From 077ccd874368bd1bf5a07c4fc8cda5ddb4eae7a6 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Mon, 4 May 2020 11:41:46 +0200 Subject: [PATCH 1/9] stats wf properties cleanup --- .../dhp/oa/provision/GraphJoinerTest.java | 42 ----- .../graph/stats/oozie_app/config-default.xml | 6 +- .../dhp/oa/graph/stats/oozie_app/workflow.xml | 166 +++++++++--------- 3 files changed, 86 insertions(+), 128 deletions(-) delete mode 100644 dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/oa/provision/GraphJoinerTest.java diff --git a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/oa/provision/GraphJoinerTest.java b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/oa/provision/GraphJoinerTest.java deleted file mode 100644 index 1336a1cf7..000000000 --- a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/oa/provision/GraphJoinerTest.java +++ /dev/null @@ -1,42 +0,0 @@ - -package eu.dnetlib.dhp.oa.provision; - -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; - -import org.junit.jupiter.api.BeforeEach; - -public class GraphJoinerTest { - - private final ClassLoader cl = getClass().getClassLoader(); - private Path workingDir; - private Path inputDir; - private Path outputDir; - - @BeforeEach - public void before() throws IOException { - workingDir = Files.createTempDirectory("promote_action_set"); - inputDir = workingDir.resolve("input"); - outputDir = workingDir.resolve("output"); - } - - private static void copyFiles(Path source, Path target) throws IOException { - Files - .list(source) - .forEach( - f -> { - try { - if (Files.isDirectory(f)) { - Path subTarget = Files.createDirectories(target.resolve(f.getFileName())); - copyFiles(f, subTarget); - } else { - Files.copy(f, target.resolve(f.getFileName())); - } - } catch (IOException e) { - e.printStackTrace(); - throw new RuntimeException(e); - } - }); - } -} diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/config-default.xml b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/config-default.xml index e2953693f..ba7002cff 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/config-default.xml +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/config-default.xml @@ -15,12 +15,12 @@ oozie.action.sharelib.for.spark spark2 - - hive_metastore_uris + + hiveMetastoreUris thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083 - hive_jdbc_url + hiveJdbcUrl jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000 diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/workflow.xml b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/workflow.xml index 19f3316d8..6f6389362 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/workflow.xml @@ -14,11 +14,11 @@ the external stats that should be added since they are not included in the graph database - hive_metastore_uris + hiveMetastoreUris hive server metastore URIs - hive_jdbc_url + hiveJdbcUrl hive server jdbc url @@ -29,7 +29,7 @@ hive.metastore.uris - ${hive_metastore_uris} + ${hiveMetastoreUris} @@ -42,7 +42,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -53,7 +53,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -64,7 +64,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -75,7 +75,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -86,7 +86,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -97,7 +97,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -108,7 +108,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -119,7 +119,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -130,7 +130,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -141,7 +141,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -152,7 +152,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -163,7 +163,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -174,7 +174,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -185,7 +185,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -196,7 +196,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -207,7 +207,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -218,7 +218,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -229,7 +229,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -240,7 +240,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -251,7 +251,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -262,7 +262,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -273,7 +273,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -284,7 +284,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -295,7 +295,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -306,7 +306,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -317,7 +317,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -328,7 +328,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -339,7 +339,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -350,7 +350,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -361,7 +361,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -372,7 +372,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -383,7 +383,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -394,7 +394,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -405,7 +405,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -416,7 +416,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -427,7 +427,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -438,7 +438,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -449,7 +449,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -460,7 +460,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -471,7 +471,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -482,7 +482,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -493,7 +493,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -504,7 +504,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -515,7 +515,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -526,7 +526,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -537,7 +537,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -548,7 +548,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -559,7 +559,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -570,7 +570,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -581,7 +581,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -592,7 +592,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -603,7 +603,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -614,7 +614,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -625,7 +625,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -636,7 +636,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -647,7 +647,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -658,7 +658,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -669,7 +669,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -680,7 +680,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -691,7 +691,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -702,7 +702,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -713,7 +713,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -724,7 +724,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -735,7 +735,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -746,7 +746,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -757,7 +757,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -768,7 +768,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -779,7 +779,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -790,7 +790,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -801,7 +801,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -813,7 +813,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -824,7 +824,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -835,7 +835,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -846,7 +846,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -857,7 +857,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -868,7 +868,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -879,7 +879,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -890,7 +890,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -901,7 +901,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} @@ -912,7 +912,7 @@ - ${hive_jdbc_url} + ${hiveJdbcUrl} stats_db_name=${stats_db_name} openaire_db_name=${openaire_db_name} From bac37b397388b02cbfc363e80d6ac0cfefba4b1e Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Mon, 4 May 2020 11:51:17 +0200 Subject: [PATCH 2/9] fixed children expansion in XML records --- .../dhp/oa/provision/XmlIndexingJob.java | 4 +- .../oa/provision/utils/XmlRecordFactory.java | 60 +- .../dhp/oa/provision/template/child.st | 6 +- .../provision/IndexRecordTransformerTest.java | 39 + .../eu/dnetlib/dhp/oa/provision/fields.xml | 166 ++++ .../provision/layoutToRecordTransformer.xsl | 94 +++ .../eu/dnetlib/dhp/oa/provision/record.xml | 750 ++++++++++++++++++ 7 files changed, 1089 insertions(+), 30 deletions(-) create mode 100644 dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/oa/provision/IndexRecordTransformerTest.java create mode 100644 dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/oa/provision/fields.xml create mode 100644 dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/oa/provision/layoutToRecordTransformer.xsl create mode 100644 dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/oa/provision/record.xml diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/XmlIndexingJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/XmlIndexingJob.java index b9746f153..ede7aa7b4 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/XmlIndexingJob.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/XmlIndexingJob.java @@ -113,7 +113,7 @@ public class XmlIndexingJob { }); } - private static String toIndexRecord(Transformer tr, final String record) { + protected static String toIndexRecord(Transformer tr, final String record) { final StreamResult res = new StreamResult(new StringWriter()); try { tr.transform(new StreamSource(new StringReader(record)), res); @@ -135,7 +135,7 @@ public class XmlIndexingJob { * @throws IOException could happen * @throws TransformerException could happen */ - private static String getLayoutTransformer(String format, String fields, String xslt) + protected static String getLayoutTransformer(String format, String fields, String xslt) throws TransformerException { final Transformer layoutTransformer = SaxonTransformerFactory.newInstance(xslt); diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/XmlRecordFactory.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/XmlRecordFactory.java index 2cff2124e..ce1c71312 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/XmlRecordFactory.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/XmlRecordFactory.java @@ -9,10 +9,7 @@ import java.io.IOException; import java.io.Serializable; import java.io.StringReader; import java.io.StringWriter; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; import java.util.stream.Collectors; import javax.xml.transform.*; @@ -47,7 +44,7 @@ import eu.dnetlib.dhp.schema.oaf.Result; public class XmlRecordFactory implements Serializable { - public static final String REL_SUBTYPE_DEDUP = "dedup"; + private static final String REL_SUBTYPE_DEDUP = "dedup"; private final Map accumulators; private final Set specialDatasourceTypes; @@ -100,8 +97,8 @@ public class XmlRecordFactory implements Serializable { final List relations = je .getLinks() .stream() - .filter(t -> !REL_SUBTYPE_DEDUP.equalsIgnoreCase(t.getRelation().getSubRelType())) - .map(link -> mapRelation(link, templateFactory, contexts)) + .filter(link -> !isDuplicate(link)) + .map(link -> mapRelation(contexts, templateFactory, type, link)) .collect(Collectors.toCollection(ArrayList::new)); final String mainType = ModelSupport.getMainType(type); @@ -936,7 +933,7 @@ public class XmlRecordFactory implements Serializable { metadata.add(XmlSerializationUtils.mapQualifier("datasourcetypeui", dsType)); } - private String mapRelation(Tuple2 link, TemplateFactory templateFactory, Set contexts) { + private List mapFields(Tuple2 link, Set contexts) { final Relation rel = link.getRelation(); final RelatedEntity re = link.getRelatedEntity(); final String targetType = link.getRelatedEntity().getType(); @@ -1040,38 +1037,47 @@ public class XmlRecordFactory implements Serializable { default: throw new IllegalArgumentException("invalid target type: " + targetType); } - final DataInfo info = rel.getDataInfo(); - final String scheme = ModelSupport.getScheme(re.getType(), targetType); - - if (StringUtils.isBlank(scheme)) { - throw new IllegalArgumentException( - String.format("missing scheme for: <%s - %s>", re.getType(), targetType)); - } final String accumulatorName = getRelDescriptor(rel.getRelType(), rel.getSubRelType(), rel.getRelClass()); if (accumulators.containsKey(accumulatorName)) { accumulators.get(accumulatorName).add(1); } + return metadata; + } + + private String mapRelation(Set contexts, TemplateFactory templateFactory, EntityType type, Tuple2 link) { + final Relation rel = link.getRelation(); + final String targetType = link.getRelatedEntity().getType(); + final String scheme = ModelSupport.getScheme(type.toString(), targetType); + + if (StringUtils.isBlank(scheme)) { + throw new IllegalArgumentException( + String.format("missing scheme for: <%s - %s>", type.toString(), targetType)); + } + final HashSet fields = Sets.newHashSet(mapFields(link, contexts)); return templateFactory .getRel( - targetType, rel.getTarget(), Sets.newHashSet(metadata), rel.getRelClass(), scheme, info); + targetType, rel.getTarget(), fields, rel.getRelClass(), scheme, rel.getDataInfo()); } private List listChildren( final OafEntity entity, JoinedEntity je, TemplateFactory templateFactory) { - final List children = Lists.newArrayList(); EntityType entityType = EntityType.valueOf(je.getEntity().getType()); - children - .addAll( - je - .getLinks() - .stream() - .filter(link -> REL_SUBTYPE_DEDUP.equalsIgnoreCase(link.getRelation().getSubRelType())) - .map(link -> mapRelation(link, templateFactory, null)) - .collect(Collectors.toCollection(ArrayList::new))); + List children = je + .getLinks() + .stream() + .filter(link -> isDuplicate(link)) + .map(link -> { + final String targetType = link.getRelatedEntity().getType(); + final String name = ModelSupport.getMainType(EntityType.valueOf(targetType)); + final HashSet fields = Sets.newHashSet(mapFields(link, null)); + return templateFactory + .getChild(name, link.getRelatedEntity().getId(), Lists.newArrayList(fields)); + }) + .collect(Collectors.toCollection(ArrayList::new)); if (MainEntityType.result.toString().equals(ModelSupport.getMainType(entityType))) { final List instances = ((Result) entity).getInstance(); @@ -1178,6 +1184,10 @@ public class XmlRecordFactory implements Serializable { return children; } + private boolean isDuplicate(Tuple2 link) { + return REL_SUBTYPE_DEDUP.equalsIgnoreCase(link.getRelation().getSubRelType()); + } + private List listExtraInfo(OafEntity entity) { final List extraInfo = entity.getExtraInfo(); return extraInfo != null diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/oa/provision/template/child.st b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/oa/provision/template/child.st index 89f81e16b..1d3cffea0 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/oa/provision/template/child.st +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/oa/provision/template/child.st @@ -1,3 +1,3 @@ -> - $metadata:{ it | $it$ }$ - \ No newline at end of file +<$name$$if(hasId)$ objidentifier="$id$"$else$$endif$> + $metadata:{$it$}$ + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/oa/provision/IndexRecordTransformerTest.java b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/oa/provision/IndexRecordTransformerTest.java new file mode 100644 index 000000000..b1e39c696 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/oa/provision/IndexRecordTransformerTest.java @@ -0,0 +1,39 @@ + +package eu.dnetlib.dhp.oa.provision; + +import java.io.IOException; +import java.io.StringReader; +import java.io.StringWriter; +import java.nio.file.Files; +import java.nio.file.Path; + +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerException; +import javax.xml.transform.stream.StreamResult; +import javax.xml.transform.stream.StreamSource; + +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import eu.dnetlib.dhp.utils.saxon.SaxonTransformerFactory; + +public class IndexRecordTransformerTest { + + @Test + public void testTrasformRecord() throws IOException, TransformerException { + String fields = IOUtils.toString(getClass().getResourceAsStream("fields.xml")); + String record = IOUtils.toString(getClass().getResourceAsStream("record.xml")); + String xslt = IOUtils.toString(getClass().getResourceAsStream("layoutToRecordTransformer.xsl")); + + String transformer = XmlIndexingJob.getLayoutTransformer("DMF", fields, xslt); + + Transformer tr = SaxonTransformerFactory.newInstance(transformer); + + String a = XmlIndexingJob.toIndexRecord(tr, record); + + System.out.println(a); + + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/oa/provision/fields.xml b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/oa/provision/fields.xml new file mode 100644 index 000000000..f74da5d07 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/oa/provision/fields.xml @@ -0,0 +1,166 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/oa/provision/layoutToRecordTransformer.xsl b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/oa/provision/layoutToRecordTransformer.xsl new file mode 100644 index 000000000..d814baa8e --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/oa/provision/layoutToRecordTransformer.xsl @@ -0,0 +1,94 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + . + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/oa/provision/record.xml b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/oa/provision/record.xml new file mode 100644 index 000000000..b617dbea2 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/oa/provision/record.xml @@ -0,0 +1,750 @@ + +
+ dedup_wf_001::113ca964590682d906a3588d3d6b4838 + 2020-03-15T05:46:43.509Z + 2020-03-15T21:17:13.902Z +
+ + + + + + + + + + oai:pubmedcentral.nih.gov:5657733 + oai:doaj.org/article:f26495a4c1d149099049e1a604fa1256 + 10.1177/0963689717714320 + 28933215 + PMC5657733 + 10.1177/0963689717714320 + Extract Protects Model Neurons + from Traumatic Injury + + Jain, Subhash C. + Citron, Bruce A. + Vijayalakshmi Ravindranath + Saykally, Jessica N. + Keeley, Kristen L. + Haris Hatic + 2017-06-01 + Withania somnifera has been used in traditional medicine for a variety + of neural disorders. Recently, chronic neurodegenerative conditions have been + shown to benefit from treatment with this extract. To evaluate the action of + this extract on traumatically injured neurons, the efficacy of W. somnifera root + extract as a neuroprotective agent was examined in cultured model neurons + exposed to an in vitro injury system designed to mimic mild traumatic brain + injury (TBI). Neuronal health was evaluated by staining with annexin V (an + early, apoptotic feature) and monitoring released lactate dehydrogenase activity + (a terminal cell loss parameter). Potential mechanisms underlying the observed + neuroprotection were examined. Additionally, morphological changes were + monitored following injury and treatment. Although no differences were found in + the expression of the antioxidant transcription factor nuclear factor erythroid + 2-like 2 (Nrf2) or other Nrf2-related downstream components, significant changes + were seen in apoptotic signaling. Treatment with the extract resulted in an + increased length of neurites projecting from the neuronal cell body after + injury. W. somnifera extract treatment also resulted in reduced cell death in + the model neuron TBI system. The cell death factor Bax was involved (its + expression was reduced 2-fold by the treatment) and injury-induced reduction in + neurite lengths and numbers was reversed by the treatment. This all indicates + that W. somnifera root extract was neuroprotective and could have therapeutic + potential to target factors involved in secondary injury and long-term sequelae + of mild TBI. + + Withania + somnifera + R + Cell Biology + neuroprotection + SH-SY5Y + Biomedical Engineering + Transplantation + traumatic + brain injury + neurites + Ayurveda + Medicine + + 2018-11-13 + 2017-6-30 + 2017-7-1 + SAGE Publishing + Cell Transplantation, Vol 26 (2017) + Cell Transplantation + + + Cell Transplantation + + + + + true + false + 0.9 + dedup-similarity-result-levenstein + + + + + wt__________::4de25ac59f6cb729d5716260164bb67c + Indian Institute Of Science + + + nih_________::ba7da8316fd53d04a985bc935e438555 + INDIAN INSTITUTE OF SCIENCE + + + dedup_wf_001::0047940c0207b6a83e79cd803ecf17d1 + + MRC - MRC Laboratory of Molecular Biology + LMB + + + rcuk________::2558c4f3132f6907f7b23c69009f0d87 + INDIAN INSTUTUTE OF SCIENCE + + + dedup_wf_001::d2fdc8e80f8b4365091bcea83f918ccf + + University of Delhi + University of Delhi + + + doiboost____::d5177e3ad00bd9288201b60206a0b5d0 + 2017-6-30 + + + + 10.1177/0963689717714320 + + + od_______267::fb470352a4b33af7c83391c02117c4fc + + SAGE Publications + PMC5657733 + 28933215 + 2017-06-01 + 10.1177/0963689717714320 + + + nih_________::24e81ae35bbcb50c778df1039f912617 + + + nih_________::NIH::VETERANS_AFFAIRS + + Preventing TBI-Induced Chronic Functional Loss with a Neuroprotective + Antioxidant + 1I01RX001520-01A1 + + + wt__________::52e59d4aa1c57bda1ec144f409de83fc + Indian Institute of Science + + + dedup_wf_001::0499ff413ba8e7fa686531725ba12338 + IISc + + Indian Institute of Science + + + wt__________::ba1db3669859a46e72f222052a9a26d8 + University of Delhi + + + dedup_wf_001::17c785347dfb060aa115af824b0c6789 + IISc + + Indian Institute of Science Bangalore + + + scholexplore::16181ec1a2484116e8ed6b3348858fe7 + + 28933215 + + + doajarticles::cac994ec6c322070c41474486eb5c595 + 2017-07-01 + + SAGE Publishing + 10.1177/0963689717714320 + + + r37980778c78::39a72c53d5801325784f728b543a49a1 + + 10.1371/journal.pone.0006628 + 2016-01-01 + Figshare + + + rcuk________::23feba2a5ca7f6b6016bf3a45180da50 + University of Delhi + + + + + + + + + + https://www.ncbi.nlm.nih.gov/pubmed/28933215 + + + + + + + 2017-06-01 + + + http://europepmc.org/articles/PMC5657733 + + + + + + + + + http://journals.sagepub.com/doi/full-xml/10.1177/0963689717714320 + + + http://journals.sagepub.com/doi/pdf/10.1177/0963689717714320 + + + https://academic.microsoft.com/#/detail/2588640354 + + + + + + + 2017-07-01 + + + https://doi.org/10.1177/0963689717714320 + + + https://doaj.org/toc/0963-6897 + + + https://doaj.org/toc/1555-3892 + + + + + + + + + http://dx.doi.org/10.1177/0963689717714320 + + + + + + + + + https://journals.sagepub.com/doi/pdf/10.1177/0963689717714320 + + + + + + + + 1 Bryan-Hancock C Harrison J The global burden of traumatic brain + injury: preliminary results from the Global Burden of Disease Project. + Inj Prev. 2010;16(Suppl 1):A17. + + + 2 Gardner RC Yaffe K Epidemiology of mild traumatic brain injury + and neurodegenerative disease. Mol Cell Neurosci. 2015;66(Pt + B):75–80.25748121 + + + + + 3 Stern RA Riley DO Daneshvar DH Nowinski CJ Cantu RC McKee AC + Long-term consequences of repetitive brain trauma: chronic traumatic + encephalopathy. PM R. 2011;3(10 Suppl 2):S460–S467.22035690 + + + + + 4 Mac Donald CL Johnson AM Cooper D Nelson EC Werner NJ Shimony JS + Snyder AZ Raichle ME Witherow JR Fang R Detection of blast-related + traumatic brain injury in U.S. military personnel. N Engl J Med. + 2011;364(22):2091–2100.21631321 + + + + + 5 Hatic H Kane MJ Saykally JN Citron BA Modulation of transcription + factor Nrf2 in an in vitro model of traumatic brain injury. J + Neurotrauma. 2012;29(6):1188–1196.22201269 + + + + + 6 Saykally JN Rachmany L Hatic H Shaer A Rubovitch V Pick CG Citron + BA The nuclear factor erythroid 2-like 2 activator, + tert-butylhydroquinone, improves cognitive performance in mice after + mild traumatic brain injury. Neuroscience. + 2012;223:305–314.22890082 + + + + + 7 Hall ED Vaishnav RA Mustafa AG Antioxidant therapies for + traumatic brain injury. Neurotherapeutics. + 2010;7(1):51–61.20129497 + + + + + 8 Scartezzini P Speroni E Review on some plants of Indian + traditional medicine with antioxidant activity. J Ethnopharmacol. + 2000;71(1–2):23–43.10904144 + + + + + 9 Mishra LC Singh BB Dagenais S Scientific basis for the + therapeutic use of Withania somnifera (ashwagandha): a review. Altern + Med Rev. 2000;5(4):334–346.10956379 + + + + + 10 Singh RH Exploring larger evidence-base for contemporary + Ayurveda. Int J Ayurveda Res. 2010;1(2):65–66.20814517 + + + + + 11 Alam N Hossain M Mottalib MA Sulaiman SA Gan SH Khalil MI + Methanolic extracts of Withania somnifera leaves, fruits and roots + possess antioxidant properties and antibacterial activities. BMC + Complement Altern Med. 2012;12:175.23039061 + + + + + 12 Gupta GL Rana AC Withania somnifera (ashwagandha): a review. + Pharmacognosy Rev. 2007;1(1):129–136. + + + 13 Durg S Dhadde SB Vandal R Shivakumar BS Charan CS Withania + somnifera (ashwagandha) in neurobehavioural disorders induced by brain + oxidative stress in rodents: a systematic review and meta-analysis. J + Pharm Pharmacol. 2015;67(7):879–899.25828061 + + + + + 14 Kuboyama T Tohda C Komatsu K Effects of ashwagandha (roots of + Withania somnifera) on neurodegenerative diseases. Biol Pharm Bull. + 2014;37(6):892–897.24882401 + + + + + 15 Mirjalili MH Moyano E Bonfill M Cusido RM Palazon J Steroidal + lactones from Withania somnifera, an ancient plant for novel medicine. + Molecules. 2009;14(7):2373–2393.19633611 + + + + + 16 Ven Murthy MR Ranjekar PK Ramassamy C Deshpande M Scientific + basis for the use of Indian ayurvedic medicinal plants in the treatment + of neurodegenerative disorders: ashwagandha. Cent Nerv Syst Agents Med + Chem. 2010;10(3):238–246.20528765 + + + + + 17 Singh RH Narsimhamurthy K Singh G Neuronutrient impact of + Ayurvedic Rasayana therapy in brain aging. Biogerontology. + 2008;9(6):369–374.18931935 + + + + + 18 Kulkarni SK Dhir A Withania somnifera: an Indian ginseng. Prog + Neuropsychopharmacol Biol Psychiatry. + 2008;32(5):1093–1105.17959291 + + + + + 19 Cooley K Szczurko O Perri D Mills EJ Bernhardt B Zhou Q Seely D + Naturopathic care for anxiety: a randomized controlled trial + ISRCTN78958974. PLoS One. 2009;4(8):e6628.19718255 + + + + + 20 Chopra A Lavin P Patwardhan B Chitre D A 32-week randomized, + placebo-controlled clinical evaluation of RA-11, an Ayurvedic drug, on + osteoarthritis of the knees. J Clin Rheumatol. + 2004;10(5):236–245.17043520 + + + + + 21 Chaudhary G Sharma U Jagannathan NR Gupta YK Evaluation of + Withania somnifera in a middle cerebral artery occlusion model of stroke + in rats. Clin Exp Pharmacol Physiol. + 2003;30(5–6):399–404.12859433 + + + + 22 Adams JD Jr Yang J Mishra LC Singh BB Effects of ashwagandha in + a rat model of stroke. Altern Ther Health Med. + 2002;8(5):18–19. + + + 23 Baitharu I Jain V Deep SN Hota KB Hota SK Prasad D Ilavazhagan G + Withania somnifera root extract ameliorates hypobaric hypoxia induced + memory impairment in rats. J Ethnopharmacol. + 2013;145(2):431–441.23211660 + + + + + 24 RajaSankar S Manivasagam T Sankar V Prakash S Muthusamy R + Krishnamurti A Surendran S Withania somnifera root extract improves + catecholamines and physiological abnormalities seen in a Parkinson’s + disease model mouse. J Ethnopharmacol. + 2009;125(3):369–373.19666100 + + + + + 25 Pingali U Pilli R Fatima N Effect of standardized aqueous + extract of Withania somnifera on tests of cognitive and psychomotor + performanc e in healthy human participants. Pharmacognosy Res. + 2014;6(1):12–18.24497737 + + + + + 26 Prabhakaran Y Dinakaran SK Macharala SP Ghosh S Karanam SR + Kanthasamy N Avasarala H Molecular docking studies of withanolides + against Cox-2 enzyme. Pak J Pharm Sci. + 2012;25(3):595–598.22713947 + + + + + 27 Mohan R Hammers HJ Bargagna-Mohan P Zhan XH Herbstritt CJ Ruiz A + Zhang L Hanson AD Conner BP Rougas J Withaferin A is a potent inhibitor + of angiogenesis. Angiogenesis. 2004;7(2):115–122.15516832 + + + + + 28 Friedemann T Otto B Klatschke K Schumacher U Tao Y Leung AK + Efferth T Schroder S Coptis chinensis Franch. exhibits neuroprotective + properties against oxidative stress in human neuroblastoma cells. J + Ethnopharmacol. 2014;155(1):607–615.24929105 + + + + + 29 Hu S Han R Mak S Han Y Protection against + 1-methyl-4-phenylpyridinium ion (MPP+)-induced apoptosis by water + extract of ginseng (Panax ginseng C.A. Meyer) in SH-SY5Y cells. J + Ethnopharmacol. 2011;135(1):34–42.21349320 + + + + + 30 Kane MJ Hatic H Delic V Dennis JS Butler CL Saykally JN Citron + BA Modeling the pathobiology of repetitive traumatic brain injury in + immortalized neuronal cell lines. Brain Res. + 2011;1425:123–131.22018688 + + + + 31 Sehgal N Gupta A Valli RK Joshi SD Mills JT Hamel E Khanna P + Jain SC Thakur SS Ravindranath V Withania somnifera reverses Alzheimer’s + disease pathology by enhancing low-density lipoprotein receptor-related + protein in liver. Proc Natl Acad Sci U S A. + 2012;109(9):3510–3515.22308347 + + + + + 32 Arundine M Aarts M Lau A Tymianski M Vulnerability of central + neurons to secondary insults after in vitro mechanical stretch. J + Neurosci. 2004;24(37):8106–8123.15371512 + + + + + 33 Lau A Arundine M Sun HS Jones M Tymianski M Inhibition of + caspase-mediated apoptosis by peroxynitrite in traumatic brain injury. J + Neurosci. 2006;26(45):11540–11553.17093075 + + + + 34 Weber JT Rzigalinski BA Ellis EF Traumatic injury of cortical + neurons causes changes in intracellular calcium stores and capacitative + calcium influx. J Biol Chem. 2001;276(3):1800–1807.11050103 + + + + 35 Ellis EF McKinney JS Willoughby KA Liang S Povlishock JT A new + model for rapid stretch-induced injury of cells in culture: + characterization of the model using astrocytes. J Neurotrauma. + 1995;12(3):325–339.7473807 + + + + 36 Zhang Y Ba Y Liu C Sun G Ding L Gao S Hao J Yu Z Zhang J Zen K + PGC-1alpha induces apoptosis in human epithelial ovarian cancer cells + through a PPARgamma-dependent pathway. Cell Res. + 2007;17(4):363–373.17372612 + + + + 37 Brooks AR Lelkes PI Rubanyi GM Gene expression profiling of + human aortic endothelial cells exposed to disturbed flow and steady + laminar flow. Physiol Genomics. 2002;9(1):27–41.11948288 + + + + 38 Du Y Villeneuve NF Wang XJ Sun Z Chen W Li J Lou H Wong PK Zhang + DD Oridonin confers protection against arsenic-induced toxicity through + activation of the Nrf2-mediated defensive response. Environ Health + Perspect. 2008;116(9):1154–1161.18795156 + + + + + 39 Pool M Thiemann J Bar-Or A Fournier AE NeuriteTracer: a novel + ImageJ plugin for automated quantification of neurite outgrowth. J + Neurosci Methods. 2008;168(1):134–139.17936365 + + + + + 40 Chen J Wu X Shao B Zhao W Shi W Zhang S Ni L Shen A Increased + expression of TNF receptor-associated factor 6 after rat traumatic brain + injury. Cell Mol Neurobiol. 2011;31(2):269–275.21072581 + + + + 41 Kuboyama T Tohda C Komatsu K Neuritic regeneration and synaptic + reconstruction induced by withanolide A. Br J Pharmacol. + 2005;144(7):961–971.15711595 + + + + + 42 Kuboyama T Tohda C Komatsu K Withanoside IV and its active + metabolite, sominone, attenuate Abeta(25-35)-induced neurodegeneration + Eur J Neurosci. 2006;23(6):1417–1426.16553605 + + + + + 43 Jarrard LE On the role of the hippocampus in learning and memory + in the rat. Behav Neural Biol. 1993;60(1):9–26.8216164 + + + + + 44 Vareed SK Bauer AK Nair KM Liu Y Jayaprakasam B Nair MG + Blood-brain barrier permeability of bioactive withanamides present in + Withania somnifera fruit extract. Phytother Res. + 2014;28(8):1260–1264.24458838 + + + + + + + +
From 3df703f67d639630369a984ea8f4af2680647b90 Mon Sep 17 00:00:00 2001 From: miconis Date: Mon, 4 May 2020 12:08:12 +0200 Subject: [PATCH 3/9] mergerels added to propagate relations --- .../java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java | 2 +- .../src/test/java/eu/dnetlib/dhp/oa/dedup/SparkDedupTest.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java b/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java index 34611db8e..2d18c9a61 100644 --- a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java +++ b/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java @@ -94,7 +94,7 @@ public class SparkPropagateRelation extends AbstractSparkAction { FieldType.TARGET, getDeletedFn()); - save(newRels.union(updated), outputRelationPath, SaveMode.Overwrite); + save(newRels.union(updated).union(mergeRels), outputRelationPath, SaveMode.Overwrite); } private static Dataset processDataset( diff --git a/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/SparkDedupTest.java b/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/SparkDedupTest.java index a0ae7bc3c..990ac04c0 100644 --- a/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/SparkDedupTest.java +++ b/dhp-workflows/dhp-dedup-openaire/src/test/java/eu/dnetlib/dhp/oa/dedup/SparkDedupTest.java @@ -420,7 +420,7 @@ public class SparkDedupTest implements Serializable { long relations = jsc.textFile(testDedupGraphBasePath + "/relation").count(); - assertEquals(826, relations); + assertEquals(5022, relations); // check deletedbyinference final Dataset mergeRels = spark From de5fbe325ca8c4df252072293889e89e73b70f93 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Mon, 4 May 2020 16:00:48 +0200 Subject: [PATCH 4/9] bits of javadoc --- .../java/eu/dnetlib/dhp/schema/oaf/Oaf.java | 3 +++ .../eu/dnetlib/dhp/schema/oaf/Relation.java | 21 +++++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Oaf.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Oaf.java index 4bfc05039..3496492e8 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Oaf.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Oaf.java @@ -7,6 +7,9 @@ import java.util.Objects; public abstract class Oaf implements Serializable { + /** + * The list of datasource id/name pairs providing this relationship. + */ protected List collectedfrom; private DataInfo dataInfo; diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Relation.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Relation.java index 2c282c29e..76503f885 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Relation.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Relation.java @@ -7,16 +7,37 @@ import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; +/** + * Relation models any edge between two nodes in the OpenAIRE graph. It has a source id and a target id + * pointing to graph node identifiers and it is further characterised by the semantic of the link through the fields + * relType, subRelType and relClass. Provenance information is modeled according to the dataInfo element and collectedFrom, + * while individual relationship types can provide extra information via the properties field. + */ public class Relation extends Oaf { + /** + * Main relationship classifier, values include 'resultResult', 'resultProject', 'resultOrganization', etc. + */ private String relType; + /** + * Further classifies a relationship, values include 'affiliation', 'similarity', 'supplement', etc. + */ private String subRelType; + /** + * Indicates the direction of the relationship, values include 'isSupplementTo', 'isSupplementedBy', 'merges, 'isMergedIn'. + */ private String relClass; + /** + * The source entity id. + */ private String source; + /** + * The target entity id. + */ private String target; public String getRelType() { From 405f495d549bc818becc06d5795966b45dc09359 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Mon, 4 May 2020 19:18:12 +0200 Subject: [PATCH 5/9] code formatting --- .../main/java/eu/dnetlib/dhp/schema/oaf/Relation.java | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Relation.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Relation.java index 76503f885..ad5e9cebe 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Relation.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Relation.java @@ -8,10 +8,10 @@ import java.util.stream.Collectors; import java.util.stream.Stream; /** - * Relation models any edge between two nodes in the OpenAIRE graph. It has a source id and a target id - * pointing to graph node identifiers and it is further characterised by the semantic of the link through the fields - * relType, subRelType and relClass. Provenance information is modeled according to the dataInfo element and collectedFrom, - * while individual relationship types can provide extra information via the properties field. + * Relation models any edge between two nodes in the OpenAIRE graph. It has a source id and a target id pointing to + * graph node identifiers and it is further characterised by the semantic of the link through the fields relType, + * subRelType and relClass. Provenance information is modeled according to the dataInfo element and collectedFrom, while + * individual relationship types can provide extra information via the properties field. */ public class Relation extends Oaf { @@ -26,7 +26,8 @@ public class Relation extends Oaf { private String subRelType; /** - * Indicates the direction of the relationship, values include 'isSupplementTo', 'isSupplementedBy', 'merges, 'isMergedIn'. + * Indicates the direction of the relationship, values include 'isSupplementTo', 'isSupplementedBy', 'merges, + * 'isMergedIn'. */ private String relClass; From f1b7e140368a52bb7f2a916940c92e135ab18df3 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Mon, 4 May 2020 19:18:34 +0200 Subject: [PATCH 6/9] code formatting --- .../raw/MigrateDbEntitiesApplication.java | 84 ++++++++++++------- 1 file changed, 55 insertions(+), 29 deletions(-) diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplication.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplication.java index 58339fdc5..f5ac56b78 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplication.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplication.java @@ -51,7 +51,7 @@ import eu.dnetlib.dhp.schema.oaf.Software; import eu.dnetlib.dhp.schema.oaf.StructuredProperty; public class MigrateDbEntitiesApplication extends AbstractMigrationApplication - implements Closeable { + implements Closeable { private static final Log log = LogFactory.getLog(MigrateDbEntitiesApplication.class); @@ -61,9 +61,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication public static void main(final String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( - IOUtils - .toString(MigrateDbEntitiesApplication.class - .getResourceAsStream("/eu/dnetlib/dhp/oa/graph/migrate_db_entities_parameters.json"))); + IOUtils + .toString( + MigrateDbEntitiesApplication.class + .getResourceAsStream("/eu/dnetlib/dhp/oa/graph/migrate_db_entities_parameters.json"))); parser.parseArgument(args); @@ -76,7 +77,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication final boolean processClaims = parser.get("action") != null && parser.get("action").equalsIgnoreCase("claims"); try (final MigrateDbEntitiesApplication smdbe = new MigrateDbEntitiesApplication(hdfsPath, dbUrl, dbUser, - dbPassword)) { + dbPassword)) { if (processClaims) { log.info("Processing claims..."); smdbe.execute("queryClaims.sql", smdbe::processClaims); @@ -107,15 +108,15 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication } public MigrateDbEntitiesApplication( - final String hdfsPath, final String dbUrl, final String dbUser, final String dbPassword) - throws Exception { + final String hdfsPath, final String dbUrl, final String dbUser, final String dbPassword) + throws Exception { super(hdfsPath); this.dbClient = new DbClient(dbUrl, dbUser, dbPassword); this.lastUpdateTimestamp = new Date().getTime(); } public void execute(final String sqlFile, final Function> producer) - throws Exception { + throws Exception { final String sql = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/graph/sql/" + sqlFile)); final Consumer consumer = rs -> producer.apply(rs).forEach(oaf -> emitOaf(oaf)); @@ -134,7 +135,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication ds.setId(createOpenaireId(10, rs.getString("datasourceid"), true)); ds.setOriginalId(Arrays.asList(rs.getString("datasourceid"))); ds - .setCollectedfrom(listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"))); + .setCollectedfrom( + listKeyValues( + createOpenaireId(10, rs.getString("collectedfromid"), true), + rs.getString("collectedfromname"))); ds.setPid(new ArrayList<>()); ds.setDateofcollection(asString(rs.getDate("dateofcollection"))); ds.setDateoftransformation(null); // Value not returned by the SQL query @@ -175,7 +179,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication ds.setCertificates(field(rs.getString("certificates"), info)); ds.setPolicies(new ArrayList<>()); // The sql query returns an empty array ds - .setJournal(prepareJournal(rs.getString("officialname"), rs.getString("journal"), info)); // Journal + .setJournal(prepareJournal(rs.getString("officialname"), rs.getString("journal"), info)); // Journal ds.setDataInfo(info); ds.setLastupdatetimestamp(lastUpdateTimestamp); @@ -195,7 +199,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication p.setId(createOpenaireId(40, rs.getString("projectid"), true)); p.setOriginalId(Arrays.asList(rs.getString("projectid"))); p - .setCollectedfrom(listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"))); + .setCollectedfrom( + listKeyValues( + createOpenaireId(10, rs.getString("collectedfromid"), true), + rs.getString("collectedfromname"))); p.setPid(new ArrayList<>()); p.setDateofcollection(asString(rs.getDate("dateofcollection"))); p.setDateoftransformation(asString(rs.getDate("dateoftransformation"))); @@ -212,7 +219,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication p.setDuration(field(Integer.toString(rs.getInt("duration")), info)); p.setEcsc39(field(Boolean.toString(rs.getBoolean("ecsc39")), info)); p - .setOamandatepublications(field(Boolean.toString(rs.getBoolean("oamandatepublications")), info)); + .setOamandatepublications(field(Boolean.toString(rs.getBoolean("oamandatepublications")), info)); p.setEcarticle29_3(field(Boolean.toString(rs.getBoolean("ecarticle29_3")), info)); p.setSubjects(prepareListOfStructProps(rs.getArray("subjects"), info)); p.setFundingtree(prepareListFields(rs.getArray("fundingtree"), info)); @@ -249,7 +256,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication o.setId(createOpenaireId(20, rs.getString("organizationid"), true)); o.setOriginalId(Arrays.asList(rs.getString("organizationid"))); o - .setCollectedfrom(listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"))); + .setCollectedfrom( + listKeyValues( + createOpenaireId(10, rs.getString("collectedfromid"), true), + rs.getString("collectedfromname"))); o.setPid(new ArrayList<>()); o.setDateofcollection(asString(rs.getDate("dateofcollection"))); o.setDateoftransformation(asString(rs.getDate("dateoftransformation"))); @@ -264,12 +274,14 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication o.setEclegalperson(field(Boolean.toString(rs.getBoolean("eclegalperson")), info)); o.setEcnonprofit(field(Boolean.toString(rs.getBoolean("ecnonprofit")), info)); o - .setEcresearchorganization(field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info)); + .setEcresearchorganization(field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info)); o.setEchighereducation(field(Boolean.toString(rs.getBoolean("echighereducation")), info)); o - .setEcinternationalorganizationeurinterests(field(Boolean.toString(rs.getBoolean("ecinternationalorganizationeurinterests")), info)); + .setEcinternationalorganizationeurinterests( + field(Boolean.toString(rs.getBoolean("ecinternationalorganizationeurinterests")), info)); o - .setEcinternationalorganization(field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info)); + .setEcinternationalorganization( + field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info)); o.setEcenterprise(field(Boolean.toString(rs.getBoolean("ecenterprise")), info)); o.setEcsmevalidated(field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info)); o.setEcnutscode(field(Boolean.toString(rs.getBoolean("ecnutscode")), info)); @@ -288,7 +300,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication final DataInfo info = prepareDataInfo(rs); final String orgId = createOpenaireId(20, rs.getString("organization"), true); final String dsId = createOpenaireId(10, rs.getString("datasource"), true); - final List collectedFrom = listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")); + final List collectedFrom = listKeyValues( + createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")); final Relation r1 = new Relation(); r1.setRelType("datasourceOrganization"); @@ -321,7 +334,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication final DataInfo info = prepareDataInfo(rs); final String orgId = createOpenaireId(20, rs.getString("resporganization"), true); final String projectId = createOpenaireId(40, rs.getString("project"), true); - final List collectedFrom = listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")); + final List collectedFrom = listKeyValues( + createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")); final Relation r1 = new Relation(); r1.setRelType("projectOrganization"); @@ -351,10 +365,12 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication public List processClaims(final ResultSet rs) { - final DataInfo info = - dataInfo(false, null, false, false, qualifier("user:claim", "user:claim", "dnet:provenanceActions", "dnet:provenanceActions"), "0.9"); + final DataInfo info = dataInfo( + false, null, false, false, + qualifier("user:claim", "user:claim", "dnet:provenanceActions", "dnet:provenanceActions"), "0.9"); - final List collectedFrom = listKeyValues(createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE"); + final List collectedFrom = listKeyValues( + createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE"); try { @@ -440,11 +456,15 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication final String inferenceprovenance = rs.getString("inferenceprovenance"); final Boolean inferred = rs.getBoolean("inferred"); final String trust = rs.getString("trust"); - return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, MigrationConstants.ENTITYREGISTRY_PROVENANCE_ACTION, trust); + return dataInfo( + deletedbyinference, inferenceprovenance, inferred, false, + MigrationConstants.ENTITYREGISTRY_PROVENANCE_ACTION, trust); } private Qualifier prepareQualifierSplitting(final String s) { - if (StringUtils.isBlank(s)) { return null; } + if (StringUtils.isBlank(s)) { + return null; + } final String[] arr = s.split("@@@"); return arr.length == 4 ? qualifier(arr[0], arr[1], arr[2], arr[3]) : null; } @@ -458,19 +478,23 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication } private StructuredProperty prepareStructProp(final String s, final DataInfo dataInfo) { - if (StringUtils.isBlank(s)) { return null; } + if (StringUtils.isBlank(s)) { + return null; + } final String[] parts = s.split("###"); if (parts.length == 2) { final String value = parts[0]; final String[] arr = parts[1].split("@@@"); - if (arr.length == 4) { return structuredProperty(value, arr[0], arr[1], arr[2], arr[3], dataInfo); } + if (arr.length == 4) { + return structuredProperty(value, arr[0], arr[1], arr[2], arr[3], dataInfo); + } } return null; } private List prepareListOfStructProps( - final Array array, - final DataInfo dataInfo) throws SQLException { + final Array array, + final DataInfo dataInfo) throws SQLException { final List res = new ArrayList<>(); if (array != null) { for (final String s : (String[]) array.getArray()) { @@ -489,8 +513,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication final String[] arr = sj.split("@@@"); if (arr.length == 3) { final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0].trim() : null; - final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1].trim() : null;; - final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2].trim() : null;; + final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1].trim() : null; + ; + final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2].trim() : null; + ; if (issn != null || eissn != null || lissn != null) { return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info); } From a2fc37df5f5fe4bccbfc60e99d30658462ed7643 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Mon, 4 May 2020 19:18:59 +0200 Subject: [PATCH 7/9] adjusted parameters --- .../oa/graph/raw_db/oozie_app/workflow.xml | 85 +++++++++++++++---- 1 file changed, 68 insertions(+), 17 deletions(-) diff --git a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_db/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_db/oozie_app/workflow.xml index 0730f3a1f..05b85a561 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_db/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_db/oozie_app/workflow.xml @@ -1,8 +1,8 @@ - + - migrationPathStep1 - the base path to store hdfs file + contentPath + path location to store (or reuse) content from the aggregator postgresURL @@ -16,6 +16,7 @@ postgresPassword the password postgres + sparkDriverMemory memory for driver process @@ -28,31 +29,81 @@ sparkExecutorCores number of cores used by single executor + + oozieActionShareLibForSpark2 + oozie action sharelib for spark 2.* + + + spark2ExtraListeners + com.cloudera.spark.lineage.NavigatorAppListener + spark 2.* extra listeners classname + + + spark2SqlQueryExecutionListeners + com.cloudera.spark.lineage.NavigatorQueryListener + spark 2.* sql query execution listeners classname + + + spark2YarnHistoryServerAddress + spark 2.* yarn history server address + + + spark2EventLogDir + spark 2.* event log dir location + - + + ${jobTracker} + ${nameNode} + + + mapreduce.job.queuename + ${queueName} + + + oozie.launcher.mapred.job.queue.name + ${oozieLauncherQueueName} + + + oozie.action.sharelib.for.spark + ${oozieActionShareLibForSpark2} + + + + + Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}] - - - - - + + + + + + eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication + --hdfsPath${contentPath}/db_records + --postgresUrl${postgresURL} + --postgresUser${postgresUser} + --postgresPassword${postgresPassword} + + - + - ${jobTracker} - ${nameNode} - eu.dnetlib.dhp.migration.step1.MigrateDbEntitiesApplication - -p${migrationPathStep1}/db_records - -pgurl${postgresURL} - -pguser${postgresUser} - -pgpasswd${postgresPassword} + + + + eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication + --hdfsPath${contentPath}/db_claims + --postgresUrl${postgresURL} + --postgresUser${postgresUser} + --postgresPassword${postgresPassword} + --actionclaims From 4a8487165ca98cb61d06a64006c1ea2ab46751fb Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Mon, 4 May 2020 19:19:29 +0200 Subject: [PATCH 8/9] using long param names in wf definition --- .../oa/graph/raw_all/oozie_app/workflow.xml | 70 +++++++++---------- 1 file changed, 35 insertions(+), 35 deletions(-) diff --git a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_all/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_all/oozie_app/workflow.xml index 9f91380ab..fa015499c 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_all/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_all/oozie_app/workflow.xml @@ -115,11 +115,11 @@ eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication - -p${contentPath}/db_claims - -pgurl${postgresURL} - -pguser${postgresUser} - -pgpasswd${postgresPassword} - -aclaims + --hdfsPath${contentPath}/db_claims + --postgresUrl${postgresURL} + --postgresUser${postgresUser} + --postgresPassword${postgresPassword} + --actionclaims @@ -165,10 +165,10 @@ eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication - -p${contentPath}/db_records - -pgurl${postgresURL} - -pguser${postgresUser} - -pgpasswd${postgresPassword} + --hdfsPath${contentPath}/db_records + --postgresUrl${postgresURL} + --postgresUser${postgresUser} + --postgresPassword${postgresPassword} @@ -180,12 +180,12 @@ eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication - -p${contentPath}/odf_records - -mongourl${mongoURL} - -mongodb${mongoDb} - -fODF - -lstore - -icleaned + --hdfsPath${contentPath}/odf_records + --mongoBaseUrl${mongoURL} + --mongoDb${mongoDb} + --mdFormatODF + --mdLayoutstore + --mdInterpretationcleaned @@ -197,12 +197,12 @@ eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication - -p${contentPath}/oaf_records - -mongourl${mongoURL} - -mongodb${mongoDb} - -fOAF - -lstore - -icleaned + --hdfsPath${contentPath}/oaf_records + --mongoBaseUrl${mongoURL} + --mongoDb${mongoDb} + --mdFormatOAF + --mdLayoutstore + --mdInterpretationcleaned @@ -231,11 +231,11 @@ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} - -s${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims - -t${workingDir}/entities_claim - -pgurl${postgresURL} - -pguser${postgresUser} - -pgpasswd${postgresPassword} + --sourcePaths${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims + --targetPath${workingDir}/entities_claim + --postgresUrl${postgresURL} + --postgresUser${postgresUser} + --postgresPassword${postgresPassword} @@ -257,8 +257,8 @@ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} - -s${workingDir}/entities_claim - -g${workingDir}/graph_claims + --sourcePath${workingDir}/entities_claim + --graphRawPath${workingDir}/graph_claims @@ -280,11 +280,11 @@ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} - -s${contentPath}/db_records,${contentPath}/oaf_records,${contentPath}/odf_records - -t${workingDir}/entities - -pgurl${postgresURL} - -pguser${postgresUser} - -pgpasswd${postgresPassword} + --sourcePaths${contentPath}/db_records,${contentPath}/oaf_records,${contentPath}/odf_records + --targetPath${workingDir}/entities + --postgresUrl${postgresURL} + --postgresUser${postgresUser} + --postgresPassword${postgresPassword} @@ -307,8 +307,8 @@ --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.sql.shuffle.partitions=7680 - -s${workingDir}/entities - -g${workingDir}/graph_raw + --sourcePath${workingDir}/entities + --graphRawPath${workingDir}/graph_raw From 0825321d0b5bf73e8cafe4e692c8b076482f9694 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Tue, 5 May 2020 12:39:04 +0200 Subject: [PATCH 9/9] improved unit tests in dhp-aggregation --- .../GenerateNativeStoreSparkJob.java | 146 ++++++++++-------- .../transformation/TransformSparkJobNode.java | 97 +++++++----- .../collection_input_parameters.json | 98 ++++++++++-- .../transformation_input_parameters.json | 86 +++++++++-- .../dhp/collection/CollectionJobTest.java | 86 +++++------ .../transformation/TransformationJobTest.java | 58 ++++--- 6 files changed, 368 insertions(+), 203 deletions(-) diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/GenerateNativeStoreSparkJob.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/GenerateNativeStoreSparkJob.java index 9811fb707..861ae5201 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/GenerateNativeStoreSparkJob.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/GenerateNativeStoreSparkJob.java @@ -1,17 +1,21 @@ package eu.dnetlib.dhp.collection; +import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; + import java.io.ByteArrayInputStream; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.Objects; +import java.util.Optional; import org.apache.commons.cli.*; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; +import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; @@ -23,6 +27,8 @@ import org.apache.spark.util.LongAccumulator; import org.dom4j.Document; import org.dom4j.Node; import org.dom4j.io.SAXReader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.ObjectMapper; @@ -35,6 +41,8 @@ import eu.dnetlib.message.MessageType; public class GenerateNativeStoreSparkJob { + private static final Logger log = LoggerFactory.getLogger(GenerateNativeStoreSparkJob.class); + public static MetadataRecord parseRecord( final String input, final String xpath, @@ -78,84 +86,90 @@ public class GenerateNativeStoreSparkJob { final Provenance provenance = jsonMapper.readValue(parser.get("provenance"), Provenance.class); final long dateOfCollection = new Long(parser.get("dateOfCollection")); - final SparkSession spark = SparkSession - .builder() - .appName("GenerateNativeStoreSparkJob") - .master(parser.get("master")) - .getOrCreate(); + Boolean isSparkSessionManaged = Optional + .ofNullable(parser.get("isSparkSessionManaged")) + .map(Boolean::valueOf) + .orElse(Boolean.TRUE); + log.info("isSparkSessionManaged: {}", isSparkSessionManaged); final Map ongoingMap = new HashMap<>(); final Map reportMap = new HashMap<>(); final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest")); - final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); + SparkConf conf = new SparkConf(); + runWithSparkSession( + conf, + isSparkSessionManaged, + spark -> { + final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); - final JavaPairRDD inputRDD = sc - .sequenceFile(parser.get("input"), IntWritable.class, Text.class); + final JavaPairRDD inputRDD = sc + .sequenceFile(parser.get("input"), IntWritable.class, Text.class); - final LongAccumulator totalItems = sc.sc().longAccumulator("TotalItems"); + final LongAccumulator totalItems = sc.sc().longAccumulator("TotalItems"); + final LongAccumulator invalidRecords = sc.sc().longAccumulator("InvalidRecords"); - final LongAccumulator invalidRecords = sc.sc().longAccumulator("InvalidRecords"); + final MessageManager manager = new MessageManager( + parser.get("rabbitHost"), + parser.get("rabbitUser"), + parser.get("rabbitPassword"), + false, + false, + null); - final MessageManager manager = new MessageManager( - parser.get("rabbitHost"), - parser.get("rabbitUser"), - parser.get("rabbitPassword"), - false, - false, - null); + final JavaRDD mappeRDD = inputRDD + .map( + item -> parseRecord( + item._2().toString(), + parser.get("xpath"), + parser.get("encoding"), + provenance, + dateOfCollection, + totalItems, + invalidRecords)) + .filter(Objects::nonNull) + .distinct(); - final JavaRDD mappeRDD = inputRDD - .map( - item -> parseRecord( - item._2().toString(), - parser.get("xpath"), - parser.get("encoding"), - provenance, - dateOfCollection, - totalItems, - invalidRecords)) - .filter(Objects::nonNull) - .distinct(); + ongoingMap.put("ongoing", "0"); + if (!test) { + manager + .sendMessage( + new Message( + parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap), + parser.get("rabbitOngoingQueue"), + true, + false); + } - ongoingMap.put("ongoing", "0"); - if (!test) { - manager - .sendMessage( - new Message( - parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap), - parser.get("rabbitOngoingQueue"), - true, - false); - } + final Encoder encoder = Encoders.bean(MetadataRecord.class); + final Dataset mdstore = spark.createDataset(mappeRDD.rdd(), encoder); + final LongAccumulator mdStoreRecords = sc.sc().longAccumulator("MDStoreRecords"); + mdStoreRecords.add(mdstore.count()); + ongoingMap.put("ongoing", "" + totalItems.value()); + if (!test) { + manager + .sendMessage( + new Message( + parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap), + parser.get("rabbitOngoingQueue"), + true, + false); + } + mdstore.write().format("parquet").save(parser.get("output")); + reportMap.put("inputItem", "" + totalItems.value()); + reportMap.put("invalidRecords", "" + invalidRecords.value()); + reportMap.put("mdStoreSize", "" + mdStoreRecords.value()); + if (!test) { + manager + .sendMessage( + new Message(parser.get("workflowId"), "Collection", MessageType.REPORT, reportMap), + parser.get("rabbitReportQueue"), + true, + false); + manager.close(); + } + }); - final Encoder encoder = Encoders.bean(MetadataRecord.class); - final Dataset mdstore = spark.createDataset(mappeRDD.rdd(), encoder); - final LongAccumulator mdStoreRecords = sc.sc().longAccumulator("MDStoreRecords"); - mdStoreRecords.add(mdstore.count()); - ongoingMap.put("ongoing", "" + totalItems.value()); - if (!test) { - manager - .sendMessage( - new Message( - parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap), - parser.get("rabbitOngoingQueue"), - true, - false); - } - mdstore.write().format("parquet").save(parser.get("output")); - reportMap.put("inputItem", "" + totalItems.value()); - reportMap.put("invalidRecords", "" + invalidRecords.value()); - reportMap.put("mdStoreSize", "" + mdStoreRecords.value()); - if (!test) { - manager - .sendMessage( - new Message(parser.get("workflowId"), "Collection", MessageType.REPORT, reportMap), - parser.get("rabbitReportQueue"), - true, - false); - manager.close(); - } } } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/TransformSparkJobNode.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/TransformSparkJobNode.java index 5f39717d0..8737d36ef 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/TransformSparkJobNode.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/transformation/TransformSparkJobNode.java @@ -1,13 +1,17 @@ package eu.dnetlib.dhp.transformation; +import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; + import java.io.ByteArrayInputStream; import java.util.HashMap; import java.util.Map; import java.util.Objects; +import java.util.Optional; import org.apache.commons.cli.*; import org.apache.commons.io.IOUtils; +import org.apache.spark.SparkConf; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoder; import org.apache.spark.sql.Encoders; @@ -17,8 +21,11 @@ import org.dom4j.Document; import org.dom4j.DocumentException; import org.dom4j.Node; import org.dom4j.io.SAXReader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.dhp.collection.GenerateNativeStoreSparkJob; import eu.dnetlib.dhp.model.mdstore.MetadataRecord; import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary; import eu.dnetlib.dhp.transformation.vocabulary.VocabularyHelper; @@ -29,6 +36,8 @@ import eu.dnetlib.message.MessageType; public class TransformSparkJobNode { + private static final Logger log = LoggerFactory.getLogger(TransformSparkJobNode.class); + public static void main(String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( @@ -40,12 +49,18 @@ public class TransformSparkJobNode { parser.parseArgument(args); + Boolean isSparkSessionManaged = Optional + .ofNullable(parser.get("isSparkSessionManaged")) + .map(Boolean::valueOf) + .orElse(Boolean.TRUE); + log.info("isSparkSessionManaged: {}", isSparkSessionManaged); + final String inputPath = parser.get("input"); final String outputPath = parser.get("output"); final String workflowId = parser.get("workflowId"); final String trasformationRule = extractXSLTFromTR( Objects.requireNonNull(DHPUtils.decompressString(parser.get("transformationRule")))); - final String master = parser.get("master"); + final String rabbitUser = parser.get("rabbitUser"); final String rabbitPassword = parser.get("rabbitPassword"); final String rabbitHost = parser.get("rabbitHost"); @@ -53,46 +68,48 @@ public class TransformSparkJobNode { final long dateOfCollection = new Long(parser.get("dateOfCollection")); final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest")); - final SparkSession spark = SparkSession - .builder() - .appName("TransformStoreSparkJob") - .master(master) - .getOrCreate(); + SparkConf conf = new SparkConf(); + runWithSparkSession( + conf, + isSparkSessionManaged, + spark -> { + final Encoder encoder = Encoders.bean(MetadataRecord.class); + final Dataset mdstoreInput = spark.read().format("parquet").load(inputPath).as(encoder); + final LongAccumulator totalItems = spark.sparkContext().longAccumulator("TotalItems"); + final LongAccumulator errorItems = spark.sparkContext().longAccumulator("errorItems"); + final LongAccumulator transformedItems = spark.sparkContext().longAccumulator("transformedItems"); + final Map vocabularies = new HashMap<>(); + vocabularies.put("dnet:languages", VocabularyHelper.getVocabularyFromAPI("dnet:languages")); + final TransformFunction transformFunction = new TransformFunction( + totalItems, + errorItems, + transformedItems, + trasformationRule, + dateOfCollection, + vocabularies); + mdstoreInput.map(transformFunction, encoder).write().format("parquet").save(outputPath); + if (rabbitHost != null) { + System.out.println("SEND FINAL REPORT"); + final Map reportMap = new HashMap<>(); + reportMap.put("inputItem", "" + totalItems.value()); + reportMap.put("invalidRecords", "" + errorItems.value()); + reportMap.put("mdStoreSize", "" + transformedItems.value()); + System.out.println(new Message(workflowId, "Transform", MessageType.REPORT, reportMap)); + if (!test) { + final MessageManager manager = new MessageManager(rabbitHost, rabbitUser, rabbitPassword, false, + false, + null); + manager + .sendMessage( + new Message(workflowId, "Transform", MessageType.REPORT, reportMap), + rabbitReportQueue, + true, + false); + manager.close(); + } + } + }); - final Encoder encoder = Encoders.bean(MetadataRecord.class); - final Dataset mdstoreInput = spark.read().format("parquet").load(inputPath).as(encoder); - final LongAccumulator totalItems = spark.sparkContext().longAccumulator("TotalItems"); - final LongAccumulator errorItems = spark.sparkContext().longAccumulator("errorItems"); - final LongAccumulator transformedItems = spark.sparkContext().longAccumulator("transformedItems"); - final Map vocabularies = new HashMap<>(); - vocabularies.put("dnet:languages", VocabularyHelper.getVocabularyFromAPI("dnet:languages")); - final TransformFunction transformFunction = new TransformFunction( - totalItems, - errorItems, - transformedItems, - trasformationRule, - dateOfCollection, - vocabularies); - mdstoreInput.map(transformFunction, encoder).write().format("parquet").save(outputPath); - if (rabbitHost != null) { - System.out.println("SEND FINAL REPORT"); - final Map reportMap = new HashMap<>(); - reportMap.put("inputItem", "" + totalItems.value()); - reportMap.put("invalidRecords", "" + errorItems.value()); - reportMap.put("mdStoreSize", "" + transformedItems.value()); - System.out.println(new Message(workflowId, "Transform", MessageType.REPORT, reportMap)); - if (!test) { - final MessageManager manager = new MessageManager(rabbitHost, rabbitUser, rabbitPassword, false, false, - null); - manager - .sendMessage( - new Message(workflowId, "Transform", MessageType.REPORT, reportMap), - rabbitReportQueue, - true, - false); - manager.close(); - } - } } private static String extractXSLTFromTR(final String tr) throws DocumentException { diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/collection/collection_input_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/collection/collection_input_parameters.json index 4b4925f27..4a6aec5ee 100644 --- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/collection/collection_input_parameters.json +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/collection/collection_input_parameters.json @@ -1,16 +1,86 @@ [ - {"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true}, - {"paramName":"e", "paramLongName":"encoding", "paramDescription": "the encoding of the input record should be JSON or XML", "paramRequired": true}, - {"paramName":"d", "paramLongName":"dateOfCollection", "paramDescription": "the date when the record has been stored", "paramRequired": true}, - {"paramName":"p", "paramLongName":"provenance", "paramDescription": "the infos about the provenance of the collected records", "paramRequired": true}, - {"paramName":"x", "paramLongName":"xpath", "paramDescription": "the xpath to identify the record ifentifier", "paramRequired": true}, - {"paramName":"i", "paramLongName":"input", "paramDescription": "the path of the sequencial file to read", "paramRequired": true}, - {"paramName":"o", "paramLongName":"output", "paramDescription": "the path of the result DataFrame on HDFS", "paramRequired": true}, - {"paramName":"ru", "paramLongName":"rabbitUser", "paramDescription": "the user to connect with RabbitMq for messaging", "paramRequired": true}, - {"paramName":"rp", "paramLongName":"rabbitPassword", "paramDescription": "the password to connect with RabbitMq for messaging", "paramRequired": true}, - {"paramName":"rh", "paramLongName":"rabbitHost", "paramDescription": "the host of the RabbitMq server", "paramRequired": true}, - {"paramName":"ro", "paramLongName":"rabbitOngoingQueue", "paramDescription": "the name of the ongoing queue", "paramRequired": true}, - {"paramName":"rr", "paramLongName":"rabbitReportQueue", "paramDescription": "the name of the report queue", "paramRequired": true}, - {"paramName":"w", "paramLongName":"workflowId", "paramDescription": "the identifier of the dnet Workflow", "paramRequired": true}, - {"paramName":"t", "paramLongName":"isTest", "paramDescription": "the name of the report queue", "paramRequired": false} + { + "paramName": "issm", + "paramLongName": "isSparkSessionManaged", + "paramDescription": "when true will stop SparkSession after job execution", + "paramRequired": false + }, + { + "paramName": "e", + "paramLongName": "encoding", + "paramDescription": "the encoding of the input record should be JSON or XML", + "paramRequired": true + }, + { + "paramName": "d", + "paramLongName": "dateOfCollection", + "paramDescription": "the date when the record has been stored", + "paramRequired": true + }, + { + "paramName": "p", + "paramLongName": "provenance", + "paramDescription": "the infos about the provenance of the collected records", + "paramRequired": true + }, + { + "paramName": "x", + "paramLongName": "xpath", + "paramDescription": "the xpath to identify the record identifier", + "paramRequired": true + }, + { + "paramName": "i", + "paramLongName": "input", + "paramDescription": "the path of the sequencial file to read", + "paramRequired": true + }, + { + "paramName": "o", + "paramLongName": "output", + "paramDescription": "the path of the result DataFrame on HDFS", + "paramRequired": true + }, + { + "paramName": "ru", + "paramLongName": "rabbitUser", + "paramDescription": "the user to connect with RabbitMq for messaging", + "paramRequired": true + }, + { + "paramName": "rp", + "paramLongName": "rabbitPassword", + "paramDescription": "the password to connect with RabbitMq for messaging", + "paramRequired": true + }, + { + "paramName": "rh", + "paramLongName": "rabbitHost", + "paramDescription": "the host of the RabbitMq server", + "paramRequired": true + }, + { + "paramName": "ro", + "paramLongName": "rabbitOngoingQueue", + "paramDescription": "the name of the ongoing queue", + "paramRequired": true + }, + { + "paramName": "rr", + "paramLongName": "rabbitReportQueue", + "paramDescription": "the name of the report queue", + "paramRequired": true + }, + { + "paramName": "w", + "paramLongName": "workflowId", + "paramDescription": "the identifier of the dnet Workflow", + "paramRequired": true + }, + { + "paramName": "t", + "paramLongName": "isTest", + "paramDescription": "the name of the report queue", + "paramRequired": false + } ] \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/transformation/transformation_input_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/transformation/transformation_input_parameters.json index 3af21f53f..4bb5fd56a 100644 --- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/transformation/transformation_input_parameters.json +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/transformation/transformation_input_parameters.json @@ -1,16 +1,74 @@ [ - {"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true}, - {"paramName":"d", "paramLongName":"dateOfCollection", "paramDescription": "the date when the record has been stored", "paramRequired": true}, - {"paramName":"i", "paramLongName":"input", "paramDescription": "the path of the sequencial file to read", "paramRequired": true}, - {"paramName":"o", "paramLongName":"output", "paramDescription": "the path of the result DataFrame on HDFS", "paramRequired": true}, - {"paramName":"w", "paramLongName":"workflowId", "paramDescription": "the identifier of the dnet Workflow", "paramRequired": true}, - {"paramName":"tr", "paramLongName":"transformationRule","paramDescription": "the transformation Rule to apply to the input MDStore", "paramRequired": true}, - {"paramName":"ru", "paramLongName":"rabbitUser", "paramDescription": "the user to connect with RabbitMq for messaging", "paramRequired": true}, - {"paramName":"rp", "paramLongName":"rabbitPassword", "paramDescription": "the password to connect with RabbitMq for messaging", "paramRequired": true}, - {"paramName":"rh", "paramLongName":"rabbitHost", "paramDescription": "the host of the RabbitMq server", "paramRequired": true}, - {"paramName":"ro", "paramLongName":"rabbitOngoingQueue", "paramDescription": "the name of the ongoing queue", "paramRequired": true}, - {"paramName":"rr", "paramLongName":"rabbitReportQueue", "paramDescription": "the name of the report queue", "paramRequired": true}, - {"paramName":"t", "paramLongName":"isTest", "paramDescription": "the name of the report queue", "paramRequired": false} - - + { + "paramName": "issm", + "paramLongName": "isSparkSessionManaged", + "paramDescription": "when true will stop SparkSession after job execution", + "paramRequired": false + }, + { + "paramName": "d", + "paramLongName": "dateOfCollection", + "paramDescription": "the date when the record has been stored", + "paramRequired": true + }, + { + "paramName": "i", + "paramLongName": "input", + "paramDescription": "the path of the sequencial file to read", + "paramRequired": true + }, + { + "paramName": "o", + "paramLongName": "output", + "paramDescription": "the path of the result DataFrame on HDFS", + "paramRequired": true + }, + { + "paramName": "w", + "paramLongName": "workflowId", + "paramDescription": "the identifier of the dnet Workflow", + "paramRequired": true + }, + { + "paramName": "tr", + "paramLongName": "transformationRule", + "paramDescription": "the transformation Rule to apply to the input MDStore", + "paramRequired": true + }, + { + "paramName": "ru", + "paramLongName": "rabbitUser", + "paramDescription": "the user to connect with RabbitMq for messaging", + "paramRequired": true + }, + { + "paramName": "rp", + "paramLongName": "rabbitPassword", + "paramDescription": "the password to connect with RabbitMq for messaging", + "paramRequired": true + }, + { + "paramName": "rh", + "paramLongName": "rabbitHost", + "paramDescription": "the host of the RabbitMq server", + "paramRequired": true + }, + { + "paramName": "ro", + "paramLongName": "rabbitOngoingQueue", + "paramDescription": "the name of the ongoing queue", + "paramRequired": true + }, + { + "paramName": "rr", + "paramLongName": "rabbitReportQueue", + "paramDescription": "the name of the report queue", + "paramRequired": true + }, + { + "paramName": "t", + "paramLongName": "isTest", + "paramDescription": "the name of the report queue", + "paramRequired": false + } ] \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/CollectionJobTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/CollectionJobTest.java index 44364b30a..c3b05f5c9 100644 --- a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/CollectionJobTest.java +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/CollectionJobTest.java @@ -9,65 +9,60 @@ import java.nio.file.Path; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; +import org.apache.spark.SparkConf; +import org.apache.spark.sql.SparkSession; +import org.junit.jupiter.api.*; +import org.junit.jupiter.api.io.TempDir; import com.fasterxml.jackson.databind.ObjectMapper; import eu.dnetlib.dhp.model.mdstore.MetadataRecord; import eu.dnetlib.dhp.model.mdstore.Provenance; +import eu.dnetlib.dhp.schema.common.ModelSupport; public class CollectionJobTest { - private Path testDir; + private static SparkSession spark; - @BeforeEach - public void setup() throws IOException { - testDir = Files.createTempDirectory("dhp-collection"); + @BeforeAll + public static void beforeAll() { + SparkConf conf = new SparkConf(); + conf.setAppName(CollectionJobTest.class.getSimpleName()); + conf.setMaster("local"); + spark = SparkSession.builder().config(conf).getOrCreate(); } - @AfterEach - public void teadDown() throws IOException { - FileUtils.deleteDirectory(testDir.toFile()); + @AfterAll + public static void afterAll() { + spark.stop(); } @Test - public void tesCollection() throws Exception { + public void tesCollection(@TempDir Path testDir) throws Exception { final Provenance provenance = new Provenance("pippo", "puppa", "ns_prefix"); + Assertions.assertNotNull(new ObjectMapper().writeValueAsString(provenance)); + GenerateNativeStoreSparkJob .main( new String[] { - "-mt", - "local", - "-w", - "wid", - "-e", - "XML", - "-d", - "" + System.currentTimeMillis(), - "-p", - new ObjectMapper().writeValueAsString(provenance), - "-x", - "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']", - "-i", - this.getClass().getResource("/eu/dnetlib/dhp/collection/native.seq").toString(), - "-o", - testDir.toString() + "/store", - "-t", - "true", - "-ru", - "", - "-rp", - "", - "-rh", - "", - "-ro", - "", - "-rr", - "" + "issm", "true", + "-w", "wid", + "-e", "XML", + "-d", "" + System.currentTimeMillis(), + "-p", new ObjectMapper().writeValueAsString(provenance), + "-x", "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']", + "-i", this.getClass().getResource("/eu/dnetlib/dhp/collection/native.seq").toString(), + "-o", testDir.toString() + "/store", + "-t", "true", + "-ru", "", + "-rp", "", + "-rh", "", + "-ro", "", + "-rr", "" }); - System.out.println(new ObjectMapper().writeValueAsString(provenance)); + + // TODO introduce useful assertions + } @Test @@ -85,9 +80,8 @@ public class CollectionJobTest { null, null); - assert record != null; - System.out.println(record.getId()); - System.out.println(record.getOriginalId()); + assertNotNull(record.getId()); + assertNotNull(record.getOriginalId()); } @Test @@ -112,10 +106,12 @@ public class CollectionJobTest { System.currentTimeMillis(), null, null); - assert record != null; + record.setBody("ciao"); - assert record1 != null; record1.setBody("mondo"); + + assertNotNull(record); + assertNotNull(record1); assertEquals(record, record1); } } diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/transformation/TransformationJobTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/transformation/TransformationJobTest.java index 01c9e3103..98c8cf66c 100644 --- a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/transformation/TransformationJobTest.java +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/transformation/TransformationJobTest.java @@ -12,10 +12,14 @@ import java.util.Map; import javax.xml.transform.stream.StreamSource; import org.apache.commons.io.IOUtils; +import org.apache.spark.SparkConf; +import org.apache.spark.sql.SparkSession; import org.apache.spark.util.LongAccumulator; import org.dom4j.Document; import org.dom4j.Node; import org.dom4j.io.SAXReader; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -23,6 +27,7 @@ import org.junit.jupiter.api.io.TempDir; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import eu.dnetlib.dhp.collection.CollectionJobTest; import eu.dnetlib.dhp.model.mdstore.MetadataRecord; import eu.dnetlib.dhp.transformation.functions.Cleaner; import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary; @@ -33,6 +38,21 @@ import net.sf.saxon.s9api.*; @ExtendWith(MockitoExtension.class) public class TransformationJobTest { + private static SparkSession spark; + + @BeforeAll + public static void beforeAll() { + SparkConf conf = new SparkConf(); + conf.setAppName(CollectionJobTest.class.getSimpleName()); + conf.setMaster("local"); + spark = SparkSession.builder().config(conf).getOrCreate(); + } + + @AfterAll + public static void afterAll() { + spark.stop(); + } + @Mock private LongAccumulator accumulator; @@ -78,31 +98,21 @@ public class TransformationJobTest { TransformSparkJobNode .main( new String[] { - "-mt", - "local", - "-i", - mdstore_input, - "-o", - mdstore_output, - "-d", - "1", - "-w", - "1", - "-tr", - xslt, - "-t", - "true", - "-ru", - "", - "-rp", - "", - "-rh", - "", - "-ro", - "", - "-rr", - "" + "-issm", "true", + "-i", mdstore_input, + "-o", mdstore_output, + "-d", "1", + "-w", "1", + "-tr", xslt, + "-t", "true", + "-ru", "", + "-rp", "", + "-rh", "", + "-ro", "", + "-rr", "" }); + + // TODO introduce useful assertions } @Test