diff --git a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/dump/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/dump/oozie_app/workflow.xml
index 7321fd076..baf9ae37a 100644
--- a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/dump/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/dump/oozie_app/workflow.xml
@@ -1,18 +1,18 @@
-
- sourcePath
- the source path
-
-
- isLookUpUrl
- the isLookup service endpoint
-
-
- outputPath
- the output path
-
+
+ sourcePath
+ the source path
+
+
+ isLookUpUrl
+ the isLookup service endpoint
+
+
+ outputPath
+ the output path
+
accessToken
the access token used for the deposition in Zenodo
@@ -320,6 +320,7 @@
+
yarn
@@ -344,6 +345,7 @@
+
yarn
@@ -371,43 +373,42 @@
-
-
- yarn
- cluster
- Split dumped result for community
- eu.dnetlib.dhp.oa.graph.dump.community.SparkSplitForCommunity
- dhp-graph-mapper-${projectVersion}.jar
-
- --executor-memory=${sparkExecutorMemory}
- --executor-cores=${sparkExecutorCores}
- --driver-memory=${sparkDriverMemory}
- --conf spark.extraListeners=${spark2ExtraListeners}
- --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
- --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
- --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
- --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
-
- --sourcePath${workingDir}/ext
- --outputPath${workingDir}/split
- --communityMapPath${workingDir}/communityMap
-
-
-
-
+
+
+ yarn
+ cluster
+ Split dumped result for community
+ eu.dnetlib.dhp.oa.graph.dump.community.SparkSplitForCommunity
+ dhp-graph-mapper-${projectVersion}.jar
+
+ --executor-memory=${sparkExecutorMemory}
+ --executor-cores=${sparkExecutorCores}
+ --driver-memory=${sparkDriverMemory}
+ --conf spark.extraListeners=${spark2ExtraListeners}
+ --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
+ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
+ --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
+ --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
+
+ --sourcePath${workingDir}/ext
+ --outputPath${workingDir}/split
+ --communityMapPath${workingDir}/communityMap
+
+
+
+
eu.dnetlib.dhp.oa.graph.dump.MakeTar
--hdfsPath${outputPath}
--nameNode${nameNode}
- --sourcePath${workingDir}/split
+ --sourcePath${workingDir}/split
-
eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS
@@ -424,8 +425,6 @@
-
-
\ No newline at end of file
diff --git a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/dump_whole/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/dump_whole/oozie_app/workflow.xml
index 793c1ed33..e5001bf43 100644
--- a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/dump_whole/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/dump_whole/oozie_app/workflow.xml
@@ -1,18 +1,18 @@
-
- sourcePath
- the source path
-
-
- isLookUpUrl
- the isLookup service endpoint
-
-
- outputPath
- the output path
-
+
+ sourcePath
+ the source path
+
+
+ isLookUpUrl
+ the isLookup service endpoint
+
+
+ outputPath
+ the output path
+
resultAggregation
true if all the result type have to be dumped under result. false otherwise
@@ -357,10 +357,8 @@
-
-
@@ -389,7 +387,6 @@
-
yarn
@@ -418,7 +415,6 @@
-
@@ -530,7 +526,6 @@
-
@@ -568,8 +563,7 @@
-
-
+
eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS