diff --git a/dhp-workflows/dhp-propagation/src/main/resources/eu/dnetlib/dhp/countrypropagation/input_countrypropagation_parameters.json b/dhp-workflows/dhp-propagation/src/main/resources/eu/dnetlib/dhp/countrypropagation/input_countrypropagation_parameters.json
index 81fead58fa..068c673ce4 100644
--- a/dhp-workflows/dhp-propagation/src/main/resources/eu/dnetlib/dhp/countrypropagation/input_countrypropagation_parameters.json
+++ b/dhp-workflows/dhp-propagation/src/main/resources/eu/dnetlib/dhp/countrypropagation/input_countrypropagation_parameters.json
@@ -11,18 +11,6 @@
"paramDescription": "the path of the sequencial file to read",
"paramRequired": true
},
- {
- "paramName":"wl",
- "paramLongName":"whitelist",
- "paramDescription": "datasource id that will be considered even if not in the allowed typology list. Split by ;",
- "paramRequired": true
- },
- {
- "paramName":"at",
- "paramLongName":"allowedtypes",
- "paramDescription": "the types of the allowed datasources. Split by ;",
- "paramRequired": true
- },
{
"paramName":"h",
"paramLongName":"hive_metastore_uris",
@@ -40,5 +28,11 @@
"paramLongName":"saveGraph",
"paramDescription": "true if the new version of the graph must be saved",
"paramRequired": true
+ },
+ {
+ "paramName":"tn",
+ "paramLongName":"resultTableName",
+ "paramDescription": "the name of the result table we are currently working on",
+ "paramRequired": true
}
]
\ No newline at end of file
diff --git a/dhp-workflows/dhp-propagation/src/main/resources/eu/dnetlib/dhp/countrypropagation/oozie_app/workflow.xml b/dhp-workflows/dhp-propagation/src/main/resources/eu/dnetlib/dhp/countrypropagation/oozie_app/workflow.xml
index aa1e6dc78e..d91207f46c 100644
--- a/dhp-workflows/dhp-propagation/src/main/resources/eu/dnetlib/dhp/countrypropagation/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-propagation/src/main/resources/eu/dnetlib/dhp/countrypropagation/oozie_app/workflow.xml
@@ -38,12 +38,152 @@
-
+
Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]
+
+
+ yarn
+ cluster
+ PrepareDatasourceCountryAssociation
+ eu.dnetlib.dhp.countrypropagation.PrepareDatasourceCountryAssociation
+ dhp-graph-propagation-${projectVersion}.jar
+
+ --executor-cores=${sparkExecutorCoresForJoining}
+ --executor-memory=${sparkExecutorMemoryForJoining}
+ --driver-memory=${sparkDriverMemoryForJoining}
+ --conf spark.extraListeners=${spark2ExtraListeners}
+ --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
+ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
+ --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
+
+ -mt yarn-cluster
+ --sourcePath${sourcePath}
+ --whitelist${whitelist}
+ --allowedtypes${allowedtypes}
+ --hive_metastore_uris${hive_metastore_uris}
+
+
+
+
+
+
+
+
+
+
+
+
+
+ yarn
+ cluster
+ countryPropagationForPublications
+ eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob2
+ dhp-graph-propagation-${projectVersion}.jar
+
+ --executor-cores=${sparkExecutorCoresForJoining}
+ --executor-memory=${sparkExecutorMemoryForJoining}
+ --driver-memory=${sparkDriverMemoryForJoining}
+ --conf spark.extraListeners=${spark2ExtraListeners}
+ --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
+ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
+ --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
+
+ -mt yarn-cluster
+ --sourcePath${sourcePath}
+ --hive_metastore_uris${hive_metastore_uris}
+ --writeUpdate${writeUpdate}
+ --saveGraph${saveGraph}
+ --resultTableNameeu.dnetlib.dhp.schema.oaf.Publication
+
+
+
+
+
+
+
+ yarn
+ cluster
+ countryPropagationForDataset
+ eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob2
+ dhp-graph-propagation-${projectVersion}.jar
+
+ --executor-cores=${sparkExecutorCoresForJoining}
+ --executor-memory=${sparkExecutorMemoryForJoining}
+ --driver-memory=${sparkDriverMemoryForJoining}
+ --conf spark.extraListeners=${spark2ExtraListeners}
+ --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
+ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
+ --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
+
+ -mt yarn-cluster
+ --sourcePath${sourcePath}
+ --hive_metastore_uris${hive_metastore_uris}
+ --writeUpdate${writeUpdate}
+ --saveGraph${saveGraph}
+ --resultTableNameeu.dnetlib.dhp.schema.oaf.Dataset
+
+
+
+
+
+
+
+ yarn
+ cluster
+ countryPropagationForORP
+ eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob2
+ dhp-graph-propagation-${projectVersion}.jar
+
+ --executor-cores=${sparkExecutorCoresForJoining}
+ --executor-memory=${sparkExecutorMemoryForJoining}
+ --driver-memory=${sparkDriverMemoryForJoining}
+ --conf spark.extraListeners=${spark2ExtraListeners}
+ --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
+ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
+ --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
+
+ -mt yarn-cluster
+ --sourcePath${sourcePath}
+ --hive_metastore_uris${hive_metastore_uris}
+ --writeUpdate${writeUpdate}
+ --saveGraph${saveGraph}
+ --resultTableNameeu.dnetlib.dhp.schema.oaf.Otherresearchproduct
+
+
+
+
+
+
+
+ yarn
+ cluster
+ countryPropagationForSoftware
+ eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob2
+ dhp-graph-propagation-${projectVersion}.jar
+
+ --executor-cores=${sparkExecutorCoresForJoining}
+ --executor-memory=${sparkExecutorMemoryForJoining}
+ --driver-memory=${sparkDriverMemoryForJoining}
+ --conf spark.extraListeners=${spark2ExtraListeners}
+ --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
+ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
+ --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
+
+ -mt yarn-cluster
+ --sourcePath${sourcePath}
+ --hive_metastore_uris${hive_metastore_uris}
+ --writeUpdate${writeUpdate}
+ --saveGraph${saveGraph}
+ --resultTableNameeu.dnetlib.dhp.schema.oaf.Software
+
+
+
+
+
${jobTracker}