sourcePath the source path allowedsemrels the semantic relationships allowed for propagation outputPath the output path Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}] ${jobTracker} ${nameNode} ${nameNode}/${sourcePath}/relation ${nameNode}/${outputPath}/relation ${jobTracker} ${nameNode} ${nameNode}/${sourcePath}/organization ${nameNode}/${outputPath}/organization ${jobTracker} ${nameNode} ${nameNode}/${sourcePath}/project ${nameNode}/${outputPath}/project ${jobTracker} ${nameNode} ${nameNode}/${sourcePath}/datasource ${nameNode}/${outputPath}/datasource yarn cluster ORCIDPropagation-PreparePhase1-Publications eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1 dhp-propagation-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.sql.shuffle.partitions=3840 --conf spark.speculation=false --conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false --sourcePath ${sourcePath} --hive_metastore_uris ${hive_metastore_uris} --resultTableName eu.dnetlib.dhp.schema.oaf.Publication --outputPath ${workingDir}/preparedInfo/targetOrcidAssoc --allowedsemrels ${allowedsemrels} yarn cluster ORCIDPropagation-PreparePhase1-Dataset eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1 dhp-propagation-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --sourcePath ${sourcePath} --hive_metastore_uris ${hive_metastore_uris} --resultTableName eu.dnetlib.dhp.schema.oaf.Dataset --outputPath ${workingDir}/preparedInfo/targetOrcidAssoc --allowedsemrels ${allowedsemrels} yarn cluster ORCIDPropagation-PreparePhase1-ORP eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1 dhp-propagation-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --sourcePath ${sourcePath} --hive_metastore_uris ${hive_metastore_uris} --resultTableName eu.dnetlib.dhp.schema.oaf.OtherResearchProduct --outputPath ${workingDir}/preparedInfo/targetOrcidAssoc --allowedsemrels ${allowedsemrels} yarn cluster ORCIDPropagation-PreparePhase1-Software eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1 dhp-propagation-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --sourcePath ${sourcePath} --hive_metastore_uris ${hive_metastore_uris} --resultTableName eu.dnetlib.dhp.schema.oaf.Software --outputPath ${workingDir}/preparedInfo/targetOrcidAssoc --allowedsemrels ${allowedsemrels} yarn cluster ORCIDPropagation-PreparePhase2 eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep2 dhp-propagation-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --sourcePath ${workingDir}/preparedInfo/targetOrcidAssoc --outputPath ${workingDir}/preparedInfo/mergedOrcidAssoc yarn cluster ORCIDPropagation-Publication eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob dhp-propagation-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.speculation=false --conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false --conf spark.sql.shuffle.partitions=3840 --possibleUpdatesPath ${workingDir}/preparedInfo/mergedOrcidAssoc --sourcePath ${sourcePath}/publication --hive_metastore_uris ${hive_metastore_uris} --resultTableName eu.dnetlib.dhp.schema.oaf.Publication --outputPath ${outputPath}/publication --saveGraph ${saveGraph} yarn cluster ORCIDPropagation-Dataset eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob dhp-propagation-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.speculation=false --conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false --possibleUpdatesPath ${workingDir}/preparedInfo/mergedOrcidAssoc --sourcePath ${sourcePath}/dataset --hive_metastore_uris ${hive_metastore_uris} --resultTableName eu.dnetlib.dhp.schema.oaf.Dataset --outputPath ${outputPath}/dataset --saveGraph ${saveGraph} yarn cluster ORCIDPropagation-ORP eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob dhp-propagation-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.speculation=false --conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false --possibleUpdatesPath ${workingDir}/preparedInfo/mergedOrcidAssoc --sourcePath ${sourcePath}/otherresearchproduct --hive_metastore_uris ${hive_metastore_uris} --resultTableName eu.dnetlib.dhp.schema.oaf.OtherResearchProduct --outputPath ${outputPath}/otherresearchproduct --saveGraph ${saveGraph} yarn cluster ORCIDPropagation-Software eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob dhp-propagation-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.dynamicAllocation.enabled=true --conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors} --conf spark.speculation=false --conf spark.hadoop.mapreduce.map.speculative=false --conf spark.hadoop.mapreduce.reduce.speculative=false --possibleUpdatesPath ${workingDir}/preparedInfo/mergedOrcidAssoc --sourcePath ${sourcePath}/software --hive_metastore_uris ${hive_metastore_uris} --resultTableName eu.dnetlib.dhp.schema.oaf.Software --outputPath ${outputPath}/software --saveGraph ${saveGraph}