sourcePath
the source path
allowedsemrels
the allowed semantics
outputPath
the output path
Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]
${jobTracker}
${nameNode}
${nameNode}/${sourcePath}/relation
${nameNode}/${outputPath}/relation
${jobTracker}
${nameNode}
${nameNode}/${sourcePath}/publication
${nameNode}/${outputPath}/publication
${jobTracker}
${nameNode}
${nameNode}/${sourcePath}/dataset
${nameNode}/${outputPath}/dataset
${jobTracker}
${nameNode}
${nameNode}/${sourcePath}/otherresearchproduct
${nameNode}/${outputPath}/otherresearchproduct
${jobTracker}
${nameNode}
${nameNode}/${sourcePath}/software
${nameNode}/${outputPath}/software
${jobTracker}
${nameNode}
${nameNode}/${sourcePath}/organization
${nameNode}/${outputPath}/organization
${jobTracker}
${nameNode}
${nameNode}/${sourcePath}/project
${nameNode}/${outputPath}/project
${jobTracker}
${nameNode}
${nameNode}/${sourcePath}/datasource
${nameNode}/${outputPath}/datasource
yarn
cluster
PrepareProjectResultsAssociation
eu.dnetlib.dhp.projecttoresult.PrepareProjectResultsAssociation
dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--sourcePath${sourcePath}/relation
--allowedsemrels${allowedsemrels}
--hive_metastore_uris${hive_metastore_uris}
--potentialUpdatePath${workingDir}/preparedInfo/potentialUpdates
--alreadyLinkedPath${workingDir}/preparedInfo/alreadyLinked
yarn
cluster
ProjectToResultPropagation
eu.dnetlib.dhp.projecttoresult.SparkResultToProjectThroughSemRelJob
dhp-enrichment-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.dynamicAllocation.enabled=true
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
--saveGraph${saveGraph}
--hive_metastore_uris${hive_metastore_uris}
--outputPath${outputPath}/relation
--potentialUpdatePath${workingDir}/preparedInfo/potentialUpdates
--alreadyLinkedPath${workingDir}/preparedInfo/alreadyLinked