projectFileURL
the url where to get the projects file
programmeFileURL
the url where to get the programme file
outputPath
path where to store the action set
Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]
eu.dnetlib.dhp.actionmanager.project.csvutils.ReadCSV
--hdfsNameNode${nameNode}
--fileURL${projectFileURL}
--hdfsPath${workingDir}/projects
--classForNameeu.dnetlib.dhp.actionmanager.project.csvutils.CSVProject
eu.dnetlib.dhp.actionmanager.project.csvutils.ReadCSV
--hdfsNameNode${nameNode}
--fileURL${programmeFileURL}
--hdfsPath${workingDir}/programme
--classForNameeu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme
yarn
cluster
PrepareProgramme
eu.dnetlib.dhp.actionmanager.project.PrepareProgramme
dhp-aggregation-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=3840
--programmePath${workingDir}/programme
--outputPath${workingDir}/preparedProgramme
yarn
cluster
PrepareProgramme
eu.dnetlib.dhp.actionmanager.project.PrepareProjects
dhp-aggregation-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=3840
--projectPath${workingDir}/projects
--outputPath${workingDir}/preparedProjects
yarn
cluster
ProjectProgrammeAS
eu.dnetlib.dhp.actionmanager.project.SparkAtomicActionJob
dhp-aggregation-${projectVersion}.jar
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=3840
--projectPath${workingDir}/preparedProjects
--programmePath${workingDir}/preparedProgramme
--outputPath${outputPath}