outputGraph
the target graph name (or path)
graphFormat
HIVE
the graph data format
reuseContent
false
should import content from the aggregator or reuse a previous version
contentPath
path location to store (or reuse) content from the aggregator
postgresURL
the postgres URL to access to the database
postgresUser
the user postgres
postgresPassword
the password postgres
dbSchema
beta
the database schema according to the D-Net infrastructure (beta or production)
mongoURL
mongoDB url, example: mongodb://[username:password@]host[:port]
mongoDb
mongo database
isLookUpUrl
the address of the lookUp service
nsPrefixBlacklist
a blacklist of nsprefixes (comma separeted)
hiveMetastoreUris
hive server metastore URIs
sparkDriverMemory
memory for driver process
sparkExecutorMemory
memory for individual executor
sparkExecutorCores
number of cores used by single executor
oozieActionShareLibForSpark2
oozie action sharelib for spark 2.*
spark2ExtraListeners
com.cloudera.spark.lineage.NavigatorAppListener
spark 2.* extra listeners classname
spark2SqlQueryExecutionListeners
com.cloudera.spark.lineage.NavigatorQueryListener
spark 2.* sql query execution listeners classname
spark2YarnHistoryServerAddress
spark 2.* yarn history server address
spark2EventLogDir
spark 2.* event log dir location
sparkSqlWarehouseDir
spark 2.* db directory location
${jobTracker}
${nameNode}
mapreduce.job.queuename
${queueName}
oozie.launcher.mapred.job.queue.name
${oozieLauncherQueueName}
oozie.action.sharelib.for.spark
${oozieActionShareLibForSpark2}
Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]
${wf:conf('reuseContent') eq false}
${wf:conf('reuseContent') eq true}
eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication
--hdfsPath${contentPath}/db_claims
--postgresUrl${postgresURL}
--postgresUser${postgresUser}
--postgresPassword${postgresPassword}
--isLookUpUrl${isLookUpUrl}
--actionclaims
--dbschema${dbSchema}
--nsPrefixBlacklist${nsPrefixBlacklist}
eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication
-p${contentPath}/odf_claims
-mongourl${mongoURL}
-mongodb${mongoDb}
-fODF
-lstore
-iclaim
eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication
-p${contentPath}/oaf_claims
-mongourl${mongoURL}
-mongodb${mongoDb}
-fOAF
-lstore
-iclaim
eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication
--hdfsPath${contentPath}/db_records
--postgresUrl${postgresURL}
--postgresUser${postgresUser}
--postgresPassword${postgresPassword}
--isLookUpUrl${isLookUpUrl}
--dbschema${dbSchema}
--nsPrefixBlacklist${nsPrefixBlacklist}
eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication
--hdfsPath${contentPath}/odf_records
--mongoBaseUrl${mongoURL}
--mongoDb${mongoDb}
--mdFormatODF
--mdLayoutstore
--mdInterpretationcleaned
eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication
--hdfsPath${contentPath}/oaf_records
--mongoBaseUrl${mongoURL}
--mongoDb${mongoDb}
--mdFormatOAF
--mdLayoutstore
--mdInterpretationcleaned
eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication
--hdfsPath${contentPath}/oaf_records_invisible
--mongoBaseUrl${mongoURL}
--mongoDb${mongoDb}
--mdFormatOAF
--mdLayoutstore
--mdInterpretationintersection
yarn
cluster
GenerateEntities_claim
eu.dnetlib.dhp.oa.graph.raw.GenerateEntitiesApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--sourcePaths${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims,${contentPath}/oaf_records_invisible
--targetPath${workingDir}/entities_claim
--isLookUpUrl${isLookUpUrl}
yarn
cluster
GenerateGraph_claims
eu.dnetlib.dhp.oa.graph.raw.DispatchEntitiesApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--sourcePath${workingDir}/entities_claim
--graphRawPath${workingDir}/graph_claims
yarn
cluster
GenerateEntities
eu.dnetlib.dhp.oa.graph.raw.GenerateEntitiesApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--sourcePaths${contentPath}/db_records,${contentPath}/oaf_records,${contentPath}/odf_records
--targetPath${workingDir}/entities
--isLookUpUrl${isLookUpUrl}
yarn
cluster
GenerateGraph
eu.dnetlib.dhp.oa.graph.raw.DispatchEntitiesApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=7680
--sourcePath${workingDir}/entities
--graphRawPath${workingDir}/graph_raw
${wf:conf('graphFormat') eq 'JSON'}
${wf:conf('graphFormat') eq 'HIVE'}
yarn
cluster
reset_DB
eu.dnetlib.dhp.common.ResetHiveDbApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
--conf spark.sql.shuffle.partitions=7680
--dbName${outputGraph}
--hiveMetastoreUris${hiveMetastoreUris}
yarn
cluster
MergeClaims_publication
eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
--conf spark.sql.shuffle.partitions=7680
--rawGraphPath${workingDir}/graph_raw
--claimsGraphPath${workingDir}/graph_claims
--outputGraph${outputGraph}
--graphFormat${graphFormat}
--graphTableClassNameeu.dnetlib.dhp.schema.oaf.Publication
--hiveMetastoreUris${hiveMetastoreUris}
yarn
cluster
MergeClaims_dataset
eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
--conf spark.sql.shuffle.partitions=7680
--rawGraphPath${workingDir}/graph_raw
--claimsGraphPath${workingDir}/graph_claims
--outputGraph${outputGraph}
--graphFormat${graphFormat}
--graphTableClassNameeu.dnetlib.dhp.schema.oaf.Dataset
--hiveMetastoreUris${hiveMetastoreUris}
yarn
cluster
MergeClaims_relation
eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
--conf spark.sql.shuffle.partitions=3840
--rawGraphPath${workingDir}/graph_raw
--claimsGraphPath${workingDir}/graph_claims
--outputGraph${outputGraph}
--graphFormat${graphFormat}
--graphTableClassNameeu.dnetlib.dhp.schema.oaf.Relation
--hiveMetastoreUris${hiveMetastoreUris}
yarn
cluster
MergeClaims_software
eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
--conf spark.sql.shuffle.partitions=1920
--rawGraphPath${workingDir}/graph_raw
--claimsGraphPath${workingDir}/graph_claims
--outputGraph${outputGraph}
--graphFormat${graphFormat}
--graphTableClassNameeu.dnetlib.dhp.schema.oaf.Software
--hiveMetastoreUris${hiveMetastoreUris}
yarn
cluster
MergeClaims_otherresearchproduct
eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
--conf spark.sql.shuffle.partitions=1920
--rawGraphPath${workingDir}/graph_raw
--claimsGraphPath${workingDir}/graph_claims
--outputGraph${outputGraph}
--graphFormat${graphFormat}
--graphTableClassNameeu.dnetlib.dhp.schema.oaf.OtherResearchProduct
--hiveMetastoreUris${hiveMetastoreUris}
yarn
cluster
MergeClaims_datasource
eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
--conf spark.sql.shuffle.partitions=200
--rawGraphPath${workingDir}/graph_raw
--claimsGraphPath${workingDir}/graph_claims
--outputGraph${outputGraph}
--graphFormat${graphFormat}
--graphTableClassNameeu.dnetlib.dhp.schema.oaf.Datasource
--hiveMetastoreUris${hiveMetastoreUris}
yarn
cluster
MergeClaims_organization
eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
--conf spark.sql.shuffle.partitions=200
--rawGraphPath${workingDir}/graph_raw
--claimsGraphPath${workingDir}/graph_claims
--outputGraph${outputGraph}
--graphFormat${graphFormat}
--graphTableClassNameeu.dnetlib.dhp.schema.oaf.Organization
--hiveMetastoreUris${hiveMetastoreUris}
yarn
cluster
MergeClaims_project
eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication
dhp-graph-mapper-${projectVersion}.jar
--executor-memory ${sparkExecutorMemory}
--executor-cores ${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
--conf spark.sql.shuffle.partitions=200
--rawGraphPath${workingDir}/graph_raw
--claimsGraphPath${workingDir}/graph_claims
--outputGraph${outputGraph}
--graphFormat${graphFormat}
--graphTableClassNameeu.dnetlib.dhp.schema.oaf.Project
--hiveMetastoreUris${hiveMetastoreUris}