betaInputGraph the beta graph name (or path) prodInputGraph the production graph name (or path) outputGraph the merged graph name (or path) priority decides from which infrastructure the content must win in case of ID clash inputGraphFormat HIVE the input graph data format outputGraphFormat HIVE the output graph data format hiveMetastoreUris hive server metastore URIs sparkDriverMemory memory for driver process sparkExecutorMemory memory for individual executor sparkExecutorCores number of cores used by single executor oozieActionShareLibForSpark2 oozie action sharelib for spark 2.* spark2ExtraListeners com.cloudera.spark.lineage.NavigatorAppListener spark 2.* extra listeners classname spark2SqlQueryExecutionListeners com.cloudera.spark.lineage.NavigatorQueryListener spark 2.* sql query execution listeners classname spark2YarnHistoryServerAddress spark 2.* yarn history server address spark2EventLogDir spark 2.* event log dir location sparkSqlWarehouseDir spark 2.* db directory location Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}] ${wf:conf('outputGraphFormat') eq 'JSON'} ${wf:conf('outputGraphFormat') eq 'HIVE'} yarn cluster reset_DB eu.dnetlib.dhp.common.ResetHiveDbApplication dhp-graph-mapper-${projectVersion}.jar --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir} --conf spark.sql.shuffle.partitions=7680 --dbName${outputGraph} --hiveMetastoreUris${hiveMetastoreUris} yarn cluster Merge publications eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob dhp-graph-mapper-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir} --conf spark.sql.shuffle.partitions=7680 --betaInputGraph${betaInputGraph} --prodInputGraph${prodInputGraph} --outputGraph${outputGraph} --inputGraphFormat${inputGraphFormat} --outputGraphFormat${outputGraphFormat} --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Publication --priority${priority} --hiveMetastoreUris${hiveMetastoreUris} yarn cluster Merge datasets eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob dhp-graph-mapper-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir} --conf spark.sql.shuffle.partitions=7680 --betaInputGraph${betaInputGraph} --prodInputGraph${prodInputGraph} --outputGraph${outputGraph} --inputGraphFormat${inputGraphFormat} --outputGraphFormat${outputGraphFormat} --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Dataset --priority${priority} --hiveMetastoreUris${hiveMetastoreUris} yarn cluster Merge otherresearchproducts eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob dhp-graph-mapper-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir} --conf spark.sql.shuffle.partitions=7680 --betaInputGraph${betaInputGraph} --prodInputGraph${prodInputGraph} --outputGraph${outputGraph} --inputGraphFormat${inputGraphFormat} --outputGraphFormat${outputGraphFormat} --graphTableClassNameeu.dnetlib.dhp.schema.oaf.OtherResearchProduct --priority${priority} --hiveMetastoreUris${hiveMetastoreUris} yarn cluster Merge softwares eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob dhp-graph-mapper-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir} --conf spark.sql.shuffle.partitions=7680 --betaInputGraph${betaInputGraph} --prodInputGraph${prodInputGraph} --outputGraph${outputGraph} --inputGraphFormat${inputGraphFormat} --outputGraphFormat${outputGraphFormat} --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Software --priority${priority} --hiveMetastoreUris${hiveMetastoreUris} yarn cluster Merge datasources eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob dhp-graph-mapper-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir} --conf spark.sql.shuffle.partitions=7680 --betaInputGraph${betaInputGraph} --prodInputGraph${prodInputGraph} --outputGraph${outputGraph} --inputGraphFormat${inputGraphFormat} --outputGraphFormat${outputGraphFormat} --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Datasource --priority${priority} --hiveMetastoreUris${hiveMetastoreUris} yarn cluster Merge organizations eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob dhp-graph-mapper-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir} --conf spark.sql.shuffle.partitions=7680 --betaInputGraph${betaInputGraph} --prodInputGraph${prodInputGraph} --outputGraph${outputGraph} --inputGraphFormat${inputGraphFormat} --outputGraphFormat${outputGraphFormat} --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Organization --priority${priority} --hiveMetastoreUris${hiveMetastoreUris} yarn cluster Merge projects eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob dhp-graph-mapper-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir} --conf spark.sql.shuffle.partitions=7680 --betaInputGraph${betaInputGraph} --prodInputGraph${prodInputGraph} --outputGraph${outputGraph} --inputGraphFormat${inputGraphFormat} --outputGraphFormat${outputGraphFormat} --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Project --priority${priority} --hiveMetastoreUris${hiveMetastoreUris} yarn cluster Merge relations eu.dnetlib.dhp.oa.graph.merge.MergeGraphSparkJob dhp-graph-mapper-${projectVersion}.jar --executor-cores=${sparkExecutorCores} --executor-memory=${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners=${spark2ExtraListeners} --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir} --conf spark.sql.shuffle.partitions=7680 --betaInputGraph${betaInputGraph} --prodInputGraph${prodInputGraph} --outputGraph${outputGraph} --inputGraphFormat${inputGraphFormat} --outputGraphFormat${outputGraphFormat} --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Relation --priority${priority} --hiveMetastoreUris${hiveMetastoreUris}