forked from D-Net/dnet-hadoop
followup changes reorganising the mdstore synchronisation mechanism
This commit is contained in:
parent
49737f1087
commit
9c59dac859
|
@ -501,7 +501,6 @@ case object Crossref2Oaf {
|
||||||
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
|
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
|
||||||
queue += generateRelation(targetId, sourceId, ModelConstants.PRODUCES)
|
queue += generateRelation(targetId, sourceId, ModelConstants.PRODUCES)
|
||||||
|
|
||||||
|
|
||||||
case "10.13039/501100005416" => generateSimpleRelationFromAward(funder, "rcn_________", a => a)
|
case "10.13039/501100005416" => generateSimpleRelationFromAward(funder, "rcn_________", a => a)
|
||||||
case "10.13039/501100002848" => generateSimpleRelationFromAward(funder, "conicytf____", a => a)
|
case "10.13039/501100002848" => generateSimpleRelationFromAward(funder, "conicytf____", a => a)
|
||||||
case "10.13039/501100003448" => generateSimpleRelationFromAward(funder, "gsrt________", extractECAward)
|
case "10.13039/501100003448" => generateSimpleRelationFromAward(funder, "gsrt________", extractECAward)
|
||||||
|
|
|
@ -214,16 +214,13 @@
|
||||||
|
|
||||||
<action name="ImportODF_claims">
|
<action name="ImportODF_claims">
|
||||||
<java>
|
<java>
|
||||||
<prepare>
|
|
||||||
<delete path="${contentPath}/odf_claims"/>
|
|
||||||
</prepare>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
||||||
<arg>-p</arg><arg>${contentPath}/odf_claims</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/mdstore</arg>
|
||||||
<arg>-mongourl</arg><arg>${mongoURL}</arg>
|
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
||||||
<arg>-mongodb</arg><arg>${mongoDb}</arg>
|
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
||||||
<arg>-f</arg><arg>ODF</arg>
|
<arg>--mdFormat</arg><arg>ODF</arg>
|
||||||
<arg>-l</arg><arg>store</arg>
|
<arg>--mdLayout</arg><arg>store</arg>
|
||||||
<arg>-i</arg><arg>claim</arg>
|
<arg>--mdInterpretation</arg><arg>claim</arg>
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||||
</java>
|
</java>
|
||||||
<ok to="reuse_oaf_claims"/>
|
<ok to="reuse_oaf_claims"/>
|
||||||
|
@ -240,16 +237,13 @@
|
||||||
|
|
||||||
<action name="ImportOAF_claims">
|
<action name="ImportOAF_claims">
|
||||||
<java>
|
<java>
|
||||||
<prepare>
|
|
||||||
<delete path="${contentPath}/oaf_claims"/>
|
|
||||||
</prepare>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
||||||
<arg>-p</arg><arg>${contentPath}/oaf_claims</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/mdstore</arg>
|
||||||
<arg>-mongourl</arg><arg>${mongoURL}</arg>
|
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
||||||
<arg>-mongodb</arg><arg>${mongoDb}</arg>
|
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
||||||
<arg>-f</arg><arg>OAF</arg>
|
<arg>--mdFormat</arg><arg>OAF</arg>
|
||||||
<arg>-l</arg><arg>store</arg>
|
<arg>--mdLayout</arg><arg>store</arg>
|
||||||
<arg>-i</arg><arg>claim</arg>
|
<arg>--mdInterpretation</arg><arg>claim</arg>
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||||
</java>
|
</java>
|
||||||
<ok to="wait_import"/>
|
<ok to="wait_import"/>
|
||||||
|
@ -293,11 +287,8 @@
|
||||||
|
|
||||||
<action name="ImportODF">
|
<action name="ImportODF">
|
||||||
<java>
|
<java>
|
||||||
<prepare>
|
|
||||||
<delete path="${contentPath}/odf_records"/>
|
|
||||||
</prepare>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
||||||
<arg>--hdfsPath</arg><arg>${contentPath}/odf_records</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/mdstore</arg>
|
||||||
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
||||||
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
||||||
<arg>--mdFormat</arg><arg>ODF</arg>
|
<arg>--mdFormat</arg><arg>ODF</arg>
|
||||||
|
@ -319,11 +310,8 @@
|
||||||
|
|
||||||
<action name="ImportOAF">
|
<action name="ImportOAF">
|
||||||
<java>
|
<java>
|
||||||
<prepare>
|
|
||||||
<delete path="${contentPath}/oaf_records"/>
|
|
||||||
</prepare>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
||||||
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_records</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/mdstore</arg>
|
||||||
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
||||||
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
||||||
<arg>--mdFormat</arg><arg>OAF</arg>
|
<arg>--mdFormat</arg><arg>OAF</arg>
|
||||||
|
@ -337,11 +325,8 @@
|
||||||
|
|
||||||
<action name="ImportOAF_invisible">
|
<action name="ImportOAF_invisible">
|
||||||
<java>
|
<java>
|
||||||
<prepare>
|
|
||||||
<delete path="${contentPath}/oaf_records_invisible"/>
|
|
||||||
</prepare>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
||||||
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_records_invisible</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/mdstore</arg>
|
||||||
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
||||||
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
||||||
<arg>--mdFormat</arg><arg>OAF</arg>
|
<arg>--mdFormat</arg><arg>OAF</arg>
|
||||||
|
@ -377,7 +362,7 @@
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--hdfsPath</arg><arg>${contentPath}/odf_records_hdfs</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/odf_mdstore_hdfs</arg>
|
||||||
<arg>--mdstoreManagerUrl</arg><arg>${mdstoreManagerUrl}</arg>
|
<arg>--mdstoreManagerUrl</arg><arg>${mdstoreManagerUrl}</arg>
|
||||||
<arg>--mdFormat</arg><arg>ODF</arg>
|
<arg>--mdFormat</arg><arg>ODF</arg>
|
||||||
<arg>--mdLayout</arg><arg>store</arg>
|
<arg>--mdLayout</arg><arg>store</arg>
|
||||||
|
@ -411,7 +396,7 @@
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_records_hdfs</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_mdstore_hdfs</arg>
|
||||||
<arg>--mdstoreManagerUrl</arg><arg>${mdstoreManagerUrl}</arg>
|
<arg>--mdstoreManagerUrl</arg><arg>${mdstoreManagerUrl}</arg>
|
||||||
<arg>--mdFormat</arg><arg>OAF</arg>
|
<arg>--mdFormat</arg><arg>OAF</arg>
|
||||||
<arg>--mdLayout</arg><arg>store</arg>
|
<arg>--mdLayout</arg><arg>store</arg>
|
||||||
|
@ -544,7 +529,7 @@
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePaths</arg><arg>${contentPath}/db_openaire,${contentPath}/db_openorgs,${contentPath}/oaf_records,${contentPath}/odf_records,${contentPath}/oaf_records_hdfs,${contentPath}/odf_records_hdfs,${contentPath}/oaf_records_invisible</arg>
|
<arg>--sourcePaths</arg><arg>${contentPath}/db_openaire,${contentPath}/db_openorgs,${contentPath}/oaf_mdstore_hdfs,${contentPath}/odf_mdstore_hdfs,${contentPath}/mdstore/*/*</arg>
|
||||||
<arg>--invalidPath</arg><arg>${workingDir}/invalid_records</arg>
|
<arg>--invalidPath</arg><arg>${workingDir}/invalid_records</arg>
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
|
@ -568,7 +553,7 @@
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePaths</arg><arg>${contentPath}/db_openaire,${contentPath}/db_openorgs,${contentPath}/oaf_records,${contentPath}/odf_records,${contentPath}/oaf_records_hdfs,${contentPath}/odf_records_hdfs,${contentPath}/oaf_records_invisible</arg>
|
<arg>--sourcePaths</arg><arg>${contentPath}/db_openaire,${contentPath}/db_openorgs,${contentPath}/oaf_mdstore_hdfs,${contentPath}/odf_mdstore_hdfs,${contentPath}/mdstore/*/*</arg>
|
||||||
<arg>--targetPath</arg><arg>${workingDir}/entities</arg>
|
<arg>--targetPath</arg><arg>${workingDir}/entities</arg>
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||||
<arg>--shouldHashId</arg><arg>${shouldHashId}</arg>
|
<arg>--shouldHashId</arg><arg>${shouldHashId}</arg>
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>jobTracker</name>
|
|
||||||
<value>yarnRM</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>nameNode</name>
|
|
||||||
<value>hdfs://nameservice1</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.use.system.libpath</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>spark2</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
|
@ -1,162 +0,0 @@
|
||||||
<workflow-app name="import Claims as Graph" xmlns="uri:oozie:workflow:0.5">
|
|
||||||
<parameters>
|
|
||||||
<property>
|
|
||||||
<name>reuseContent</name>
|
|
||||||
<value>false</value>
|
|
||||||
<description>should import content from the aggregator or reuse a previous version</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>contentPath</name>
|
|
||||||
<description>path location to store (or reuse) content from the aggregator</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>postgresURL</name>
|
|
||||||
<description>the postgres URL to access to the database</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>postgresUser</name>
|
|
||||||
<description>the user postgres</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>postgresPassword</name>
|
|
||||||
<description>the password postgres</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>dbSchema</name>
|
|
||||||
<value>beta</value>
|
|
||||||
<description>the database schema according to the D-Net infrastructure (beta or production)</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>mongoURL</name>
|
|
||||||
<description>mongoDB url, example: mongodb://[username:password@]host[:port]</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>mongoDb</name>
|
|
||||||
<description>mongo database</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>isLookupUrl</name>
|
|
||||||
<description>the address of the lookUp service</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>nsPrefixBlacklist</name>
|
|
||||||
<value></value>
|
|
||||||
<description>a blacklist of nsprefixes (comma separeted)</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkDriverMemory</name>
|
|
||||||
<description>memory for driver process</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorMemory</name>
|
|
||||||
<description>memory for individual executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorCores</name>
|
|
||||||
<description>number of cores used by single executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozieActionShareLibForSpark2</name>
|
|
||||||
<description>oozie action sharelib for spark 2.*</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2ExtraListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
||||||
<description>spark 2.* extra listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2SqlQueryExecutionListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
||||||
<description>spark 2.* sql query execution listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2YarnHistoryServerAddress</name>
|
|
||||||
<description>spark 2.* yarn history server address</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2EventLogDir</name>
|
|
||||||
<description>spark 2.* event log dir location</description>
|
|
||||||
</property>
|
|
||||||
</parameters>
|
|
||||||
|
|
||||||
<global>
|
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
|
||||||
<name-node>${nameNode}</name-node>
|
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>mapreduce.job.queuename</name>
|
|
||||||
<value>${queueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
||||||
<value>${oozieLauncherQueueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>${oozieActionShareLibForSpark2}</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
||||||
</global>
|
|
||||||
|
|
||||||
<start to="ImportDB_claims"/>
|
|
||||||
|
|
||||||
<kill name="Kill">
|
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
||||||
</kill>
|
|
||||||
|
|
||||||
<action name="ImportDB_claims">
|
|
||||||
<java>
|
|
||||||
<prepare>
|
|
||||||
<delete path="${contentPath}/db_claims"/>
|
|
||||||
</prepare>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
|
|
||||||
<arg>--hdfsPath</arg><arg>${contentPath}/db_claims</arg>
|
|
||||||
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
|
||||||
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
|
||||||
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
|
||||||
<arg>--action</arg><arg>claims</arg>
|
|
||||||
<arg>--dbschema</arg><arg>${dbSchema}</arg>
|
|
||||||
<arg>--nsPrefixBlacklist</arg><arg>${nsPrefixBlacklist}</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="ImportODF_claims"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="ImportODF_claims">
|
|
||||||
<java>
|
|
||||||
<prepare>
|
|
||||||
<delete path="${contentPath}/odf_claims"/>
|
|
||||||
</prepare>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
|
||||||
<arg>-p</arg><arg>${contentPath}/odf_claims</arg>
|
|
||||||
<arg>-mongourl</arg><arg>${mongoURL}</arg>
|
|
||||||
<arg>-mongodb</arg><arg>${mongoDb}</arg>
|
|
||||||
<arg>-f</arg><arg>ODF</arg>
|
|
||||||
<arg>-l</arg><arg>store</arg>
|
|
||||||
<arg>-i</arg><arg>claim</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="ImportOAF_claims"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="ImportOAF_claims">
|
|
||||||
<java>
|
|
||||||
<prepare>
|
|
||||||
<delete path="${contentPath}/oaf_claims"/>
|
|
||||||
</prepare>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
|
||||||
<arg>-p</arg><arg>${contentPath}/oaf_claims</arg>
|
|
||||||
<arg>-mongourl</arg><arg>${mongoURL}</arg>
|
|
||||||
<arg>-mongodb</arg><arg>${mongoDb}</arg>
|
|
||||||
<arg>-f</arg><arg>OAF</arg>
|
|
||||||
<arg>-l</arg><arg>store</arg>
|
|
||||||
<arg>-i</arg><arg>claim</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="End"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
|
|
||||||
<end name="End"/>
|
|
||||||
</workflow-app>
|
|
|
@ -1,18 +0,0 @@
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>jobTracker</name>
|
|
||||||
<value>yarnRM</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>nameNode</name>
|
|
||||||
<value>hdfs://nameservice1</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.use.system.libpath</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>spark2</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
|
@ -1,195 +0,0 @@
|
||||||
<workflow-app name="import DB entities" xmlns="uri:oozie:workflow:0.5">
|
|
||||||
<parameters>
|
|
||||||
<property>
|
|
||||||
<name>contentPath</name>
|
|
||||||
<description>path location to store (or reuse) content from the aggregator</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>postgresURL</name>
|
|
||||||
<description>the postgres URL to access to the database</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>postgresUser</name>
|
|
||||||
<description>the user postgres</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>postgresPassword</name>
|
|
||||||
<description>the password postgres</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>dbSchema</name>
|
|
||||||
<value>beta</value>
|
|
||||||
<description>the database schema according to the D-Net infrastructure (beta or production)</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>isLookupUrl</name>
|
|
||||||
<description>the address of the lookUp service</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>nsPrefixBlacklist</name>
|
|
||||||
<value></value>
|
|
||||||
<description>a blacklist of nsprefixes (comma separeted)</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>reuseContent</name>
|
|
||||||
<value>false</value>
|
|
||||||
<description>reuse content in the aggregator database</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkDriverMemory</name>
|
|
||||||
<description>memory for driver process</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorMemory</name>
|
|
||||||
<description>memory for individual executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorCores</name>
|
|
||||||
<description>number of cores used by single executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozieActionShareLibForSpark2</name>
|
|
||||||
<description>oozie action sharelib for spark 2.*</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2ExtraListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
||||||
<description>spark 2.* extra listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2SqlQueryExecutionListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
||||||
<description>spark 2.* sql query execution listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2YarnHistoryServerAddress</name>
|
|
||||||
<description>spark 2.* yarn history server address</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2EventLogDir</name>
|
|
||||||
<description>spark 2.* event log dir location</description>
|
|
||||||
</property>
|
|
||||||
</parameters>
|
|
||||||
|
|
||||||
<global>
|
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
|
||||||
<name-node>${nameNode}</name-node>
|
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>mapreduce.job.queuename</name>
|
|
||||||
<value>${queueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
||||||
<value>${oozieLauncherQueueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>${oozieActionShareLibForSpark2}</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
||||||
</global>
|
|
||||||
|
|
||||||
<start to="reuse_db"/>
|
|
||||||
|
|
||||||
<kill name="Kill">
|
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
||||||
</kill>
|
|
||||||
|
|
||||||
<decision name="reuse_db">
|
|
||||||
<switch>
|
|
||||||
<case to="ImportDB">${wf:conf('reuseContent') eq false}</case>
|
|
||||||
<case to="GenerateEntities">${wf:conf('reuseContent') eq true}</case>
|
|
||||||
<default to="ImportDB"/>
|
|
||||||
</switch>
|
|
||||||
</decision>
|
|
||||||
|
|
||||||
<action name="ImportDB">
|
|
||||||
<java>
|
|
||||||
<prepare>
|
|
||||||
<delete path="${contentPath}/db_records"/>
|
|
||||||
</prepare>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
|
|
||||||
<arg>--hdfsPath</arg><arg>${contentPath}/db_records</arg>
|
|
||||||
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
|
||||||
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
|
||||||
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
|
||||||
<arg>--action</arg><arg>openaire</arg>
|
|
||||||
<arg>--dbschema</arg><arg>${dbSchema}</arg>
|
|
||||||
<arg>--nsPrefixBlacklist</arg><arg>${nsPrefixBlacklist}</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="ImportDB_claims"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="ImportDB_claims">
|
|
||||||
<java>
|
|
||||||
<prepare>
|
|
||||||
<delete path="${contentPath}/db_claims"/>
|
|
||||||
</prepare>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
|
|
||||||
<arg>--hdfsPath</arg><arg>${contentPath}/db_claims</arg>
|
|
||||||
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
|
||||||
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
|
||||||
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
|
||||||
<arg>--dbschema</arg><arg>${dbSchema}</arg>
|
|
||||||
<arg>--action</arg><arg>claims</arg>
|
|
||||||
<arg>--nsPrefixBlacklist</arg><arg>${nsPrefixBlacklist}</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="GenerateEntities"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="GenerateEntities">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>GenerateEntities</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.raw.GenerateEntitiesApplication</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory ${sparkExecutorMemory}
|
|
||||||
--executor-cores ${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePaths</arg><arg>${contentPath}/db_records,${contentPath}/db_claims</arg>
|
|
||||||
<arg>--targetPath</arg><arg>${workingDir}/entities</arg>
|
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
|
||||||
<arg>--shouldHashId</arg><arg>true</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="GenerateGraph"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="GenerateGraph">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>GenerateGraph</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.raw.DispatchEntitiesApplication</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory ${sparkExecutorMemory}
|
|
||||||
--executor-cores ${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/entities</arg>
|
|
||||||
<arg>--graphRawPath</arg><arg>${workingDir}/graph_aggregator</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="End"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<end name="End"/>
|
|
||||||
</workflow-app>
|
|
|
@ -1,18 +0,0 @@
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>jobTracker</name>
|
|
||||||
<value>yarnRM</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>nameNode</name>
|
|
||||||
<value>hdfs://nameservice1</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.use.system.libpath</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>spark2</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
|
@ -1,157 +0,0 @@
|
||||||
<workflow-app name="Test Import of Hdfs Stores" xmlns="uri:oozie:workflow:0.5">
|
|
||||||
|
|
||||||
<parameters>
|
|
||||||
<property>
|
|
||||||
<name>graphOutputPath</name>
|
|
||||||
<description>the target path to store raw graph</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>contentPath</name>
|
|
||||||
<description>path location to store (or reuse) content from the aggregator</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>mdstoreManagerUrl</name>
|
|
||||||
<description>the address of the Mdstore Manager</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>isLookupUrl</name>
|
|
||||||
<description>the address of the lookUp service</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkDriverMemory</name>
|
|
||||||
<description>memory for driver process</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorMemory</name>
|
|
||||||
<description>memory for individual executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorCores</name>
|
|
||||||
<description>number of cores used by single executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozieActionShareLibForSpark2</name>
|
|
||||||
<description>oozie action sharelib for spark 2.*</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2ExtraListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
||||||
<description>spark 2.* extra listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2SqlQueryExecutionListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
||||||
<description>spark 2.* sql query execution listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2YarnHistoryServerAddress</name>
|
|
||||||
<description>spark 2.* yarn history server address</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2EventLogDir</name>
|
|
||||||
<description>spark 2.* event log dir location</description>
|
|
||||||
</property>
|
|
||||||
</parameters>
|
|
||||||
|
|
||||||
<global>
|
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
|
||||||
<name-node>${nameNode}</name-node>
|
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>mapreduce.job.queuename</name>
|
|
||||||
<value>${queueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
||||||
<value>${oozieLauncherQueueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>${oozieActionShareLibForSpark2}</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
||||||
</global>
|
|
||||||
|
|
||||||
<start to="ImportODF_hdfs"/>
|
|
||||||
|
|
||||||
<kill name="Kill">
|
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
||||||
</kill>
|
|
||||||
|
|
||||||
<action name="ImportODF_hdfs">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>ImportODF_hdfs</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.raw.MigrateHdfsMdstoresApplication</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory ${sparkExecutorMemory}
|
|
||||||
--executor-cores ${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--hdfsPath</arg><arg>${contentPath}/odf_records_hdfs</arg>
|
|
||||||
<arg>--mdstoreManagerUrl</arg><arg>${mdstoreManagerUrl}</arg>
|
|
||||||
<arg>--mdFormat</arg><arg>ODF</arg>
|
|
||||||
<arg>--mdLayout</arg><arg>store</arg>
|
|
||||||
<arg>--mdInterpretation</arg><arg>cleaned</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="GenerateEntities"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="GenerateEntities">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>GenerateEntities</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.raw.GenerateEntitiesApplication</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory ${sparkExecutorMemory}
|
|
||||||
--executor-cores ${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePaths</arg><arg>${contentPath}/odf_records_hdfs</arg>
|
|
||||||
<arg>--targetPath</arg><arg>${workingDir}/entities</arg>
|
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
|
||||||
<arg>--shouldHashId</arg><arg>${shouldHashId}</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="GenerateGraph"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="GenerateGraph">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>GenerateGraph</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.raw.DispatchEntitiesApplication</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory ${sparkExecutorMemory}
|
|
||||||
--executor-cores ${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/entities</arg>
|
|
||||||
<arg>--graphRawPath</arg><arg>${workingDir}/graph_raw</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="End"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<end name="End"/>
|
|
||||||
</workflow-app>
|
|
|
@ -1,18 +0,0 @@
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>jobTracker</name>
|
|
||||||
<value>yarnRM</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>nameNode</name>
|
|
||||||
<value>hdfs://nameservice1</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.use.system.libpath</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>spark2</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
|
@ -1,67 +0,0 @@
|
||||||
<workflow-app name="import regular entities as Graph (step 1)" xmlns="uri:oozie:workflow:0.5">
|
|
||||||
<parameters>
|
|
||||||
<property>
|
|
||||||
<name>migrationPathStep1</name>
|
|
||||||
<description>the base path to store hdfs file</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>mongoURL</name>
|
|
||||||
<description>mongoDB url, example: mongodb://[username:password@]host[:port]</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>mongoDb</name>
|
|
||||||
<description>mongo database</description>
|
|
||||||
</property>
|
|
||||||
</parameters>
|
|
||||||
|
|
||||||
<start to="ResetWorkingPath"/>
|
|
||||||
|
|
||||||
<kill name="Kill">
|
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
||||||
</kill>
|
|
||||||
|
|
||||||
<action name="ResetWorkingPath">
|
|
||||||
<fs>
|
|
||||||
<delete path='${migrationPathStep1}'/>
|
|
||||||
<mkdir path='${migrationPathStep1}'/>
|
|
||||||
</fs>
|
|
||||||
<ok to="ImportODF"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
<action name="ImportODF">
|
|
||||||
<java>
|
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
|
||||||
<name-node>${nameNode}</name-node>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
|
||||||
<arg>-p</arg><arg>${migrationPathStep1}</arg>
|
|
||||||
<arg>-mongourl</arg><arg>${mongoURL}</arg>
|
|
||||||
<arg>-mongodb</arg><arg>${mongoDb}</arg>
|
|
||||||
<arg>-f</arg><arg>ODF</arg>
|
|
||||||
<arg>-l</arg><arg>store</arg>
|
|
||||||
<arg>-i</arg><arg>cleaned</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
|
|
||||||
</java>
|
|
||||||
<ok to="ImportOAF"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="ImportOAF">
|
|
||||||
<java>
|
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
|
||||||
<name-node>${nameNode}</name-node>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
|
||||||
<arg>-p</arg><arg>${migrationPathStep1}</arg>
|
|
||||||
<arg>-mongourl</arg><arg>${mongoURL}</arg>
|
|
||||||
<arg>-mongodb</arg><arg>${mongoDb}</arg>
|
|
||||||
<arg>-f</arg><arg>OAF</arg>
|
|
||||||
<arg>-l</arg><arg>store</arg>
|
|
||||||
<arg>-i</arg><arg>cleaned</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="End"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<end name="End"/>
|
|
||||||
</workflow-app>
|
|
|
@ -1,18 +0,0 @@
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>jobTracker</name>
|
|
||||||
<value>yarnRM</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>nameNode</name>
|
|
||||||
<value>hdfs://nameservice1</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.use.system.libpath</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>spark2</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
|
@ -1,65 +0,0 @@
|
||||||
<workflow-app name="import regular entities as Graph (step 2)" xmlns="uri:oozie:workflow:0.5">
|
|
||||||
<parameters>
|
|
||||||
<property>
|
|
||||||
<name>migrationPathStep1</name>
|
|
||||||
<description>the base path to store hdfs file</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>migrationPathStep2</name>
|
|
||||||
<description>the temporary path to store entities before dispatching</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>isLookupUrl</name>
|
|
||||||
<description>the address of the lookUp service</description>
|
|
||||||
</property>
|
|
||||||
|
|
||||||
<property>
|
|
||||||
<name>sparkDriverMemory</name>
|
|
||||||
<description>memory for driver process</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorMemory</name>
|
|
||||||
<description>memory for individual executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorCores</name>
|
|
||||||
<description>number of cores used by single executor</description>
|
|
||||||
</property>
|
|
||||||
</parameters>
|
|
||||||
|
|
||||||
<start to="ResetEntities"/>
|
|
||||||
|
|
||||||
<kill name="Kill">
|
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
||||||
</kill>
|
|
||||||
|
|
||||||
<action name="ResetEntities">
|
|
||||||
<fs>
|
|
||||||
<delete path='${migrationPathStep2}'/>
|
|
||||||
<mkdir path='${migrationPathStep2}'/>
|
|
||||||
</fs>
|
|
||||||
<ok to="GenerateEntities"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="GenerateEntities">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
|
||||||
<name-node>${nameNode}</name-node>
|
|
||||||
<master>yarn-cluster</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>GenerateEntities</name>
|
|
||||||
<class>eu.dnetlib.dhp.migration.step2.GenerateEntitiesApplication</class>
|
|
||||||
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>--executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse"</spark-opts>
|
|
||||||
<arg>-mt</arg> <arg>yarn-cluster</arg>
|
|
||||||
<arg>-s</arg><arg>${migrationPathStep1}/db_records,${migrationPathStep1}/oaf_records,${migrationPathStep1}/odf_records</arg>
|
|
||||||
<arg>-t</arg><arg>${migrationPathStep2}/all_entities</arg>
|
|
||||||
<arg>--islookup</arg><arg>${isLookupUrl}</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="End"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<end name="End"/>
|
|
||||||
</workflow-app>
|
|
|
@ -1,18 +0,0 @@
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>jobTracker</name>
|
|
||||||
<value>yarnRM</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>nameNode</name>
|
|
||||||
<value>hdfs://nameservice1</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.use.system.libpath</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>spark2</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
|
@ -1,60 +0,0 @@
|
||||||
<workflow-app name="import regular entities as Graph (step 3)" xmlns="uri:oozie:workflow:0.5">
|
|
||||||
<parameters>
|
|
||||||
|
|
||||||
<property>
|
|
||||||
<name>migrationPathStep2</name>
|
|
||||||
<description>the temporary path to store entities before dispatching</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>migrationPathStep3</name>
|
|
||||||
<description>the graph Raw base path</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkDriverMemory</name>
|
|
||||||
<description>memory for driver process</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorMemory</name>
|
|
||||||
<description>memory for individual executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorCores</name>
|
|
||||||
<description>number of cores used by single executor</description>
|
|
||||||
</property>
|
|
||||||
</parameters>
|
|
||||||
|
|
||||||
<start to="ResetGraph"/>
|
|
||||||
|
|
||||||
<kill name="Kill">
|
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
||||||
</kill>
|
|
||||||
|
|
||||||
<action name="ResetGraph">
|
|
||||||
<fs>
|
|
||||||
<delete path='${migrationPathStep3}'/>
|
|
||||||
<mkdir path='${migrationPathStep3}'/>
|
|
||||||
</fs>
|
|
||||||
<ok to="GenerateGraph"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="GenerateGraph">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
|
||||||
<name-node>${nameNode}</name-node>
|
|
||||||
<master>yarn-cluster</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>GenerateGraph</name>
|
|
||||||
<class>eu.dnetlib.dhp.migration.step3.DispatchEntitiesApplication</class>
|
|
||||||
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>--executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse"</spark-opts>
|
|
||||||
<arg>-mt</arg> <arg>yarn-cluster</arg>
|
|
||||||
<arg>-s</arg><arg>${migrationPathStep2}/all_entities</arg>
|
|
||||||
<arg>-g</arg><arg>${migrationPathStep3}</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="End"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<end name="End"/>
|
|
||||||
</workflow-app>
|
|
Loading…
Reference in New Issue