forked from D-Net/dnet-hadoop
changed to split in two steps the generation of the crossref dataset
This commit is contained in:
parent
95885bcf12
commit
3585e53da3
|
@ -142,6 +142,32 @@
|
||||||
<arg>--workingPath</arg><arg>${crossrefDumpPath}</arg>
|
<arg>--workingPath</arg><arg>${crossrefDumpPath}</arg>
|
||||||
<arg>--outputPath</arg><arg>${crossrefDumpPath}/files/</arg>
|
<arg>--outputPath</arg><arg>${crossrefDumpPath}/files/</arg>
|
||||||
</java>
|
</java>
|
||||||
|
<ok to="UnpackCrossrefEntries"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="UnpackCrossrefEntries">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>SparkUnpackCrossrefEntries</name>
|
||||||
|
<class>eu.dnetlib.doiboost.crossref.UnpackCrtossrefEntries</class>
|
||||||
|
<jar>dhp-doiboost-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--master</arg><arg>yarn-cluster</arg>
|
||||||
|
<arg>--sourcePath</arg><arg>${crossrefDumpPath}/files</arg>
|
||||||
|
<arg>--targetPath</arg><arg>${crossrefDumpPath}/crossref_unpack/</arg>
|
||||||
|
|
||||||
|
</spark>
|
||||||
<ok to="GenerateCrossrefDataset"/>
|
<ok to="GenerateCrossrefDataset"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
@ -155,7 +181,7 @@
|
||||||
<jar>dhp-doiboost-${projectVersion}.jar</jar>
|
<jar>dhp-doiboost-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-memory=7G
|
--executor-memory=7G
|
||||||
--executor-cores=4
|
--executor-cores=2
|
||||||
--driver-memory=7G
|
--driver-memory=7G
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
@ -164,7 +190,7 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--master</arg><arg>yarn-cluster</arg>
|
<arg>--master</arg><arg>yarn-cluster</arg>
|
||||||
<arg>--sourcePath</arg><arg>${crossrefDumpPath}/files/</arg>
|
<arg>--sourcePath</arg><arg>${crossrefDumpPath}/crossref_unpack/</arg>
|
||||||
<arg>--targetPath</arg><arg>${inputPathCrossref}/crossref_ds</arg>
|
<arg>--targetPath</arg><arg>${inputPathCrossref}/crossref_ds</arg>
|
||||||
|
|
||||||
</spark>
|
</spark>
|
||||||
|
@ -174,7 +200,8 @@
|
||||||
|
|
||||||
<action name="removeFiles">
|
<action name="removeFiles">
|
||||||
<fs>
|
<fs>
|
||||||
<delete path="${crossrefDumpPath}/files"/>
|
<!-- <delete path="${crossrefDumpPath}/files"/>-->
|
||||||
|
<delete path="${crossrefDumpPath}/crossref_unpack/"/>
|
||||||
</fs>
|
</fs>
|
||||||
<ok to="ResetMagWorkingPath"/>
|
<ok to="ResetMagWorkingPath"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
Loading…
Reference in New Issue