1
0
Fork 0

added script to download the dump,changed the workflow input paramenters

This commit is contained in:
Miriam Baglioni 2021-08-05 10:54:03 +02:00
parent 83c04e5d28
commit 5faeefbda8
3 changed files with 68 additions and 61 deletions

View File

@ -29,16 +29,16 @@ public class ExtractCrossrefRecords {
"/eu/dnetlib/dhp/doiboost/crossref_dump_reader.json"))); "/eu/dnetlib/dhp/doiboost/crossref_dump_reader.json")));
parser.parseArgument(args); parser.parseArgument(args);
final String hdfsServerUri = parser.get("hdfsServerUri"); final String hdfsServerUri = parser.get("hdfsServerUri");
final String workingPath = parser.get("workingPath"); final String workingPath = hdfsServerUri.concat(parser.get("workingPath"));
final String outputPath = parser.get("outputPath"); final String outputPath = parser.get("outputPath");
final String crossrefFileNameTarGz = parser.get("crossrefFileNameTarGz"); final String crossrefFileNameTarGz = parser.get("crossrefFileNameTarGz");
Path hdfsreadpath = new Path(hdfsServerUri.concat(crossrefFileNameTarGz)); Path hdfsreadpath = new Path(workingPath.concat("/").concat(crossrefFileNameTarGz));
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set("fs.defaultFS", hdfsServerUri.concat(workingPath)); conf.set("fs.defaultFS", workingPath);
conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()); conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName()); conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
FileSystem fs = FileSystem.get(URI.create(hdfsServerUri.concat(workingPath)), conf); FileSystem fs = FileSystem.get(URI.create(workingPath), conf);
FSDataInputStream crossrefFileStream = fs.open(hdfsreadpath); FSDataInputStream crossrefFileStream = fs.open(hdfsreadpath);
try (TarArchiveInputStream tais = new TarArchiveInputStream( try (TarArchiveInputStream tais = new TarArchiveInputStream(
new GzipCompressorInputStream(crossrefFileStream))) { new GzipCompressorInputStream(crossrefFileStream))) {

View File

@ -0,0 +1,2 @@
#!/bin/bash
curl -LSs -H "Crossref-Plus-API-Token: Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJodHRwOi8vY3Jvc3NyZWYub3JnLyIsImF1ZCI6Im1kcGx1cyIsImp0aSI6Ijk3YTZkNGVkLTg5MjktNGQ2Yi05NWY1LTY2YmMyNDgzNTRjNCJ9.5DPM4gRibUBYBtrUSpRz3RGHYVB-8f61jQBW_q-r-hs" $1 | hdfs dfs -put $2/$3

View File

@ -1,13 +1,5 @@
<workflow-app name="read Crossref dump from HDFS" xmlns="uri:oozie:workflow:0.5"> <workflow-app name="CROSSREF DUMP - DOWNLOAD" xmlns="uri:oozie:workflow:0.5">
<parameters> <parameters>
<property>
<name>crossrefDumpPath</name>
<description>the working dir base path</description>
</property>
<property>
<name>inputPathCrossref</name>
<description>the working dir base path</description>
</property>
<property> <property>
<name>sparkDriverMemory</name> <name>sparkDriverMemory</name>
<description>memory for driver process</description> <description>memory for driver process</description>
@ -18,27 +10,82 @@
</property> </property>
<property> <property>
<name>sparkExecutorCores</name> <name>sparkExecutorCores</name>
<value>2</value>
<description>number of cores used by single executor</description> <description>number of cores used by single executor</description>
</property> </property>
<!-- Crossref Parameters -->
<property>
<name>crossrefdumpfilename</name>
<description>the Crossref input path</description>
</property>
<property>
<name>crossrefDumpPath</name>
<description>the Crossref dump path</description>
</property>
</parameters> </parameters>
<start to="GenerateCrossrefDataset"/> <global>
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>${oozieActionShareLibForSpark2}</value>
</property>
</configuration>
</global>
<start to="ImportCrossRef"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="removeFiles">
<fs>
<delete path="${crossrefDumpPath}/${crossrefdumpfilename}"/>
<delete path="${crossrefDumpPath}/files"/>
<delete path="${crossrefDumpPath}/crossref_unpack"/>
</fs>
<ok to="DownloadDump"/>
<error to="Kill"/>
</action>
<action name="DownloadDump">
<shell xmlns="uri:oozie:shell-action:0.2">
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>mapred.job.queue.name</name>
<value>${queueName}</value>
</property>
</configuration>
<exec>download.sh</exec>
<argument>${url}</argument>
<argument>${crossrefDumpPath}</argument>
<argument>${crossrefdumpfilename}</argument>
<env-var>HADOOP_USER_NAME=${wf:user()}</env-var>
<file>download.sh</file>
<capture-output/>
</shell>
<ok to="ImportCrossRef"/>
<error to="Kill"/>
</action>
<action name="ImportCrossRef"> <action name="ImportCrossRef">
<java> <java>
<job-tracker>${jobTracker}</job-tracker> <job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node> <name-node>${nameNode}</name-node>
<main-class>eu.dnetlib.doiboost.crossref.ExtractCrossrefRecords</main-class> <main-class>eu.dnetlib.doiboost.crossref.ExtractCrossrefRecords</main-class>
<arg>--hdfsServerUri</arg><arg>${nameNode}</arg> <arg>--hdfsServerUri</arg><arg>${nameNode}</arg>
<arg>--crossrefFileNameTarGz</arg><arg>${crossrefDumpPath}/crossref.tar.gz</arg> <arg>--crossrefFileNameTarGz</arg><arg>${crossrefdumpfilename}</arg>
<arg>--workingPath</arg><arg>${crossrefDumpPath}</arg> <arg>--workingPath</arg><arg>${crossrefDumpPath}</arg>
<arg>--outputPath</arg><arg>${workingDir}/files/</arg> <arg>--outputPath</arg><arg>${crossrefDumpPath}/files/</arg>
</java> </java>
<ok to="UnpackCrossrefEntries"/> <ok to="UnpackCrossrefEntries"/>
<error to="Kill"/> <error to="Kill"/>
@ -48,7 +95,7 @@
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn-cluster</master> <master>yarn-cluster</master>
<mode>cluster</mode> <mode>cluster</mode>
<name>SparkGenerateCrossrefDataset</name> <name>SparkUnpackCrossrefEntries</name>
<class>eu.dnetlib.doiboost.crossref.UnpackCrtossrefEntries</class> <class>eu.dnetlib.doiboost.crossref.UnpackCrtossrefEntries</class>
<jar>dhp-doiboost-${projectVersion}.jar</jar> <jar>dhp-doiboost-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
@ -63,56 +110,14 @@
</spark-opts> </spark-opts>
<arg>--master</arg><arg>yarn-cluster</arg> <arg>--master</arg><arg>yarn-cluster</arg>
<arg>--sourcePath</arg><arg>${crossrefDumpPath}/files</arg> <arg>--sourcePath</arg><arg>${crossrefDumpPath}/files</arg>
<arg>--targetPath</arg><arg>${inputPathCrossref}/crossref_ds</arg> <arg>--targetPath</arg><arg>${crossrefDumpPath}/crossref_unpack/</arg>
</spark>
<ok to="GenerateCrossrefDataset"/>
<error to="Kill"/>
</action>
<action name="GenerateCrossrefDataset">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn-cluster</master>
<mode>cluster</mode>
<name>SparkGenerateCrossrefDataset</name>
<class>eu.dnetlib.doiboost.crossref.GenerateCrossrefDataset</class>
<jar>dhp-doiboost-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.sql.shuffle.partitions=3840
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--master</arg><arg>yarn-cluster</arg>
<arg>--sourcePath</arg><arg>${inputPathCrossref}/crossref_ds</arg>
<arg>--targetPath</arg><arg>${inputPathCrossref}/crossref_ds_updates</arg>
</spark> </spark>
<ok to="End"/> <ok to="End"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<!-- <action name="removeFiles">-->
<!-- <fs>-->
<!-- <delete path="${workingDir}/files"/>-->
<!-- </fs>-->
<!-- <ok to="renameDataset"/>-->
<!-- <error to="Kill"/>-->
<!-- </action>-->
<!-- <action name="renameDataset">-->
<!-- <fs>-->
<!-- <delete path="${inputPathCrossref}/crossref_ds"/>-->
<!-- <move source="${inputPathCrossref}/crossref_ds_updated"-->
<!-- target="${inputPathCrossref}/crossref_ds"/>-->
<!-- </fs>-->
<!-- <ok to="End"/>-->
<!-- <error to="Kill"/>-->
<!-- </action>-->
<end name="End"/> <end name="End"/>
</workflow-app> </workflow-app>