1
0
Fork 0

using long param names in wf definition

This commit is contained in:
Claudio Atzori 2020-05-04 19:19:29 +02:00
parent a2fc37df5f
commit 4a8487165c
1 changed files with 35 additions and 35 deletions

View File

@ -115,11 +115,11 @@
<delete path="${contentPath}/db_claims"/> <delete path="${contentPath}/db_claims"/>
</prepare> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
<arg>-p</arg><arg>${contentPath}/db_claims</arg> <arg>--hdfsPath</arg><arg>${contentPath}/db_claims</arg>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
<arg>-a</arg><arg>claims</arg> <arg>--action</arg><arg>claims</arg>
</java> </java>
<ok to="ImportODF_claims"/> <ok to="ImportODF_claims"/>
<error to="Kill"/> <error to="Kill"/>
@ -165,10 +165,10 @@
<delete path="${contentPath}/db_records"/> <delete path="${contentPath}/db_records"/>
</prepare> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
<arg>-p</arg><arg>${contentPath}/db_records</arg> <arg>--hdfsPath</arg><arg>${contentPath}/db_records</arg>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</java> </java>
<ok to="ImportODF"/> <ok to="ImportODF"/>
<error to="Kill"/> <error to="Kill"/>
@ -180,12 +180,12 @@
<delete path="${contentPath}/odf_records"/> <delete path="${contentPath}/odf_records"/>
</prepare> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>-p</arg><arg>${contentPath}/odf_records</arg> <arg>--hdfsPath</arg><arg>${contentPath}/odf_records</arg>
<arg>-mongourl</arg><arg>${mongoURL}</arg> <arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>-mongodb</arg><arg>${mongoDb}</arg> <arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>-f</arg><arg>ODF</arg> <arg>--mdFormat</arg><arg>ODF</arg>
<arg>-l</arg><arg>store</arg> <arg>--mdLayout</arg><arg>store</arg>
<arg>-i</arg><arg>cleaned</arg> <arg>--mdInterpretation</arg><arg>cleaned</arg>
</java> </java>
<ok to="ImportOAF"/> <ok to="ImportOAF"/>
<error to="Kill"/> <error to="Kill"/>
@ -197,12 +197,12 @@
<delete path="${contentPath}/oaf_records"/> <delete path="${contentPath}/oaf_records"/>
</prepare> </prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class> <main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>-p</arg><arg>${contentPath}/oaf_records</arg> <arg>--hdfsPath</arg><arg>${contentPath}/oaf_records</arg>
<arg>-mongourl</arg><arg>${mongoURL}</arg> <arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>-mongodb</arg><arg>${mongoDb}</arg> <arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>-f</arg><arg>OAF</arg> <arg>--mdFormat</arg><arg>OAF</arg>
<arg>-l</arg><arg>store</arg> <arg>--mdLayout</arg><arg>store</arg>
<arg>-i</arg><arg>cleaned</arg> <arg>--mdInterpretation</arg><arg>cleaned</arg>
</java> </java>
<ok to="wait_import"/> <ok to="wait_import"/>
<error to="Kill"/> <error to="Kill"/>
@ -231,11 +231,11 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts> </spark-opts>
<arg>-s</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg> <arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg>
<arg>-t</arg><arg>${workingDir}/entities_claim</arg> <arg>--targetPath</arg><arg>${workingDir}/entities_claim</arg>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</spark> </spark>
<ok to="GenerateGraph_claims"/> <ok to="GenerateGraph_claims"/>
<error to="Kill"/> <error to="Kill"/>
@ -257,8 +257,8 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts> </spark-opts>
<arg>-s</arg><arg>${workingDir}/entities_claim</arg> <arg>--sourcePath</arg><arg>${workingDir}/entities_claim</arg>
<arg>-g</arg><arg>${workingDir}/graph_claims</arg> <arg>--graphRawPath</arg><arg>${workingDir}/graph_claims</arg>
</spark> </spark>
<ok to="wait_graphs"/> <ok to="wait_graphs"/>
<error to="Kill"/> <error to="Kill"/>
@ -280,11 +280,11 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts> </spark-opts>
<arg>-s</arg><arg>${contentPath}/db_records,${contentPath}/oaf_records,${contentPath}/odf_records</arg> <arg>--sourcePaths</arg><arg>${contentPath}/db_records,${contentPath}/oaf_records,${contentPath}/odf_records</arg>
<arg>-t</arg><arg>${workingDir}/entities</arg> <arg>--targetPath</arg><arg>${workingDir}/entities</arg>
<arg>-pgurl</arg><arg>${postgresURL}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</spark> </spark>
<ok to="GenerateGraph"/> <ok to="GenerateGraph"/>
<error to="Kill"/> <error to="Kill"/>
@ -307,8 +307,8 @@
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=7680 --conf spark.sql.shuffle.partitions=7680
</spark-opts> </spark-opts>
<arg>-s</arg><arg>${workingDir}/entities</arg> <arg>--sourcePath</arg><arg>${workingDir}/entities</arg>
<arg>-g</arg><arg>${workingDir}/graph_raw</arg> <arg>--graphRawPath</arg><arg>${workingDir}/graph_raw</arg>
</spark> </spark>
<ok to="wait_graphs"/> <ok to="wait_graphs"/>
<error to="Kill"/> <error to="Kill"/>