forked from D-Net/dnet-hadoop
minor changes
This commit is contained in:
parent
4625b7486e
commit
2148a51fae
|
@ -1,18 +1,18 @@
|
||||||
<workflow-app name="dump_community_products" xmlns="uri:oozie:workflow:0.5">
|
<workflow-app name="dump_community_products" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
|
||||||
<parameters>
|
<parameters>
|
||||||
<property>
|
<property>
|
||||||
<name>sourcePath</name>
|
<name>sourcePath</name>
|
||||||
<description>the source path</description>
|
<description>the source path</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>isLookUpUrl</name>
|
<name>isLookUpUrl</name>
|
||||||
<description>the isLookup service endpoint</description>
|
<description>the isLookup service endpoint</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>outputPath</name>
|
<name>outputPath</name>
|
||||||
<description>the output path</description>
|
<description>the output path</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>accessToken</name>
|
<name>accessToken</name>
|
||||||
<description>the access token used for the deposition in Zenodo</description>
|
<description>the access token used for the deposition in Zenodo</description>
|
||||||
|
@ -320,6 +320,7 @@
|
||||||
<ok to="join_extend"/>
|
<ok to="join_extend"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="extend_orp">
|
<action name="extend_orp">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
|
@ -344,6 +345,7 @@
|
||||||
<ok to="join_extend"/>
|
<ok to="join_extend"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="extend_software">
|
<action name="extend_software">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
|
@ -371,43 +373,42 @@
|
||||||
|
|
||||||
<join name="join_extend" to="splitForCommunities"/>
|
<join name="join_extend" to="splitForCommunities"/>
|
||||||
|
|
||||||
<action name="splitForCommunities">
|
<action name="splitForCommunities">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>Split dumped result for community</name>
|
<name>Split dumped result for community</name>
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkSplitForCommunity</class>
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkSplitForCommunity</class>
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/ext</arg>
|
<arg>--sourcePath</arg><arg>${workingDir}/ext</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/split</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/split</arg>
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="make_archive"/>
|
<ok to="make_archive"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="make_archive">
|
<action name="make_archive">
|
||||||
<java>
|
<java>
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>
|
||||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/split</arg>
|
<arg>--sourcePath</arg><arg>${workingDir}/split</arg>
|
||||||
</java>
|
</java>
|
||||||
<ok to="send_zenodo"/>
|
<ok to="send_zenodo"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
|
||||||
<action name="send_zenodo">
|
<action name="send_zenodo">
|
||||||
<java>
|
<java>
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
|
||||||
|
@ -424,8 +425,6 @@
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<end name="End"/>
|
<end name="End"/>
|
||||||
|
|
||||||
</workflow-app>
|
</workflow-app>
|
|
@ -1,18 +1,18 @@
|
||||||
<workflow-app name="dump_whole_graph" xmlns="uri:oozie:workflow:0.5">
|
<workflow-app name="dump_whole_graph" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
|
||||||
<parameters>
|
<parameters>
|
||||||
<property>
|
<property>
|
||||||
<name>sourcePath</name>
|
<name>sourcePath</name>
|
||||||
<description>the source path</description>
|
<description>the source path</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>isLookUpUrl</name>
|
<name>isLookUpUrl</name>
|
||||||
<description>the isLookup service endpoint</description>
|
<description>the isLookup service endpoint</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>outputPath</name>
|
<name>outputPath</name>
|
||||||
<description>the output path</description>
|
<description>the output path</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>resultAggregation</name>
|
<name>resultAggregation</name>
|
||||||
<description>true if all the result type have to be dumped under result. false otherwise</description>
|
<description>true if all the result type have to be dumped under result. false otherwise</description>
|
||||||
|
@ -357,10 +357,8 @@
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
|
||||||
<join name="join_dump" to="fork_context"/>
|
<join name="join_dump" to="fork_context"/>
|
||||||
|
|
||||||
|
|
||||||
<fork name="fork_context">
|
<fork name="fork_context">
|
||||||
<path start="create_entities_fromcontext"/>
|
<path start="create_entities_fromcontext"/>
|
||||||
<path start="create_relation_fromcontext"/>
|
<path start="create_relation_fromcontext"/>
|
||||||
|
@ -389,7 +387,6 @@
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
|
||||||
<action name="create_relation_fromorgs">
|
<action name="create_relation_fromorgs">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
|
@ -418,7 +415,6 @@
|
||||||
|
|
||||||
<join name="join_context" to="fork_extract_relations"/>
|
<join name="join_context" to="fork_extract_relations"/>
|
||||||
|
|
||||||
|
|
||||||
<fork name="fork_extract_relations">
|
<fork name="fork_extract_relations">
|
||||||
<path start="rels_from_pubs"/>
|
<path start="rels_from_pubs"/>
|
||||||
<path start="rels_from_dats"/>
|
<path start="rels_from_dats"/>
|
||||||
|
@ -530,7 +526,6 @@
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
|
||||||
<join name="join_extract_relations" to="collect_and_save"/>
|
<join name="join_extract_relations" to="collect_and_save"/>
|
||||||
|
|
||||||
<action name="collect_and_save">
|
<action name="collect_and_save">
|
||||||
|
@ -568,8 +563,7 @@
|
||||||
<ok to="send_zenodo"/>
|
<ok to="send_zenodo"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
|
||||||
<action name="send_zenodo">
|
<action name="send_zenodo">
|
||||||
<java>
|
<java>
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
|
||||||
|
|
Loading…
Reference in New Issue