[OpenOrgsWf] cleanup

This commit is contained in:
Claudio Atzori 2021-03-29 17:40:34 +02:00
parent 7f4e9479ec
commit 9237d55d7f
2 changed files with 1 additions and 56 deletions

View File

@ -16,10 +16,6 @@
<name>workingPath</name> <name>workingPath</name>
<description>path for the working directory</description> <description>path for the working directory</description>
</property> </property>
<property>
<name>dedupGraphPath</name>
<description>path for the output graph</description>
</property>
<property> <property>
<name>cutConnectedComponent</name> <name>cutConnectedComponent</name>
<description>max number of elements in a connected component</description> <description>max number of elements in a connected component</description>

View File

@ -214,57 +214,6 @@
<error to="Kill"/> <error to="Kill"/>
</action> </action>
<!--<action name="merge_claims_relation">-->
<!--<spark xmlns="uri:oozie:spark-action:0.2">-->
<!--<master>yarn</master>-->
<!--<mode>cluster</mode>-->
<!--<name>MergeClaims_relation</name>-->
<!--<class>eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication</class>-->
<!--<jar>dhp-graph-mapper-${projectVersion}.jar</jar>-->
<!--<spark-opts>-->
<!--&#45;&#45;executor-memory ${sparkExecutorMemory}-->
<!--&#45;&#45;executor-cores ${sparkExecutorCores}-->
<!--&#45;&#45;driver-memory=${sparkDriverMemory}-->
<!--&#45;&#45;conf spark.extraListeners=${spark2ExtraListeners}-->
<!--&#45;&#45;conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}-->
<!--&#45;&#45;conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}-->
<!--&#45;&#45;conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}-->
<!--&#45;&#45;conf spark.sql.shuffle.partitions=3840-->
<!--</spark-opts>-->
<!--<arg>&#45;&#45;rawGraphPath</arg><arg>${workingDir}/graph_raw</arg>-->
<!--<arg>&#45;&#45;claimsGraphPath</arg><arg>${workingDir}/graph_claims</arg>-->
<!--<arg>&#45;&#45;outputRawGaphPath</arg><arg>${graphOutputPath}</arg>-->
<!--<arg>&#45;&#45;graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>-->
<!--</spark>-->
<!--<ok to="wait_merge"/>-->
<!--<error to="Kill"/>-->
<!--</action>-->
<!--<action name="merge_claims_organization">-->
<!--<spark xmlns="uri:oozie:spark-action:0.2">-->
<!--<master>yarn</master>-->
<!--<mode>cluster</mode>-->
<!--<name>MergeClaims_organization</name>-->
<!--<class>eu.dnetlib.dhp.oa.graph.raw.MergeClaimsApplication</class>-->
<!--<jar>dhp-graph-mapper-${projectVersion}.jar</jar>-->
<!--<spark-opts>-->
<!--&#45;&#45;executor-memory ${sparkExecutorMemory}-->
<!--&#45;&#45;executor-cores ${sparkExecutorCores}-->
<!--&#45;&#45;driver-memory=${sparkDriverMemory}-->
<!--&#45;&#45;conf spark.extraListeners=${spark2ExtraListeners}-->
<!--&#45;&#45;conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}-->
<!--&#45;&#45;conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}-->
<!--&#45;&#45;conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}-->
<!--&#45;&#45;conf spark.sql.shuffle.partitions=200-->
<!--</spark-opts>-->
<!--<arg>&#45;&#45;rawGraphPath</arg><arg>${workingDir}/graph_raw</arg>-->
<!--<arg>&#45;&#45;claimsGraphPath</arg><arg>${workingDir}/graph_claims</arg>-->
<!--<arg>&#45;&#45;outputRawGaphPath</arg><arg>${graphOutputPath}</arg>-->
<!--<arg>&#45;&#45;graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>-->
<!--</spark>-->
<!--<ok to="wait_merge"/>-->
<!--<error to="Kill"/>-->
<!--</action>-->
<end name="End"/> <end name="End"/>
</workflow-app> </workflow-app>