renamed jar containing the bulktagging and propagation workflows from dhp-[bulktagging|propagation] to dhp-enrichment; adjusted xml formatting
This commit is contained in:
parent
1547ca7e15
commit
ec0782e582
|
@ -101,7 +101,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>bulkTagging-publication</name>
|
||||
<class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob</class>
|
||||
<jar>dhp-bulktag-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--num-executors=${sparkExecutorNumber}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -130,7 +130,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>bulkTagging-dataset</name>
|
||||
<class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob</class>
|
||||
<jar>dhp-bulktag-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--num-executors=${sparkExecutorNumber}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -159,7 +159,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>bulkTagging-orp</name>
|
||||
<class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob</class>
|
||||
<jar>dhp-bulktag-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--num-executors=${sparkExecutorNumber}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -188,7 +188,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>bulkTagging-software</name>
|
||||
<class>eu.dnetlib.dhp.bulktag.SparkBulkTagJob</class>
|
||||
<jar>dhp-bulktag-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--num-executors=${sparkExecutorNumber}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
|
|
@ -92,7 +92,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>PrepareDatasourceCountryAssociation</name>
|
||||
<class>eu.dnetlib.dhp.countrypropagation.PrepareDatasourceCountryAssociation</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -126,7 +126,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>prepareResultCountry-Publication</name>
|
||||
<class>eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -156,7 +156,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>prepareResultCountry-Dataset</name>
|
||||
<class>eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -186,7 +186,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>prepareResultCountry-ORP</name>
|
||||
<class>eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -216,7 +216,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>prepareResultCountry-Software</name>
|
||||
<class>eu.dnetlib.dhp.countrypropagation.PrepareResultCountrySet</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -255,7 +255,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>countryPropagationForPublications</name>
|
||||
<class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -285,7 +285,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>countryPropagationForDataset</name>
|
||||
<class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -315,7 +315,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>countryPropagationForORP</name>
|
||||
<class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -345,7 +345,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>countryPropagationForSoftware</name>
|
||||
<class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
|
|
@ -95,7 +95,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ORCIDPropagation-PreparePhase1-Publications</name>
|
||||
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -111,16 +111,11 @@
|
|||
--conf spark.hadoop.mapreduce.map.speculative=false
|
||||
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg>
|
||||
<arg>${allowedsemrels}</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
|
||||
</spark>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -132,7 +127,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ORCIDPropagation-PreparePhase1-Dataset</name>
|
||||
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -144,16 +139,11 @@
|
|||
--conf spark.dynamicAllocation.enabled=true
|
||||
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg>
|
||||
<arg>${allowedsemrels}</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
|
||||
</spark>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -165,7 +155,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ORCIDPropagation-PreparePhase1-ORP</name>
|
||||
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -177,16 +167,11 @@
|
|||
--conf spark.dynamicAllocation.enabled=true
|
||||
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg>
|
||||
<arg>${allowedsemrels}</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
|
||||
</spark>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -198,7 +183,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ORCIDPropagation-PreparePhase1-Software</name>
|
||||
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep1</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -210,16 +195,11 @@
|
|||
--conf spark.dynamicAllocation.enabled=true
|
||||
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg>
|
||||
<arg>${allowedsemrels}</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
|
||||
</spark>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -233,7 +213,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ORCIDPropagation-PreparePhase2</name>
|
||||
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.PrepareResultOrcidAssociationStep2</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -245,13 +225,10 @@
|
|||
--conf spark.dynamicAllocation.enabled=true
|
||||
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg><arg>${workingDir}/preparedInfo/targetOrcidAssoc</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
</spark>
|
||||
<ok to="fork-join-exec-propagation"/>
|
||||
<!-- <ok to="End"/>-->
|
||||
<error to="Kill"/>
|
||||
|
||||
</action>
|
||||
|
@ -268,7 +245,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ORCIDPropagation-Publication</name>
|
||||
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -284,18 +261,12 @@
|
|||
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
||||
--conf spark.sql.shuffle.partitions=3840
|
||||
</spark-opts>
|
||||
<arg>--possibleUpdatesPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}/publication</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${outputPath}/publication</arg>
|
||||
<arg>--saveGraph</arg>
|
||||
<arg>${saveGraph}</arg>
|
||||
<arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/publication</arg>
|
||||
<arg>--saveGraph</arg><arg>${saveGraph}</arg>
|
||||
</spark>
|
||||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -306,7 +277,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ORCIDPropagation-Dataset</name>
|
||||
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -321,18 +292,12 @@
|
|||
--conf spark.hadoop.mapreduce.map.speculative=false
|
||||
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
||||
</spark-opts>
|
||||
<arg>--possibleUpdatesPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}/dataset</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${outputPath}/dataset</arg>
|
||||
<arg>--saveGraph</arg>
|
||||
<arg>${saveGraph}</arg>
|
||||
<arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/dataset</arg>
|
||||
<arg>--saveGraph</arg><arg>${saveGraph}</arg>
|
||||
</spark>
|
||||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -343,7 +308,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ORCIDPropagation-ORP</name>
|
||||
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -358,18 +323,12 @@
|
|||
--conf spark.hadoop.mapreduce.map.speculative=false
|
||||
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
||||
</spark-opts>
|
||||
<arg>--possibleUpdatesPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}/otherresearchproduct</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${outputPath}/otherresearchproduct</arg>
|
||||
<arg>--saveGraph</arg>
|
||||
<arg>${saveGraph}</arg>
|
||||
<arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/otherresearchproduct</arg>
|
||||
<arg>--saveGraph</arg><arg>${saveGraph}</arg>
|
||||
</spark>
|
||||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -380,7 +339,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ORCIDPropagation-Software</name>
|
||||
<class>eu.dnetlib.dhp.orcidtoresultfromsemrel.SparkOrcidToResultFromSemRelJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -395,22 +354,19 @@
|
|||
--conf spark.hadoop.mapreduce.map.speculative=false
|
||||
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
||||
</spark-opts>
|
||||
<arg>--possibleUpdatesPath</arg>
|
||||
<arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg>
|
||||
<arg>${sourcePath}/software</arg>
|
||||
<arg>--hive_metastore_uris</arg>
|
||||
<arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg>
|
||||
<arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--outputPath</arg>
|
||||
<arg>${outputPath}/software</arg>
|
||||
<arg>--saveGraph</arg>
|
||||
<arg>${saveGraph}</arg>
|
||||
<arg>--possibleUpdatesPath</arg><arg>${workingDir}/preparedInfo/mergedOrcidAssoc</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/software</arg>
|
||||
<arg>--saveGraph</arg><arg>${saveGraph}</arg>
|
||||
</spark>
|
||||
<ok to="wait2"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<join name="wait2" to="End"/>
|
||||
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
|
@ -128,60 +128,59 @@
|
|||
|
||||
<join name="wait" to="prepare_project_results_association"/>
|
||||
|
||||
<action name="prepare_project_results_association">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>PrepareProjectResultsAssociation</name>
|
||||
<class>eu.dnetlib.dhp.projecttoresult.PrepareProjectResultsAssociation</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/relation</arg>
|
||||
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--potentialUpdatePath</arg><arg>${workingDir}/preparedInfo/potentialUpdates</arg>
|
||||
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/preparedInfo/alreadyLinked</arg>
|
||||
</spark>
|
||||
<ok to="apply_propagation"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="prepare_project_results_association">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>PrepareProjectResultsAssociation</name>
|
||||
<class>eu.dnetlib.dhp.projecttoresult.PrepareProjectResultsAssociation</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/relation</arg>
|
||||
<arg>--allowedsemrels</arg><arg>${allowedsemrels}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--potentialUpdatePath</arg><arg>${workingDir}/preparedInfo/potentialUpdates</arg>
|
||||
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/preparedInfo/alreadyLinked</arg>
|
||||
</spark>
|
||||
<ok to="apply_propagation"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="apply_propagation">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>ProjectToResultPropagation</name>
|
||||
<class>eu.dnetlib.dhp.projecttoresult.SparkResultToProjectThroughSemRelJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.dynamicAllocation.enabled=true
|
||||
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
|
||||
</spark-opts>
|
||||
<arg>--saveGraph</arg><arg>${saveGraph}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/relation</arg>
|
||||
<arg>--potentialUpdatePath</arg><arg>${workingDir}/preparedInfo/potentialUpdates</arg>
|
||||
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/preparedInfo/alreadyLinked</arg>
|
||||
</spark>
|
||||
<ok to="End"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
<action name="apply_propagation">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>ProjectToResultPropagation</name>
|
||||
<class>eu.dnetlib.dhp.projecttoresult.SparkResultToProjectThroughSemRelJob</class>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.dynamicAllocation.enabled=true
|
||||
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
|
||||
</spark-opts>
|
||||
<arg>--saveGraph</arg><arg>${saveGraph}</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/relation</arg>
|
||||
<arg>--potentialUpdatePath</arg><arg>${workingDir}/preparedInfo/potentialUpdates</arg>
|
||||
<arg>--alreadyLinkedPath</arg><arg>${workingDir}/preparedInfo/alreadyLinked</arg>
|
||||
</spark>
|
||||
<ok to="End"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<end name="End"/>
|
||||
|
||||
|
|
|
@ -88,7 +88,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>Prepare-Community-Result-Organization</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromorganization.PrepareResultCommunitySet</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -122,7 +122,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>community2resultfromorganization-Publication</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -151,7 +151,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>community2resultfromorganization-Dataset</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -180,7 +180,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>community2resultfromorganization-ORP</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -209,7 +209,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>community2resultfromorganization-Software</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromorganization.SparkResultToCommunityFromOrganizationJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
|
|
@ -99,7 +99,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ResultToCommunitySemRel-PreparePhase1-Publications</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -128,7 +128,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ResultToCommunitySemRel-PreparePhase1-Dataset</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -150,13 +150,14 @@
|
|||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="join_prepare_otherresearchproduct">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>ResultToCommunitySemRel-PreparePhase1-ORP</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -185,7 +186,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ResultToCommunitySemRel-PreparePhase1-Software</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep1</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -216,7 +217,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>ResultToCommunityEmRelPropagation-PreparePhase2</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.PrepareResultCommunitySetStep2</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -232,9 +233,7 @@
|
|||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo/mergedCommunityAssoc</arg>
|
||||
</spark>
|
||||
<ok to="fork-join-exec-propagation"/>
|
||||
<!-- <ok to="End"/>-->
|
||||
<error to="Kill"/>
|
||||
|
||||
</action>
|
||||
|
||||
<fork name="fork-join-exec-propagation">
|
||||
|
@ -250,7 +249,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>Result2CommunitySemRelPropagation-Publication</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -279,7 +278,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>Result2CommunitySemRelPropagation-Dataset</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -308,7 +307,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>Result2CommunitySemRelPropagation-ORP</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -337,7 +336,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>Result2CommunitySemRelPropagation-Software</name>
|
||||
<class>eu.dnetlib.dhp.resulttocommunityfromsemrel.SparkResultToCommunityThroughSemRelJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
|
|
@ -131,7 +131,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>PrepareResultOrganizationAssociation</name>
|
||||
<class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.PrepareResultInstRepoAssociation</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -163,7 +163,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>resultToOrganizationFromInstRepoPropagationForPublications</name>
|
||||
<class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -193,7 +193,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>resultToOrganizationFromInstRepoPropagationForDataset</name>
|
||||
<class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -223,7 +223,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>resultToOrganizationFromInstRepoPropagationForORP</name>
|
||||
<class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
@ -253,7 +253,7 @@
|
|||
<mode>cluster</mode>
|
||||
<name>resultToOrganizationFromInstRepoPropagationForSoftware</name>
|
||||
<class>eu.dnetlib.dhp.resulttoorganizationfrominstrepo.SparkResultToOrganizationFromIstRepoJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
|
|
Loading…
Reference in New Issue