forked from D-Net/dnet-hadoop
Merge pull request 'Use SparkSQL in place of Hive for executing step16-createIndicatorsTables.sql of stats update wf' (#386) from stats_with_spark_sql into beta
Reviewed-on: D-Net/dnet-hadoop#386
This commit is contained in:
commit
f804c58bc7
|
@ -8,6 +8,11 @@
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<artifactId>dhp-stats-update</artifactId>
|
<artifactId>dhp-stats-update</artifactId>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-common</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.spark</groupId>
|
<groupId>org.apache.spark</groupId>
|
||||||
<artifactId>spark-core_${scala.binary.version}</artifactId>
|
<artifactId>spark-core_${scala.binary.version}</artifactId>
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -64,6 +64,26 @@
|
||||||
<name>hadoop_user_name</name>
|
<name>hadoop_user_name</name>
|
||||||
<description>user name of the wf owner</description>
|
<description>user name of the wf owner</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>sparkSqlWarehouseDir</name>
|
||||||
|
</property>
|
||||||
|
<!-- General oozie workflow properties -->
|
||||||
|
<property>
|
||||||
|
<name>sparkClusterOpts</name>
|
||||||
|
<value>--conf spark.network.timeout=600 --conf spark.extraListeners= --conf spark.sql.queryExecutionListeners= --conf spark.yarn.historyServer.address=http://iis-cdh5-test-m3.ocean.icm.edu.pl:18088 --conf spark.eventLog.dir=hdfs://nameservice1/user/spark/applicationHistory</value>
|
||||||
|
<description>spark cluster-wide options</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkResourceOpts</name>
|
||||||
|
<value>--executor-memory=6G --conf spark.executor.memoryOverhead=4G --executor-cores=6 --driver-memory=8G --driver-cores=4</value>
|
||||||
|
<description>spark resource options</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkApplicationOpts</name>
|
||||||
|
<value>--conf spark.sql.shuffle.partitions=3840</value>
|
||||||
|
<description>spark resource options</description>
|
||||||
|
</property>
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<global>
|
<global>
|
||||||
|
@ -82,6 +102,10 @@
|
||||||
<name>hive.mapjoin.followby.gby.localtask.max.memory.usage</name>
|
<name>hive.mapjoin.followby.gby.localtask.max.memory.usage</name>
|
||||||
<value>0.80</value>
|
<value>0.80</value>
|
||||||
</property>
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>${oozieActionShareLibForSpark2}</value>
|
||||||
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>mapred.job.queue.name</name>
|
<name>mapred.job.queue.name</name>
|
||||||
<value>analytics</value>
|
<value>analytics</value>
|
||||||
|
@ -322,12 +346,23 @@
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="Step16-createIndicatorsTables">
|
<action name="Step16-createIndicatorsTables">
|
||||||
<hive2 xmlns="uri:oozie:hive2-action:0.1">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<jdbc-url>${hive_jdbc_url}</jdbc-url>
|
<master>yarn</master>
|
||||||
<script>scripts/step16-createIndicatorsTables.sql</script>
|
<mode>cluster</mode>
|
||||||
<param>stats_db_name=${stats_db_name}</param>
|
<name>Step16-createIndicatorsTables</name>
|
||||||
<param>external_stats_db_name=${external_stats_db_name}</param>
|
<class>eu.dnetlib.dhp.oozie.RunSQLSparkJob</class>
|
||||||
</hive2>
|
<jar>dhp-stats-update-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
${sparkClusterOpts}
|
||||||
|
${sparkResourceOpts}
|
||||||
|
${sparkApplicationOpts}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--hiveMetastoreUris</arg><arg>${hive_metastore_uris}</arg>
|
||||||
|
<arg>--sql</arg><arg>eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step16-createIndicatorsTables.sql</arg>
|
||||||
|
<arg>--stats_db_name</arg><arg>${stats_db_name}</arg>
|
||||||
|
<arg>--external_stats_db_name</arg><arg>${external_stats_db_name}</arg>
|
||||||
|
</spark>
|
||||||
<ok to="Step16_1-definitions"/>
|
<ok to="Step16_1-definitions"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
Loading…
Reference in New Issue