forked from antonis.lempesis/dnet-hadoop
Merge branch 'master' of https://code-repo.d4science.org/D-Net/dnet-hadoop
This commit is contained in:
commit
3af2b8d700
|
@ -10,6 +10,37 @@
|
|||
|
||||
<artifactId>dhp-dedup-scholexplorer</artifactId>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>net.alchim31.maven</groupId>
|
||||
<artifactId>scala-maven-plugin</artifactId>
|
||||
<version>4.0.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>scala-compile-first</id>
|
||||
<phase>initialize</phase>
|
||||
<goals>
|
||||
<goal>add-source</goal>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>scala-test-compile</id>
|
||||
<phase>process-test-resources</phase>
|
||||
<goals>
|
||||
<goal>testCompile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<scalaVersion>${scala.version}</scalaVersion>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
</build>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<dependency>
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
# dnet-graph-mapper
|
||||
Dnet-graph-mapper is a DNET module responsible
|
||||
of importing the first version of graph into Hadoop Cluster.
|
|
@ -1,11 +1,20 @@
|
|||
<workflow-app name="Create Raw Graph Step 1: import Entities from aggregator to HDFS" xmlns="uri:oozie:workflow:0.5">
|
||||
<parameters>
|
||||
<property>
|
||||
<name>reuseContent</name>
|
||||
<value>false</value>
|
||||
<description>should import content from the aggregator or reuse a previous version</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>workingPath</name>
|
||||
<description>the working dir base path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>targetPath</name>
|
||||
<name>targetXMLPath</name>
|
||||
<description>the graph Raw base path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>targetEntityPath</name>
|
||||
<description>the graph Raw base path</description>
|
||||
</property>
|
||||
<property>
|
||||
|
@ -29,12 +38,20 @@
|
|||
<description>mongo database</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>user</name>
|
||||
<description>HDFS user</description>
|
||||
<name>entity</name>
|
||||
<description>the entity type</description>
|
||||
</property>
|
||||
</parameters>
|
||||
|
||||
<start to="ResetWorkingPath"/>
|
||||
<start to="ReuseContent"/>
|
||||
|
||||
<decision name="ReuseContent">
|
||||
<switch>
|
||||
<case to="ResetWorkingPath">${wf:conf('reuseContent') eq false}</case>
|
||||
<case to="ResetTargetPath">${wf:conf('reuseContent') eq true}</case>
|
||||
<default to="ResetWorkingPath"/>
|
||||
</switch>
|
||||
</decision>
|
||||
|
||||
|
||||
<kill name="Kill">
|
||||
|
@ -43,8 +60,8 @@
|
|||
|
||||
<action name="ResetWorkingPath">
|
||||
<fs>
|
||||
<delete path='${targetPath}'/>
|
||||
<mkdir path='${workingPath}'/>
|
||||
<delete path='${workingPath}'/>
|
||||
<mkdir path='${workingPath}/input'/>
|
||||
</fs>
|
||||
<ok to="ImportEntitiesFromMongo"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -56,9 +73,8 @@
|
|||
<job-tracker>${jobTracker}</job-tracker>
|
||||
<name-node>${nameNode}</name-node>
|
||||
<main-class>eu.dnetlib.dhp.sx.graph.ImportDataFromMongo</main-class>
|
||||
<arg>-t</arg><arg>${targetPath}</arg>
|
||||
<arg>-t</arg><arg>${targetXMLPath}</arg>
|
||||
<arg>-n</arg><arg>${nameNode}</arg>
|
||||
<arg>-u</arg><arg>${user}</arg>
|
||||
<arg>-h</arg><arg>${dbhost}</arg>
|
||||
<arg>-p</arg><arg>27017</arg>
|
||||
<arg>-dn</arg><arg>${dbName}</arg>
|
||||
|
@ -66,6 +82,33 @@
|
|||
<arg>-l</arg><arg>${layout}</arg>
|
||||
<arg>-i</arg><arg>${interpretation}</arg>
|
||||
</java>
|
||||
<ok to="ResetTargetPath"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="ResetTargetPath">
|
||||
<fs>
|
||||
<delete path='${targetEntityPath}'/>
|
||||
</fs>
|
||||
<ok to="ConvertXML2Entity"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="ConvertXML2Entity">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
<name-node>${nameNode}</name-node>
|
||||
<master>yarn-cluster</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Import ${entity} and related entities</name>
|
||||
<class>eu.dnetlib.dhp.sx.graph.SparkScholexplorerGraphImporter</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>--executor-memory ${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} ${sparkExtraOPT}</spark-opts>
|
||||
<arg>-mt</arg> <arg>yarn-cluster</arg>
|
||||
<arg>--sourcePath</arg><arg>${targetXMLPath}</arg>
|
||||
<arg>--targetPath</arg><arg>${targetEntityPath}</arg>
|
||||
<arg>--entity</arg><arg>${entity}</arg>
|
||||
</spark>
|
||||
<ok to="End"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
|
Loading…
Reference in New Issue