forked from D-Net/dnet-hadoop
Fix workflow application path
This commit is contained in:
parent
26328e2a0d
commit
8ef718c363
|
@ -24,3 +24,13 @@ mvn package -Poozie-package,deploy,run -Dworkflow.source.dir=eu/dnetlib/dhp/oa/g
|
|||
```
|
||||
|
||||
Note: edit the property `bip.ranker.tag` of the `pom.xml` file to specify the tag of [BIP-Ranker](https://github.com/athenarc/Bip-Ranker) that you want to use.
|
||||
|
||||
|
||||
Job info and logs:
|
||||
```
|
||||
export OOZIE_URL=http://iis-cdh5-test-m3:11000/oozie
|
||||
oozie job -info <jobId>
|
||||
oozie job -log <jobId>
|
||||
```
|
||||
|
||||
where `jobId` is the id of the job returned by the `run_workflow.sh` script.
|
|
@ -76,7 +76,7 @@ bipScorePath=${workingDir}/openaire_universe_scores/
|
|||
checkpointDir=${nameNode}/${workingDir}/check/
|
||||
|
||||
# The directory for the doi-based bip graph
|
||||
bipGraphFilePath=${nameNode}/${workingDir}/bipdbv8_graph
|
||||
# bipGraphFilePath=${nameNode}/${workingDir}/bipdbv8_graph
|
||||
|
||||
# The folder from which synonyms of openaire-ids are read
|
||||
# openaireDataInput=${nameNode}/tmp/beta_provision/graph/21_graph_cleaned/
|
||||
|
@ -89,9 +89,12 @@ synonymFolder=${nameNode}/${workingDir}/openaireid_to_dois/
|
|||
openaireGraphInputPath=${nameNode}/${workingDir}/openaire_id_graph
|
||||
|
||||
# The workflow application path
|
||||
wfAppPath=${nameNode}/${oozieWorkflowPath}
|
||||
wfAppPath=${oozieTopWfApplicationPath}
|
||||
|
||||
# The following is needed as a property of a workflow
|
||||
oozie.wf.application.path=${wfAppPath}
|
||||
#oozie.wf.application.path=${wfAppPath}
|
||||
oozie.wf.application.path=${oozieTopWfApplicationPath}
|
||||
|
||||
|
||||
# Path where the final output should be?
|
||||
actionSetOutputPath=${workingDir}/bip_actionsets/
|
||||
|
|
|
@ -126,7 +126,7 @@
|
|||
<!-- number of partitions to be used on joins -->
|
||||
<arg>${sparkShufflePartitions}</arg>
|
||||
<!-- This needs to point to the file on the hdfs i think -->
|
||||
<file>${wfAppPath}/CC.py#CC.py</file>
|
||||
<file>${wfAppPath}/bip-ranker/CC.py#CC.py</file>
|
||||
</spark>
|
||||
|
||||
<!-- Do this after finishing okay -->
|
||||
|
@ -171,7 +171,7 @@
|
|||
<arg>${sparkShufflePartitions}</arg>
|
||||
<arg>${checkpointDir}</arg>
|
||||
<!-- This needs to point to the file on the hdfs i think -->
|
||||
<file>${wfAppPath}/TAR.py#TAR.py</file>
|
||||
<file>${wfAppPath}/bip-ranker/TAR.py#TAR.py</file>
|
||||
</spark>
|
||||
|
||||
<!-- Do this after finishing okay -->
|
||||
|
@ -216,7 +216,7 @@
|
|||
<arg>${sparkShufflePartitions}</arg>
|
||||
<arg>3</arg>
|
||||
<!-- This needs to point to the file on the hdfs i think -->
|
||||
<file>${wfAppPath}/CC.py#CC.py</file>
|
||||
<file>${wfAppPath}/bip-ranker/CC.py#CC.py</file>
|
||||
</spark>
|
||||
|
||||
<!-- Do this after finishing okay -->
|
||||
|
@ -274,7 +274,7 @@
|
|||
<arg>${sparkShufflePartitions}</arg>
|
||||
<arg>dfs</arg>
|
||||
<!-- This needs to point to the file on the hdfs i think -->
|
||||
<file>${wfAppPath}/PageRank.py#PageRank.py</file>
|
||||
<file>${wfAppPath}/bip-ranker/PageRank.py#PageRank.py</file>
|
||||
</spark>
|
||||
|
||||
<!-- Do this after finishing okay -->
|
||||
|
@ -324,7 +324,7 @@
|
|||
<arg>${sparkShufflePartitions}</arg>
|
||||
<arg>dfs</arg>
|
||||
<!-- This needs to point to the file on the hdfs i think -->
|
||||
<file>${wfAppPath}/AttRank.py#AttRank.py</file>
|
||||
<file>${wfAppPath}/bip-ranker/AttRank.py#AttRank.py</file>
|
||||
</spark>
|
||||
|
||||
<!-- Do this after finishing okay -->
|
||||
|
|
Loading…
Reference in New Issue