Compare commits

..

3 Commits

3 changed files with 95 additions and 2 deletions

View File

@ -0,0 +1,26 @@
#dnet-hadoop connection properties for iis-cdh5-test-gw
dhp.hadoop.frontend.temp.dir=/home/jenkins
dhp.hadoop.frontend.user.name=jenkins
dhp.hadoop.frontend.host.name=iis-cdh5-test-gw.ocean.icm.edu.pl
dhp.hadoop.frontend.port.ssh=22
oozieServiceLoc=http://iis-cdh5-test-m3:11000/oozie
jobTracker=yarnRM
nameNode=hdfs://nameservice1
oozie.execution.log.file.location = target/extract-and-run-on-remote-host.log
maven.executable=mvn
#common spark related
sparkDriverMemory=7G
sparkExecutorMemory=7G
sparkExecutorCores=4
#spark 1 related
oozieActionShareLibForSpark1=spark
spark1YarnHistoryServerAddress=http://iis-cdh5-test-gw.ocean.icm.edu.pl:18088
spark1EventLogDir=/user/spark/applicationHistory
#spark 2 related
oozieActionShareLibForSpark2=spark2
spark2YarnHistoryServerAddress=http://iis-cdh5-test-gw.ocean.icm.edu.pl:18089
spark2EventLogDir=/user/spark/spark2ApplicationHistory

View File

@ -48,13 +48,12 @@ public class IndexOnESJob {
final JavaRDD<String> inputRdd = ClusterUtils
.readPath(spark, eventsPath, Event.class)
// .limit(10000) // TODO REMOVE
.map(IndexOnESJob::eventAsJsonString, Encoders.STRING())
.javaRDD();
final Map<String, String> esCfg = new HashMap<>();
// esCfg.put("es.nodes", "10.19.65.51, 10.19.65.52, 10.19.65.53, 10.19.65.54");
esCfg.put("es.index.auto.create", "false");
esCfg.put("es.nodes", indexHost);
esCfg.put("es.mapping.id", "eventId"); // THE PRIMARY KEY
esCfg.put("es.batch.write.retry.count", "8");

68
jenkins/deploy/Jenkinsfile vendored Normal file
View File

@ -0,0 +1,68 @@
pipeline {
agent any
options {
buildDiscarder(logRotator(numToKeepStr: "5"))
timeout time: 60, unit: "MINUTES"
}
stages {
stage("Install") {
steps {
configFileProvider([configFile(fileId: "dnet-hadoop-build.properties", variable: 'BUILD_PROPERTIES')]) {
load "${BUILD_PROPERTIES}"
withEnv(["JAVA_HOME=${ tool type: 'jdk', name: "${JDK_VERSION}" }",
"PATH+MAVEN=${tool type: 'maven', name: "${MAVEN_VERSION}"}/bin:${env.JAVA_HOME}/bin"]) {
sh """
mvn clean install -DskipTests
"""
}
}
}
}
stage("00-Upload-aggregatorGraph") {
steps {
configFileProvider([configFile(fileId: "dnet-hadoop-build.properties", variable: 'BUILD_PROPERTIES')]) {
load "${BUILD_PROPERTIES}"
withEnv(["JAVA_HOME=${ tool type: 'jdk', name: "${JDK_VERSION}" }",
"PATH+MAVEN=${tool type: 'maven', name: "${MAVEN_VERSION}"}/bin:${env.JAVA_HOME}/bin"]) {
sshagent(['dnet-hadoop-deployment-credentials']) {
sh """
cd dhp-workflows/dhp-graph-mapper && \
mvn clean package -Poozie-package,deploy \
-Dworkflow.source.dir=eu/dnetlib/dhp/oa/graph/raw_all \
-DdhpConnectionProperties=${WORKSPACE}/connection-properties/iis-cdh5-test-gw.properties
"""
}
}
}
}
}
// stage("Deploy") {
// steps {
// sshagent(['iis-deployment-credentials']) {
// sh """
// WORKING_DIR=/tmp/jenkins/`date +%Y-%m-%d` && \
// ssh -o StrictHostKeyChecking=no jenkins@iis-cdh5-test-gw.ocean.icm.edu.pl "\
// rm -rf \${WORKING_DIR} && \
// mkdir -p \${WORKING_DIR}
// " && \
// scp -r ${WORKSPACE}/jenkins/deploy/ jenkins@iis-cdh5-test-gw.ocean.icm.edu.pl:\${WORKING_DIR} && \
// ssh -o StrictHostKeyChecking=no jenkins@iis-cdh5-test-gw.ocean.icm.edu.pl "\
// cd \${WORKING_DIR}/deploy && \
// sh deploy_release.sh
// "
// """
// }
// }
// }
}
post {
always {
cleanWs()
}
}
}