gCubeDeployer/Jenkinsfile

419 lines
14 KiB
Groovy

#!groovy
import groovy.io.FileType
import static groovy.io.FileType.FILES
import jenkins.model.*
import hudson.FilePath;
import jenkins.model.Jenkins;
/**
* Deploy components to the D4Science Infrastructure (dev-only)
*
* Roberto Cirillo (ISTI-CNR)
*/
// related jenkins job: https://jenkins.d4science.org/job/gCubeDeployer/
def agent_root_folder = '/var/lib/jenkins'
//def agent_deploy_filename = 'deploy.${env.BUILD_NUMBER}.csv'
//def agent_deploy_backup_filename = 'deploy.${env.BUILD_NUMBER}.bck'
def deployList
def backupList
if (params.deployFile) {
println "Using custom deploy file"
deployList = params.deployFile
}
//locate the targetHost file
//String targetHostURL = "https://code-repo.d4science.org/gCubeCI/gCubeDeployer/raw/branch/master/open/gcube-${gCube_release_version}.yaml"
//if (verbose)
// println "Querying ${targetHostURL}"
//load the release file
//def text = targetHostURL.toURL().getText()
pipeline {
agent {
label 'ansible'
}
options {
ansiColor('xterm')
}
triggers {
// every fifteen minutes (perhaps at :07, :22, :37, :52)
// cron('H/15 * * * *')
// once in every two hours slot between 9 AM and 5 PM every weekday (perhaps at 10:38 AM, 12:38 PM, 2:38 PM, 4:38 PM)
cron('H H(9-16)/6 * * 1-5')
}
environment {
AGENT_ROOT_FOLDER = "${agent_root_folder}"
DEPLOY_ROOT_FOLDER ="${agent_root_folder}/ansible-repos/ansible-playbooks/d4science-ghn-cluster"
DEPLOY_FILE_ROOT_FOLDER="${agent_root_folder}/CD/"
PIPELINE_BUILD_NUMBER = "${env.BUILD_NUMBER}"
DEPLOY_FILE = "${DEPLOY_FILE_ROOT_FOLDER}deploy.${PIPELINE_BUILD_NUMBER}.csv"
BACKUP_FILE = "${DEPLOY_FILE_ROOT_FOLDER}deploy.${PIPELINE_BUILD_NUMBER}.bck"
TRIGGER_JOB= "${params.TRIGGER_JOB}"
TRIGGER_VERSION= "${params.TRIGGER_VERSION}"
TRIGGER_HOST="${params.TRIGGER_HOST}"
TRIGGER_CD="${params.TRIGGER_CD}"
}
parameters {
string(name: 'TRIGGER_JOB',
defaultValue: '',
description: 'Name of the service or job to deploy')
string(name: 'TRIGGER_VERSION',
defaultValue: '',
description: 'service version to deploy')
string(name: 'TRIGGER_HOST',
defaultValue: '',
description: 'Target Host / Host group where deploy the service')
booleanParam(name: 'TRIGGER_CD',
defaultValue: true,
description: 'Set to false to avoid current deploy')
string(name: 'gCube_release_version',
defaultValue: '',
description: 'Leave blank if executed outside gCube release')
}
stages {
stage('Initialize environment') {
steps {
sh '''
date=`date`;
mkdir -p ${DEPLOY_FILE_ROOT_FOLDER}
mkdir -p "${WORKSPACE}/CD"
find "${DEPLOY_FILE_ROOT_FOLDER}" -type f -exec mv --target-directory="${WORKSPACE}/CD" '\'{'\'} '\'+
'''
}
}
stage('Deploy from system') {
when{
allOf{
triggeredBy 'TimerTrigger'
// maybe we can add a new condition in order to consider the manual execution of this pipeline
environment name: 'IS_CRON_TMP', value: 'True'
}
}
steps {
echo 'Cron build enabled. Deploy from system ongoing'
script {
// parse the report and extract the data
// def components = getComponentsFromCSV(deployList)
println "Going to check the deploy file in ${DEPLOY_FILE_ROOT_FOLDER}"
//def components =getComponentsFromCSVDir("${WORKSPACE}/CD")
//def components = readCSV file: 'CD/deploy.csv'
readCSV(file: 'CD/deploy.csv').each { line , count->
if (line.startsWith('#'))
return
def fields = line.split(',')
components.add([
name : fields[0],
version : fields[1],
host : fields[2]
]
)
}
println(components)
if (components.size() > 0) {
def componentSet=components.toSet();
for (component in componentSet) {
stage(component){
println "Deploy on going of component: $component"
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
checkup("${component.name}", "${component.version}", "${component.host}");
deploy("${component.name}", "${component.version}", "${component.host}");
}
}
}
}else{
println "No components found"
}
}
cleanup(DEPLOY_FILE,BACKUP_FILE);
}
}
stage('Nothing to do ') {
when{
anyOf{
allOf{
triggeredBy 'TimerTrigger'
environment name: 'IS_CRON_TMP', value: 'False'
}
}
}
steps {
echo 'Do Nothing: cron build disabled'
}
}
stage('New pending deploy ') {
when{
environment name: 'IS_CRON_TMP', value: 'True'
anyOf{
triggeredBy 'BuildUpstreamCause'
triggeredBy 'UpstreamCause'
triggeredBy 'UserIdCause'
}
}
steps {
sh '''
echo "Cron build enabled. New deploy of ${TRIGGER_JOB} - ${TRIGGER_VERSION} will be added to the deploy file"
touch $DEPLOY_FILE;
if grep -q \"\${TRIGGER_JOB}\" \${DEPLOY_FILE}; then
echo "component ${TRIGGER_JOB} already added. Nothing to add."
else
echo "${TRIGGER_JOB},${TRIGGER_VERSION},${TRIGGER_HOST}" >> ${DEPLOY_FILE}
fi
'''
// the following catch give always an error in order to return a feedback UNSTABLE to the final user since the deploy is still not performed
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
sh("""
exit 126;
""")
}
}
}
stage('Deploy from job ') {
when{
environment name: 'IS_CRON_TMP', value: 'False';
expression {
env.TRIGGER_CD.toBoolean()
}
anyOf{
triggeredBy 'BuildUpstreamCause'
triggeredBy 'UpstreamCause'
}
}
steps {
echo "Cron build disabled. New deploy of ${params.TRIGGER_JOB} - ${params.TRIGGER_VERSION} ongoing"
ansiColor("xterm") {
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
checkup("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
deploy("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
}
}
}
}
stage('Deploy manually ') {
when{
allOf{
environment name: 'IS_CRON_TMP', value: 'False';
triggeredBy 'UserIdCause'
}
}
steps {
echo "Cron build disabled. Pipeline executed Manually. New deploy of ${params.TRIGGER_JOB} - ${params.TRIGGER_VERSION} ongoing"
ansiColor("xterm") {
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
checkup("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
deploy("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
}
}
}
}
}
post {
always {
script {
sh '''
echo 'Sending report'
'''
//cat ./${ACTION_DEPLOY_FILE}.csv
}
}
success {
echo 'The deploy pipeline worked!'
emailext attachLog: true,//attachmentsPattern: "**/${ACTION_DEPLOY_FILE}.csv",
to: 'roberto.cirillo@isti.cnr.it',
subject: "[Jenkins-gCubeDeployer] Deploy report",
body: "${currentBuild.fullDisplayName}. Build time: ${currentBuild.durationString}. See ${env.BUILD_URL}. "
}
failure {
echo 'The deploy pipeline has failed'
emailext attachLog: true,
to: 'roberto.cirillo@isti.cnr.it',
subject: "[Jenkins-gCubeDeployer] deploy ${currentBuild.fullDisplayName} failed",
body: "Something is wrong with ${env.BUILD_URL}"
}
}
}
//parse a csv file formatted in this way: ComponentName,ComponentVersion, ComponentHost
def getComponentsFromCSV(def deployList) {
def components = []
if (fileExists("${deployList}")) {
echo 'file found'
readFile("${deployList}").split('\n').each { line, count->
if (line.startsWith('#'))
return
def fields = line.split(',')
components.add([
name : fields[0],
version : fields[1],
host : fields[2]
]
)
}
} else {
echo ' File Not found. Failing.'
}
return components
}
//launch ansible deploy
def deploy(String service, String version, String host){
def now = new Date();
println("Going to deploy the service "+service+" with version: "+version+" on target: "+host);
def statusCode = sh( script: "cd $DEPLOY_ROOT_FOLDER;./deployService.sh $service $version $host;", returnStdout: true);
sh("""
echo " last exit code \$?";
""")
println("Deploy ended with status: "+statusCode);
}
//Implement a new method in order to check the input parameters
def checkup(String service, String version, String host){
sh("""
case "$version" in
**SNAPSHOT) echo "version contains SNAPSHOT" ;;
**latest) echo "version contains latest" ;;
* ) echo "version without SNAPSHOT. EXIT WITHOUT DEPLOY "; exit 1;;
esac
""")
}
//clean and update the local deploy file
def cleanup(def DEPLOY_FILE, def BACKUP_FILE){
sh '''
echo "cleanup $DEPLOY_FILE";
if [ -f ${DEPLOY_FILE} ]; then
if [ -f ${BACKUP_FILE} ]; then
echo "backup found: ${BACKUP_FILE} going to replace it";
rm ${BACKUP_FILE};
fi
mv ${DEPLOY_FILE} ${BACKUP_FILE};
else
echo "deploy file empty"
fi
'''
}
//experimental test
@NonCPS
def getComponentsFromCSVDir(def dirPath){
File folder = new File(dirPath)
// folder= new FilePath(Jenkins.getInstance().getComputer(env['NODE_NAME']).getChannel(), dirPath);
println ("folder ready ");
if (folder){
println (" processing folder "+folder);
folder.eachFileRecurse FileType.FILES, { file ->
// check it if the file ends with a .csv extension
println("checking file: "+file);
if (file.name.endsWith(".csv")) {
println ("Processing file ");
readFile(file).split('\n').each { line, count->
if (line.startsWith('#'))
return
def fields = line.split(',')
components.add([
name : fields[0],
version : fields[1],
host : fields[2]
]
)
}
}
println ("removing current deploy file: "+file.name);
// remove the file here if possible
// file.delete();
}
}
return components
}
@NonCPS
def getComponentsFromCSVDir2(def dirPath){
def list = []
def folder = new File(dirPath)
println ("folder ready ");
if (folder){
println (" processing folder "+folder);
folder.eachFileRecurse FileType.FILES, {
if (file.name.endsWith(".csv")) {
println ("Processing file "+it);
list << file
readFile(file).split('\n').each { line, count->
if (line.startsWith('#'))
return
def fields = line.split(',')
components.add([
name : fields[0],
version : fields[1],
host : fields[2]
]
)
}
}
println ("removing current deploy file: "+it.name);
// remove the file here if possible
// file.delete();
}
}
println "files to remove:";
list.each {
println ("removing"+it.path);
it.delete();
}
return components
}
@NonCPS
def getComponentsFromCSVDir3(def dirPath){
println "checking current folder"
dh = new File(dirPath)
println "current folder path: "+dh.path
dh.eachFileRecurse {
println it
}
println "checking CD folder";
// startDir.eachFileRecurse(FILES) {
// if (it.name.endsWith('.csv')) {
// println it
// }
// }
}
def getComponentsFromCSVDir4(def dirPath){
sh '''
echo "parsing $dirPath";
for entry in `ls $dirPath`; do
INPUT=$entry
OLDIFS=$IFS
IFS=','
[ ! -f $INPUT ] && { echo "$INPUT file not found"; exit 99; }
while read artifactid version host
do
echo "Component name : $artifactid"
echo "Component version : $version"
echo "Target host : $host"
done < $INPUT
IFS=$OLDIFS
done
'''
}