#!groovy import org.yaml.snakeyaml.Yaml import groovy.json.JsonSlurper /** * Deploy gCube components on D4Science Infrastructure (dev and preprod environments) * * Roberto Cirillo (ISTI-CNR) */ // related jenkins job: https://jenkins.d4science.org/job/gCubeDeployer/ def agent_root_folder = '/var/lib/jenkins' def verbose = true def yamlConfig def text if ("${params.gCube_release_version}" || "${params.RELEASE_URL}"){ String releaseURL=''; if ("${params.gCube_release_version}"){ //locate the release file releaseURL = "https://code-repo.d4science.org/gCubeCI/gCubeReleases/raw/branch/master/open/gcube-${gCube_release_version}.yaml" }else{ releaseURL = "${params.RELEASE_URL}" } //load the release file text = releaseURL.toURL().getText() //parsing yamlConfig = new Yaml().load(text) if (verbose) println yamlConfig.inspect() echo "Building gCube v. ${yamlConfig.gCube_release.Version}" if (verbose) { echo "Found components:" yamlConfig.gCube_release.Components.each { println it.key } } } def deployList def backupList pipeline { agent { label 'ansible' } options { ansiColor('xterm') } triggers { // every fifteen minutes (perhaps at :07, :22, :37, :52) // cron('H/15 * * * *') // once in every two hours slot between 9 AM and 5 PM every weekday (perhaps at 10:38 AM, 12:38 PM, 2:38 PM, 4:38 PM) cron('H H(9-16)/6 * * 1-5') } environment { AGENT_ROOT_FOLDER = "${agent_root_folder}" // folder where all the pending deployment will be added PENDING_DEPLOY_FOLDER="${agent_root_folder}/CD/" ANSIBLE_ROOT_FOLDER = "${agent_root_folder}/ansible-repos/ansible-playbooks/d4science-smartgears-services" PIPELINE_BUILD_NUMBER = "${env.BUILD_NUMBER}" // deploy file related to the current pipeline job DEPLOY_FILE = "${PENDING_DEPLOY_FOLDER}deploy.${PIPELINE_BUILD_NUMBER}.csv" BACKUP_FILE = "${PENDING_DEPLOY_FOLDER}deploy.${PIPELINE_BUILD_NUMBER}.bck" // deploy folder that contains all the pending jobs associated to the current pipeline execution DEPLOY_FOLDER= "${WORKSPACE}/CD-${PIPELINE_BUILD_NUMBER}" // artifacts coordinates TRIGGER_JOB= "${params.TRIGGER_JOB}" TRIGGER_VERSION= "${params.TRIGGER_VERSION}" TRIGGER_HOST="${params.TRIGGER_HOST}" // enable the ContinuousDeployment if TRUE TRIGGER_CD="${params.TRIGGER_CD}" // generic parameter that can be passed to the ansible job TRIGGER_FOO="${params.TRIGGER_FOO}" // Useful to identify the remote YAML file GCUBE_RELEASE_NUMBER = "${params.gCube_release_version}" //Category name of the components candidates to the remote deployment DEPLOY_CATEGORY="${env.DEPLOY_CATEGORY}" //Job name of the release pipeline RELEASE_JOB='Pipeline-gCubeBuilder' //URL to the YAML file. Used alternatively to RELEASE_JOB RELEASE_URL= "${params.RELEASE_URL}" // username of ReleaseManager. Used for limiting the preproduction deployments RELEASE_MANAGER_USER='roberto.cirillo' // name of Smartgears distribution group, defined in the yaml file ( only for release deployments) DISTRIBUTION_CATEGORY='Distribution' // name of Portlets group, defined in the yaml file ( only for release deployments) PORTLET_CATEGORY='Portlets' } parameters { string(name: 'TRIGGER_JOB', defaultValue: '', description: 'Name of the service or job to deploy') string(name: 'TRIGGER_VERSION', defaultValue: '', description: 'service version to deploy') string(name: 'TRIGGER_HOST', defaultValue: '', description: 'Target Host / Host group where deploy the service') booleanParam(name: 'TRIGGER_CD', defaultValue: true, description: 'Set to false to avoid current deploy') string(name: 'TRIGGER_FOO', defaultValue: '', description: 'Additional parameter to pass to the ansible job. Leave blank if not used') string(name: 'gCube_release_version', defaultValue: '', description: 'The number of the current gCube release. Leave blank if executed outside gCube release.') string(name: 'RELEASE_URL', defaultValue: '', description: 'URL to the yaml file. Leave blank if executed outside gCube release.') } stages { stage('Initialize environment') { steps { sh ''' date=`date`; mkdir -p ${PENDING_DEPLOY_FOLDER} mkdir -p "${DEPLOY_FOLDER}" find "${PENDING_DEPLOY_FOLDER}" -type f -exec mv --target-directory="${DEPLOY_FOLDER}" '\'{'\'} '\'+ ''' } } stage('Deploy from system') { when{ anyOf{ allOf{ triggeredBy 'TimerTrigger' environment name: 'IS_SCHEDULED', value: 'True' } } } steps { echo 'Cron build enabled. \033[31;1;4mDeploy from system ongoing\033[0m' script { echo "pipeline was triggered by ${params.TRIGGER_JOB}" println("Going to deploy all the pending deployments") def deployFolder="CD-${env.BUILD_NUMBER}"; parseDeployPendingJobs(deployFolder); } } } stage('Nothing to do ') { when{ allOf{ triggeredBy 'TimerTrigger' environment name: 'IS_SCHEDULED', value: 'False' } } steps { echo '\033[31;1;4mGoing to sleep\033[0m' sh ' exit 0; ' } } stage('New pending deploy ') { when{ environment name: 'IS_SCHEDULED', value: 'True' anyOf{ triggeredBy 'BuildUpstreamCause' triggeredBy 'UpstreamCause' triggeredBy 'UserIdCause' } expression { params.TRIGGER_JOB != "${RELEASE_JOB}" } } steps { sh ''' echo '\033[31;1;4mNew pending deploy/033[0m' echo "Cron build enabled. Adding ${TRIGGER_JOB} - ${TRIGGER_VERSION} to the queue" touch $DEPLOY_FILE; if grep -q \"\${TRIGGER_JOB}\" \${DEPLOY_FILE}; then echo "component ${TRIGGER_JOB} already added. Nothing to add." else echo "${TRIGGER_JOB},${TRIGGER_VERSION},${TRIGGER_HOST}" >> ${DEPLOY_FILE} fi ''' // the following catch give always an error in order to return a feedback UNSTABLE to the final user since the deploy is still not performed. // Not sure this is the best approach catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') { sh(""" exit 126; """) } } } stage('Deploy from job ') { when{ environment name: 'IS_SCHEDULED', value: 'False'; expression { env.TRIGGER_CD.toBoolean() } anyOf{ triggeredBy 'BuildUpstreamCause' triggeredBy 'UpstreamCause' } expression { params.TRIGGER_JOB != "${RELEASE_JOB}" } } steps { script{ echo '\033[31;1;4mNew upstream deploy ongoing\033[0m' echo "Cron build disabled. Component: ${params.TRIGGER_JOB} - ${params.TRIGGER_VERSION}" ansiColor('xterm') { catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') { checkup("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}"); deploy("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}", "${params.TRIGGER_FOO}"); } } } } } stage('Deploy from gCubeRelease ') { when{ anyOf{ triggeredBy 'BuildUpstreamCause' triggeredBy 'UpstreamCause' // added just for testing . To Remove. triggeredBy cause: "UserIdCause", detail: "${RELEASE_MANAGER_USER}" } // we should improve this conditional check equals(actual: "${params.TRIGGER_JOB}", expected: "${RELEASE_JOB}") } steps { script{ if("${DEPLOY_CATEGORY}"){ echo '\033[31;1;4mGoing to deploy gCube Release \033[0m' deployRelease("${DEPLOY_CATEGORY}", yamlConfig); }else println ("CATEGORIES NOT FOUND. ENVIRONMENT NOT SET FOR RELEASE DEPLOYMENT. NOTHING TO DO"); } } } stage('Deploy manually ') { when{ allOf{ environment name: 'IS_SCHEDULED', value: 'False'; triggeredBy 'UserIdCause' } expression { params.TRIGGER_JOB != "${RELEASE_JOB}" } } steps { echo '\033[31;1;4mNew deploy ongoing manually triggered\033[0m' echo "Cron build disabled. Pipeline executed Manually. New deploy of ${params.TRIGGER_JOB} - ${params.TRIGGER_VERSION} ongoing" catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { deploy("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}", "${params.TRIGGER_FOO}"); } } } } post { always { script { sh ''' echo 'Sending report' ''' } } success { echo 'The deploy pipeline worked!' emailext attachLog: true,//attachmentsPattern: "**/${ACTION_DEPLOY_FILE}.csv", to: 'roberto.cirillo@isti.cnr.it', subject: "[Jenkins-gCubeDeployer] Deploy report", body: "${currentBuild.fullDisplayName}. Build time: ${currentBuild.durationString}. See ${env.BUILD_URL}. " } failure { echo 'The deploy pipeline has failed' emailext attachLog: true, to: 'roberto.cirillo@isti.cnr.it', subject: "[Jenkins-gCubeDeployer] deploy ${currentBuild.fullDisplayName} failed", body: "Something is wrong with ${env.BUILD_URL}" } } } /* CHECKUP METHODS */ //Implement a new method in order to check the input parameters def checkup(String service, String version, String host){ sh(""" case "$version" in **SNAPSHOT) echo "version contains SNAPSHOT" ;; **latest) echo "version contains latest" ;; * ) echo "version without SNAPSHOT. EXIT WITHOUT DEPLOY "; exit 1;; esac """) } //Implement a new method in order to check the input parameters def checkupRelease(String service, String version){ sh(""" case "$version" in **SNAPSHOT) echo "version contains SNAPSHOT. EXIT because this is a release build" ; exit 1;; **latest) echo "version contains latest" ;; * ) echo "version without SNAPSHOT. Go ahead";; esac """) } /* DEPLOY METHODS*/ //parse all csv files found in the local folder and deploy the components defined inside def parseDeployPendingJobs( def deployFolder){ println ("searching files in folder ${deployFolder}"); def files = findFiles(glob: "${deployFolder}/*.csv") if (files == null){ println ("Nothing to do"); }else{ def serviceList = [] for (def file : files){ def records = readCSV file: "${file.path}" for (def record : records) { println("Processing record: "+record) serviceList += "${record.get(0)},${record.get(1)},${record.get(2)}"; } sh "rm ${file.path}" } sh "rm -Rf ${deployFolder}" deployJobs(serviceList.unique()) } } //Deploy jobs from an input list with the following elements: serviceName,serviceVersion,targetHost. Create a new stage for any job def deployJobs(def serviceList){ for (def record : serviceList) { service=record.split(","); stage(service[0]){ println("Processing deploy: "+service[ 0 ]) // catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') { checkup(service[0], service[ 1 ], service[2]); deploy(service[0], service[ 1 ], service[2]); // } } } } //Run ansible deploy def deploy(String service, String version, String host, String foo){ def now = new Date(); println("Going to deploy the service "+service+" with version: "+version+" on target: "+host+" with argument: "+foo); dir("${ANSIBLE_ROOT_FOLDER}/CD/"){ sh(""" git pull; """) def statusCode = sh( script: "./deployService.sh $service $version $host $foo;", returnStatus: true); sh(""" echo " last exit code \$?"; """) println("Deploy statusCode value: "+statusCode); if (statusCode != 0){ currentBuild.result = 'FAILURE' sh(""" exit $statusCode; """) } } } /* DEPLOY RELEASE METHODS*/ // collects all the components defined inside one or more yaml categories matching categoryList and invoke the deploy def deployRelease(def categoryList, def yaml){ def isPortlet= false def releaseList = [] def portlets = [] String smartgears def categories=categoryList.split(',') yaml.gCube_release.Components.each { group_name, component_list -> if (categories.contains(group_name)){ for ( def category : categories){ if("${group_name}" == "$category"){ if (category == "${DISTRIBUTION_CATEGORY}"){ //searching smartgears-distribution (os smartgears-distribution-legacy) version in Distribution component defined inside the yaml file def sg= component_list?.collect{ if ("$it.name" == 'smartgears-distribution' || "$it.name" == 'smartgears-distribution-legacy' ){ return "$it.version"; }} smartgears=(sg !=null ) ? sg[0] : '' }else if (category =="${PORTLET_CATEGORY}"){ portlets += component_list?.collect{ if ("${it.group_id}" != "null") return "{\"group_id\": \"$it.group_id\", \"name\": \"$it.name\", \"version\": \"$it.version\", \"extension\": \"war\"} "; else return "{\"name\": \"$it.name\", \"version\": \"$it.version\", \"extension\": \"war\"}"} def jsonContent= "{\"generic_portlets\": "+portlets+"}" echo "json: $jsonContent" writeJSON file: 'portlets.json', json: jsonContent def output = sh(returnStdout: true, returnStdoutTrim: true, script: "mv portlets.json $ANSIBLE_ROOT_FOLDER") isPortlet= true }else{ releaseList += component_list?.collect {return "$it.name,$it.version" } } return; } } } } deployReleaseJobs(releaseList, smartgears, isPortlet); } //Deploy only release components, starting from a servicelist. All the component are passed as: componentName,componentVersion def deployReleaseJobs(def serviceList, String smartgears, boolean isPortlet){ if (serviceList || isPortlet){ println("Going to deploy release components"); for (def record : serviceList) { service=record.split(","); stage(service[0]){ println("Processing RELEASE deploy: service "+service[ 0 ]+" with version "+service[ 1 ]+" and SG "+smartgears) catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') { checkupRelease(service[0], service[ 1 ]); if (smartgears) deployReleaseJob(service[0], service[ 1 ], smartgears); else deployReleaseJob(service[0], service[ 1 ], ''); } } } if (isPortlet){ stage("portlets"){ deployReleaseJob('generic-portlet', '',''); } } }else{ println("Release components not found. Skip this stage"); } } //Run ansible deploy related to a single service def deployReleaseJob(String service, String version, String smartgears){ def now = new Date(); println("Going to deploy "+service+" "+version+" on preproduction "); dir("${ANSIBLE_ROOT_FOLDER}/CD/"){ sh(""" git pull; """) def statusCode = sh( script: "./deployPreprodService.sh $service $version $smartgears ;", returnStatus: true); sh(""" echo " last exit code \$?"; """) println("Deploy ended with status: "+statusCode); if (statusCode != 0){ currentBuild.result = 'FAILURE' sh(""" exit $statusCode; """) } } }