gCubeDeployer/Jenkinsfile

338 lines
12 KiB
Plaintext
Raw Normal View History

2022-04-22 15:38:43 +02:00
#!groovy
2022-04-22 16:35:54 +02:00
/**
* Deploy components to the D4Science Infrastructure (dev-only)
*
2022-04-22 17:02:12 +02:00
* Roberto Cirillo (ISTI-CNR)
2022-04-22 16:35:54 +02:00
*/
2022-04-22 15:38:43 +02:00
2022-04-22 11:41:05 +02:00
// related jenkins job: https://jenkins.d4science.org/job/gCubeDeployer/
def agent_root_folder = '/var/lib/jenkins'
2022-04-22 16:35:54 +02:00
2022-10-06 13:32:48 +02:00
if ("${params.report_number}"){
2022-10-06 12:49:33 +02:00
// load the build report's content if present
def text
String reportURL = "https://code-repo.d4science.org/gCubeCI/gCubeReleases/raw/branch/master/open/${gCube_release_version}/build_commits.${params.report_number}.csv"
if (reportURL){
println "Pulling the report from Git at ${reportURL}"
text = reportURL.toURL().getText()
// parse the report and extract the data
def components = parseBuildCommits(text)
assert 0 < components.size(): "No component found in build_commits.${report_number}.csv"
for (component in components) {
// here we need to filter on keywords matching component's section
println " $component"
}
}
}
def deployList
2022-04-28 10:27:36 +02:00
def backupList
if (params.deployFile) {
println "Using custom deploy file"
deployList = params.deployFile
}
2022-07-18 10:08:09 +02:00
2022-04-15 14:41:39 +02:00
pipeline {
agent {
2022-09-08 12:07:26 +02:00
label 'ansible'
2022-04-15 14:41:39 +02:00
}
2022-05-09 13:01:55 +02:00
options {
ansiColor('xterm')
}
2022-04-15 15:23:26 +02:00
triggers {
2022-04-26 14:49:16 +02:00
// every fifteen minutes (perhaps at :07, :22, :37, :52)
2022-05-11 09:16:10 +02:00
// cron('H/15 * * * *')
2022-05-02 13:02:50 +02:00
// once in every two hours slot between 9 AM and 5 PM every weekday (perhaps at 10:38 AM, 12:38 PM, 2:38 PM, 4:38 PM)
2022-07-18 10:10:04 +02:00
cron('H H(9-16)/6 * * 1-5')
2022-04-15 15:23:26 +02:00
}
2022-04-15 14:41:39 +02:00
environment {
AGENT_ROOT_FOLDER = "${agent_root_folder}"
PENDING_DEPLOY_FOLDER="${agent_root_folder}/CD/"
2022-09-30 17:57:05 +02:00
CD_ROOT_FOLDER = "${agent_root_folder}/ansible-repos/ansible-playbooks/d4science-ghn-cluster/CD"
PIPELINE_BUILD_NUMBER = "${env.BUILD_NUMBER}"
DEPLOY_FILE = "${PENDING_DEPLOY_FOLDER}deploy.${PIPELINE_BUILD_NUMBER}.csv"
BACKUP_FILE = "${PENDING_DEPLOY_FOLDER}deploy.${PIPELINE_BUILD_NUMBER}.bck"
2022-10-03 16:38:01 +02:00
DEPLOY_FOLDER= "${WORKSPACE}/CD-${PIPELINE_BUILD_NUMBER}"
TRIGGER_JOB= "${params.TRIGGER_JOB}"
TRIGGER_VERSION= "${params.TRIGGER_VERSION}"
2022-05-10 15:34:47 +02:00
TRIGGER_HOST="${params.TRIGGER_HOST}"
2022-05-10 10:50:15 +02:00
TRIGGER_CD="${params.TRIGGER_CD}"
2022-10-06 11:59:44 +02:00
GCUBE_RELEASE_NUMBER = "${params.gCube_release_version}"
REPORT_NUMBER = "${params.report_number}"
2022-04-15 14:41:39 +02:00
}
2022-07-18 10:08:09 +02:00
parameters {
string(name: 'TRIGGER_JOB',
defaultValue: '',
description: 'Name of the service or job to deploy')
string(name: 'TRIGGER_VERSION',
defaultValue: '',
description: 'service version to deploy')
string(name: 'TRIGGER_HOST',
defaultValue: '',
description: 'Target Host / Host group where deploy the service')
booleanParam(name: 'TRIGGER_CD',
defaultValue: true,
description: 'Set to false to avoid current deploy')
2022-07-29 16:46:49 +02:00
string(name: 'gCube_release_version',
2022-07-18 10:08:09 +02:00
defaultValue: '',
description: 'The number of the gCube release. Leave blank if executed outside gCube release.')
string(name: 'report_number',
defaultValue: '',
description: 'The build report number on Git to pull the notes. Leave blank if executed outside gCube release.')
string(name: 'report',
defaultValue: '',
description: 'The build report. Leave blank to pull the build report from Git. Leave blank if executed outside gCube release.')
2022-07-18 10:08:09 +02:00
}
2022-04-15 14:41:39 +02:00
stages {
stage('Initialize environment') {
2022-04-15 14:41:39 +02:00
steps {
sh '''
2022-04-28 15:10:58 +02:00
date=`date`;
mkdir -p ${PENDING_DEPLOY_FOLDER}
2022-10-03 17:12:19 +02:00
mkdir -p "${DEPLOY_FOLDER}"
find "${PENDING_DEPLOY_FOLDER}" -type f -exec mv --target-directory="${DEPLOY_FOLDER}" '\'{'\'} '\'+
2022-04-22 17:00:32 +02:00
'''
2022-04-15 14:41:39 +02:00
}
}
2022-04-15 15:06:25 +02:00
stage('Deploy from system') {
2022-04-15 14:41:39 +02:00
when{
2022-10-06 11:59:44 +02:00
anyOf{
allOf{
triggeredBy 'TimerTrigger'
// maybe we can add a new condition in order to consider the manual execution of this pipeline
environment name: 'IS_SCHEDULED', value: 'True'
}
// in this case the pipeline is triggered by the gCubeBuilder pipeline
2022-10-06 13:54:44 +02:00
equals(actual: "${${params.TRIGGER_JOB}}", expected: 'gCubeBuilder')
2022-10-06 11:59:44 +02:00
}
2022-04-15 14:41:39 +02:00
}
steps {
2022-04-28 15:22:49 +02:00
echo 'Cron build enabled. Deploy from system ongoing'
script {
2022-10-06 11:59:44 +02:00
echo "pipeline triggered by $TRIGGER_JOB"
2022-10-06 13:54:44 +02:00
if("${${params.TRIGGER_JOB}}" == "gCubeBuilder"){
2022-10-06 11:59:44 +02:00
// do something
2022-10-06 13:37:18 +02:00
println("Pipeline triggered by gCubeBuilder")
2022-10-06 11:59:44 +02:00
}else{
def deployFolder="CD-${env.BUILD_NUMBER}";
deployPendingJobs(deployFolder);
}
2022-04-28 15:22:49 +02:00
}
2022-10-04 09:56:24 +02:00
2022-04-15 14:41:39 +02:00
}
}
stage('Nothing to do ') {
2022-04-15 14:41:39 +02:00
when{
anyOf{
allOf{
triggeredBy 'TimerTrigger'
2022-10-05 10:21:20 +02:00
environment name: 'IS_SCHEDULED', value: 'False'
}
2022-04-15 15:49:41 +02:00
}
2022-04-15 14:41:39 +02:00
}
steps {
2022-04-15 17:20:46 +02:00
echo 'Do Nothing: cron build disabled'
}
}
stage('New pending deploy ') {
2022-04-15 15:57:10 +02:00
when{
2022-10-05 10:21:20 +02:00
environment name: 'IS_SCHEDULED', value: 'True'
2022-04-15 16:26:45 +02:00
anyOf{
2022-04-15 16:04:19 +02:00
triggeredBy 'BuildUpstreamCause'
2022-04-15 16:26:45 +02:00
triggeredBy 'UpstreamCause'
2022-08-04 15:23:39 +02:00
triggeredBy 'UserIdCause'
2022-04-15 15:57:10 +02:00
}
}
steps {
2022-04-22 16:50:35 +02:00
sh '''
echo "Cron build enabled. New deploy of ${TRIGGER_JOB} - ${TRIGGER_VERSION} will be added to the deploy file"
touch $DEPLOY_FILE;
2022-04-27 16:12:18 +02:00
if grep -q \"\${TRIGGER_JOB}\" \${DEPLOY_FILE}; then
2022-04-27 15:57:39 +02:00
echo "component ${TRIGGER_JOB} already added. Nothing to add."
else
2022-05-10 15:34:47 +02:00
echo "${TRIGGER_JOB},${TRIGGER_VERSION},${TRIGGER_HOST}" >> ${DEPLOY_FILE}
2022-04-27 16:01:09 +02:00
fi
'''
2022-07-18 10:04:42 +02:00
// the following catch give always an error in order to return a feedback UNSTABLE to the final user since the deploy is still not performed
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
sh("""
exit 126;
""")
}
2022-04-15 15:57:10 +02:00
}
}
2022-04-15 15:06:25 +02:00
stage('Deploy from job ') {
when{
2022-10-05 10:21:20 +02:00
environment name: 'IS_SCHEDULED', value: 'False';
2022-05-10 11:32:10 +02:00
expression {
env.TRIGGER_CD.toBoolean()
}
2022-04-15 16:26:45 +02:00
anyOf{
2022-04-15 16:04:19 +02:00
triggeredBy 'BuildUpstreamCause'
2022-04-15 16:26:45 +02:00
triggeredBy 'UpstreamCause'
2022-04-15 15:57:10 +02:00
}
2022-04-15 15:06:25 +02:00
}
steps {
2022-04-15 17:20:46 +02:00
echo "Cron build disabled. New deploy of ${params.TRIGGER_JOB} - ${params.TRIGGER_VERSION} ongoing"
2022-05-09 17:20:42 +02:00
ansiColor("xterm") {
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
2022-05-09 17:20:42 +02:00
checkup("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
deploy("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
}
}
2022-04-15 15:06:25 +02:00
}
}
2022-07-18 10:08:09 +02:00
stage('Deploy manually ') {
when{
allOf{
2022-10-05 10:21:20 +02:00
environment name: 'IS_SCHEDULED', value: 'False';
2022-07-18 10:08:09 +02:00
triggeredBy 'UserIdCause'
}
}
steps {
echo "Cron build disabled. Pipeline executed Manually. New deploy of ${params.TRIGGER_JOB} - ${params.TRIGGER_VERSION} ongoing"
ansiColor("xterm") {
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
deploy("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
}
}
}
}
2022-04-15 14:41:39 +02:00
}
post {
always {
script {
sh '''
echo 'Sending report'
2022-04-15 14:41:39 +02:00
'''
2022-04-15 15:06:25 +02:00
//cat ./${ACTION_DEPLOY_FILE}.csv
2022-04-15 14:41:39 +02:00
}
}
success {
echo 'The deploy pipeline worked!'
2022-04-15 15:26:43 +02:00
emailext attachLog: true,//attachmentsPattern: "**/${ACTION_DEPLOY_FILE}.csv",
2022-04-15 14:41:39 +02:00
to: 'roberto.cirillo@isti.cnr.it',
subject: "[Jenkins-gCubeDeployer] Deploy report",
2022-04-15 14:41:39 +02:00
body: "${currentBuild.fullDisplayName}. Build time: ${currentBuild.durationString}. See ${env.BUILD_URL}. "
}
failure {
echo 'The deploy pipeline has failed'
emailext attachLog: true,
to: 'roberto.cirillo@isti.cnr.it',
subject: "[Jenkins-gCubeDeployer] deploy ${currentBuild.fullDisplayName} failed",
2022-04-15 14:41:39 +02:00
body: "Something is wrong with ${env.BUILD_URL}"
}
}
}
//launch ansible deploy
2022-05-02 13:02:50 +02:00
def deploy(String service, String version, String host){
def now = new Date();
println("Going to deploy the service "+service+" with version: "+version+" on target: "+host);
2022-09-30 17:57:05 +02:00
def statusCode = sh( script: "cd $CD_ROOT_FOLDER;./deployService.sh $service $version $host;", returnStdout: true);
2022-05-09 16:49:22 +02:00
sh("""
2022-05-09 16:55:40 +02:00
echo " last exit code \$?";
2022-05-09 16:49:22 +02:00
""")
2022-05-10 11:57:09 +02:00
println("Deploy ended with status: "+statusCode);
2022-05-02 13:02:50 +02:00
}
2022-05-06 15:59:59 +02:00
2022-05-09 16:49:22 +02:00
//Implement a new method in order to check the input parameters
2022-05-09 13:01:55 +02:00
def checkup(String service, String version, String host){
sh("""
2022-05-10 15:12:07 +02:00
case "$version" in
**SNAPSHOT) echo "version contains SNAPSHOT" ;;
2022-07-18 10:08:09 +02:00
**latest) echo "version contains latest" ;;
2022-05-10 15:22:56 +02:00
* ) echo "version without SNAPSHOT. EXIT WITHOUT DEPLOY "; exit 1;;
2022-05-10 15:12:07 +02:00
esac
2022-05-09 15:42:54 +02:00
""")
2022-05-09 13:01:55 +02:00
}
//parse all csv files found in the local folder and deploy the components defined inside
def deployPendingJobs( def deployFolder){
println ("searching files in folder ${deployFolder}");
def files = findFiles(glob: "${deployFolder}/*.csv")
if (files == null){
println ("Nothing to do");
}else{
def serviceList = []
for (def file : files){
def records = readCSV file: "${file.path}"
for (def record : records) {
println("Processing record: "+record)
if(!serviceList.contains(record.get(0))){
stage(record.get(0)){
println "Deploy on going of component: ${record.get(0)}"
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
checkup(record.get(0), record.get(1), record.get(2));
deploy(record.get(0), record.get(1), record.get(2));
}
}
serviceList << record.get(0)
}else{
echo "${record.get(0)} already deployed. Deployment skipped."
}
}
sh "rm ${file.path}"
}
sh "rm -Rf ${deployFolder}"
}
}
2022-10-06 11:59:44 +02:00
//parse all csv files found in the local folder and deploy the components defined inside
def parsePendingJobs( def deployFolder){
println ("searching files in folder ${deployFolder}");
def files = findFiles(glob: "${deployFolder}/*.csv")
if (files == null){
println ("Nothing to do");
}else{
def serviceList = []
for (def file : files){
def records = readCSV file: "${file.path}"
for (def record : records) {
println("Processing record: "+record)
serviceList += "$record.get(0),$record.get(1),$record.get(2)";
deployJobs(serviceList.unique())
}
sh "rm ${file.path}"
}
sh "rm -Rf ${deployFolder}"
}
}
//Deploy jobs from an input list with the following elements: serviceName,serviceVersion,targetHost.
def deployJobs(def serviceList){
for (def record : serviceList) {
println("Processing record: "+record)
service=record.split(",");
stage(service.get(0)){
println "Deploy on going of component: ${service.get(0)}"
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
checkup(service.get(0), service.get(1), service.get(2));
deploy(service.get(0), service.get(1), service.get(2));
}
}
}
}