gCubeDeployer/Jenkinsfile

262 lines
8.9 KiB
Plaintext
Raw Normal View History

2022-04-22 15:38:43 +02:00
#!groovy
2022-04-22 16:35:54 +02:00
/**
* Deploy components to the D4Science Infrastructure (dev-only)
*
2022-04-22 17:02:12 +02:00
* Roberto Cirillo (ISTI-CNR)
2022-04-22 16:35:54 +02:00
*/
2022-04-22 15:38:43 +02:00
2022-04-22 11:41:05 +02:00
// related jenkins job: https://jenkins.d4science.org/job/gCubeDeployer/
def agent_root_folder = '/var/lib/jenkins'
def agent_deploy_filename = 'deploy.csv'
2022-04-28 10:27:36 +02:00
def agent_deploy_backup_filename = 'deploy.bck'
2022-04-22 16:35:54 +02:00
def deployList
2022-04-28 10:27:36 +02:00
def backupList
if (params.deployFile) {
println "Using custom deploy file"
deployList = params.deployFile
} else {
2022-04-26 17:05:30 +02:00
println "Using local deploy file"
//load the report from local
deployList = agent_root_folder+'/'+agent_deploy_filename;
println "Load from local file ${deployList}"
}
2022-04-28 10:27:36 +02:00
backupList = agent_root_folder+'/'+agent_deploy_backup_filename;
2022-04-22 16:35:54 +02:00
2022-04-15 14:41:39 +02:00
pipeline {
agent {
2022-05-09 17:03:28 +02:00
label 'CD'
2022-04-15 14:41:39 +02:00
}
2022-05-09 13:01:55 +02:00
options {
ansiColor('xterm')
}
2022-04-15 15:23:26 +02:00
triggers {
2022-04-26 14:49:16 +02:00
// every fifteen minutes (perhaps at :07, :22, :37, :52)
2022-05-11 09:16:10 +02:00
// cron('H/15 * * * *')
2022-04-22 11:41:05 +02:00
// once a day on the 1st and 15th of every month
2022-05-10 15:28:57 +02:00
// cron ('H H 1,15 * *')
2022-05-02 13:02:50 +02:00
// once in every two hours slot between 9 AM and 5 PM every weekday (perhaps at 10:38 AM, 12:38 PM, 2:38 PM, 4:38 PM)
2022-05-11 09:16:10 +02:00
cron('H H(9-16)/2 * * 1-5')
2022-05-02 13:02:50 +02:00
2022-04-15 15:23:26 +02:00
}
2022-04-15 14:41:39 +02:00
environment {
AGENT_ROOT_FOLDER = "${agent_root_folder}"
2022-05-10 11:36:13 +02:00
DEPLOY_ROOT_FOLDER ="${agent_root_folder}/ansible-repos/ansible-playbooks/d4science-ghn-cluster/CD"
2022-04-26 15:25:58 +02:00
DEPLOY_FILE = "${agent_root_folder}/${agent_deploy_filename}"
2022-04-28 10:27:36 +02:00
BACKUP_FILE = "${agent_root_folder}/${agent_deploy_backup_filename}"
TRIGGER_JOB= "${params.TRIGGER_JOB}"
TRIGGER_VERSION= "${params.TRIGGER_VERSION}"
2022-05-10 15:34:47 +02:00
TRIGGER_HOST="${params.TRIGGER_HOST}"
2022-05-10 10:50:15 +02:00
TRIGGER_CD="${params.TRIGGER_CD}"
2022-04-28 15:31:21 +02:00
// maybe here define another param used to identify the target host. In this way the developer can change it from the job
2022-04-15 14:41:39 +02:00
}
stages {
stage('initialize environment') {
steps {
sh '''
2022-04-28 15:10:58 +02:00
date=`date`;
2022-04-22 17:00:32 +02:00
'''
2022-04-15 14:41:39 +02:00
}
}
2022-04-15 15:06:25 +02:00
stage('Deploy from system') {
2022-04-15 14:41:39 +02:00
when{
allOf{
2022-04-15 15:45:23 +02:00
triggeredBy 'TimerTrigger'
2022-04-26 14:49:16 +02:00
// maybe we can add a new condition in order to consider the manual execution of this pipeline
2022-04-22 16:05:03 +02:00
environment name: 'IS_CRON', value: 'True'
2022-04-15 14:41:39 +02:00
}
}
steps {
2022-04-28 15:22:49 +02:00
echo 'Cron build enabled. Deploy from system ongoing'
script {
// parse the report and extract the data
2022-05-10 15:28:02 +02:00
def components = estractComponentsFromCSV(deployList)
if (components.size() > 0) {
for (component in components) {
stage(component){
println "Deploy on going of component: $component"
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
checkup("${component.name}", "${component.version}", "${component.host}");
deploy("${component.name}", "${component.version}", "${component.host}");
}
2022-04-29 13:09:03 +02:00
}
}
}else{
2022-04-29 12:39:02 +02:00
println "No components found"
2022-04-28 15:22:49 +02:00
}
}
2022-05-06 15:59:59 +02:00
cleanup(DEPLOY_FILE,BACKUP_FILE);
2022-04-15 14:41:39 +02:00
}
}
2022-04-22 16:03:08 +02:00
stage('Nothing to do by System ') {
2022-04-15 14:41:39 +02:00
when{
2022-04-15 15:49:41 +02:00
allOf{
2022-04-15 16:04:19 +02:00
triggeredBy 'TimerTrigger'
2022-04-22 16:05:03 +02:00
environment name: 'IS_CRON', value: 'False'
2022-04-15 15:49:41 +02:00
}
2022-04-15 14:41:39 +02:00
}
steps {
2022-04-15 17:20:46 +02:00
echo 'Do Nothing: cron build disabled'
}
}
2022-04-22 16:03:08 +02:00
stage('Add new pending deploy ') {
2022-04-15 15:57:10 +02:00
when{
2022-04-22 16:05:03 +02:00
environment name: 'IS_CRON', value: 'True'
2022-04-15 16:26:45 +02:00
anyOf{
2022-04-15 16:04:19 +02:00
triggeredBy 'BuildUpstreamCause'
2022-04-15 16:26:45 +02:00
triggeredBy 'UpstreamCause'
2022-04-15 15:57:10 +02:00
}
}
steps {
2022-04-22 16:50:35 +02:00
sh '''
echo "Cron build enabled. New deploy of ${TRIGGER_JOB} - ${TRIGGER_VERSION} will be added to the deploy file"
touch $DEPLOY_FILE;
2022-04-27 16:12:18 +02:00
if grep -q \"\${TRIGGER_JOB}\" \${DEPLOY_FILE}; then
2022-04-27 15:57:39 +02:00
echo "component ${TRIGGER_JOB} already added. Nothing to add."
else
2022-05-10 15:34:47 +02:00
echo "${TRIGGER_JOB},${TRIGGER_VERSION},${TRIGGER_HOST}" >> ${DEPLOY_FILE}
2022-04-27 16:01:09 +02:00
fi
2022-04-22 16:50:35 +02:00
'''
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
sh("""
exit 126;
""")
}
2022-04-15 15:57:10 +02:00
}
}
2022-04-15 15:06:25 +02:00
stage('Deploy from job ') {
when{
2022-05-10 11:04:55 +02:00
environment name: 'IS_CRON', value: 'False';
2022-05-10 11:32:10 +02:00
expression {
env.TRIGGER_CD.toBoolean()
}
// expression {
// return env.TRIGGER_VERSION.contains("SNAPSHOT")
// }
2022-04-15 16:26:45 +02:00
anyOf{
2022-04-15 16:04:19 +02:00
triggeredBy 'BuildUpstreamCause'
2022-04-15 16:26:45 +02:00
triggeredBy 'UpstreamCause'
2022-04-15 15:57:10 +02:00
}
2022-04-15 15:06:25 +02:00
}
steps {
2022-04-15 17:20:46 +02:00
echo "Cron build disabled. New deploy of ${params.TRIGGER_JOB} - ${params.TRIGGER_VERSION} ongoing"
2022-05-09 17:20:42 +02:00
ansiColor("xterm") {
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
checkup("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
deploy("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
}
}
2022-04-15 15:06:25 +02:00
}
}
2022-04-15 14:41:39 +02:00
}
post {
always {
script {
sh '''
echo ' jobs currently appended:'
'''
2022-04-15 15:06:25 +02:00
//cat ./${ACTION_DEPLOY_FILE}.csv
2022-04-15 14:41:39 +02:00
}
}
success {
echo 'The deploy pipeline worked!'
2022-04-15 15:26:43 +02:00
emailext attachLog: true,//attachmentsPattern: "**/${ACTION_DEPLOY_FILE}.csv",
2022-04-15 14:41:39 +02:00
to: 'roberto.cirillo@isti.cnr.it',
2022-04-15 15:27:56 +02:00
subject: "Deploy report",
2022-04-15 14:41:39 +02:00
body: "${currentBuild.fullDisplayName}. Build time: ${currentBuild.durationString}. See ${env.BUILD_URL}. "
}
failure {
echo 'The deploy pipeline has failed'
emailext attachLog: true,
to: 'roberto.cirillo@isti.cnr.it',
subject: "[Jenkins deploy D4S] deploy ${currentBuild.fullDisplayName} failed",
body: "Something is wrong with ${env.BUILD_URL}"
}
}
}
2022-04-26 17:05:30 +02:00
2022-05-10 15:28:02 +02:00
//parse a csv file formatted in this way: ComponentName,ComponentVersion, ComponentHost
def estractComponentsFromCSV(def deployList) {
2022-04-27 12:44:51 +02:00
def components = []
2022-04-27 12:35:18 +02:00
if (fileExists("${deployList}")) {
echo 'file found'
2022-04-27 12:35:18 +02:00
readFile("${deployList}").split('\n').each { line, count->
2022-04-28 10:33:47 +02:00
if (line.startsWith('#'))
return
2022-04-27 12:35:18 +02:00
def fields = line.split(',')
2022-04-27 12:44:51 +02:00
components.add([
name : fields[0],
version : fields[1],
host : fields[2]
2022-04-27 12:44:51 +02:00
]
)
2022-04-27 15:02:42 +02:00
}
2022-04-27 12:35:18 +02:00
} else {
echo ' File Not found. Failing.'
}
2022-04-27 12:44:51 +02:00
return components
2022-04-27 12:35:18 +02:00
}
2022-04-27 15:02:42 +02:00
//Alternative way to append a text in a small file (not used)
def appendToFile(String fileName, String line) {
def current = ""
if (fileExists(fileName)) {
current = readFile fileName
}
writeFile file: fileName, text: current + "\n" + line
}
2022-04-27 12:35:18 +02:00
2022-04-28 10:27:36 +02:00
/**
* Appends the footer to the deploy file.
*/
def appendFooter( def File) {
def now = new Date()
sh("""
2022-04-28 10:33:47 +02:00
echo "#---" >> $File
echo "#generated by the gCubeDeploy pipeline >> $File
echo "#last update $now*" >> $File
2022-04-28 10:27:36 +02:00
""")
}
2022-05-02 13:02:50 +02:00
def deploy(String service, String version, String host){
def now = new Date();
println("Going to deploy the service "+service+" with version: "+version+" on target: "+host);
2022-05-10 11:36:13 +02:00
def statusCode = sh( script: "cd $DEPLOY_ROOT_FOLDER;./deployService.sh $service $version $host;", returnStdout: true);
2022-05-09 16:49:22 +02:00
sh("""
2022-05-09 16:55:40 +02:00
echo " last exit code \$?";
2022-05-09 16:49:22 +02:00
""")
2022-05-10 11:57:09 +02:00
println("Deploy ended with status: "+statusCode);
2022-05-02 13:02:50 +02:00
}
2022-05-06 15:59:59 +02:00
2022-05-09 16:49:22 +02:00
//Implement a new method in order to check the input parameters
2022-05-09 13:01:55 +02:00
def checkup(String service, String version, String host){
sh("""
2022-05-10 15:12:07 +02:00
case "$version" in
**SNAPSHOT) echo "version contains SNAPSHOT" ;;
2022-05-10 15:22:56 +02:00
* ) echo "version without SNAPSHOT. EXIT WITHOUT DEPLOY "; exit 1;;
2022-05-10 15:12:07 +02:00
esac
2022-05-09 15:42:54 +02:00
""")
2022-05-09 13:01:55 +02:00
}
2022-05-06 15:59:59 +02:00
def cleanup(def DEPLOY_FILE, def BACKUP_FILE){
sh '''
echo "cleanup $DEPLOY_FILE";
if [ -f ${DEPLOY_FILE} ]; then
if [ -f ${BACKUP_FILE} ]; then
echo "backup found: ${BACKUP_FILE} going to replace it";
rm ${BACKUP_FILE};
fi
mv ${DEPLOY_FILE} ${BACKUP_FILE};
else
echo "deploy file empty"
fi
'''
}