2022-04-22 15:38:43 +02:00
|
|
|
#!groovy
|
2022-10-10 15:08:33 +02:00
|
|
|
import org.yaml.snakeyaml.Yaml
|
2023-06-15 15:45:55 +02:00
|
|
|
import groovy.json.JsonSlurper
|
2022-04-22 15:38:43 +02:00
|
|
|
|
2022-04-22 16:35:54 +02:00
|
|
|
/**
|
2023-05-17 09:15:35 +02:00
|
|
|
* Deploy gCube components on D4Science Infrastructure (dev and preprod environments)
|
2022-04-22 16:35:54 +02:00
|
|
|
*
|
2022-04-22 17:02:12 +02:00
|
|
|
* Roberto Cirillo (ISTI-CNR)
|
2022-04-22 16:35:54 +02:00
|
|
|
*/
|
2022-04-22 15:38:43 +02:00
|
|
|
|
2022-04-22 11:41:05 +02:00
|
|
|
// related jenkins job: https://jenkins.d4science.org/job/gCubeDeployer/
|
|
|
|
|
2022-04-26 15:36:19 +02:00
|
|
|
def agent_root_folder = '/var/lib/jenkins'
|
2022-10-10 15:13:00 +02:00
|
|
|
def verbose = true
|
2022-10-13 10:53:36 +02:00
|
|
|
def yamlConfig
|
2022-10-10 17:18:24 +02:00
|
|
|
def text
|
2022-10-10 14:47:21 +02:00
|
|
|
if ("${params.gCube_release_version}" || "${params.RELEASE_URL}"){
|
|
|
|
String releaseURL='';
|
|
|
|
if ("${params.gCube_release_version}"){
|
|
|
|
//locate the release file
|
|
|
|
releaseURL = "https://code-repo.d4science.org/gCubeCI/gCubeReleases/raw/branch/master/open/gcube-${gCube_release_version}.yaml"
|
|
|
|
}else{
|
2022-10-17 10:04:59 +02:00
|
|
|
releaseURL = "${params.RELEASE_URL}"
|
2022-10-10 14:47:21 +02:00
|
|
|
}
|
|
|
|
//load the release file
|
2022-10-10 17:18:24 +02:00
|
|
|
text = releaseURL.toURL().getText()
|
2022-10-10 13:31:52 +02:00
|
|
|
//parsing
|
2022-10-17 10:04:59 +02:00
|
|
|
yamlConfig = new Yaml().load(text)
|
2022-10-10 13:31:52 +02:00
|
|
|
if (verbose)
|
2022-10-13 10:53:36 +02:00
|
|
|
println yamlConfig.inspect()
|
|
|
|
echo "Building gCube v. ${yamlConfig.gCube_release.Version}"
|
2022-10-10 13:31:52 +02:00
|
|
|
if (verbose) {
|
|
|
|
echo "Found components:"
|
2022-10-13 10:53:36 +02:00
|
|
|
yamlConfig.gCube_release.Components.each { println it.key }
|
2022-10-06 09:58:03 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-06 12:47:18 +02:00
|
|
|
|
2022-04-26 15:16:49 +02:00
|
|
|
def deployList
|
2022-04-28 10:27:36 +02:00
|
|
|
def backupList
|
2022-07-18 10:08:09 +02:00
|
|
|
|
2022-04-15 14:41:39 +02:00
|
|
|
pipeline {
|
|
|
|
agent {
|
2022-09-08 12:07:26 +02:00
|
|
|
label 'ansible'
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
2022-05-09 13:01:55 +02:00
|
|
|
options {
|
|
|
|
ansiColor('xterm')
|
|
|
|
}
|
|
|
|
|
2022-04-15 15:23:26 +02:00
|
|
|
triggers {
|
2022-04-26 14:49:16 +02:00
|
|
|
// every fifteen minutes (perhaps at :07, :22, :37, :52)
|
2022-05-11 09:16:10 +02:00
|
|
|
// cron('H/15 * * * *')
|
2022-05-02 13:02:50 +02:00
|
|
|
// once in every two hours slot between 9 AM and 5 PM every weekday (perhaps at 10:38 AM, 12:38 PM, 2:38 PM, 4:38 PM)
|
2022-07-18 10:10:04 +02:00
|
|
|
cron('H H(9-16)/6 * * 1-5')
|
2022-04-15 15:23:26 +02:00
|
|
|
}
|
2022-05-11 10:02:40 +02:00
|
|
|
|
2022-04-15 14:41:39 +02:00
|
|
|
environment {
|
|
|
|
AGENT_ROOT_FOLDER = "${agent_root_folder}"
|
2022-10-17 10:04:59 +02:00
|
|
|
// folder where all the pending deployment will be added
|
2022-10-03 10:09:52 +02:00
|
|
|
PENDING_DEPLOY_FOLDER="${agent_root_folder}/CD/"
|
2023-06-15 12:38:46 +02:00
|
|
|
ANSIBLE_ROOT_FOLDER = "${agent_root_folder}/ansible-repos/ansible-playbooks/d4science-ghn-cluster"
|
2022-08-04 10:13:03 +02:00
|
|
|
PIPELINE_BUILD_NUMBER = "${env.BUILD_NUMBER}"
|
2022-10-17 10:04:59 +02:00
|
|
|
// deploy file related to the current pipeline job
|
2022-10-03 10:09:52 +02:00
|
|
|
DEPLOY_FILE = "${PENDING_DEPLOY_FOLDER}deploy.${PIPELINE_BUILD_NUMBER}.csv"
|
|
|
|
BACKUP_FILE = "${PENDING_DEPLOY_FOLDER}deploy.${PIPELINE_BUILD_NUMBER}.bck"
|
2022-10-17 10:04:59 +02:00
|
|
|
// deploy folder that contains all the pending jobs associated to the current pipeline execution
|
2022-10-03 16:38:01 +02:00
|
|
|
DEPLOY_FOLDER= "${WORKSPACE}/CD-${PIPELINE_BUILD_NUMBER}"
|
2022-10-17 10:04:59 +02:00
|
|
|
// artifacts coordinates
|
2022-04-22 17:09:31 +02:00
|
|
|
TRIGGER_JOB= "${params.TRIGGER_JOB}"
|
|
|
|
TRIGGER_VERSION= "${params.TRIGGER_VERSION}"
|
2022-05-10 15:34:47 +02:00
|
|
|
TRIGGER_HOST="${params.TRIGGER_HOST}"
|
2022-10-17 10:04:59 +02:00
|
|
|
// enable the ContinuousDeployment if TRUE
|
2022-05-10 10:50:15 +02:00
|
|
|
TRIGGER_CD="${params.TRIGGER_CD}"
|
2022-10-17 10:04:59 +02:00
|
|
|
// Useful to identify the remote YAML file
|
2022-10-06 11:59:44 +02:00
|
|
|
GCUBE_RELEASE_NUMBER = "${params.gCube_release_version}"
|
2022-10-17 10:04:59 +02:00
|
|
|
//Category name of the components candidates to the remote deployment
|
2022-10-10 13:31:52 +02:00
|
|
|
DEPLOY_CATEGORY="${env.DEPLOY_CATEGORY}"
|
2022-10-17 10:04:59 +02:00
|
|
|
//Job name of the release pipeline
|
2023-05-17 09:15:35 +02:00
|
|
|
RELEASE_JOB='Pipeline-gCubeBuilder'
|
2022-10-17 10:04:59 +02:00
|
|
|
//URL to the YAML file. Used alternatively to RELEASE_JOB
|
2022-10-10 14:47:21 +02:00
|
|
|
RELEASE_URL= "${params.RELEASE_URL}"
|
2022-10-17 10:04:59 +02:00
|
|
|
// username of ReleaseManager. Used for limiting the preproduction deployments
|
2022-10-13 11:21:00 +02:00
|
|
|
RELEASE_MANAGER_USER='roberto.cirillo'
|
2022-10-17 10:04:59 +02:00
|
|
|
// name of Smartgears distribution group, defined in the yaml file ( only for release deployments)
|
|
|
|
DISTRIBUTION_CATEGORY='Distribution'
|
2023-06-14 10:51:01 +02:00
|
|
|
// name of Portlets group, defined in the yaml file ( only for release deployments)
|
|
|
|
PORTLET_CATEGORY='Portlets'
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
2022-07-18 10:08:09 +02:00
|
|
|
|
|
|
|
parameters {
|
|
|
|
string(name: 'TRIGGER_JOB',
|
|
|
|
defaultValue: '',
|
|
|
|
description: 'Name of the service or job to deploy')
|
|
|
|
string(name: 'TRIGGER_VERSION',
|
|
|
|
defaultValue: '',
|
|
|
|
description: 'service version to deploy')
|
|
|
|
string(name: 'TRIGGER_HOST',
|
|
|
|
defaultValue: '',
|
|
|
|
description: 'Target Host / Host group where deploy the service')
|
|
|
|
booleanParam(name: 'TRIGGER_CD',
|
|
|
|
defaultValue: true,
|
|
|
|
description: 'Set to false to avoid current deploy')
|
2022-07-29 16:46:49 +02:00
|
|
|
string(name: 'gCube_release_version',
|
2022-07-18 10:08:09 +02:00
|
|
|
defaultValue: '',
|
2022-10-18 10:53:42 +02:00
|
|
|
description: 'The number of the current gCube release. Leave blank if executed outside gCube release.')
|
2022-10-10 14:47:21 +02:00
|
|
|
string(name: 'RELEASE_URL',
|
2022-10-06 09:58:03 +02:00
|
|
|
defaultValue: '',
|
2022-10-10 14:47:21 +02:00
|
|
|
description: 'URL to the yaml file. Leave blank if executed outside gCube release.')
|
2022-07-18 10:08:09 +02:00
|
|
|
}
|
|
|
|
|
2022-04-15 14:41:39 +02:00
|
|
|
stages {
|
2022-05-11 10:02:40 +02:00
|
|
|
stage('Initialize environment') {
|
2022-04-15 14:41:39 +02:00
|
|
|
steps {
|
|
|
|
sh '''
|
2022-04-28 15:10:58 +02:00
|
|
|
date=`date`;
|
2022-10-03 10:09:52 +02:00
|
|
|
mkdir -p ${PENDING_DEPLOY_FOLDER}
|
2022-10-03 17:12:19 +02:00
|
|
|
mkdir -p "${DEPLOY_FOLDER}"
|
|
|
|
find "${PENDING_DEPLOY_FOLDER}" -type f -exec mv --target-directory="${DEPLOY_FOLDER}" '\'{'\'} '\'+
|
2022-04-22 17:00:32 +02:00
|
|
|
'''
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
|
|
|
}
|
2022-04-15 15:06:25 +02:00
|
|
|
stage('Deploy from system') {
|
2022-04-15 14:41:39 +02:00
|
|
|
when{
|
2022-10-06 11:59:44 +02:00
|
|
|
anyOf{
|
|
|
|
allOf{
|
|
|
|
triggeredBy 'TimerTrigger'
|
2022-10-13 11:24:24 +02:00
|
|
|
environment name: 'IS_SCHEDULED', value: 'True'
|
2022-10-06 11:59:44 +02:00
|
|
|
}
|
|
|
|
}
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
|
|
|
steps {
|
2023-04-26 15:55:34 +02:00
|
|
|
echo 'Cron build enabled. \033[31;1;4mDeploy from system ongoing\033[0m'
|
2022-04-28 15:22:49 +02:00
|
|
|
script {
|
2022-10-10 13:31:52 +02:00
|
|
|
echo "pipeline was triggered by ${params.TRIGGER_JOB}"
|
2022-10-17 10:04:59 +02:00
|
|
|
println("Going to deploy all the pending deployments")
|
2022-10-10 15:05:37 +02:00
|
|
|
def deployFolder="CD-${env.BUILD_NUMBER}";
|
2022-10-17 10:04:59 +02:00
|
|
|
parseDeployPendingJobs(deployFolder);
|
2022-04-28 15:22:49 +02:00
|
|
|
}
|
2022-10-17 10:04:59 +02:00
|
|
|
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
|
|
|
}
|
2022-05-11 10:02:40 +02:00
|
|
|
stage('Nothing to do ') {
|
2022-04-15 14:41:39 +02:00
|
|
|
when{
|
2022-10-17 16:32:53 +02:00
|
|
|
allOf{
|
|
|
|
triggeredBy 'TimerTrigger'
|
|
|
|
environment name: 'IS_SCHEDULED', value: 'False'
|
2022-04-15 15:49:41 +02:00
|
|
|
}
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
|
|
|
steps {
|
2023-04-26 15:29:59 +02:00
|
|
|
echo '\033[31;1;4mGoing to sleep\033[0m'
|
2022-10-17 16:32:53 +02:00
|
|
|
sh ' exit 0; '
|
2022-04-22 16:00:25 +02:00
|
|
|
}
|
|
|
|
}
|
2022-05-11 10:02:40 +02:00
|
|
|
stage('New pending deploy ') {
|
2022-04-15 15:57:10 +02:00
|
|
|
when{
|
2022-10-13 11:24:24 +02:00
|
|
|
environment name: 'IS_SCHEDULED', value: 'True'
|
2022-04-15 16:26:45 +02:00
|
|
|
anyOf{
|
2022-04-15 16:04:19 +02:00
|
|
|
triggeredBy 'BuildUpstreamCause'
|
2022-04-15 16:26:45 +02:00
|
|
|
triggeredBy 'UpstreamCause'
|
2022-10-11 14:43:20 +02:00
|
|
|
triggeredBy 'UserIdCause'
|
2022-04-15 15:57:10 +02:00
|
|
|
}
|
2022-10-11 11:38:01 +02:00
|
|
|
expression { params.TRIGGER_JOB != "${RELEASE_JOB}" }
|
2022-04-15 15:57:10 +02:00
|
|
|
}
|
|
|
|
steps {
|
2022-04-22 16:50:35 +02:00
|
|
|
sh '''
|
2023-06-15 11:35:11 +02:00
|
|
|
echo '\033[31;1;4mNew pending deploy/033[0m'
|
2023-04-26 15:55:34 +02:00
|
|
|
echo "Cron build enabled. Adding ${TRIGGER_JOB} - ${TRIGGER_VERSION} to the queue"
|
2022-10-11 10:47:22 +02:00
|
|
|
touch $DEPLOY_FILE;
|
|
|
|
if grep -q \"\${TRIGGER_JOB}\" \${DEPLOY_FILE}; then
|
|
|
|
echo "component ${TRIGGER_JOB} already added. Nothing to add."
|
|
|
|
else
|
|
|
|
echo "${TRIGGER_JOB},${TRIGGER_VERSION},${TRIGGER_HOST}" >> ${DEPLOY_FILE}
|
2022-10-17 10:04:59 +02:00
|
|
|
fi
|
|
|
|
'''
|
|
|
|
// the following catch give always an error in order to return a feedback UNSTABLE to the final user since the deploy is still not performed.
|
|
|
|
// Not sure this is the best approach
|
2022-05-10 16:01:16 +02:00
|
|
|
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
|
|
|
|
sh("""
|
|
|
|
exit 126;
|
|
|
|
""")
|
|
|
|
}
|
2022-04-15 15:57:10 +02:00
|
|
|
}
|
|
|
|
}
|
2022-04-15 15:06:25 +02:00
|
|
|
stage('Deploy from job ') {
|
|
|
|
when{
|
2022-10-13 11:24:24 +02:00
|
|
|
environment name: 'IS_SCHEDULED', value: 'False';
|
2022-05-10 11:32:10 +02:00
|
|
|
expression {
|
|
|
|
env.TRIGGER_CD.toBoolean()
|
|
|
|
}
|
2022-04-15 16:26:45 +02:00
|
|
|
anyOf{
|
2022-04-15 16:04:19 +02:00
|
|
|
triggeredBy 'BuildUpstreamCause'
|
2022-04-15 16:26:45 +02:00
|
|
|
triggeredBy 'UpstreamCause'
|
2022-04-15 15:57:10 +02:00
|
|
|
}
|
2022-10-17 10:04:59 +02:00
|
|
|
expression { params.TRIGGER_JOB != "${RELEASE_JOB}" }
|
2022-04-15 15:06:25 +02:00
|
|
|
}
|
|
|
|
steps {
|
2022-10-10 17:10:42 +02:00
|
|
|
script{
|
2023-04-26 15:55:34 +02:00
|
|
|
echo '\033[31;1;4mNew upstream deploy ongoing\033[0m'
|
|
|
|
echo "Cron build disabled. Component: ${params.TRIGGER_JOB} - ${params.TRIGGER_VERSION}"
|
2022-12-13 14:31:36 +01:00
|
|
|
ansiColor('xterm') {
|
2023-03-08 17:18:18 +01:00
|
|
|
catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
|
2022-10-11 10:47:22 +02:00
|
|
|
checkup("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
|
|
|
|
deploy("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
|
|
|
|
}
|
|
|
|
}
|
2022-10-10 17:10:42 +02:00
|
|
|
}
|
2022-05-09 17:20:42 +02:00
|
|
|
|
2022-10-17 10:04:59 +02:00
|
|
|
|
2022-04-15 15:06:25 +02:00
|
|
|
}
|
|
|
|
}
|
2022-10-10 15:05:37 +02:00
|
|
|
stage('Deploy from gCubeRelease ') {
|
|
|
|
when{
|
|
|
|
anyOf{
|
|
|
|
triggeredBy 'BuildUpstreamCause'
|
|
|
|
triggeredBy 'UpstreamCause'
|
2023-06-15 11:35:11 +02:00
|
|
|
// added just for testing . To Remove.
|
2022-10-13 10:24:42 +02:00
|
|
|
triggeredBy cause: "UserIdCause", detail: "${RELEASE_MANAGER_USER}"
|
2022-10-10 15:05:37 +02:00
|
|
|
}
|
2023-06-15 11:35:11 +02:00
|
|
|
// we should improve this conditional check
|
2022-10-11 10:47:22 +02:00
|
|
|
equals(actual: "${params.TRIGGER_JOB}", expected: "${RELEASE_JOB}")
|
2022-10-10 15:05:37 +02:00
|
|
|
}
|
|
|
|
steps {
|
|
|
|
script{
|
2022-10-12 10:20:56 +02:00
|
|
|
if("${DEPLOY_CATEGORY}"){
|
2023-06-14 15:17:44 +02:00
|
|
|
echo '\033[31;1;4mGoing to deploy gCube Release \033[0m'
|
2022-10-13 10:53:36 +02:00
|
|
|
deployRelease("${DEPLOY_CATEGORY}", yamlConfig);
|
2023-06-15 11:35:11 +02:00
|
|
|
}else println ("CATEGORIES NOT FOUND. ENVIRONMENT NOT SET FOR RELEASE DEPLOYMENT. NOTHING TO DO");
|
2022-10-10 15:05:37 +02:00
|
|
|
}
|
|
|
|
}
|
2022-10-17 10:04:59 +02:00
|
|
|
}
|
2022-07-18 10:08:09 +02:00
|
|
|
stage('Deploy manually ') {
|
|
|
|
when{
|
|
|
|
allOf{
|
2022-10-13 11:24:24 +02:00
|
|
|
environment name: 'IS_SCHEDULED', value: 'False';
|
2022-07-18 10:08:09 +02:00
|
|
|
triggeredBy 'UserIdCause'
|
|
|
|
}
|
2022-10-17 10:04:59 +02:00
|
|
|
expression { params.TRIGGER_JOB != "${RELEASE_JOB}" }
|
2022-07-18 10:08:09 +02:00
|
|
|
}
|
|
|
|
steps {
|
2023-04-26 15:55:34 +02:00
|
|
|
echo '\033[31;1;4mNew deploy ongoing manually triggered\033[0m'
|
2022-07-18 10:08:09 +02:00
|
|
|
echo "Cron build disabled. Pipeline executed Manually. New deploy of ${params.TRIGGER_JOB} - ${params.TRIGGER_VERSION} ongoing"
|
2023-04-26 15:55:34 +02:00
|
|
|
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
|
|
|
|
deploy("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
|
2022-07-18 10:08:09 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
|
|
|
post {
|
|
|
|
always {
|
|
|
|
script {
|
|
|
|
sh '''
|
2022-05-11 10:02:40 +02:00
|
|
|
echo 'Sending report'
|
2022-04-15 14:41:39 +02:00
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
|
|
|
success {
|
|
|
|
echo 'The deploy pipeline worked!'
|
|
|
|
|
2022-04-15 15:26:43 +02:00
|
|
|
emailext attachLog: true,//attachmentsPattern: "**/${ACTION_DEPLOY_FILE}.csv",
|
2022-04-15 14:41:39 +02:00
|
|
|
to: 'roberto.cirillo@isti.cnr.it',
|
2022-05-11 10:02:40 +02:00
|
|
|
subject: "[Jenkins-gCubeDeployer] Deploy report",
|
2022-04-15 14:41:39 +02:00
|
|
|
body: "${currentBuild.fullDisplayName}. Build time: ${currentBuild.durationString}. See ${env.BUILD_URL}. "
|
|
|
|
}
|
|
|
|
failure {
|
|
|
|
echo 'The deploy pipeline has failed'
|
|
|
|
emailext attachLog: true,
|
|
|
|
to: 'roberto.cirillo@isti.cnr.it',
|
2022-05-11 10:02:40 +02:00
|
|
|
subject: "[Jenkins-gCubeDeployer] deploy ${currentBuild.fullDisplayName} failed",
|
2022-04-15 14:41:39 +02:00
|
|
|
body: "Something is wrong with ${env.BUILD_URL}"
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
2023-06-15 11:27:17 +02:00
|
|
|
|
|
|
|
|
2023-06-15 12:02:55 +02:00
|
|
|
/* CHECKUP METHODS */
|
2022-05-06 15:59:59 +02:00
|
|
|
|
2022-05-09 16:49:22 +02:00
|
|
|
//Implement a new method in order to check the input parameters
|
2022-05-09 13:01:55 +02:00
|
|
|
def checkup(String service, String version, String host){
|
|
|
|
sh("""
|
2022-05-10 15:12:07 +02:00
|
|
|
case "$version" in
|
|
|
|
**SNAPSHOT) echo "version contains SNAPSHOT" ;;
|
2022-07-18 10:08:09 +02:00
|
|
|
**latest) echo "version contains latest" ;;
|
2022-05-10 15:22:56 +02:00
|
|
|
* ) echo "version without SNAPSHOT. EXIT WITHOUT DEPLOY "; exit 1;;
|
2022-05-10 15:12:07 +02:00
|
|
|
esac
|
2022-05-09 15:42:54 +02:00
|
|
|
""")
|
2022-05-09 13:01:55 +02:00
|
|
|
}
|
2022-10-05 09:59:20 +02:00
|
|
|
|
2022-10-10 13:31:52 +02:00
|
|
|
//Implement a new method in order to check the input parameters
|
|
|
|
def checkupRelease(String service, String version){
|
|
|
|
sh("""
|
|
|
|
case "$version" in
|
2022-10-11 10:09:24 +02:00
|
|
|
**SNAPSHOT) echo "version contains SNAPSHOT. EXIT because this is a release build" ; exit 1;;
|
2022-10-10 13:31:52 +02:00
|
|
|
**latest) echo "version contains latest" ;;
|
2022-10-11 10:17:42 +02:00
|
|
|
* ) echo "version without SNAPSHOT. Go ahead";;
|
2022-10-10 13:31:52 +02:00
|
|
|
esac
|
|
|
|
""")
|
|
|
|
}
|
2023-06-15 11:27:17 +02:00
|
|
|
|
|
|
|
|
|
|
|
/* DEPLOY METHODS*/
|
|
|
|
|
2022-10-06 11:59:44 +02:00
|
|
|
//parse all csv files found in the local folder and deploy the components defined inside
|
2022-10-06 14:25:22 +02:00
|
|
|
def parseDeployPendingJobs( def deployFolder){
|
2022-10-06 11:59:44 +02:00
|
|
|
println ("searching files in folder ${deployFolder}");
|
|
|
|
def files = findFiles(glob: "${deployFolder}/*.csv")
|
|
|
|
if (files == null){
|
|
|
|
println ("Nothing to do");
|
|
|
|
}else{
|
|
|
|
def serviceList = []
|
|
|
|
for (def file : files){
|
|
|
|
def records = readCSV file: "${file.path}"
|
|
|
|
for (def record : records) {
|
|
|
|
println("Processing record: "+record)
|
2022-10-06 14:32:33 +02:00
|
|
|
serviceList += "${record.get(0)},${record.get(1)},${record.get(2)}";
|
2022-10-17 10:04:59 +02:00
|
|
|
}
|
2022-10-06 11:59:44 +02:00
|
|
|
sh "rm ${file.path}"
|
|
|
|
}
|
|
|
|
sh "rm -Rf ${deployFolder}"
|
2022-10-06 14:55:26 +02:00
|
|
|
deployJobs(serviceList.unique())
|
2022-10-17 10:04:59 +02:00
|
|
|
}
|
2022-10-06 11:59:44 +02:00
|
|
|
}
|
2023-06-15 12:02:55 +02:00
|
|
|
|
2022-10-13 10:24:42 +02:00
|
|
|
//Deploy jobs from an input list with the following elements: serviceName,serviceVersion,targetHost. Create a new stage for any job
|
2022-10-06 11:59:44 +02:00
|
|
|
def deployJobs(def serviceList){
|
|
|
|
for (def record : serviceList) {
|
|
|
|
service=record.split(",");
|
2022-10-11 10:09:24 +02:00
|
|
|
stage(service[0]){
|
|
|
|
println("Processing deploy: "+service[ 0 ])
|
|
|
|
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
|
|
|
|
checkup(service[0], service[ 1 ], service[2]);
|
|
|
|
deploy(service[0], service[ 1 ], service[2]);
|
2022-10-10 13:31:52 +02:00
|
|
|
}
|
|
|
|
}
|
2022-10-11 10:09:24 +02:00
|
|
|
}
|
2022-10-17 10:04:59 +02:00
|
|
|
|
2022-10-11 10:09:24 +02:00
|
|
|
}
|
2023-06-14 14:59:34 +02:00
|
|
|
|
2023-06-15 11:27:17 +02:00
|
|
|
//Run ansible deploy
|
|
|
|
def deploy(String service, String version, String host){
|
|
|
|
def now = new Date();
|
|
|
|
println("Going to deploy the service "+service+" with version: "+version+" on target: "+host);
|
2023-06-15 12:38:46 +02:00
|
|
|
def statusCode = sh( script: "cd ${ANSIBLE_ROOT_FOLDER}/CD;./deployService.sh $service $version $host;", returnStdout: true);
|
2023-06-15 11:27:17 +02:00
|
|
|
sh("""
|
|
|
|
echo " last exit code \$?";
|
|
|
|
""")
|
|
|
|
println("Deploy ended with status: "+statusCode);
|
2022-10-11 10:09:24 +02:00
|
|
|
}
|
2023-06-15 11:27:17 +02:00
|
|
|
|
|
|
|
|
|
|
|
/* DEPLOY RELEASE METHODS*/
|
|
|
|
|
|
|
|
// collects all the components defined inside one or more yaml categories matching categoryList and invoke the deploy
|
2022-10-13 10:53:36 +02:00
|
|
|
def deployRelease(def categoryList, def yaml){
|
2023-06-15 12:02:55 +02:00
|
|
|
def isPortlet= false
|
2022-10-13 10:24:42 +02:00
|
|
|
def releaseList = []
|
2023-06-14 15:29:48 +02:00
|
|
|
def portlets = []
|
2022-10-13 10:24:42 +02:00
|
|
|
String smartgears
|
|
|
|
def categories=categoryList.split(',')
|
2022-10-13 10:53:36 +02:00
|
|
|
yaml.gCube_release.Components.each { group_name, component_list ->
|
2022-10-13 10:40:28 +02:00
|
|
|
if (categories.contains(group_name)){
|
|
|
|
for ( def category : categories){
|
|
|
|
if("${group_name}" == "$category"){
|
2022-10-17 10:04:59 +02:00
|
|
|
if (category == "${DISTRIBUTION_CATEGORY}"){
|
2022-10-13 10:40:28 +02:00
|
|
|
//searching smartgears-distribution version in Distribution component of defined inside the yaml file
|
|
|
|
def sg= component_list?.collect{ if ("$it.name" == 'smartgears-distribution'){ return "$it.version"; }}
|
2023-05-17 09:52:21 +02:00
|
|
|
smartgears=(sg !=null ) ? sg[0] : ''
|
2023-06-14 10:51:01 +02:00
|
|
|
}else if (category =="${PORTLET_CATEGORY}"){
|
2023-06-15 15:15:32 +02:00
|
|
|
//def portlets="{\"generic_portlets\": ";
|
2023-06-15 15:58:52 +02:00
|
|
|
// portlets += component_list?.collect{ return [ 'name': "$it.name", 'version': "$it.version", 'extension': 'war' ] }
|
2023-06-16 09:39:37 +02:00
|
|
|
// portlets += component_list?.collect{ return " {\"group_id\": \"$it.group_id\", \"name\": \"$it.name\", \"version\": \"$it.version\", \"extension\": \"war\"} "}
|
2023-06-16 09:41:08 +02:00
|
|
|
portlets += component_list?.collect{ if ("${it.group_id}" != null) return "{\"group_id\": \"$it.group_id\", \"name\": \"$it.name\", \"version\": \"$it.version\", \"extension\": \"war\"} "; else return "{\"name\": \"$it.name\", \"version\": \"$it.version\", \"extension\": \"war\"}"}
|
2023-06-16 09:39:37 +02:00
|
|
|
// portlets.removeAll(["\"group_id\": \"null\""]);
|
2023-06-15 15:58:52 +02:00
|
|
|
echo "portlets: $portlets"
|
2023-06-15 17:12:50 +02:00
|
|
|
def jsonContent= "{\"generic_portlets\": "+portlets+"}"
|
2023-06-15 15:58:52 +02:00
|
|
|
echo "json: $jsonContent"
|
|
|
|
writeJSON file: 'portlets.json', json: jsonContent
|
2023-06-14 17:04:13 +02:00
|
|
|
// def read = readJSON file: 'portlets.json'
|
|
|
|
// println("Portlets found: "+read.name+" "+read.version+ " "+read.extension)
|
|
|
|
def output = sh(returnStdout: true, returnStdoutTrim: true, script: "mv portlets.json $ANSIBLE_ROOT_FOLDER")
|
2023-06-15 12:02:55 +02:00
|
|
|
isPortlet= true
|
2022-10-13 10:43:36 +02:00
|
|
|
}else{
|
|
|
|
releaseList += component_list?.collect {return "$it.name,$it.version" }
|
2022-10-13 10:40:28 +02:00
|
|
|
}
|
2022-10-13 10:43:36 +02:00
|
|
|
return;
|
2022-10-13 10:24:42 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-06-15 12:02:55 +02:00
|
|
|
deployReleaseJobs(releaseList, smartgears, isPortlet);
|
2023-05-17 09:15:35 +02:00
|
|
|
}
|
2023-06-15 11:27:17 +02:00
|
|
|
|
|
|
|
//Deploy only release components, starting from a servicelist. All the component are passed as: componentName,componentVersion
|
2023-06-15 12:02:55 +02:00
|
|
|
def deployReleaseJobs(def serviceList, String smartgears, boolean isPortlet){
|
2023-06-15 12:15:15 +02:00
|
|
|
if (serviceList || isPortlet){
|
2023-06-15 11:27:17 +02:00
|
|
|
println("Going to deploy release components");
|
|
|
|
for (def record : serviceList) {
|
|
|
|
service=record.split(",");
|
|
|
|
stage(service[0]){
|
|
|
|
println("Processing RELEASE deploy: service "+service[ 0 ]+" with version "+service[ 1 ]+" and SG "+smartgears)
|
|
|
|
catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
|
|
|
|
checkupRelease(service[0], service[ 1 ]);
|
|
|
|
if (smartgears)
|
|
|
|
deployReleaseJob(service[0], service[ 1 ], smartgears);
|
|
|
|
else
|
|
|
|
deployReleaseJob(service[0], service[ 1 ], '');
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-06-15 12:02:55 +02:00
|
|
|
if (isPortlet){
|
|
|
|
deployReleaseJob('generic-portlet', '','');
|
|
|
|
}
|
2023-06-15 12:15:15 +02:00
|
|
|
|
2023-06-15 11:27:17 +02:00
|
|
|
}else{
|
|
|
|
println("Release components not found. Skip this stage");
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
//Run ansible deploy related to a single service
|
|
|
|
def deployReleaseJob(String service, String version, String smartgears){
|
|
|
|
def now = new Date();
|
|
|
|
println("Going to deploy the service "+service+" with version: "+version+" on preproduction ");
|
2023-06-15 12:38:46 +02:00
|
|
|
def statusCode = sh( script: "cd ${ANSIBLE_ROOT_FOLDER}/CD;git pull;./deployPreprodService.sh $service $version $smartgears ;", returnStdout: true);
|
2023-06-15 11:27:17 +02:00
|
|
|
sh("""
|
|
|
|
echo " last exit code \$?";
|
|
|
|
""")
|
|
|
|
println("Deploy ended with status: "+statusCode);
|
|
|
|
}
|