2022-04-22 15:38:43 +02:00
|
|
|
#!groovy
|
2022-09-16 10:37:54 +02:00
|
|
|
import groovy.io.FileType.*
|
2022-09-09 12:35:22 +02:00
|
|
|
import static groovy.io.FileType.FILES
|
2022-04-22 16:35:54 +02:00
|
|
|
/**
|
|
|
|
* Deploy components to the D4Science Infrastructure (dev-only)
|
|
|
|
*
|
2022-04-22 17:02:12 +02:00
|
|
|
* Roberto Cirillo (ISTI-CNR)
|
2022-04-22 16:35:54 +02:00
|
|
|
*/
|
2022-04-22 15:38:43 +02:00
|
|
|
|
2022-04-22 11:41:05 +02:00
|
|
|
// related jenkins job: https://jenkins.d4science.org/job/gCubeDeployer/
|
|
|
|
|
2022-04-26 15:36:19 +02:00
|
|
|
def agent_root_folder = '/var/lib/jenkins'
|
2022-08-04 15:44:33 +02:00
|
|
|
//def agent_deploy_filename = 'deploy.${env.BUILD_NUMBER}.csv'
|
|
|
|
//def agent_deploy_backup_filename = 'deploy.${env.BUILD_NUMBER}.bck'
|
2022-04-22 16:35:54 +02:00
|
|
|
|
2022-04-26 15:16:49 +02:00
|
|
|
def deployList
|
2022-04-28 10:27:36 +02:00
|
|
|
def backupList
|
2022-04-26 15:16:49 +02:00
|
|
|
if (params.deployFile) {
|
|
|
|
println "Using custom deploy file"
|
|
|
|
deployList = params.deployFile
|
|
|
|
}
|
2022-04-22 16:35:54 +02:00
|
|
|
|
2022-07-18 10:08:09 +02:00
|
|
|
//locate the targetHost file
|
|
|
|
//String targetHostURL = "https://code-repo.d4science.org/gCubeCI/gCubeDeployer/raw/branch/master/open/gcube-${gCube_release_version}.yaml"
|
|
|
|
//if (verbose)
|
|
|
|
// println "Querying ${targetHostURL}"
|
|
|
|
//load the release file
|
|
|
|
//def text = targetHostURL.toURL().getText()
|
|
|
|
|
|
|
|
|
|
|
|
|
2022-04-15 14:41:39 +02:00
|
|
|
pipeline {
|
|
|
|
agent {
|
2022-08-04 13:00:41 +02:00
|
|
|
label 'ansible'
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
2022-05-09 13:01:55 +02:00
|
|
|
options {
|
|
|
|
ansiColor('xterm')
|
|
|
|
}
|
|
|
|
|
2022-04-15 15:23:26 +02:00
|
|
|
triggers {
|
2022-04-26 14:49:16 +02:00
|
|
|
// every fifteen minutes (perhaps at :07, :22, :37, :52)
|
2022-09-16 14:01:10 +02:00
|
|
|
cron('H/30 * * * *')
|
2022-05-02 13:02:50 +02:00
|
|
|
// once in every two hours slot between 9 AM and 5 PM every weekday (perhaps at 10:38 AM, 12:38 PM, 2:38 PM, 4:38 PM)
|
2022-09-16 10:07:20 +02:00
|
|
|
// cron('H H(9-16)/6 * * 1-5')
|
2022-04-15 15:23:26 +02:00
|
|
|
}
|
2022-05-11 10:02:40 +02:00
|
|
|
|
2022-04-15 14:41:39 +02:00
|
|
|
environment {
|
|
|
|
AGENT_ROOT_FOLDER = "${agent_root_folder}"
|
2022-08-04 15:54:27 +02:00
|
|
|
DEPLOY_ROOT_FOLDER ="${agent_root_folder}/ansible-repos/ansible-playbooks/d4science-ghn-cluster"
|
2022-09-09 14:46:50 +02:00
|
|
|
DEPLOY_FILE_ROOT_FOLDER="${agent_root_folder}/CD/"
|
2022-08-04 10:13:03 +02:00
|
|
|
PIPELINE_BUILD_NUMBER = "${env.BUILD_NUMBER}"
|
2022-09-09 14:46:50 +02:00
|
|
|
DEPLOY_FILE = "${DEPLOY_FILE_ROOT_FOLDER}deploy.${PIPELINE_BUILD_NUMBER}.csv"
|
|
|
|
BACKUP_FILE = "${DEPLOY_FILE_ROOT_FOLDER}deploy.${PIPELINE_BUILD_NUMBER}.bck"
|
2022-04-22 17:09:31 +02:00
|
|
|
TRIGGER_JOB= "${params.TRIGGER_JOB}"
|
|
|
|
TRIGGER_VERSION= "${params.TRIGGER_VERSION}"
|
2022-05-10 15:34:47 +02:00
|
|
|
TRIGGER_HOST="${params.TRIGGER_HOST}"
|
2022-05-10 10:50:15 +02:00
|
|
|
TRIGGER_CD="${params.TRIGGER_CD}"
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
2022-07-18 10:08:09 +02:00
|
|
|
|
|
|
|
parameters {
|
|
|
|
string(name: 'TRIGGER_JOB',
|
|
|
|
defaultValue: '',
|
|
|
|
description: 'Name of the service or job to deploy')
|
|
|
|
string(name: 'TRIGGER_VERSION',
|
|
|
|
defaultValue: '',
|
|
|
|
description: 'service version to deploy')
|
|
|
|
string(name: 'TRIGGER_HOST',
|
|
|
|
defaultValue: '',
|
|
|
|
description: 'Target Host / Host group where deploy the service')
|
|
|
|
booleanParam(name: 'TRIGGER_CD',
|
|
|
|
defaultValue: true,
|
|
|
|
description: 'Set to false to avoid current deploy')
|
2022-07-29 16:46:49 +02:00
|
|
|
string(name: 'gCube_release_version',
|
2022-07-18 10:08:09 +02:00
|
|
|
defaultValue: '',
|
2022-07-29 16:46:49 +02:00
|
|
|
description: 'Leave blank if executed outside gCube release')
|
2022-07-18 10:08:09 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2022-04-15 14:41:39 +02:00
|
|
|
stages {
|
2022-05-11 10:02:40 +02:00
|
|
|
stage('Initialize environment') {
|
2022-04-15 14:41:39 +02:00
|
|
|
steps {
|
|
|
|
sh '''
|
2022-04-28 15:10:58 +02:00
|
|
|
date=`date`;
|
2022-08-04 17:02:22 +02:00
|
|
|
mkdir -p ${DEPLOY_FILE_ROOT_FOLDER}
|
2022-04-22 17:00:32 +02:00
|
|
|
'''
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
|
|
|
}
|
2022-04-15 15:06:25 +02:00
|
|
|
stage('Deploy from system') {
|
2022-04-15 14:41:39 +02:00
|
|
|
when{
|
|
|
|
allOf{
|
2022-04-15 15:45:23 +02:00
|
|
|
triggeredBy 'TimerTrigger'
|
2022-04-26 14:49:16 +02:00
|
|
|
// maybe we can add a new condition in order to consider the manual execution of this pipeline
|
2022-08-30 15:03:35 +02:00
|
|
|
environment name: 'IS_CRON_TMP', value: 'True'
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
steps {
|
2022-04-28 15:22:49 +02:00
|
|
|
echo 'Cron build enabled. Deploy from system ongoing'
|
|
|
|
script {
|
|
|
|
// parse the report and extract the data
|
2022-08-04 10:13:03 +02:00
|
|
|
// def components = getComponentsFromCSV(deployList)
|
2022-08-04 17:02:22 +02:00
|
|
|
println "Going to check the deploy file in ${DEPLOY_FILE_ROOT_FOLDER}"
|
2022-09-16 14:16:12 +02:00
|
|
|
def components =getComponentsFromCSVDir2("${DEPLOY_FILE_ROOT_FOLDER}")
|
2022-04-29 11:55:02 +02:00
|
|
|
if (components.size() > 0) {
|
2022-08-04 16:31:43 +02:00
|
|
|
def componentSet=components.toSet();
|
|
|
|
for (component in componentSet) {
|
2022-04-29 11:55:02 +02:00
|
|
|
stage(component){
|
|
|
|
println "Deploy on going of component: $component"
|
2022-05-10 15:54:13 +02:00
|
|
|
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
|
2022-05-10 14:16:54 +02:00
|
|
|
checkup("${component.name}", "${component.version}", "${component.host}");
|
|
|
|
deploy("${component.name}", "${component.version}", "${component.host}");
|
2022-04-29 11:55:02 +02:00
|
|
|
}
|
2022-04-29 13:09:03 +02:00
|
|
|
}
|
2022-04-29 11:55:02 +02:00
|
|
|
}
|
|
|
|
}else{
|
2022-04-29 12:39:02 +02:00
|
|
|
println "No components found"
|
2022-04-28 15:22:49 +02:00
|
|
|
}
|
|
|
|
}
|
2022-05-06 15:59:59 +02:00
|
|
|
cleanup(DEPLOY_FILE,BACKUP_FILE);
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
|
|
|
}
|
2022-05-11 10:02:40 +02:00
|
|
|
stage('Nothing to do ') {
|
2022-04-15 14:41:39 +02:00
|
|
|
when{
|
2022-08-03 17:32:20 +02:00
|
|
|
anyOf{
|
|
|
|
allOf{
|
|
|
|
triggeredBy 'TimerTrigger'
|
2022-08-30 15:03:35 +02:00
|
|
|
environment name: 'IS_CRON_TMP', value: 'False'
|
2022-08-03 17:32:20 +02:00
|
|
|
}
|
2022-04-15 15:49:41 +02:00
|
|
|
}
|
2022-08-03 17:32:20 +02:00
|
|
|
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
|
|
|
steps {
|
2022-04-15 17:20:46 +02:00
|
|
|
echo 'Do Nothing: cron build disabled'
|
2022-04-22 16:00:25 +02:00
|
|
|
}
|
|
|
|
}
|
2022-05-11 10:02:40 +02:00
|
|
|
stage('New pending deploy ') {
|
2022-04-15 15:57:10 +02:00
|
|
|
when{
|
2022-08-30 15:03:35 +02:00
|
|
|
environment name: 'IS_CRON_TMP', value: 'True'
|
2022-04-15 16:26:45 +02:00
|
|
|
anyOf{
|
2022-04-15 16:04:19 +02:00
|
|
|
triggeredBy 'BuildUpstreamCause'
|
2022-04-15 16:26:45 +02:00
|
|
|
triggeredBy 'UpstreamCause'
|
2022-08-04 15:23:39 +02:00
|
|
|
triggeredBy 'UserIdCause'
|
2022-04-15 15:57:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
steps {
|
2022-04-22 16:50:35 +02:00
|
|
|
sh '''
|
2022-04-27 15:36:31 +02:00
|
|
|
echo "Cron build enabled. New deploy of ${TRIGGER_JOB} - ${TRIGGER_VERSION} will be added to the deploy file"
|
2022-04-28 17:53:58 +02:00
|
|
|
touch $DEPLOY_FILE;
|
2022-04-27 16:12:18 +02:00
|
|
|
if grep -q \"\${TRIGGER_JOB}\" \${DEPLOY_FILE}; then
|
2022-04-27 15:57:39 +02:00
|
|
|
echo "component ${TRIGGER_JOB} already added. Nothing to add."
|
2022-04-27 15:36:31 +02:00
|
|
|
else
|
2022-05-10 15:34:47 +02:00
|
|
|
echo "${TRIGGER_JOB},${TRIGGER_VERSION},${TRIGGER_HOST}" >> ${DEPLOY_FILE}
|
2022-04-27 16:01:09 +02:00
|
|
|
fi
|
2022-04-22 16:50:35 +02:00
|
|
|
'''
|
2022-07-18 10:04:42 +02:00
|
|
|
|
|
|
|
// the following catch give always an error in order to return a feedback UNSTABLE to the final user since the deploy is still not performed
|
2022-05-10 16:01:16 +02:00
|
|
|
catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') {
|
|
|
|
sh("""
|
|
|
|
exit 126;
|
|
|
|
""")
|
|
|
|
}
|
2022-04-15 15:57:10 +02:00
|
|
|
}
|
|
|
|
}
|
2022-04-15 15:06:25 +02:00
|
|
|
stage('Deploy from job ') {
|
|
|
|
when{
|
2022-08-30 15:03:35 +02:00
|
|
|
environment name: 'IS_CRON_TMP', value: 'False';
|
2022-05-10 11:32:10 +02:00
|
|
|
expression {
|
|
|
|
env.TRIGGER_CD.toBoolean()
|
|
|
|
}
|
2022-04-15 16:26:45 +02:00
|
|
|
anyOf{
|
2022-04-15 16:04:19 +02:00
|
|
|
triggeredBy 'BuildUpstreamCause'
|
2022-04-15 16:26:45 +02:00
|
|
|
triggeredBy 'UpstreamCause'
|
2022-04-15 15:57:10 +02:00
|
|
|
}
|
2022-04-15 15:06:25 +02:00
|
|
|
}
|
|
|
|
steps {
|
2022-04-15 17:20:46 +02:00
|
|
|
echo "Cron build disabled. New deploy of ${params.TRIGGER_JOB} - ${params.TRIGGER_VERSION} ongoing"
|
2022-05-09 17:20:42 +02:00
|
|
|
ansiColor("xterm") {
|
|
|
|
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
|
|
|
|
checkup("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
|
|
|
|
deploy("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-15 15:06:25 +02:00
|
|
|
}
|
|
|
|
}
|
2022-07-18 10:08:09 +02:00
|
|
|
stage('Deploy manually ') {
|
|
|
|
when{
|
|
|
|
allOf{
|
2022-08-30 15:03:35 +02:00
|
|
|
environment name: 'IS_CRON_TMP', value: 'False';
|
2022-07-18 10:08:09 +02:00
|
|
|
triggeredBy 'UserIdCause'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
steps {
|
|
|
|
echo "Cron build disabled. Pipeline executed Manually. New deploy of ${params.TRIGGER_JOB} - ${params.TRIGGER_VERSION} ongoing"
|
|
|
|
ansiColor("xterm") {
|
|
|
|
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
|
|
|
|
checkup("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
|
|
|
|
deploy("${params.TRIGGER_JOB}", "${params.TRIGGER_VERSION}", "${params.TRIGGER_HOST}");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
|
|
|
post {
|
|
|
|
always {
|
|
|
|
script {
|
|
|
|
sh '''
|
2022-05-11 10:02:40 +02:00
|
|
|
echo 'Sending report'
|
2022-04-15 14:41:39 +02:00
|
|
|
'''
|
2022-04-15 15:06:25 +02:00
|
|
|
//cat ./${ACTION_DEPLOY_FILE}.csv
|
2022-04-15 14:41:39 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
success {
|
|
|
|
echo 'The deploy pipeline worked!'
|
|
|
|
|
2022-04-15 15:26:43 +02:00
|
|
|
emailext attachLog: true,//attachmentsPattern: "**/${ACTION_DEPLOY_FILE}.csv",
|
2022-04-15 14:41:39 +02:00
|
|
|
to: 'roberto.cirillo@isti.cnr.it',
|
2022-05-11 10:02:40 +02:00
|
|
|
subject: "[Jenkins-gCubeDeployer] Deploy report",
|
2022-04-15 14:41:39 +02:00
|
|
|
body: "${currentBuild.fullDisplayName}. Build time: ${currentBuild.durationString}. See ${env.BUILD_URL}. "
|
|
|
|
}
|
|
|
|
failure {
|
|
|
|
echo 'The deploy pipeline has failed'
|
|
|
|
emailext attachLog: true,
|
|
|
|
to: 'roberto.cirillo@isti.cnr.it',
|
2022-05-11 10:02:40 +02:00
|
|
|
subject: "[Jenkins-gCubeDeployer] deploy ${currentBuild.fullDisplayName} failed",
|
2022-04-15 14:41:39 +02:00
|
|
|
body: "Something is wrong with ${env.BUILD_URL}"
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-26 17:05:30 +02:00
|
|
|
|
2022-05-10 15:28:02 +02:00
|
|
|
//parse a csv file formatted in this way: ComponentName,ComponentVersion, ComponentHost
|
2022-05-11 12:03:07 +02:00
|
|
|
def getComponentsFromCSV(def deployList) {
|
2022-04-27 12:44:51 +02:00
|
|
|
def components = []
|
2022-04-27 12:35:18 +02:00
|
|
|
if (fileExists("${deployList}")) {
|
2022-04-28 17:53:58 +02:00
|
|
|
echo 'file found'
|
2022-04-27 12:35:18 +02:00
|
|
|
readFile("${deployList}").split('\n').each { line, count->
|
2022-04-28 10:33:47 +02:00
|
|
|
if (line.startsWith('#'))
|
|
|
|
return
|
2022-04-27 12:35:18 +02:00
|
|
|
def fields = line.split(',')
|
2022-04-27 12:44:51 +02:00
|
|
|
components.add([
|
|
|
|
name : fields[0],
|
2022-05-10 14:16:54 +02:00
|
|
|
version : fields[1],
|
|
|
|
host : fields[2]
|
2022-04-27 12:44:51 +02:00
|
|
|
]
|
|
|
|
)
|
2022-04-27 15:02:42 +02:00
|
|
|
}
|
2022-04-27 12:35:18 +02:00
|
|
|
} else {
|
|
|
|
echo ' File Not found. Failing.'
|
|
|
|
}
|
2022-04-27 12:44:51 +02:00
|
|
|
return components
|
2022-04-27 12:35:18 +02:00
|
|
|
}
|
|
|
|
|
2022-05-11 10:02:40 +02:00
|
|
|
//launch ansible deploy
|
2022-05-02 13:02:50 +02:00
|
|
|
def deploy(String service, String version, String host){
|
|
|
|
def now = new Date();
|
|
|
|
println("Going to deploy the service "+service+" with version: "+version+" on target: "+host);
|
2022-05-10 11:36:13 +02:00
|
|
|
def statusCode = sh( script: "cd $DEPLOY_ROOT_FOLDER;./deployService.sh $service $version $host;", returnStdout: true);
|
2022-05-09 16:49:22 +02:00
|
|
|
sh("""
|
2022-05-09 16:55:40 +02:00
|
|
|
echo " last exit code \$?";
|
2022-05-09 16:49:22 +02:00
|
|
|
""")
|
2022-05-10 11:57:09 +02:00
|
|
|
println("Deploy ended with status: "+statusCode);
|
2022-05-02 13:02:50 +02:00
|
|
|
}
|
2022-05-06 15:59:59 +02:00
|
|
|
|
2022-05-09 16:49:22 +02:00
|
|
|
//Implement a new method in order to check the input parameters
|
2022-05-09 13:01:55 +02:00
|
|
|
def checkup(String service, String version, String host){
|
|
|
|
sh("""
|
2022-05-10 15:12:07 +02:00
|
|
|
case "$version" in
|
|
|
|
**SNAPSHOT) echo "version contains SNAPSHOT" ;;
|
2022-07-18 10:08:09 +02:00
|
|
|
**latest) echo "version contains latest" ;;
|
2022-05-10 15:22:56 +02:00
|
|
|
* ) echo "version without SNAPSHOT. EXIT WITHOUT DEPLOY "; exit 1;;
|
2022-05-10 15:12:07 +02:00
|
|
|
esac
|
2022-05-09 15:42:54 +02:00
|
|
|
""")
|
2022-05-09 13:01:55 +02:00
|
|
|
}
|
2022-05-11 10:02:40 +02:00
|
|
|
//clean and update the local deploy file
|
2022-05-06 15:59:59 +02:00
|
|
|
def cleanup(def DEPLOY_FILE, def BACKUP_FILE){
|
|
|
|
sh '''
|
|
|
|
echo "cleanup $DEPLOY_FILE";
|
|
|
|
if [ -f ${DEPLOY_FILE} ]; then
|
|
|
|
if [ -f ${BACKUP_FILE} ]; then
|
|
|
|
echo "backup found: ${BACKUP_FILE} going to replace it";
|
|
|
|
rm ${BACKUP_FILE};
|
|
|
|
fi
|
|
|
|
mv ${DEPLOY_FILE} ${BACKUP_FILE};
|
|
|
|
else
|
|
|
|
echo "deploy file empty"
|
|
|
|
fi
|
|
|
|
'''
|
2022-08-03 17:32:20 +02:00
|
|
|
}
|
|
|
|
|
2022-08-04 10:25:49 +02:00
|
|
|
//experimental test
|
2022-08-03 17:32:20 +02:00
|
|
|
|
2022-09-16 11:36:11 +02:00
|
|
|
@NonCPS
|
2022-08-03 17:32:20 +02:00
|
|
|
def getComponentsFromCSVDir(def dirPath){
|
2022-09-08 14:40:49 +02:00
|
|
|
File folder = new File(dirPath)
|
2022-09-09 11:24:19 +02:00
|
|
|
println ("folder ready ");
|
2022-09-08 14:40:49 +02:00
|
|
|
if (folder){
|
2022-09-09 11:24:19 +02:00
|
|
|
println (" processing folder "+folder);
|
2022-09-08 14:40:49 +02:00
|
|
|
folder.eachFileRecurse FileType.FILES, { file ->
|
|
|
|
// check it if the file ends with a .csv extension
|
2022-09-09 11:49:36 +02:00
|
|
|
println("checking file: "+file);
|
2022-09-08 14:40:49 +02:00
|
|
|
if (file.name.endsWith(".csv")) {
|
2022-09-09 11:49:36 +02:00
|
|
|
println ("Processing file ");
|
2022-09-08 14:40:49 +02:00
|
|
|
readFile(file).split('\n').each { line, count->
|
|
|
|
if (line.startsWith('#'))
|
|
|
|
return
|
|
|
|
def fields = line.split(',')
|
|
|
|
components.add([
|
|
|
|
name : fields[0],
|
|
|
|
version : fields[1],
|
|
|
|
host : fields[2]
|
|
|
|
]
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
2022-09-09 11:49:36 +02:00
|
|
|
println ("removing current deploy file: "+file.name);
|
2022-09-08 14:40:49 +02:00
|
|
|
// remove the file here if possible
|
|
|
|
// file.delete();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return components
|
2022-09-09 14:35:25 +02:00
|
|
|
}
|
|
|
|
|
2022-09-16 11:36:11 +02:00
|
|
|
@NonCPS
|
2022-09-09 14:35:25 +02:00
|
|
|
def getComponentsFromCSVDir2(def dirPath){
|
2022-09-16 10:06:53 +02:00
|
|
|
def list = []
|
|
|
|
def folder = new File(dirPath)
|
2022-09-09 14:35:25 +02:00
|
|
|
println ("folder ready ");
|
|
|
|
if (folder){
|
|
|
|
println (" processing folder "+folder);
|
2022-09-16 14:48:07 +02:00
|
|
|
folder.eachFileRecurse FileType.FILES, {
|
|
|
|
if (file.name.endsWith(".csv")) {
|
2022-09-09 14:35:25 +02:00
|
|
|
println ("Processing file "+it);
|
2022-09-16 14:48:07 +02:00
|
|
|
list << file
|
|
|
|
readFile(file).split('\n').each { line, count->
|
2022-09-09 14:35:25 +02:00
|
|
|
if (line.startsWith('#'))
|
|
|
|
return
|
|
|
|
def fields = line.split(',')
|
|
|
|
components.add([
|
|
|
|
name : fields[0],
|
|
|
|
version : fields[1],
|
|
|
|
host : fields[2]
|
|
|
|
]
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
println ("removing current deploy file: "+it.name);
|
|
|
|
// remove the file here if possible
|
|
|
|
// file.delete();
|
|
|
|
}
|
|
|
|
}
|
2022-09-16 10:06:53 +02:00
|
|
|
println "files to remove:";
|
|
|
|
list.each {
|
2022-09-16 14:16:12 +02:00
|
|
|
println ("removing"+it.path);
|
|
|
|
it.delete();
|
2022-09-16 10:06:53 +02:00
|
|
|
}
|
2022-09-09 14:35:25 +02:00
|
|
|
return components
|
|
|
|
}
|
|
|
|
|
2022-09-16 11:36:11 +02:00
|
|
|
@NonCPS
|
2022-09-16 10:37:54 +02:00
|
|
|
def getComponentsFromCSVDir3(def dirPath){
|
|
|
|
println "checking current folder"
|
2022-09-16 14:03:06 +02:00
|
|
|
// dh = new File('.')
|
|
|
|
// dh.eachFileRecurse {
|
|
|
|
// println it
|
|
|
|
// }
|
2022-09-16 10:37:54 +02:00
|
|
|
println "checking CD folder";
|
|
|
|
def startDir = new File("/var/lib/jenkins/CD")
|
|
|
|
startDir.eachFileRecurse(FILES) {
|
|
|
|
if (it.name.endsWith('.csv')) {
|
|
|
|
println it
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-09-09 14:35:25 +02:00
|
|
|
|
|
|
|
|