Skip to content

Instantly share code, notes, and snippets.

@enriquemanuel
Created August 19, 2020 12:38
Show Gist options
  • Save enriquemanuel/0209bc1d514065477ee4b36c271bc772 to your computer and use it in GitHub Desktop.
Save enriquemanuel/0209bc1d514065477ee4b36c271bc772 to your computer and use it in GitHub Desktop.
build and test jenkins
#!/usr/bin/env groovy
import jenkins.model.Jenkins
// Variable Definitions
def LOWER_ENVIRONMENTS = ['dev', 'dev1', 'test','stage', 'qa1', 'qa2', 'build'] as List
def HIGHER_ENVIRONMENTS = ['prod', 'production'] as List
// Debug
//def LOWER_ENVIRONMENTS = ['dev', 'dev1', 'test','stage']
//def NEW_AMI = "ami-03e9b6ee35af699df" as String
PASM_LOCK_JOB_NAME = "pasm-dynamo-handler"
PASM_MANUAL_LOCK_NAME = "manual_lock"
PASM_CANNOT_RUN_LOCK_NAME = "cannot_run"
PASM_SET_LOCK = "poll-set"
PASM_CHECK_LOCK = "poll-check"
PASM_CLEAR_LOCK = "clear"
DO_PLAN_PROD = false as Boolean
PLAN_ERROR = 0 as Integer
TEMP_ERROR = 0 as Integer
PLANS_WITH_IDS = [:] as Map
PLANS_WITH_ERRORS = [:] as Map
def notifySlack(String buildStatus = '', String fullmsg = '', String opt_channel = '') {
buildStatus = buildStatus ?: 'SUCCESS'
nice_msg = "${buildStatus}: `${env.JOB_NAME}` #${env.BUILD_NUMBER}:\n${env.BUILD_URL}"
def msg = fullmsg ? "${fullmsg}\n${nice_msg}" : nice_msg
def channel = opt_channel ?: '#sre-sys-notifications'
def color
if (buildStatus == 'SUCCESS') {
color = '#BDFFC3'
} else if (buildStatus == 'UNSTABLE') {
color = '#FFFE89'
} else {
color = '#FF9FA1'
}
println("\n== Send: ${msg} | to Slack == ")
slackSend(color: color, message: msg, channel: channel)
}
def waitForJobNotRunning(String jobName) {
println("\n== Getting Job Status for: ${jobName} ==")
def currentStatus = Jenkins.instance.getItemByFullName(jobName).isBuilding()
println("\n== Current Status: ${currentStatus} ==")
if(currentStatus == 'true'){
while( ! true ) {
sleep(5000)
if( getJobStatus(jobName) ) break
}
}
}
def postPipelineMetrics(String buildResult, long buildDuration, String metricName, String envName) {
eatThis = sh (
script: """
set +x
KEY_FILE=/etc/dd-agent/datadog.conf
DOGRC_FILE=~/.dogrc
if [ ! -r \$KEY_FILE ]; then
echo "\$KEY_FILE is not readable!"
exit 1
fi
API_KEY="\$(grep api_key "\$KEY_FILE" | cut -f2 -d' ')"
if ! grep -Fq "\$API_KEY" \$DOGRC_FILE; then
echo "Making \$DOGRC_FILE with new/updated key"
printf "[Connection]\nappkey = irrelevant\napikey = \$API_KEY" > \$DOGRC_FILE
fi
"""
)
if (buildResult == 'SUCCESS') {
intBuildResult = 0
} else {
intBuildResult = 1
}
secsBuildDuration = buildDuration/1000
sh (
script: """
/usr/local/bin/dog metric post 'ami_pipeline.${metricName}.result' ${intBuildResult} --tags 'env:${envName}'
/usr/local/bin/dog metric post 'ami_pipeline.${metricName}.time' ${secsBuildDuration} --tags 'env:${envName}'
"""
)
return intBuildResult
}
def loop_tf_plans(List envs) {
PLAN_ERROR = 0
PLANS_WITH_IDS = [:]
PLANS_WITH_ERRORS = [:]
envs.each { item ->
println("\n== Running update-ami for ${item} ==")
ssmUpdateBuildInfo = build job: 'update-environment-ami-id',
parameters: [
string(name: 'ENVIRONMENT', value: String.valueOf(item))
],
propagate: false
intBuildResult = postPipelineMetrics(ssmUpdateBuildInfo.result, ssmUpdateBuildInfo.duration, 'ssm_update_env_ami', item)
if (intBuildResult == 1) {
error "ssm_update_env_ami ${item} error."
}
println("\n== Running terraform-plan for ${item} ==")
tfplanBuildInfo = build job: 'terraform-plan',
parameters: [
string(name: 'TF_SERVICES', value: String.valueOf(item))
],
propagate: false
copyArtifacts(projectName: 'terraform-plan',
target: item,
flatten: true,
filter: 'environments/terragrunt.log',
selector: specific(String.valueOf(tfplanBuildInfo.getNumber())))
intBuildResult = postPipelineMetrics(tfplanBuildInfo.result, tfplanBuildInfo.duration, 'terraform_plan', item)
if (intBuildResult == 1) {
error "terraform_plan ${item} error."
}
// Check for errors:
// Debug first
TEMP_ERROR = 0
println("\n === TMP ERROR: ${TEMP_ERROR}")
sh """
ls -lsa
ls -lsa ${item}
"""
// See if we see any error
TEMP_ERROR = sh(script: "grep -ri -A2 ' will be ' ${item}/ | grep -v 'autoscaling' | grep -v 'launch_configuration' | grep -A2 'resource' | awk -v count=0 '/arn/ {count++} END {print count}'",
returnStdout: true).trim() as Integer
// Print it
println("\n === ${item} with ERROR count: ${TEMP_ERROR} ===")
// Set it in the larger variable
PLAN_ERROR = (PLAN_ERROR) + TEMP_ERROR
// display why we aren't doing this plan
if (TEMP_ERROR > 0) {
PLANS_WITH_ERRORS[item] = String.valueOf(tfplanBuildInfo.getNumber())
// this next line needs debugging
//sh(script: "grep -ri -A1- ' will be ' ${item}/ | grep -v 'autoscaling' | grep -v 'launch_configuration' | grep -A5 'resource'",
// returnStdout: true)
}
if (TEMP_ERROR == 0) {
PLANS_WITH_IDS[item] = String.valueOf(tfplanBuildInfo.getNumber())
}
}
}
def handle_pasm_locking(){
println("\n== Checking to ensure cannot run lock is not engaged before moving forward... ==")
build job: PASM_LOCK_JOB_NAME,
parameters: [
string(name: 'MODE', value: PASM_CHECK_LOCK),
string(name: 'FLAG', value: PASM_CANNOT_RUN_LOCK_NAME)
]
println("\n== Waiting for PASM pipeline lock to be open, then grabbing it before running applies... ==")
build job: PASM_LOCK_JOB_NAME,
parameters: [
string(name: 'MODE', value: PASM_SET_LOCK),
string(name: 'FLAG', value: PASM_CANNOT_RUN_LOCK_NAME)
]
println("\n== Checking to ensure manual lock is not engaged before moving forward... ==")
build job: PASM_LOCK_JOB_NAME,
parameters: [
string(name: 'MODE', value: PASM_CHECK_LOCK),
string(name: 'FLAG', value: PASM_MANUAL_LOCK_NAME)
]
println("\n== Checking if PASM is running and if yes, waiting... ==")
waitForJobNotRunning('pasm-flight-pipeline')
}
def pasm_unlocking(){
println("\n== Release Cannot Run Lock ==")
build job: PASM_LOCK_JOB_NAME,
parameters: [
string(name: 'MODE', value: PASM_CLEAR_LOCK),
string(name: 'FLAG', value: PASM_CANNOT_RUN_LOCK_NAME)
]
}
def loop_tf_apply(List envs, Map env_plan) {
envs.each { item ->
println("\n== Applying for ${item} - job id: ${env_plan[item]} ==")
tfApplyBuildInfo = build job: 'terraform-apply',
parameters: [
string(name: 'TF_SERVICES', value: String.valueOf(item)),
string(name: 'sha1', value: 'master'),
string(name: 'PLAN_BUILD_NUMBER', value: String.valueOf(env_plan[item]))
]
}
}
pipeline {
agent { label 'docker' }
stages {
stage('Pre Work: Workspace Cleanup') {
steps {
cleanWs()
}
}
stage('Run Packer') {
steps {
script {
println("\n== Running Packer to create new AMI... ==")
packerJob = build job: 'packer-base', propagate: false
copyArtifacts(projectName: 'packer-base',
selector: specific(String.valueOf(packerJob.getNumber())))
NEW_AMI = sh (
script: "awk '/us-east-1:/{print \$2}' output.txt",
returnStdout: true
).trim()
intBuildResult = postPipelineMetrics(packerJob.result, packerJob.duration, 'packer_base', 'base')
if (intBuildResult == 1) {
error "Error: Run Packer | Job: packer-base"
}
}
}
}
stage('Test New AMI') {
steps {
script {
println("\n== Running integration tests against new AMI... ==")
currentBuild.displayName = "#${currentBuild.number}.${NEW_AMI}"
chefIntegrationSelected = build job: 'chef-integration-selected',
parameters: [
string(name: 'KITCHEN_AMI', value: String.valueOf(NEW_AMI)),
booleanParam(name: 'RUN_ALL', value: true)
],
propagate: false
intBuildResult = postPipelineMetrics(chefIntegrationSelected.result, chefIntegrationSelected.duration, 'chef_integration_test', 'base')
if (intBuildResult == 1) {
error "Error: Test New AMI | Job: chef_integration_test"
}
}
}
}
stage('Update Main TF AMI') {
steps {
script {
println("\n== Saving new AMI.. ==")
updateAMI = build job: 'update-main-terraform-ami-ids',
parameters: [
string(name: 'AMI', value: String.valueOf(NEW_AMI)),
],
propagate: false
intBuildResult = postPipelineMetrics(updateAMI.result, updateAMI.duration, 'update_main_ami_tf', 'base')
if (intBuildResult == 1) {
error "Error: Update Main TF AMI | Job: update-main-terraform-ami-ids"
}
}
}
}
stage('SSM Update + TF Planning Lower Envs') {
steps {
script {
loop_tf_plans(LOWER_ENVIRONMENTS)
println("Plan Error: ${PLAN_ERROR}")
}
}
}
stage ('TF Applying Lower Envs') {
when {
expression { PLAN_ERROR == 0 }
}
steps {
handle_pasm_locking()
loop_tf_apply(PLANS_WITH_IDS)
pasm_unlocking()
}
}
stage('Show the Envs that wont be applied automatically') {
when {
expression { PLAN_ERROR > 0 }
}
steps {
script {
for (element in PLANS_WITH_IDS) {
echo "Not Applying Env:${element.key} Plan ID:${element.value}"
}
}
}
}
stage('Get Valid AMI for Prod') {
steps {
script {
DO_PLAN_PROD = false
println("\n== We need to validate the AMI has been in lower env for 7 days. ")
println("So we are going to iterate over the previous-image until we get one that has been more than 7 days.")
println("This saves us if this job is run manually or if people want to push an image quicker to prod which hasn't been burned in non-prod")
sh """
#!/bin/bash
# get ami to be inserted into prod
difference=0
ami_param_version=\$(aws ssm get-parameter --region us-east-1 --name /terraform/ami/base/previous-image | jq -r '.Parameter.Version')
while [ \$difference -lt 5 ]; do
new_ami=\$(aws ssm get-parameter --region us-east-1 --name /terraform/ami/base/previous-image:\$ami_param_version | jq -r '.Parameter.Value')
new_ami_date=\$(aws ec2 describe-images --region us-east-1 --image-ids \$new_ami | jq '.Images[].CreationDate' | awk -F'T' '{print \$1}' | awk -F'"' '{print \$2}')
echo "Image: \$new_ami was created on \$new_ami_date"
# get an older one and try again
ami_param_version=\$((ami_param_version-1))
# comparison time
now=\$(date +%s)
past=\$(date +%s --date \$new_ami_date)
seconds_difference=\$((\$now-\$past))
difference=\$((\$seconds_difference/(3600*24)))
done
echo "using \$new_ami created on \$new_ami_date"
echo "Updating Env for Prod and Production with the one we should be using"
aws ssm put-parameter --region us-east-1 --name /terraform/ami/base/previous-image --value \$new_ami --type String --overwrite
"""
DO_PLAN_PROD = true
}
}
}
stage('SSM Update + TF Planning Higher Env'){
when {
expression { DO_PLAN_PROD == true }
}
steps {
script {
PLAN_ERROR = 0
TEMP_ERROR = 0
PLANS_WITH_IDS = [:]
loop_tf_plans(HIGHER_ENVIRONMENTS)
println("Plan Error: ${PLAN_ERROR}")
}
}
}
stage ('TF Applying Higher Envs') {
when {
expression { PLAN_ERROR == 0 }
}
steps {
handle_pasm_locking()
loop_tf_apply(PLANS_WITH_IDS)
pasm_unlocking()
}
}
}
post {
success {
script {
println("\n== AMI Updated correctly to all envs ==")
def msg = "packer-base-pipeline completed correctly and updated all envs"
notifySlack(currentBuild.result, msg, '#sre-terraform-ops')
notifySlack(currentBuild.result)
}
}
failure {
script {
println("\n== AMI Pipeline failed. Please check the error above ==")
def url = String.valueOf(currentBuild.absoluteUrl)
def msg = "packer-base-pipeline failed to execute. Please review ${url} to understand any actions needed."
notifySlack(currentBuild.result, msg, '#sre-terraform-ops')
notifySlack(currentBuild.result)
}
}
always {
cleanWs()
postPipelineMetrics(currentBuild.result, currentBuild.duration, 'ami-pipeline', 'base')
//pasm_unlocking()
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment