Skip to content

Instantly share code, notes, and snippets.

@marcesher
Last active May 27, 2021 22:52
Show Gist options
  • Star 5 You must be signed in to star a gist
  • Fork 3 You must be signed in to fork a gist
  • Save marcesher/281744762494390df3a32a17d0ba6ff1 to your computer and use it in GitHub Desktop.
Save marcesher/281744762494390df3a32a17d0ba6ff1 to your computer and use it in GitHub Desktop.
new BaseJobBuilder(
name: "operations-jira-restart",
description: "Restarts dev jira. To be used when Jira is not responding",
emails: ["foo@example.com", "bar@example.com"]
).build(this).with {
scm {
git("https://[redacted]/SoftwareDevelopment/operations-jenkins-automation.git", "master")
}
steps {
shell("fab restart_jira")
}
}
import jenkins.automation.builders.BddSecurityJobBuilder
new BddSecurityJobBuilder(
name: "example-bdd-security-tests",
description: "Sample bdd security job",
baseUrl: "http:\\/\\/some-internal-url",
bddSecurityRepo: "https://github.com/cfpb/some-repo"
).build(this)
job {
configure { project ->
project / builders / 'com.checkmarx.jenkins.CxScanBuilder' {
'serverUrl'('...')
'username'('...')
'password'('...')
//.....
}
}
}
def emails = "foo@example.com"
def repos = [
[name: 'qu', url: "https://github.com/cfpb/qu"],
[name: 'handbook', url: "https://internal-thing-here"]
]
new SiteMonitorJobBuilder(
name: "jenkins-outbound-connectivity-check",
description: "Ensure Jenkins can reach out to the world. Uses a mix of git pulls and HTTP requests. We need this because our network has been known to act a fool from time to time",
emails: emails,
cronSchedule: "@hourly",
urls: ["http://google.com","https://other-stuff-here"]
).build(this).with {
multiscm {
ScmUtils.project_repos(delegate, repos, false)
}
}
import jenkins.automation.builders.*
import jenkins.automation.utils.EnvironmentUtils
// the "JAC_ENVIRONMENT" global variable is set in all of our Jenkinses, to either "DEV", "STAGE", or "PROD"
def env = EnvironmentUtils.getInstance("${JAC_ENVIRONMENT}")
if (env.isDev()) {
new BaseJobBuilder(
name: "a-sample-job",
....
).build(this)
}
environments {
aws {
dev {
globals = """
foo=bar
"""
}
prod {
globals = """
foo=bat
"""
}
all {}
}
azure {... same but different}
heroku {... same but different}
}
environments {
aws {
dev {
reposToAutomate = [
[projectName: "ccdb-jenkins-automation", url: "https://git-internal/ccdb-jenkins-automation"],
[projectName: "ghe-jenkins-automation", url: "https://git-internal/ops/ghe-jenkins-automation", viewName: "Ops", viewRegex: "ops-.*", emails: "webops@example.com"],
]
}
prod {
reposToAutomate = [...]
}
all {
reposToAutomate = [
[projectName: "jks", url: "https://internal-git/jks/jenkins-management-automation", viewName: "jks", viewRegex: "jks-.*", emails: "webops@example.com"],
]
}
}
azure {... same but different }
heroku {... same but different }
}
import jenkins.automation.utils.ScmUtils
import jenkins.automation.utils.EnvironmentUtils
def registrationConfigFile = readFileFromWorkspace('config/registered-projects-config.groovy')
def environmentVarsConfigFile = readFileFromWorkspace('config/global-env-vars-config.groovy')
def defaultEmail = "foo@example.com"
def host = "${JAC_HOST}"
def env = EnvironmentUtils.getInstance("${JAC_ENVIRONMENT}").getEnv()
def registrationConfig = new ConfigSlurper(host.toLowerCase()).parse(registrationConfigFile)
def environmentVarsConfig = new ConfigSlurper(host.toLowerCase()).parse(environmentVarsConfigFile)
def globalVariables =environmentVarsConfig."${env}".globals
def path = "${JENKINS_HOME}/globals.properties"
File globalConfigs = new File(path)
if (globalVariables) {
globalConfigs.write globalVariables
}
def sharedReposToAutomate = registrationConfig."all".reposToAutomate
def reposToAutomate = registrationConfig."${env}".reposToAutomate
reposToAutomate = reposToAutomate.plus(sharedReposToAutomate);
reposToAutomate.each { project ->
def reposToInclude = [
project,
[name: "automation", url: 'https://github.com/cfpb/jenkins-automation', sub_directory: 'automation']
]
if (project.viewName) {
def viewRegex = project.viewRegex ? project.viewRegex : "${project.projectName}-.*"
listView(project.viewName) {
columns {
status()
weather()
name()
lastSuccess()
lastFailure()
lastDuration()
buildButton()
}
filterBuildQueue()
filterExecutors()
jobs {
regex(/(?i)(${viewRegex})/)
}
}
}
job(project.projectName + '-seed-job') {
if (project.disabled) {
disabled()
}
logRotator {
daysToKeep(90)
}
multiscm {
ScmUtils.project_repos(delegate, reposToInclude, false)
}
triggers {
scm 'H/5 * * * *'
}
configure { node ->
node / publishers << 'hudson.plugins.logparser.LogParserPublisher' {
unstableOnWarning(true)
failBuildOnError(false)
useProjectRule(true)
projectRulePath('automation/log-parser-rules.txt')
}
}
steps {
dsl {
external "jobs/**/*.groovy"
additionalClasspath "automation/src/main/groovy" + "\r\n" + "src/main/groovy"
removeAction("DELETE")
removeViewAction("DELETE")
}
shell('echo ${JENKINS_HOME}')
}
recipients = project.emails ?: defaultEmail
publishers {
extendedEmail {
recipientList(recipients)
triggers {
failure {
sendTo {
recipientList()
}
}
fixed {
sendTo {
recipientList()
}
}
}
}
}
}
}
pipelineJob("pipeline-calls-other-pipeline") {
logRotator{
numToKeep 30
}
definition {
cps {
sandbox()
script("""
node {
stage 'Hello world'
echo 'Hello World 1'
stage "invoke another pipeline"
build 'pipeline-being-called'
stage 'Goodbye world'
echo "Goodbye world"
}
""".stripIndent())
}
}
}
import javaposse.jobdsl.dsl.DslFactory
import javaposse.jobdsl.dsl.Job
import jenkins.automation.builders.BaseJobBuilder
import jenkins.automation.utils.CommonUtils
class QuTaskBuilder {
String name
String description
String shellCmd
String cronSchedule
String scmSchedule
String gitUrl = Config.quDeployGitUrl
List<String> emails = Config.emails
Job build(DslFactory factory) {
Job baseJob = new BaseJobBuilder(
...
).build(factory)
baseJob.with{
CommonUtils.addInjectGlobalPasswords(delegate)
CommonUtils.addLogParserPublisher(delegate)
scm {
git(gitUrl, "master")
}
triggers { ... }
steps {
if (shellCmd) {
shell(shellCmd)
}
}
}
return baseJob
}
}
node {
stage "Hello world"
echo "Hello World"
stage "build"
build "BuildJob"
stage "test"
build "TestJob"
stage "deploy"
build "DeployJob"
stage 'Goodbye world'
echo "Goodbye world"
}
new QuTaskBuilder(
name: "qu-find-stuck-aggregations",
description: """
redacted
""".stripIndent(),
shellCmd: """
mongo_username=aggregation_manager mongo_password=\${MONGO_AGGREGATION_MANAGER} fab -u deploy -i ${Config.sshKey(env)} find_stuck_processing_aggregations -H $singleTargetHost
mongo_username=aggregation_manager mongo_password=\${MONGO_AGGREGATION_MANAGER} fab -u deploy -i ${Config.sshKey(env)} find_too_many_unprocessed_aggregations:how_many=3 -H $singleTargetHost
mongo_username=aggregation_manager mongo_password=\${MONGO_AGGREGATION_MANAGER} fab -u deploy -i ${Config.sshKey(env)} find_incomplete_processed_aggregations -H $singleTargetHost
""".stripIndent(),
cronSchedule: '@hourly'
).build(this).with {
publishers {
downstream('qu-restart-incomplete-processed-aggregations', 'FAILURE')
}
}
new QuTaskBuilder(
name: "qu-restart-incomplete-processed-aggregations",
description: "restarts jobs marked as processsed but for which no output collection exists",
shellCmd: "mongo_username=aggregation_manager mongo_password=\${MONGO_AGGREGATION_MANAGER} fab -u deploy -i ${Config.sshKey(env)} restart_incomplete_processed_aggregations -H $singleTargetHost"
).build(this)
new QuTaskBuilder(
name: "qu-delete-old-aggregations",
description: "Deletes old, unused aggregations",
shellCmd: "mongo_username=aggregation_manager mongo_password=\${MONGO_AGGREGATION_MANAGER} fab -u deploy -i ${Config.sshKey(env)} evict_from_aggregation_cache -H $singleTargetHost",
cronSchedule: '@daily'
).build(this)
new QuTaskBuilder(
name: "qu-cache-aggregations-for-year",
description: "Finds and restarts existing aggregations that should be rebuilt because they should contain records for a new year",
shellCmd: """
mongo_username=aggregation_manager mongo_password=\${MONGO_AGGREGATION_MANAGER} fab -u deploy -i ${Config.sshKey(env)} cache_aggregations_for_year:year="\${YEAR}" -H $singleTargetHost
mongo_username=aggregation_manager mongo_password=\${MONGO_AGGREGATION_MANAGER} fab -u deploy -i ${Config.sshKey(env)} restart_aggregations_for_collection:collection_name=census_tracts -H $singleTargetHost
mongo_username=aggregation_manager mongo_password=\${MONGO_AGGREGATION_MANAGER} fab -u deploy -i ${Config.sshKey(env)} restart_aggregations_for_collection:collection_name=institutions -H $singleTargetHost
""".stripIndent(),
).build(this).with {
parameters {
stringParam('YEAR', '2014', 'year of aggregation to cache')
}
}
new QuMatrixJobBuilder(
name: "qu-test-deploy-user",
description: "Checks that the deploy user has access to the application servers",
shellCmd: "fab -u deploy -i ${Config.sshKey(env)} test_deploy_user -H \$HOST",
).build(this).with {
triggers {
cron("H/15 * * * *")
}
axes {
text("HOST", env.isDev() ? Config.quDevHosts : Config.quProdHosts)
}
}
primeRamHosts = env.isDev() ? "\${qu_mongo_primary_domains}" : "\${qu_mongo_primary_domains},\${qu_mongo_secondary_domains}"
new QuTaskBuilder(
name: "qu-mongod-prime-ram",
description: "Ensures RAM is primed. On dev, we only prime primaries because the secondaries are too small to prime. On Prod, we prime primaries and secondaries",
shellCmd: "mongo_username='agent' mongo_password=\${MONGO_AGENT_PASSWORD} fab -u deploy -i ${Config.sshKey(env)} prime_ram -H ${primeRamHosts}",
).build(this)
job('example-job-from-job-dsl') {
scm {
github('jenkinsci/job-dsl-plugin', 'master')
}
triggers {
cron("@hourly")
}
steps {
shell("echo 'Hello World'")
}
}
new QuTaskBuilder(
name: "qu-restart-incomplete-processed-aggregations",
description: "restarts jobs marked as processsed but for which no output collection exists",
shellCmd: "mongo_username=aggregation_manager mongo_password=\${MONGO_AGGREGATION_MANAGER} fab -u deploy -i ${Config.sshKey(env)} restart_incomplete_processed_aggregations -H $singleTargetHost"
).build(this)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment