Skip to content

Instantly share code, notes, and snippets.

@ndbroadbent
Last active November 16, 2022 03:08
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ndbroadbent/0f0344aea7abeef596509b5705fb4298 to your computer and use it in GitHub Desktop.
Save ndbroadbent/0f0344aea7abeef596509b5705fb4298 to your computer and use it in GitHub Desktop.
Scripts to fetch failed CircleCI pipelines and run failing steps locally. Supports RSpec (parallel), RuboCop, Rswag (Swapper API specs), bundle-audit, and shellcheck
#!/bin/bash
set -eo pipefail
CURRENT_DIR="$(realpath "$(dirname "$0")")"
ROOT_DIR="$(realpath "$CURRENT_DIR"/..)"
DEBUG=""
debug() {
if [ "$DEBUG" != "true" ]; then return; fi
echo "[DEBUG] $(date -u +"%Y-%m-%d %H:%M:%S") $*" >&2
}
if ! [ -f "$ROOT_DIR/.circleci-token" ]; then
# echo "Please create a file named .circleci-token in $ROOT_DIR, containing your GitLab API token" >&2
exit 1
fi
CIRCLECI_TOKEN=$(cat "$ROOT_DIR"/.circleci-token)
CIRCLECI_PROJECT_SLUG="<ORGANIZATION>/<REPO>"
function usage() {
cat <<HELP >&2
Usage: $0 [options] <initial image>
-r Run in foreground (don't daemonize)
-f <format string> Print pipeline details
%i: Pipeline ID
%n: Pipeline Number
%s: Pipeline Status
%u: Pipeline URL
%b: Git Branch
%c: Git Commit
-b <branch> Git branch
-c <commit> Git commit. If branch provided and commit omitted, return most recent pipeline for branch.
-s <status> Pipeline status (success, failed, etc.) Default: any
-d Debug mode
-h Print usage
HELP
}
# ORIG_ARGV="$@"
GIT_BRANCH_PROVIDED="false"
GIT_COMMIT_PROVIDED="false"
STATUS_QUERY=""
FORMAT_STRING=""
while getopts "rf:b:c:s:dh" opt; do
case "$opt" in
r)
RUN_IN_FOREGROUND="true";;
f)
FORMAT_STRING="$OPTARG"
RUN_IN_FOREGROUND="true";;
b)
GIT_BRANCH_PROVIDED="true"
GIT_BRANCH="$OPTARG";;
c)
GIT_COMMIT_PROVIDED="true"
GIT_COMMIT="$OPTARG";;
s)
STATUS_QUERY="$OPTARG"
if [ "$STATUS_QUERY" != "success" ] && [ "$STATUS_QUERY" != "failed" ] && [ "$STATUS_QUERY" != "running" ]; then
echo "Invalid status query: $STATUS_QUERY" >&2
exit 1
fi;;
d)
DEBUG="true";;
h)
usage
exit;;
*)
echo "Unknown option: -$opt" >&2
exit 1;;
esac
done
shift $((OPTIND -1))
if [ -z "$GIT_BRANCH" ]; then
GIT_BRANCH="$(git rev-parse --abbrev-ref HEAD)"
fi
if [ -z "$GIT_COMMIT" ]; then
GIT_COMMIT="$(git rev-parse HEAD)"
fi
ESCAPED_BRANCH=$(ruby -r cgi -e "puts CGI.escape('$GIT_BRANCH')")
if [ -f "$ROOT_DIR/tmp/circleci_ci_status_branch" ]; then
OLD_BRANCH=$(cat "$ROOT_DIR/tmp/circleci_ci_status_branch")
fi
if [ -f "$ROOT_DIR/tmp/circleci_ci_status_commit" ]; then
OLD_COMMIT=$(cat "$ROOT_DIR/tmp/circleci_ci_status_commit")
fi
PIPELINE_STATUS=""
PIPELINE_ID=""
PIPELINE_URL=""
debug "GIT_BRANCH: $GIT_BRANCH"
debug "GIT_COMMIT: $GIT_COMMIT"
debug "GIT_BRANCH_PROVIDED: $GIT_BRANCH_PROVIDED"
debug "GIT_COMMIT_PROVIDED: $GIT_COMMIT_PROVIDED"
debug "STATUS_QUERY: '$STATUS_QUERY'"
debug "FORMAT_STRING: '$FORMAT_STRING'"
update_ci_status() {
LATEST_PIPELINES_RESPONSE=$(curl -s --header "Circle-Token: $CIRCLECI_TOKEN" \
"https://circleci.com/api/v2/project/gh/${CIRCLECI_PROJECT_SLUG}/pipeline?branch=${ESCAPED_BRANCH}")
# echo "$LATEST_PIPELINES_RESPONSE"
PIPELINES_COUNT="$(echo "$LATEST_PIPELINES_RESPONSE" | jq '.items | length')"
debug "PIPELINES_COUNT: $PIPELINES_COUNT"
if [ "$PIPELINES_COUNT" == "0" ]; then
echo "none" > "$ROOT_DIR/tmp/circleci_ci_status"
else
PIPELINE_INDEX=-1
PIPELINE_STATUS="none"
PIPELINE_SHA=""
PIPELINES_CACHE_DIR="$ROOT_DIR/tmp/circleci_pipelines_cache"
mkdir -p "$PIPELINES_CACHE_DIR"
# Performance improvement: Only run jq commands once to build an array, instead of on each iteration
PIPELINE_IDS="$(echo "$LATEST_PIPELINES_RESPONSE" | jq -r "[.items[].id] | join(\" \")")"
read -ra PIPELINE_IDS <<< "$PIPELINE_IDS"
PIPELINE_NUMBERS="$(echo "$LATEST_PIPELINES_RESPONSE" | jq -r "[.items[].number] | join(\" \")")"
read -ra PIPELINE_NUMBERS <<< "$PIPELINE_NUMBERS"
PIPELINE_SHAS="$(echo "$LATEST_PIPELINES_RESPONSE" | jq -r "[.items[].vcs.revision] | join(\" \")")"
read -ra PIPELINE_SHAS <<< "$PIPELINE_SHAS"
debug "PIPELINE_IDS: ${PIPELINE_IDS[*]}"
debug "PIPELINE_NUMBERS: ${PIPELINE_NUMBERS[*]}"
debug "PIPELINE_SHAS: ${PIPELINE_SHAS[*]}"
# Increment PIPELINE_INDEX while PIPELINE_INDEX is under 20 (max per page).
# Find the first pipeline that matches the status query, or running/failed/success (if no query provided)
while [ "$PIPELINE_INDEX" -lt "$PIPELINES_COUNT" ] && {
[ -n "$STATUS_QUERY" ] && {
[ "$PIPELINE_STATUS" != "$STATUS_QUERY" ]
} || {
[ "$PIPELINE_STATUS" != "success" ] \
&& [ "$PIPELINE_STATUS" != "failed" ] \
&& [ "$PIPELINE_STATUS" != "running" ]
}
}; do
PIPELINE_INDEX=$((PIPELINE_INDEX + 1))
# PIPELINE_ID="$(echo "$LATEST_PIPELINES_RESPONSE" | jq -r ".items[$PIPELINE_INDEX].id")"
# PIPELINE_NUMBER="$(echo "$LATEST_PIPELINES_RESPONSE" | jq -r ".items[$PIPELINE_INDEX].number")"
# PIPELINE_SHA="$(echo "$LATEST_PIPELINES_RESPONSE" | jq -r ".items[$PIPELINE_INDEX].vcs.revision")"
PIPELINE_ID="${PIPELINE_IDS[$PIPELINE_INDEX]}"
PIPELINE_NUMBER="${PIPELINE_NUMBERS[$PIPELINE_INDEX]}"
PIPELINE_SHA="${PIPELINE_SHAS[$PIPELINE_INDEX]}"
PIPELINE_URL="https://app.circleci.com/pipelines/github/${CIRCLECI_PROJECT_SLUG}/${PIPELINE_NUMBER}"
debug "[$PIPELINE_INDEX] PIPELINE_ID: $PIPELINE_ID"
debug "[$PIPELINE_INDEX] PIPELINE_NUMBER: $PIPELINE_NUMBER"
debug "[$PIPELINE_INDEX] PIPELINE_SHA: $PIPELINE_SHA"
debug "[$PIPELINE_INDEX] PIPELINE_URL: $PIPELINE_URL"
# if [ "$DEBUG" == "true" ]; then
# set -x
# fi
# If git branch arg is provided but commit is omitted, return most recent pipeline for branch
if [ "$PIPELINE_SHA" != "$GIT_COMMIT" ] && ! {
[ "$GIT_BRANCH_PROVIDED" == "true" ] && [ "$GIT_COMMIT_PROVIDED" != "true" ];
}; then
continue
fi
# Cache workflows for finished pipelines
WORKFLOWS_STATUS_FILE="$PIPELINES_CACHE_DIR/${PIPELINE_ID}_status.txt"
if [ -f "$WORKFLOWS_STATUS_FILE" ]; then
PIPELINE_STATUS=$(cat "$WORKFLOWS_STATUS_FILE")
debug "Using cached workflow status for pipeline $PIPELINE_ID: $PIPELINE_STATUS"
continue
fi
PIPELINE_CACHE_DIR="$PIPELINES_CACHE_DIR/${PIPELINE_ID}"
mkdir -p "$PIPELINE_CACHE_DIR"
WORKFLOWS_RESPONSE_FILE="$PIPELINE_CACHE_DIR/workflows.json"
if [ -f "$WORKFLOWS_RESPONSE_FILE" ]; then
debug "Using cached workflows response for pipeline $PIPELINE_ID"
WORKFLOWS_RESPONSE=$(cat "$WORKFLOWS_RESPONSE_FILE")
else
debug "Fetching workflows for pipeline $PIPELINE_ID..."
WORKFLOWS_RESPONSE=$(curl -s --header "Circle-Token: $CIRCLECI_TOKEN" \
"https://circleci.com/api/v2/pipeline/${PIPELINE_ID}/workflow")
fi
WORKFLOWS_COUNT="$(echo "$WORKFLOWS_RESPONSE" | jq '.items | length')"
debug "WORKFLOWS_COUNT: $WORKFLOWS_COUNT"
if [ "$WORKFLOWS_COUNT" -lt 2 ]; then
PIPELINE_STATUS="running"
elif [ "$WORKFLOWS_COUNT" -gt 1 ]; then
# Workflow state enum: success, running, not_run, failed, error, failing, on_hold, canceled, unauthorized
# If all workflow states are 'success' or 'on_hold' then pipeline state is 'success'
# If any workflow states are 'running', then pipeline state is 'running'
# If any workflow states are 'failed', 'failing', 'error', or 'unauthorized', then pipeline state is 'failed'
# Ignore these states: 'not_run', 'canceled'
PIPELINE_STATUS="success"
for ((i=0; i<WORKFLOWS_COUNT; i++)); do
WORKFLOW_STATUS="$(echo "$WORKFLOWS_RESPONSE" | jq -r ".items[$i].status")"
WORKFLOW_NAME="$(echo "$WORKFLOWS_RESPONSE" | jq -r ".items[$i].name")"
debug "[$i] WORKFLOW_NAME: $WORKFLOW_NAME"
debug "[$i] WORKFLOW_STATUS: $WORKFLOW_STATUS"
if [ "$WORKFLOW_STATUS" == "running" ]; then
PIPELINE_STATUS="running"
elif [ "$WORKFLOW_STATUS" == "failed" ] || \
[ "$WORKFLOW_STATUS" == "failing" ] || \
[ "$WORKFLOW_STATUS" == "error" ] || \
[ "$WORKFLOW_STATUS" == "unauthorized" ]; then
PIPELINE_STATUS="failed"
elif [ -z "$PIPELINE_STATUS" ] && {
[ "$WORKFLOW_STATUS" == "not_run" ] || \
[ "$WORKFLOW_STATUS" == "canceled" ]
}; then
PIPELINE_STATUS="ignored"
fi
if [ "$PIPELINE_STATUS" != 'success' ]; then break; fi
done
# Save cache if finished
if [ "$PIPELINE_STATUS" != "running" ]; then
echo "$WORKFLOWS_RESPONSE" > "$WORKFLOWS_RESPONSE_FILE"
echo "$PIPELINE_STATUS" > "$WORKFLOWS_STATUS_FILE"
fi
fi
done
# if [ "$DEBUG" == "true" ]; then
# set +x
# fi
if [ $PIPELINE_STATUS == "none" ]; then
PIPELINE_ID="-"
PIPELINE_NUMBER="-"
PIPELINE_URL="-"
PIPELINE_SHA="-"
fi
if [ -n "$FORMAT_STRING" ]; then return; fi
echo "$PIPELINE_STATUS" > "$ROOT_DIR/tmp/circleci_ci_status"
fi
if [ -n "$FORMAT_STRING" ]; then return; fi
echo "$GIT_BRANCH" > "$ROOT_DIR/tmp/circleci_ci_status_branch"
echo "$GIT_COMMIT" > "$ROOT_DIR/tmp/circleci_ci_status_commit"
}
if [ "$OLD_BRANCH" != "$GIT_BRANCH" ] || \
[ "$OLD_COMMIT" != "$GIT_COMMIT" ] || \
[ "$RUN_IN_FOREGROUND" == 'true' ]; then
if [ "$RUN_IN_FOREGROUND" == 'true' ]; then
update_ci_status
else
PIPELINE_STATUS="fetching"
if ! [ -f "$ROOT_DIR/tmp/circleci_ci_status_fetching" ]; then
echo > "$ROOT_DIR/tmp/circleci_ci_status_fetching"
(
update_ci_status
rm -f "$ROOT_DIR/tmp/circleci_ci_status_fetching"
) &
fi
fi
else
if [ -f "$ROOT_DIR/tmp/circleci_ci_status" ]; then
PIPELINE_STATUS="$(cat "$ROOT_DIR/tmp/circleci_ci_status")"
else
update_ci_status
fi
fi
if [ -n "$FORMAT_STRING" ]; then
if [ -z "$PIPELINE_ID" ]; then exit; fi
ESCAPED_PIPELINE_URL="$(echo "$PIPELINE_URL" | sed -e 's/[\/&]/\\&/g')"
ESCAPED_GIT_BRANCH="$(echo "$GIT_BRANCH" | sed -e 's/[\/&]/\\&/g')"
echo "$FORMAT_STRING" | sed \
-e "s/%b/$ESCAPED_GIT_BRANCH/g" \
-e "s/%c/$GIT_COMMIT/g" \
-e "s/%i/$PIPELINE_ID/g" \
-e "s/%n/$PIPELINE_NUMBER/g" \
-e "s/%s/$PIPELINE_STATUS/g" \
-e "s/%u/$ESCAPED_PIPELINE_URL/g"
exit
fi
if [ "$PIPELINE_STATUS" == "success" ]; then
echo "\[\033[1;32m\]✔"
elif [ "$PIPELINE_STATUS" == "failed" ]; then
echo "\[\033[1;31m\]✘"
elif [ "$PIPELINE_STATUS" == "none" ]; then
echo "\[\033[1;37m\]-"
elif [ "$PIPELINE_STATUS" == "running" ]; then
echo "\[\033[1;36m\]⚙︎"
elif [ "$PIPELINE_STATUS" == "fetching" ]; then
echo "\[\033[1;34m\]⋯"
else
echo "\[\033[1;33m\]⌁"
fi
#!/bin/bash
set -eo pipefail
CURRENT_DIR="$(realpath "$(dirname "$0")")"
ROOT_DIR="$(realpath "$CURRENT_DIR"/..)"
# DEBUG=""
DEBUG=true
debug() {
if [ "$DEBUG" != "true" ]; then return; fi
echo "[DEBUG] $(date -u +"%Y-%m-%d %H:%M:%S") $*" >&2
}
if ! [ -f "$ROOT_DIR/.circleci-token" ]; then
# echo "Please create a file named .circleci-token in $ROOT_DIR, containing your CircleCI API token" >&2
exit 1
fi
CIRCLECI_PROJECT_SLUG="<ORGANIZATION>/<REPO>"
CIRCLECI_TOKEN=$(cat "$ROOT_DIR"/.circleci-token)
PIPELINES_CACHE_DIR="$ROOT_DIR/tmp/circleci_pipelines_cache"
mkdir -p "$PIPELINES_CACHE_DIR"
PIPELINE_FINISHED="true"
cache_response() {
local DESCRIPTION="$1"
local CACHE_FILE="$2"
local URL="$3"
local JQ_COMMAND="${4:-}"
if [ -z "$CACHE_FILE" ]; then echo "cache_response: CACHE_FILE is empty" >&2; exit 1; fi
if [ -z "$URL" ]; then echo "cache_response: URL is empty" >&2; exit 1; fi
if [ -z "$DESCRIPTION" ]; then echo "cache_response: DESCRIPTION is empty" >&2; exit 1; fi
if [ -f "$CACHE_FILE" ]; then
# Remove ROOT_DIR from CACHE_FILE
debug "Using cached $DESCRIPTION (file: ${CACHE_FILE#"$ROOT_DIR"/})"
cat "$CACHE_FILE"
return
fi
local CACHE_DIR
CACHE_DIR="$(dirname "$CACHE_FILE")"
mkdir -p "$CACHE_DIR"
debug "Fetching $DESCRIPTION..."
local RESPONSE
RESPONSE=$(curl -sfL --compressed --header "Circle-Token: $CIRCLECI_TOKEN" "$URL")
if [ -n "$JQ_COMMAND" ]; then
RESPONSE="$(echo "$RESPONSE" | jq -r "$JQ_COMMAND")"
fi
if [ "$PIPELINE_FINISHED" == "true" ]; then
echo "$RESPONSE" > "$CACHE_FILE"
fi
echo "$RESPONSE"
}
PIPELINE_NUMBER="$1"
if [ -n "$PIPELINE_NUMBER" ]; then
PIPELINE_URL="https://app.circleci.com/pipelines/github/${CIRCLECI_PROJECT_SLUG}/${PIPELINE_NUMBER}"
PIPELINE_RESPONSE="$(cache_response \
"pipeline ID for pipeline number ${PIPELINE_NUMBER}" \
"$PIPELINES_CACHE_DIR/pipeline_numbers/${PIPELINE_NUMBER}.json" \
"https://circleci.com/api/v2/project/gh/${CIRCLECI_PROJECT_SLUG}/pipeline/${PIPELINE_NUMBER}")"
PIPELINE_ID="$(echo "$PIPELINE_RESPONSE" | jq -r '.id')"
if [ -z "$PIPELINE_ID" ]; then
echo "Could not find pipeline ID for pipeline number $PIPELINE_NUMBER" >&2
exit 1
fi
else
CURRENT_GIT_BRANCH="$(git rev-parse --abbrev-ref HEAD)"
echo "Fetching latest failed pipeline ID for '$CURRENT_GIT_BRANCH'..." >&2
PIPELINE_ATTRIBUTES="$(./scripts/circleci_pipeline_status -s failed -f "%i %n %u" -b "$CURRENT_GIT_BRANCH")"
if [ -z "$PIPELINE_ATTRIBUTES" ]; then
echo "No failed CircleCI pipeline found for branch '$CURRENT_GIT_BRANCH'"
exit 1
fi
PIPELINE_ID="$(echo "$PIPELINE_ATTRIBUTES" | cut -d' ' -f1)"
PIPELINE_NUMBER="$(echo "$PIPELINE_ATTRIBUTES" | cut -d' ' -f2)"
PIPELINE_URL="$(echo "$PIPELINE_ATTRIBUTES" | cut -d' ' -f3)"
echo "Found latest failed pipeline ID: $PIPELINE_ID" >&2
echo >&2
echo "$PIPELINE_URL" >&2
echo >&2
fi
debug "PIPELINE_NUMBER: $PIPELINE_NUMBER"
debug "PIPELINE_ID: $PIPELINE_ID"
debug "PIPELINE_URL: $PIPELINE_URL"
# Check if pipeline ID is present in current_pipeline_id file (if it exists)
# If not, update it and delete tmp/rspec_examples.txt and *.fixed files
CURRENT_PIPELINE_ID=""
if [ -f "$PIPELINES_CACHE_DIR/current_pipeline_id" ]; then
CURRENT_PIPELINE_ID=$(cat "$PIPELINES_CACHE_DIR/current_pipeline_id")
fi
if [ "$CURRENT_PIPELINE_ID" != "$PIPELINE_ID" ] || [ -n "${RESET:-}" ]; then
echo "New CircleCI pipeline ID: ${PIPELINE_ID}. Deleting tmp/rspec_examples.txt and $PIPELINES_CACHE_DIR/*.fixed..."
rm -f "$ROOT_DIR"/tmp/rspec_examples.txt
rm -f "$PIPELINES_CACHE_DIR"/*.fixed
echo "$PIPELINE_ID" > "$PIPELINES_CACHE_DIR/current_pipeline_id"
fi
WORKFLOWS_RESPONSE="$(cache_response \
"workflows for pipeline ID $PIPELINE_ID" \
"$PIPELINES_CACHE_DIR/$PIPELINE_ID/workflows.json" \
"https://circleci.com/api/v2/pipeline/${PIPELINE_ID}/workflow")"
WORKFLOWS_COUNT="$(echo "$WORKFLOWS_RESPONSE" | jq '.items | length')"
debug "WORKFLOWS_COUNT: $WORKFLOWS_COUNT"
if [ "$WORKFLOWS_COUNT" -lt 2 ]; then
echo "ERROR: Did not find at least 2 workflows for $PIPELINE_ID! (Count: $WORKFLOWS_COUNT)"
exit 1
fi
# Find the generated workflow (not 'setup-workflow')
for ((i=0; i<WORKFLOWS_COUNT; i++)); do
WORKFLOW_STATUS="$(echo "$WORKFLOWS_RESPONSE" | jq -r ".items[$i].status")"
WORKFLOW_NAME="$(echo "$WORKFLOWS_RESPONSE" | jq -r ".items[$i].name")"
WORKFLOW_ID="$(echo "$WORKFLOWS_RESPONSE" | jq -r ".items[$i].id")"
debug "[$i] WORKFLOW_NAME: '$WORKFLOW_NAME'"
debug "[$i] WORKFLOW_STATUS: '$WORKFLOW_STATUS'"
if [ "$WORKFLOW_NAME" != "setup-workflow" ]; then break; fi
done
debug "Found generated workflow: '$WORKFLOW_NAME'"
if [ "$WORKFLOW_STATUS" == "success" ] || [ "$WORKFLOW_STATUS" == "failed" ]; then
PIPELINE_FINISHED=true
else
PIPELINE_FINISHED=false
# Delete cached responses until the workflow has finished
rm -f "$PIPELINES_CACHE_DIR/pipeline_numbers/${PIPELINE_NUMBER}.json"
rm -f "$PIPELINES_CACHE_DIR/$PIPELINE_ID/workflows.json"
fi
JOBS_RESPONSE="$(cache_response \
"jobs for workflow $WORKFLOW_ID" \
"$PIPELINES_CACHE_DIR/$PIPELINE_ID/$WORKFLOW_ID/jobs.json" \
"https://circleci.com/api/v2/workflow/${WORKFLOW_ID}/job")"
failed_job_numbers() {
NAME_QUERY="$1"
echo "$JOBS_RESPONSE" | jq -r ".items \
| map(select(.name | contains(\"${NAME_QUERY}\"))) \
| map(select(.status == \"failed\")) \
| map(.job_number) \
| join(\" \")"
}
FAILED_RSPEC_JOB_NUMBERS="$(failed_job_numbers "rspec")"
read -ra FAILED_RSPEC_JOB_NUMBERS <<< "$FAILED_RSPEC_JOB_NUMBERS"
FAILED_RUBYLINT_JOB_NUMBERS="$(failed_job_numbers "ruby_lint")"
read -ra FAILED_RUBYLINT_JOB_NUMBERS <<< "$FAILED_RUBYLINT_JOB_NUMBERS"
FAILED_RSWAG_JOB_NUMBERS="$(failed_job_numbers "rswag")"
read -ra FAILED_RSWAG_JOB_NUMBERS <<< "$FAILED_RSWAG_JOB_NUMBERS"
FAILED_SHELLCHECK_JOB_NUMBERS="$(failed_job_numbers "shellcheck")"
read -ra FAILED_SHELLCHECK_JOB_NUMBERS <<< "$FAILED_SHELLCHECK_JOB_NUMBERS"
FAILED_RSPEC_AND_RSWAG_JOB_NUMBERS=(
"${FAILED_RSPEC_JOB_NUMBERS[@]}"
"${FAILED_RSWAG_JOB_NUMBERS[@]}"
)
ALL_FAILED_JOB_NUMBERS=(
"${FAILED_RSPEC_JOB_NUMBERS[@]}"
"${FAILED_RUBYLINT_JOB_NUMBERS[@]}"
"${FAILED_RSWAG_JOB_NUMBERS[@]}"
"${FAILED_SHELLCHECK_JOB_NUMBERS[@]}"
)
TOTAL_FAILED_JOBS="${#ALL_FAILED_JOB_NUMBERS[@]}"
debug "FAILED_RSPEC_JOB_NUMBERS: ${FAILED_RSPEC_JOB_NUMBERS[*]}"
debug "FAILED_RUBYLINT_JOB_NUMBERS: ${FAILED_RUBYLINT_JOB_NUMBERS[*]}"
debug "FAILED_RSWAG_JOB_NUMBERS: ${FAILED_RSWAG_JOB_NUMBERS[*]}"
debug "FAILED_SHELLCHECK_JOB_NUMBERS: ${FAILED_SHELLCHECK_JOB_NUMBERS[*]}"
debug "TOTAL_FAILED_JOBS: $TOTAL_FAILED_JOBS"
if [ "$TOTAL_FAILED_JOBS" -eq 0 ]; then
echo "ERROR: No failed jobs found for CircleCI pipeline $PIPELINE_NUMBER ($PIPELINE_ID)"
exit 1
fi
show_failed_job_urls() {
local DESCRIPTION="$1"
local JOB_NUMBERS=("${!2}")
if [ "${#JOB_NUMBERS[@]}" -eq 0 ]; then return; fi
echo "Found ${#JOB_NUMBERS[@]} failed ${DESCRIPTION} jobs:" >&2
for JOB_NUMBER in "${JOB_NUMBERS[@]}"; do
echo " * https://app.circleci.com/pipelines/github/$CIRCLECI_PROJECT_SLUG/$PIPELINE_ID/workflows/$WORKFLOW_ID/jobs/$JOB_NUMBER" >&2
done
echo >&2
}
show_failed_job_urls "RSpec" FAILED_RSPEC_JOB_NUMBERS[@]
show_failed_job_urls "Ruby Lint" FAILED_RUBYLINT_JOB_NUMBERS[@]
show_failed_job_urls "RSwag" FAILED_RSWAG_JOB_NUMBERS[@]
show_failed_job_urls "Shellcheck" FAILED_SHELLCHECK_JOB_NUMBERS[@]
fetch_logs_for_job() {
local JOB_NUMBER="$1"
if [ -f "$PIPELINES_CACHE_DIR/$PIPELINE_ID/$WORKFLOW_ID/$JOB_NUMBER/output.txt" ]; then
echo "Found cached logs for job $JOB_NUMBER" >&2
return
fi
JOB_RESPONSE="$(cache_response \
"data for job $JOB_NUMBER" \
"$PIPELINES_CACHE_DIR/$PIPELINE_ID/$WORKFLOW_ID/$JOB_NUMBER/job.json" \
"https://circleci.com/api/v2/project/gh/$CIRCLECI_PROJECT_SLUG/job/$JOB_NUMBER")"
JOB_PARALLELISM=$(echo "$JOB_RESPONSE" | jq -r '.parallelism')
# NOTE: This uses an internal API endpoint for the web UI
JOB_DATA_RESPONSE="$(cache_response \
"internal data for job $JOB_NUMBER" \
"$PIPELINES_CACHE_DIR/$PIPELINE_ID/$WORKFLOW_ID/$JOB_NUMBER/job_internal.json" \
"https://circleci.com/api/v1.1/project/github/$CIRCLECI_PROJECT_SLUG/$JOB_NUMBER")"
FOUND_FAILED_OUTPUT=false
# Separate logs for parallel jobs
for PARALLEL_INDEX in $(seq 0 $((JOB_PARALLELISM - 1))); do
FAILED_STEP_OUTPUT_URL="$(echo "$JOB_DATA_RESPONSE" | jq -r "\
.steps \
| map(.actions[$PARALLEL_INDEX]) \
| map(select(.status == \"failed\"))\
[0].output_url // empty")"
if [ -z "$FAILED_STEP_OUTPUT_URL" ]; then continue; fi
FOUND_FAILED_OUTPUT=true
cache_response \
"output for job $JOB_NUMBER, parallel run: $PARALLEL_INDEX" \
"$PIPELINES_CACHE_DIR/$PIPELINE_ID/$WORKFLOW_ID/$JOB_NUMBER/output-$PARALLEL_INDEX.txt" \
"$FAILED_STEP_OUTPUT_URL" \
".[].message" > /dev/null
done
if [ "$FOUND_FAILED_OUTPUT" == 'false' ]; then
echo "ERROR: Could not find output URL for any failed steps for job $JOB_NUMBER (parallel jobs: $JOB_PARALLELISM)" >&2
exit 1
fi
}
# Fetch logs for all failed jobs
for JOB_NUMBER in "${ALL_FAILED_JOB_NUMBERS[@]}"; do
fetch_logs_for_job "$JOB_NUMBER"
done
output_file_for_job() {
local JOB_NUMBER="$1"
local PARALLEL_INDEX="${2:-0}"
echo "$PIPELINES_CACHE_DIR/$PIPELINE_ID/$WORKFLOW_ID/$JOB_NUMBER/output-${PARALLEL_INDEX}.txt"
}
fixed_file() {
JOB_NAME="$1"
echo "$PIPELINES_CACHE_DIR/${PIPELINE_NUMBER}-${JOB_NUMBER}-${JOB_NAME}.fixed"
}
# Ruby Lint (RuboCop)
# -----------------------------------------------------
for JOB_NUMBER in "${FAILED_RUBYLINT_JOB_NUMBERS[@]}"; do
OUTPUT_FILE="$(output_file_for_job "$JOB_NUMBER")"
FIXED_FILE="$(fixed_file rubocop)"
if [ -f "$FIXED_FILE" ]; then echo "RuboCop was already fixed."; continue; fi
# set -x
FAILING_RUBOCOP_FILES="$(ruby -e "puts Dir.glob(\"$OUTPUT_FILE\").map { |f|
logs = File.read(f).gsub(/\e\[([;\d]+)?m/, '');
start_regex = /Offenses:/;
end_regex = /\d+ files? inspected, \d+ offenses? detected/;
match_regex = /^(.+):\d+:\d+:/;
next nil if !logs.match?(start_regex) || !logs.match?(end_regex);
logs.
split(start_regex)[1].
split(end_regex)[0].
scan(match_regex)
}.flatten.reject(&:nil?).map { |s|
s.gsub(/\A\p{Space}+|\p{Space}+\z/, '')
}.select { |f| File.exist?(f) }.join(' ')")"
# set +x
if [ -z "$FAILING_RUBOCOP_FILES" ]; then
echo "Could not find any Ruby files in the RuboCop output! Please check the logs manually." >&2
continue
fi
echo "Running 'rubocop -A' to fix Rubocop errors..."
echo "=> bundle exec rubocop -A $FAILING_RUBOCOP_FILES"
if bundle exec rubocop -A "$FAILING_RUBOCOP_FILES"; then
touch "$FIXED_FILE"
echo "RuboCop succeeded!" >&2
else
echo "ERROR: RuboCop failed!" >&2
exit 1
fi
done
# Ruby Lint (Security - bundle-audit and bundle-leak)
# -----------------------------------------------------
for JOB_NUMBER in "${FAILED_RUBYLINT_JOB_NUMBERS[@]}"; do
OUTPUT_FILE="$(output_file_for_job "$JOB_NUMBER")"
FIXED_FILE="$(fixed_file security)"
if [ -f "$FIXED_FILE" ]; then echo "Security was already fixed."; continue; fi
if grep -q "bundler-audit failed" "$OUTPUT_FILE" || grep -q "bundler-leak failed" "$OUTPUT_FILE"; then
echo "Checking for security issues in gems..."
echo "=> ./scripts/ci/security"
if ./scripts/ci/security; then
touch "$FIXED_FILE"
echo "./scripts/ci/security succeeded!" >&2
else
echo "ERROR: ./scripts/ci/security failed!" >&2
exit 1
fi
fi
done
# RSwag (OpenAPI Specs)
# -----------------------------------------------------
for JOB_NUMBER in "${FAILED_RSWAG_JOB_NUMBERS[@]}"; do
OUTPUT_FILE="$(output_file_for_job "$JOB_NUMBER")"
if grep -q "Tasks: TOP => traceroute" "$OUTPUT_FILE"; then
FIXED_FILE="$(fixed_file traceroute)"
if [ -f "$FIXED_FILE" ]; then echo "rake traceroute was already fixed."; continue; fi
echo "rake traceroute failed! Running locally..."
if ./scripts/traceroute; then
touch "$FIXED_FILE"
echo "rake traceroute succeeded!" >&2
else
echo "ERROR: rake traceroute failed!" >&2
exit 1
fi
fi
FIXED_FILE="$(fixed_file rswag)"
if [ -f "$FIXED_FILE" ]; then echo "RSwag was already fixed."; continue; fi
./scripts/ci/update_swagger && touch "$FIXED_FILE"
done
# RSpec (and RSwag API tests)
# -----------------------------------------------------
if [ "${#FAILED_RSPEC_AND_RSWAG_JOB_NUMBERS[@]}" -gt 0 ]; then
RSPEC_OUTPUT_FILES=()
for JOB_NUMBER in "${FAILED_RSPEC_JOB_NUMBERS[@]}"; do
OUTPUT_FILE="$(output_file_for_job "$JOB_NUMBER" "*")"
RSPEC_OUTPUT_FILES+=("$OUTPUT_FILE")
done
for JOB_NUMBER in "${FAILED_RSWAG_JOB_NUMBERS[@]}"; do
OUTPUT_FILE="$(output_file_for_job "$JOB_NUMBER")"
RSPEC_OUTPUT_FILES+=("$OUTPUT_FILE")
done
# set -x
QUOTED_RSPEC_OUTPUT_FILES="$(printf "'%s', " "${RSPEC_OUTPUT_FILES[@]}")"
QUOTED_RSPEC_OUTPUT_FILES="${QUOTED_RSPEC_OUTPUT_FILES%, }"
debug "QUOTED_RSPEC_OUTPUT_FILES: $QUOTED_RSPEC_OUTPUT_FILES"
FAILING_SPECS=$(ruby -e "puts (Dir.glob($QUOTED_RSPEC_OUTPUT_FILES)).map { |f|
logs = File.read(f).gsub(/\e\[([;\d]+)?m/, '');
start_regex = /Failed examples?:/;
match_regex = /rspec '?([^'#\s]+)/;
next nil if !logs.match?(start_regex);
logs.
split(start_regex)[1].
scan(match_regex)
}.flatten.reject(&:nil?).map { |s|
s.gsub(/\A\p{Space}+|\p{Space}+\z/, '')
}.join(' ')")
# set +x
if [ -z "$FAILING_SPECS" ]; then
echo "Could not find any failing specs in the RSpec output! Please check the logs manually." >&2
else
read -ra FAILING_SPECS <<< "$FAILING_SPECS"
# Tip: I needed to use hexdump to figure out how to get rid of ANSI color characters.
debug "FAILING_SPECS: '${FAILING_SPECS[*]}'"
# Persist results and use '--only-failures' so that we stop running tests once they are successful.
if [ -f "$ROOT_DIR"/tmp/rspec_examples.txt ]; then
FIXED_FILE="$(fixed_file rspec)"
if [ -f "$FIXED_FILE" ]; then
echo "RSpec was already fixed." >&2
else
# Once we have an examples file from the initial run, then we only need the `--only-failures` flag.
# (--only-failures is ignored when passing file names with line numbers.)
# See: https://github.com/rspec/rspec-core/issues/2526
echo "Running failed specs locally..." >&2
echo "=> bundle exec rspec --only-failures" >&2
if bundle exec rspec --only-failures; then
touch "$FIXED_FILE"
echo "RSpec succeeded!" >&2
else
echo "ERROR: RSpec failed!" >&2
exit 1
fi
fi
else
echo "Running failed specs locally..." >&2
echo "=> bundle exec rspec ${FAILING_SPECS[*]}" >&2
bundle exec rspec "${FAILING_SPECS[@]}"
fi
fi
fi
# Shellcheck
# -----------------------------------------------------
if [ "${#FAILED_SHELLCHECK_JOB_NUMBERS[@]}" -gt 0 ]; then
FIXED_FILE="$(fixed_file shellcheck)"
if [ -f "$FIXED_FILE" ]; then
echo "Shellcheck was already fixed." >&2
else
echo "Checking Shellcheck errors (with safe corrections)..." >&2
echo "=> AUTOFIX=true ./scripts/ci/shellcheck" >&2
if AUTOFIX=true ./scripts/ci/shellcheck; then
touch "$FIXED_FILE"
echo "Shellcheck succeeded!" >&2
else
echo "ERROR: Shellcheck failed!" >&2
exit 1
fi
fi
fi
echo >&2
echo "Everything is fixed! Push your changes to run a new CI build." >&2
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment