Skip to content

Instantly share code, notes, and snippets.

@droopy4096
Created April 1, 2021 19:09
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save droopy4096/bb6b4501deda05aab797467c2f9d2e7f to your computer and use it in GitHub Desktop.
Save droopy4096/bb6b4501deda05aab797467c2f9d2e7f to your computer and use it in GitHub Desktop.
#!/bin/sh
set -e
# MY_JOBS=$(mktemp containers.XXXX)
# ALL_RUNS=$(mktemp runs.XXXX)
MY_JOBS=$(mktemp jobs.XXXXXX)
cleanup(){
[ -n "${DEBUG}" ] && echo ${MY_JOBS}
rm ${MY_JOBS}
}
trap cleanup EXIT
export BRANCH_FILTER=${BRANCH_FILTER:-refs/heads/develop}
export FOLDER_NAME=${FOLDER_NAME:-''}
export NAME_PREFIX=${NAME_PREFIX:-''}
export SHORT_HASH=${SHORT_HASH:-""}
PROJECT_NAME=${PROJECT_NAME:-''}
# DEBUG=${DEBUG:-"yes"}
list_jobs(){
# enumerate ALL jobs in specified project:
local project_name=${1}
if [ -z "${DEBUG}" ]
then
az pipelines runs list --project ${project_name}
else
cat ${project_name}.json
fi
}
list_specific_jobs(){
local project_name=${1}
shift
local pipeline_ids=$@
local cmd_base
local cmd
if [ -z "${DEBUG}" ]
then
#XXX https://github.com/Azure/azure-devops-cli-extension/issues/1113
# az pipelines runs list --project ${project_name} --pipeline-ids="${pipeline_ids}"
cmd_base="az pipelines runs list --project ${project_name} --pipeline-ids="
for pipeline_id in ${pipeline_ids}
do
cmd="${cmd} <(${cmd_base}${pipeline_id})"
done
#XXX https://github.com/Azure/azure-devops-cli-extension/issues/1113
# jq -s '.[0].list=[.[].list|add]|.[0]' | az pipelines runs list --project ${project_name} --pipeline-ids="${pipeline_ids}"
eval "jq -s '.[0]=[.[]|add]|.[0]' ${cmd}"
else
# PIPELINE_IDS="${pipeline_ids}" jq '[ .[] as $current_element | select(env.PIPELINE_IDS| split(" ")| contains([$current_element.definition.id]))]' ${project_name}.json
PIPELINE_IDS="${pipeline_ids}" jq '[env.PIPELINE_IDS | split(" ") | .[] | tonumber ] as $y | [.[] | select(([.definition.id] as $x | $x - ($x-$y) | length ) > 0)]' ${project_name}.json
fi
}
abbreviate_list(){
# condense output down to most important fields:
local all_jobs=${1}
jq '[.[] | { pipeline_id: .definition.id, run_id: .id, name: .definition.name, path: .definition.path, sourceBranch: .sourceBranch, sourceVersion: .sourceVersion, status: .status, result: .result, duration: (try ((.finishTime | sub("(?<time>.*)\\..*(?<zone>[+-]..:..)$"; "\(.time)Z") | fromdate )-(.startTime | sub("(?<time>.*)\\..*(?<zone>[+-]..:..)$"; "\(.time)Z") | fromdate)) catch 0) } | select(.sourceBranch==env.BRANCH_FILTER) | select(.path==env.FOLDER_NAME) | select(.name|startswith(env.NAME_PREFIX))]' ${all_jobs}
}
filter_list(){
# condense output down to most important fields:
local all_jobs=${1}
jq '[.[] | select(.sourceBranch==env.BRANCH_FILTER) | select(.path==env.FOLDER_NAME) | select(.name|startswith(env.NAME_PREFIX))]' ${all_jobs}
}
find_hash(){
# filter out anything that is not from desired commit:
local job_list=${1}
jq '[.[] | select(try .sourceVersion catch ""|startswith(env.SHORT_HASH))]' ${job_list}
}
not_completed(){
local job_list=${1}
jq '[.[] | select(.status != "completed")]' ${job_list}
}
select_latest(){
local job_list=${1}
jq '. as $jobs | [.[].definition.name]|unique as $containers | [ $containers[] as $container | first($jobs |.[] | select(.definition.name == $container))]' ${job_list}
}
list_completed_jobs(){
# enumerate RELEVANT jobs in specified project:
local project_name=${1}
az pipelines runs list --project ${project_name} --query-order FinishTimeDesc --query "[?sourceVersion != None] | [?status == 'completed'] | [?starts_with(sourceVersion,'${SHORT_HASH}')] | [?definition.path == '${FOLDER_NAME}'] | [?starts_with(definition.name,'${NAME_PREFIX}')]"
}
wait_for_jobs(){
# list_jobs ${PROJECT_NAME} | find_hash - | abbreviate_list - | jq '.[] | select(.status !="completed")'
list_jobs ${PROJECT_NAME} | find_hash - | filter_list - | not_completed - > ${MY_JOBS}
while [ "0" -lt $(jq '.[] | select(.status !="completed")' ${MY_JOBS} | wc -l) ]
do
IDS=$(jq -r -c '.[] | .definition.id' ${MY_JOBS} | xargs echo)
list_specific_jobs ${PROJECT_NAME} ${IDS} | tee /tmp/trace.json | not_completed - > ${MY_JOBS}
[ -n "${DEBUG}" ] && (echo "====> current jobs:" && cat ${MY_JOBS})
(echo "====> current jobs:" && cat ${MY_JOBS})
echo "waiting for jobs: ${IDS}"
sleep 20
done
}
fetch_artifact(){
local project_name=${1}
local run_id=${2}
local artifact_name=${3}
local download_path=${4}
az pipelines runs artifact download --run-id ${run_id} --project ${project_name} --artifact-name ${artifact_name} --path ${download_path}
}
_extract_field(){
local field_name=${2}
local json_str=${1}
echo "${json_str}" | jq -r ".${field_name}"
}
fetch_artifacts(){
local job_list=${1}
local project_name # .definition.project.name
local run_id # .id
local artifact_name # values
local download_path # .definition.name
OLD_IFS="${IFS}"
IFS=$'\n'
mkdir -p downloads
for entry_json in $(jq -c '.[] | { project_name: .definition.project.name, run_id: .id, artifact_name: "values", download_path: .definition.name}' ${job_list})
do
project_name=$(_extract_field "${entry_json}" project_name)
run_id=$(_extract_field "${entry_json}" run_id)
artifact_name=$(_extract_field "${entry_json}" artifact_name)
download_path="downloads/"$(_extract_field "${entry_json}" download_path)
fetch_artifact ${project_name} ${run_id} ${artifact_name} ${download_path}
done
}
list_completed_jobs ${PROJECT_NAME} | select_latest - | tee latest_jobs.json | fetch_artifacts -
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment