Skip to content

Instantly share code, notes, and snippets.

Last active June 2, 2022 19:47
Show Gist options
  • Save richmilne/b7c6a378157a8879617ca415bdf28472 to your computer and use it in GitHub Desktop.
Save richmilne/b7c6a378157a8879617ca415bdf28472 to your computer and use it in GitHub Desktop.
Approve pull requests on BitBucket Server.
#! /usr/bin/env python3
"""Approve pull requests on BitBucket Server.
If you are trying to approve pull requests on BitBucket from external/3rd-
party tools (like Jenkins) there may be plugins available to do the job
automatically - if you're lucky. The `Bitbucket Approve Plugin`_ seems to be
one such plugin.
Unfortunately, it's only for the *CLOUD*-hosted version of Bitbucket. The
self-hosted version, Bitbucket Server (formerly known as Stash) uses a
different REST api. The API for the Cloud version can be found at
`Cloud API`_ (note the ``/api/2.0`` prefixes), while the API for the Server
version is at `Server API`_ (and introduced here_; note that it's version
The approval plugin tries to call a REST endpoint on the cloud version
similar to the following:{project-name}/{repo-name}/commit/{SHA1-commit-id}/approve/
But for the Server version we need a URL of the form:
This module aims to map the SHA1-hash of most recent (i.e. HEAD) commit of
the branch in the pull request to the relevant project, repository and pull
request ID, and construct the URL of the approval endpoint.
The constructed URL can then be used with a tool like curl to either add or
remove a user's approval, like this:
Add approval::
curl -X POST -H "X-Atlassian-Token: nocheck" -u user:password http://{BitBucketHost}/rest/api/...-requests/12/approve/
Remove approval::
curl -X DELETE -H "X-Atlassian-Token: nocheck" -u user:password http://{BitBucketHost}/rest/api/...-requests/12/approve/
.. _`Bitbucket Approve Plugin`:
.. _`Cloud API`:
.. _`Server API`:
.. _here:
# Use this option when calling pylint from command line:
# --ignore-long-lines="^\s*(# )?<?https?://\S+>?$|^\s*(# )?curl.*$|^\.\.\ _.*"
import argparse
import json
import os
import pickle
import string
import subprocess
import sys
from urllib.parse import urlparse
from json.decoder import JSONDecodeError # pylint: disable=ungrouped-imports
def get_hostname(url):
"""Return just the hostname from a (supposedly) git repo's url
>>> get_hostname('http://host:port/user/repo.git')
>>> get_hostname('ssh://git@host:port/user/repo.git')
results = urlparse(url)
host = results.netloc
if '@' in host:
assert 'ssh' in results.scheme
host = host[host.find('@')+1:]
return host
def query_rest_api(credentials, url):
"""Return response from calling URL with the given credentials
Credentials must be string of form 'user:password'"""
args = ['curl', '-u', credentials, url]
result =, stdout=subprocess.PIPE,
stdout = result.stdout
if isinstance(stdout, bytes):
stdout = stdout.decode('utf8')
attrs = json.loads(stdout)
except JSONDecodeError:
attrs = {}
return attrs
# pylint: disable=unused-variable
class BitBucketAPIv1(object):
"""Construct BitBucket Server REST endpoint to approve a pull request.
Sample usage:
>>> git_url = 'http://localhost:1080'
>>> creds = 'jenkins:password'
>>> cache_file = '/tmp/commit-hash-to-pull-ids.pickle'
>>> commit_id = '7f36c360607496320d155b3bbb67f2f788a03ef5'
>>> check = ('http://localhost:1080/rest/api/1.0/projects/'
... 'Ive-gotta-/repos/slug/'
... 'pull-requests/-1/approve/')
>>> with BitBucketAPIv1(git_url, creds, cache_file) as bb_query:
... ans = bb_query.create_approve_url(commit_id)
... ans == check
If you don't use the class's context manager (`with...`) you need to
explicitly make sure the cached results are saved back to the cache
file. Here's another way to use the class:
>>> bb_query = BitBucketAPIv1(git_url, creds, cache_file)
>>> print(bb_query.create_approve_url('0'*40))
>>> bb_query.save_cache()
def __init__(self, git_url, credentials, cache_file=None):
"""Set up class to query BitBucket Server REST API.
Meaning of arguments to this class are the same as the arguments to
this script, use '-h' from the command-line to view them.
In essence, this module maps commit IDs to the pull requests they
belong to. As this involves several round-trips to the server, and
iterating through ALL repositories and pull-requests visible to the
current user, there's the option to save the result of previous
lookups in the cache_file."""
host = get_hostname(git_url)
self.rest_stub = 'http://%s/rest/' % host
self.credentials = credentials
self.user_slug = credentials.split(':')[0]
if cache_file and os.path.isfile(cache_file):
with open(cache_file, 'rb') as handle:
cache = pickle.load(handle)
cache = {}
self.cache = cache
self.cache_file = cache_file
# Make sure our doctest example works!
self.cache['7f36c360607496320d155b3bbb67f2f788a03ef5'] = [
'Ive-gotta-', 'slug', -1
self._project_repos = {}
def get_paged_results(self, url):
"""Iterate through all pages of the Bitbucket server's response.
All values in the response are `yield`ed along with a sequence
number, in tuples of the form (seq #, value)
Full details here of paging here:
last_page = False
start = 0
count = 0
# Conditional logic below needed because URL might already contain
# some parameters, like '?limit=xx' for paging.
suffix = '%sstart=%%d' % ('&' if '?' in url else '?')
paged_url = url + suffix
while not last_page:
results = query_rest_api(self.credentials, paged_url % start)
start = results.get('nextPageStart')
last_page = results.get('isLastPage')
if start is None:
assert last_page or last_page is None
last_page = True
for value in results.get('values', []):
count += 1
yield (count, value)
def get_repos(self):
"""Enumerate all the repositories visible to the current user.
The repository identifiers/names (along with the IDs of their parent
projects) are saved in this class for later retrieval."""
project_repos = {}
url = self.rest_stub + 'api/1.0/repos'#?limit=1'
for seq, value in self.get_paged_results(url):
repo_slug = value['slug']
project_key = value['project']['key']
repos = project_repos.get(project_key, [])
project_repos[project_key] = repos
self._project_repos = project_repos
def get_pull_requests(self):
"""Enumerate all the pull requests visible to the current user.
This method relies on all the user-visible repositories having been
saved earlier.
The SHA1-hash of the head of the pull request branch, the id of the
pull request, and the name of the repository and project to which
the pull request belongs, are saved to the cache in the form:
{sha1-hash: [project name, repo name, pull request id]}
suffix = ('api/1.0/projects/%(project)s/repos/%(repo_slug)s/'
for project, repos in self._project_repos.items():
for repo_slug in repos:
url = self.rest_stub + suffix % locals()
for seq, value in self.get_paged_results(url):
pull_id = value['id']
pull_head = value['fromRef']['latestCommit']
self.cache[pull_head] = [project, repo_slug, pull_id]
def create_approve_url(self, commit_sha1):
"""Create URL to approve pull request whose head is commit_sha1."""
suffix = ('api/1.0/projects/%(project)s/repos/%(repo_slug)s/'
# Apparently endpoint above is deprecated. Still works for me, though,
# but might have to switch to this one:
# suffx = ('api/1.0/projects/%(project)s/repos/%(repo_slug)s/'
# 'pull-requests/%(pull_id)d/participants/%(user_slug)d')
# user_slug = self.user_slug
params = self.cache.get(commit_sha1)
if not params:
params = self.cache.get(commit_sha1)
if params:
project, repo_slug, pull_id = params
return self.rest_stub + suffix % locals()
def __enter__(self):
# The save_cache() method was originally named __del__ - which didn't
# work, as the open() builtin is already been destroyed by the time
# the method is called. One of the suggestions from this thread
# was to make the class a context manager, and handle the clean-up in
# the __exit__ method.
return self
def __exit__(self, exc_type, exc_value, traceback):
# print('Args:', (exc_type, exc_value, traceback))
def save_cache(self):
"""Save commit -> project/repos/pull requests found this session."""
if hasattr(self, 'cache') and hasattr(self, 'cache_file'):
cache = self.cache
if cache and self.cache_file:
with open(self.cache_file, 'wb') as handle:
pickle.dump(cache, handle)
def non_blank_string(text):
"""Verify that a command-line arguments is a valid, non-blank string."""
text = text.strip()
if not text:
raise ValueError
return text
def valid_sha1_commit_id(text):
"""Verify that the input is a valid, 40-digit, hex number."""
check = len(text) == 40
sha_digits = set(text.lower())
return check & ((sha_digits & set(string.hexdigits)) == sha_digits)
def parse_cmd_line_args():
"""Extract and verify arguments from cmd-line and environment vars."""
git_commit = os.environ.get('GIT_COMMIT', '')
git_url = os.environ.get('GIT_URL', '')
jenkins_creds = os.environ.get('JENKINS_CREDS', '')
parser = argparse.ArgumentParser(
# formatter_class=argparse.ArgumentDefaultsHelpFormatter,
epilog=('REST endpoint will be inserted into environment '
'as variable $%s and printed to stdout.'
help=('URL of your self-hosted BitBucket (git) Server. '
'Default: $GIT_URL="%s"' % git_url)
help=("A 'user:password' string of the user who will be approving the "
"pull request. Default: $JENKINS_CREDS=\"%s\"" % jenkins_creds)
'-s', '--sha1-commit-id',
help=('Commit hash of head of pull request. '
'Default: $GIT_COMMIT="%s"' % git_commit)
'-f', '--cache-file',
help='Path where commit-id/pull-request mappings will be cached.'
args = parser.parse_args()
return vars(args)
def main(cmd_line_args):
"""Wrapper to call BitBucketAPIv1 class and output the results."""
url = cmd_line_args['GIT_URL']
creds = cmd_line_args['CREDENTIALS']
commit = cmd_line_args['sha1_commit_id'].strip()
if commit:
if not valid_sha1_commit_id(commit):
msg = 'Invalid SHA1 commit hash "%s". ' % commit
msg += '(Should be 40-digit hex number.)'
raise ValueError(msg)
cache_file = cmd_line_args['cache_file']
with BitBucketAPIv1(url, creds, cache_file) as bb_query:
approve_url = None
if commit:
approve_url = bb_query.create_approve_url(commit)
# Didn't get commit id to look up, so just find the current mapping
# and save to cache file for later use.
# The output of this script is often wrapped in a shell command such as
# "declare -x $( ...)"
# If this script doesn't return anything, you're executing
# "declare -x", which dumps ALL vars to command line.
# "declare -x unset OUTPUT_ENV_VAR" doesn't really hurt, but it doesn't
# have the effect you want.
# To keep the cmd quiet, you have to pass it something to declare, hence
# the dummy declaration '_=_' below
if approve_url:
os.environ[OUTPUT_ENV_VAR] = approve_url
print('%s=%s' % (OUTPUT_ENV_VAR, approve_url))
# print('No commit/repos found', end="")
if OUTPUT_ENV_VAR in os.environ:
del os.environ[OUTPUT_ENV_VAR]
# print('; deleting $%s' % OUTPUT_ENV_VAR)
print('unset %s' % OUTPUT_ENV_VAR)
if __name__ == '__main__':
import doctest
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment