Last active
November 15, 2017 20:11
-
-
Save ravishchawla/7ba44ca2b191c0cc15a8e3a013bcfe28 to your computer and use it in GitHub Desktop.
prbot
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import time; | |
import datetime; | |
import requests; | |
import json; | |
import os; | |
import os.path; | |
import time; | |
''' | |
Bot to post Pull Request changes to a Slack channel. | |
For help or assistance, contact | |
Ravish Chawla <chawlar@vmware.com> | |
or | |
Sai Kiran Vudutala <svudutala@vmware.com> | |
''' | |
def post_to_slack(arr, authors, config): | |
# POST TO SLACK CHANNEL | |
users = ''; | |
for user in authors: | |
if user in config['username_repl'].keys(): | |
act_user = config['username_repl'][user]; | |
users = users + '@' + act_user + ' '; | |
else: | |
users = users + '@' + user + ' '; | |
if 'project_title' in config: | |
bot_name = config['project_title'] + '-PR-Bot'; | |
else: | |
bot_name = 'PR-Bot' | |
slack_data = {'text': 'Open Pull Requests for ' + users, 'attachments': arr, 'username': bot_name, | |
'icon_emoji': ':git:', 'link_names': '1'}; | |
slack_json_data = json.dumps(slack_data); | |
#print(slack_json_data); | |
webhook_url = config['webhook_url']; | |
print(users); | |
#resp = requests.post(webhook_url, headers={'Content-Type': 'application/json'}, data=slack_json_data); | |
def get_pullrequests_by_project(decoded, project_name): | |
prs = []; | |
try: | |
# resp = requests.get(request_url, headers={'Authorization' : config['base64_encoded_passcode']}); | |
# print(resp.text); | |
# decoded = json.loads(resp.text); | |
todays_date = datetime.datetime.now(); | |
authors = set(); | |
# =print(decoded['values'][0]); | |
for x in decoded['values']: | |
if project_name in x['title']: | |
createdDateObj = datetime.datetime.fromtimestamp(x['createdDate'] / 1000.0) | |
createdDate = createdDateObj.strftime('%m-%d-%Y %H:%M'); | |
updatedDate = datetime.datetime.fromtimestamp(x['updatedDate'] / 1000.0).strftime('%m-%d-%Y %H:%M'); | |
total_days = ((todays_date - createdDateObj).days); | |
if (total_days / 30) >= 1.0: | |
days_since = str(int(total_days / 30)) + ' months, ' + str( | |
int(total_days % 7)) + ' weeks and ' + str(int(total_days % 7)) + ' days'; | |
elif (total_days / 7) >= 1.0: | |
days_since = str(int(total_days / 7)) + ' weeks and ' + str(int(total_days % 7)) + ' days'; | |
else: | |
days_since = str(total_days) + ' days'; | |
total_unapproved = 0; | |
total_approved = 0; | |
total_needwork = 0; | |
for reviewer in x['reviewers']: | |
if reviewer['status'] == 'UNAPPROVED': | |
total_unapproved = total_unapproved + 1; | |
elif reviewer['status'] == 'APPROVED': | |
total_approved = total_approved + 1; | |
elif reviewer['status'] == 'NEEDS_WORK': | |
total_needwork = total_needwork + 1; | |
status = str(total_approved) + ' Approved'; | |
merge_status = x['properties']['mergeResult']['outcome']; | |
merge_status = merge_status[0] + merge_status[1:].lower(); | |
if total_needwork > 0: | |
status = status + ' ' + str(total_needwork) + ' Need Work' | |
color = 'warning'; | |
elif total_approved >= 2: | |
color = 'good'; | |
elif total_approved > 0: | |
color = '#439FE0'; | |
else: | |
color = 'danger'; | |
if merge_status == 'Conflicted': | |
color = 'danger'; | |
if color != 'good': | |
authors.add(x['author']['user']['name']); | |
data = {}; | |
data['title'] = x['title']; | |
data['color'] = color; | |
data['title_link'] = x['links']['self'][0]['href']; | |
data['author_name'] = x['author']['user']['displayName']; | |
data['fields'] = []; | |
fieldDict = {}; | |
fieldDict['title'] = 'Last Update'; | |
fieldDict['value'] = updatedDate; | |
fieldDict['short'] = 'true'; | |
data['fields'].append(fieldDict); | |
fieldDict = {}; | |
fieldDict['title'] = 'Days since created'; | |
fieldDict['value'] = days_since; | |
fieldDict['short'] = 'true'; | |
data['fields'].append(fieldDict); | |
fieldDict = {}; | |
fieldDict['title'] = 'Status'; | |
fieldDict['value'] = status; | |
fieldDict['short'] = 'true'; | |
data['fields'].append(fieldDict); | |
fieldDict = {}; | |
fieldDict['title'] = 'Merge status'; | |
fieldDict['value'] = merge_status; | |
fieldDict['short'] = 'true'; | |
data['fields'].append(fieldDict); | |
prs.append(data); | |
except Exception as e: | |
print(e, "Error encountered using APIs"); | |
return prs, authors; | |
def sort_by_date(prs): | |
last_update_pos = [pos for pos, i in enumerate(prs[0]['fields']) if i['title'] == 'Last Update'][0]; | |
prs = sorted(prs, | |
key=lambda x: datetime.datetime.strptime(x['fields'][last_update_pos]['value'], '%m-%d-%Y %H:%M')); | |
return prs; | |
def get_response(request_url, encoded_password): | |
return requests.get(request_url, headers={'Authorization': encoded_password}); | |
def get_last_exec_time(current_dir_path): | |
log_file_name = current_dir_path + '\last_exec_time.log'; | |
if os.path.isfile(log_file_name): | |
log_json = json.load(open(log_file_name, 'rb')); | |
last_exec_time = log_json['last_exec_time']; | |
return last_exec_time; | |
else: | |
return 0; | |
def update_exec_time(current_dir_path, exec_time): | |
log_file_name = current_dir_path + '\last_exec_time.log'; | |
exec_time = {'last_exec_time' : exec_time}; | |
with open(log_file_name, 'w') as log_file: | |
json.dump(exec_time, log_file); | |
def main(): | |
current_time = time.time(); | |
current_dir_path = os.path.dirname(os.path.realpath(__file__)) | |
last_exec_time = get_last_exec_time(current_dir_path); | |
update_exec_time(current_dir_path, current_time); | |
time_diff_in_hours = int((current_time - last_exec_time) // 60); | |
if (time_diff_in_hours < 1): | |
exit(); | |
config = json.load(open(current_dir_path + '\config.json')); | |
all_prs = []; | |
flagged_authors = set(); | |
run = True; | |
index = 0; | |
while run: | |
request_url = "https://stash.air-watch.com/rest/api/latest/projects/AIRW/repos/Canonical/pull-requests?order=oldest&state=OPEN&role.1=AUTHOR&start=%s" % index; | |
basic_auth = "Basic " + config['base64_encoded_passcode']; | |
resp = get_response(request_url, basic_auth); | |
decoded = json.loads(resp.text); | |
result, authors = get_pullrequests_by_project(decoded, config['project_title']); | |
all_prs.extend(result); | |
flagged_authors = flagged_authors | authors; | |
if 'nextPageStart' in decoded: | |
index = decoded['nextPageStart']; | |
else: | |
run = False; | |
if len(all_prs) == 0: | |
all_prs.append({'title': 'No Pull Requests are open at this time'}); | |
all_prs = sort_by_date(all_prs); | |
#print(all_prs); | |
#print(authors) | |
post_to_slack(all_prs, flagged_authors, config); | |
if __name__ == '__main__': | |
main(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment