Skip to content

Instantly share code, notes, and snippets.

@wangkuiyi
Last active February 4, 2017 08:14
Show Gist options
  • Save wangkuiyi/659055a603e58606e1177f6652e9317d to your computer and use it in GitHub Desktop.
Save wangkuiyi/659055a603e58606e1177f6652e9317d to your computer and use it in GitHub Desktop.
This bash script checks if some issues of a Github repo has been there without comments in more than 30 minutes.
project="wangkuiyi/mapreduce-lite"
if [[ $# -ne 0 ]]; then
project=$1
fi
issues_per_page=25
page=1
until [ $issues_per_page -lt 25 ]; do
echo Parsing page: $page >&2
url=$(printf 'https://github.com/%s/issues?page=%d' $project $page)
url=$url+'&q=is%3Aissue'
echo $url >&2
issues_per_page=$(curl -s "$url" | grep "<a href=\"/$project/issues/" | grep 'class="link-gray-dark' | sed "s/^.*issues\/\([0-9]*\).*$/\1/" | tee issues.cache | wc -l)
if [[ $issues_per_page -eq 0 ]]; then
echo "Waiting for a while before retry talking to Github.com ..." >&2
sleep 61
issues_per_page=$(curl -s "$url" | grep "<a href=\"/$project/issues/" | grep 'class="link-gray-dark' | sed "s/^.*issues\/\([0-9]*\).*$/\1/" | tee issues.cache | wc -l)
fi
echo issues per page: $issues_per_page >&2
let page=page+1
for issue in $(cat issues.cache); do
issue_url=$(printf 'https://github.com/%s/issues/%d' $project $issue)
comments_per_issue=$(curl -s $issue_url | grep -A 3 "commented" | grep 'relative-time' | sed "s/^.*datetime=\"\([^Z]*\).*$/\1/" | tee comments.cache | wc -l)
if [[ $comments_per_issue -eq 1 ]]; then
current_sec=$(date +%s)
comment_time=$(head -n1 comments.cache)
comment_sec=$(date -j -f "%Y-%m-%dT%H:%M:%S" $comment_time +%s)
diff_sec=$(expr $current_sec - $comment_sec)
comment_day=$(date -j -f "%Y-%m-%dT%H:%M:%S" $comment_time +%a)
if [[ $diff_sec -gt 1800 ]]; then
echo "It has been $(expr $diff_sec / 3600) hours since $issue_url was filed at $comment_time $comment_day"
fi
fi
done
sleep 2 # work around the Github abuse detection mechanism.
done
@reyoung
Copy link

reyoung commented Feb 4, 2017

#!/bin/bash
project="wangkuiyi/mapreduce-lite"
if [[ $# -ne 0 ]]; then
   project=$1
fi

issues_per_page=25
page=1
until [ $issues_per_page -lt 25 ]; do
    echo Parsing page: $page >&2

    url=$(printf 'https://github.com/%s/issues?page=%d' $project $page)
    url=$url+'&q=is%3Aissue'

    echo $url >&2

    issues_per_page=$(curl -s "$url" | grep "<a href=\"/$project/issues/" | grep 'class="link-gray-dark' | sed "s/^.*issues\/\([0-9]*\).*$/\1/" | tee issues.cache | wc -l)
    if [[ $issues_per_page -eq 0 ]]; then
	echo "Waiting for a while before retry talking to Github.com ..." >&2
        sleep 61
	issues_per_page=$(curl -s "$url" | grep "<a href=\"/$project/issues/" | grep 'class="link-gray-dark' | sed "s/^.*issues\/\([0-9]*\).*$/\1/" | tee issues.cache | wc -l)
    fi

    echo issues per page: $issues_per_page >&2
    let page=page+1

    for issue in $(cat issues.cache); do
	issue_url=$(printf 'https://github.com/%s/issues/%d' $project $issue)
	curl -s $issue_url > issue_page.cache
	comments_per_issue=$(cat issue_page.cache | grep -A 3 "commented" | grep 'relative-time' | sed "s/^.*datetime=\"\([^Z]*\).*$/\1/" | tee comments.cache | wc -l)
	author=$(cat issue_page.cache | grep  "class=\"author\">"| head -n 1 | sed "s/^.*class=\"author\">\([^Z]*\)\<\/a.*\$/\1/")
	state=$(cat issue_page.cache  | grep state | head -n 1| sed 's/.*"state state-\([^Z]*\)".*/\1/')
	if [[ $comments_per_issue -eq 1 ]]; then
	    current_sec=$(date +%s)
	    comment_time=$(head -n1 comments.cache)
	    comment_sec=$(date -j -f "%Y-%m-%dT%H:%M:%S" $comment_time +%s)
	    diff_sec=$(expr $current_sec - $comment_sec)
	    comment_day=$(date -j -f "%Y-%m-%dT%H:%M:%S" $comment_time +%a)
	    if [[ $diff_sec -gt 1800 ]]; then
		echo "It has been $(expr $diff_sec / 3600) hours since $issue_url was filed by $author at $comment_time $comment_day, state $state"
	    fi
	fi
    done

    sleep 2 # work around the Github abuse detection mechanism.
done

Add author and state in log.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment