Skip to content

Instantly share code, notes, and snippets.

@iMissile
Last active August 25, 2022 10:09
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save iMissile/a576a2d55af13f77cf7f34242fb1f02c to your computer and use it in GitHub Desktop.
Save iMissile/a576a2d55af13f77cf7f34242fb1f02c to your computer and use it in GitHub Desktop.
Jira parsing
library(tidyverse)
library(magrittr)
library(httr2)
library(rvest)
library(glue)
library(jsonlite)
library(stringi)
library(checkmate)
library(tictoc)
library(futile.logger)
library(data.table)
# tech credentials
user_id <- "ВАША УЧЕТКА"
user_pwd <- "ВАШ ПАРОЛЬ"
base_url <- "https://САЙТ ДЖИРА/rest/api/latest"
base_req <- httr2::request(base_url) %>%
req_auth_basic(user_id, user_pwd)
current_sprint_tag <- "Sprint14"
flog.appender(appender.tee("jira.log"))
flog.threshold(INFO)
flog.info("Start batch processing")
# 1. получили список интересующих проектов ----------
resp <- base_req %>%
req_url_path_append("project") %>%
req_perform()
proj_df <- resp_body_json(resp) %>%
bind_rows() %>%
filter(stri_detect_fixed(name, "ВАШ МАРКЕР ПРОЕКТОВ")) %>%
distinct(self, id, key, name)
# 2. Получили список всех Issues по выбранным проектам ----
getIssuesInProj <- function(project_tag){
resp <- base_req %>%
req_url_path_append("search") %>%
req_url_query(jql = glue("project='{project_tag}'")) %>%
req_url_query(maxResults = 1000) %>%
req_perform()
resp_body <- resp_body_string(resp)
# убедимся, что количество записей меньше окна выдачи
jqr::jq(resp_body, '.total') %T>%
{flog.info(glue("Project '{project_tag}' has {.} issues"))} %>%
{assertInt(as.integer(.), upper = 1000)}
resp_body %>%
jqr::jq('[.issues[] | . + .fields | del(.fields) | {id, key,
issuetype:(.issuetype.name), is_subtask:(.issuetype.subtask), created,
status:.status.name, summary, sprint_raw:(.customfield_10101[]? // ""), progress, project_name:(.project.name)}]') %>%
fromJSON() %>%
# вытащим маркер спринта 'name=YYYYY Sprint10 1-15.06,'
mutate(sprint = stri_match_first_regex(sprint_raw, "name=([^,]+),")[, 2]) %>%
select(-sprint_raw)
}
issues_df <- proj_df$name %>%
map_dfr(getIssuesInProj) %>%
# оставим только интересующий спринт
filter(stri_detect_fixed(sprint, !!current_sprint_tag))
janitor::tabyl(issues_df, project_name)
# 3. Тащим ворклоги по задачам спринта ----
getWklogInIssue <- function(issue_tag){
resp <- base_req %>%
req_url_path_append("issue") %>%
req_url_path_append(issue_tag) %>%
req_url_path_append("worklog") %>%
req_url_query(maxResults = 1000) %>%
req_perform()
resp_body <- resp_body_string(resp)
# убедимся, что количество записей меньше окна выдачи
crc_lst <- resp_body %>%
jqr::jq('{maxResults, total, get_all:(.maxResults == .total)}') %>%
fromJSON() %T>%
{flog.info(glue("Issue '{issue_tag}' has {.$total} records"))}
assertTRUE(crc_lst$get_all)
resp_body %>%
jqr::jq('[.worklogs[] | . + {author:.author.displayName} + {updateAuthor:.updateAuthor.displayName}]') %>%
fromJSON()
}
wklog_df <- issues_df$key %>%
map_dfr(getWklogInIssue) %>%
# TODO: надо отфильтровать по временному диапазону спринта
mutate(time_hr = timeSpentSeconds/3600) %>%
mutate(timestamp = anytime::anytime(updated),
date = lubridate::as_date(timestamp)) %>%
select(date, author, updateAuthor, time_hr, issueId, timestamp)
# 4. Собираем воедино в формате отчета
report_df <- issues_df %>%
select(key, summary, status, id, project_name) %>%
left_join(wklog_df, by = c("id" = "issueId")) %>%
filter(status %in% c("Done", "Cancelled")) %>%
group_by(key, summary, project_name, status) %>%
summarise(time_hr = sum(time_hr), author = stri_c(unique(author), collapse = "; ")) %>%
ungroup()
# 5. Проверка корректности агрегации по задачам
as.data.table(report_df) %>%
.[, .N, by = key] %>%
{assertDataTable(.[N > 1], max.rows = 0)}
# 5. Сбрасываем в excel
report_df %>%
openxlsx::write.xlsx(here::here("output", glue("{current_sprint_tag}.xlsx")), asTable = TRUE)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment