Skip to content

Instantly share code, notes, and snippets.

@GuillaumePressiat
Last active October 14, 2020 08:47
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save GuillaumePressiat/4505419b0fcf0cf9aaa903b467352a23 to your computer and use it in GitHub Desktop.
Save GuillaumePressiat/4505419b0fcf0cf9aaa903b467352a23 to your computer and use it in GitHub Desktop.
Collecter les csv covid SI-DEP et SURSAUD de SPF depuis data.gouv.fr en interrogeant son api, résultat un fichier excel, ici filtré sur la Bretagne
library(dplyr, warn.conflicts = FALSE)
liste_url <- list(
indicateurs = list(url_web = "https://www.data.gouv.fr/fr/datasets/indicateurs-de-suivi-de-lepidemie-de-covid-19/",
url_stable = "https://www.data.gouv.fr/fr/datasets/r/4acad602-d8b1-4516-bc71-7d5574d5f33e",
url_api = "https://www.data.gouv.fr/api/1/datasets/5ee9df5003284f565d561278/",
titre = "Indicateurs de suivi de l’épidémie de COVID-19",
# file_pattern = "indicateurs-covid19-dep",
file_pattern = "indicateurs-open-data-dep",
delim = ",",
include = TRUE),
tests_positivite = list(url_web = "https://www.data.gouv.fr/fr/datasets/donnees-relatives-aux-resultats-des-tests-virologiques-covid-19/",
url_stable = "https://www.data.gouv.fr/fr/datasets/r/406c6a23-e283-4300-9484-54e78c8ae675",
url_api = "https://www.data.gouv.fr/api/1/datasets/5ed117db6c161bd5baf070be",
titre = "Données relatives aux résultats des tests virologiques COVID-19 SI-DEP",
file_pattern = "sp-pos-quot-dep",
delim = ";",
include = FALSE),
tests_capacites = list(url_web = "https://www.data.gouv.fr/fr/datasets/capacite-analytique-de-tests-virologiques-dans-le-cadre-de-lepidemie-covid-19/",
url_stable = "https://www.data.gouv.fr/fr/datasets/r/0c230dc3-2d51-4f17-be97-aa9938564b39",
url_api = "https://www.data.gouv.fr/api/1/datasets/5ed11705afd28672e40fbc2f/",
titre = "Capacité analytique de tests virologiques dans le cadre de l'épidémie COVID-19 SI-DEP",
file_pattern = "sp-capa-quot-dep",
delim = ";",
include = TRUE),
incidence = list(url_web = "https://www.data.gouv.fr/fr/datasets/taux-dincidence-de-lepidemie-de-covid-19/",
url_stable = "https://www.data.gouv.fr/fr/datasets/r/19a91d64-3cd3-42fc-9943-d635491a4d76",
url_api = "https://www.data.gouv.fr/api/1/datasets/5ed1175ca00bbe1e4941a46a",
titre = "Taux d'incidence de l'épidémie de COVID-19 SI-DEP",
file_pattern = "sp-pe-tb-quot-dep",
delim = ";",
include = TRUE),
sursaud = list(url_web = "https://www.data.gouv.fr/fr/datasets/donnees-des-urgences-hospitalieres-et-de-sos-medecins-relatives-a-lepidemie-de-covid-19/",
url_stable = "https://www.data.gouv.fr/fr/datasets/r/eceb9fb4-3ebc-4da3-828d-f5939712600a",
url_api = "https://www.data.gouv.fr/api/1/datasets/5e74ecf52eb7514f2d3b8845",
titre = "Données des urgences hospitalières et de SOS médecins relatives à l'épidémie de COVID-19",
file_pattern = "sursaud-corona-quot-dep",
delim = ";",
include = TRUE)
)
type_fichier <- 'tests_capacites'
get_data <- function(type_fichier, liste_url) {
cat(type_fichier, "\n")
url_api <- liste_url[type_fichier] %>% purrr::map('url_api') %>% .[[1]]
url_stable <- liste_url[type_fichier] %>% purrr::map('url_stable') %>% .[[1]]
file_pattern <- liste_url[type_fichier] %>% purrr::map('file_pattern') %>% .[[1]]
file_delim <- liste_url[type_fichier] %>% purrr::map('delim') %>% .[[1]]
u <- httr::GET(url_api, config = httr::config(verbose = FALSE))
url_search <- httr::content(u)$resources
df_date <- tibble(url = url_search %>% purrr::map_chr('url'),
timestamp = url_search %>% purrr::map_chr('last_modified')) %>%
filter(grepl(file_pattern, url)) %>%
arrange(desc(timestamp)) %>%
pull(timestamp) %>%
.[1] %>%
lubridate::as_datetime() %>%
format(., '%Y-%m-%d--%Hh%Mm')
url_file <- tibble(url = url_search %>% purrr::map_chr('url'),
timestamp = url_search %>% purrr::map_chr('last_modified')) %>%
filter(grepl(file_pattern, url)) %>%
arrange(desc(timestamp)) %>%
pull(url) %>%
.[1]
file_name <- basename(url_file) %>%
stringr::str_replace_all("\\.csv", paste0('__',df_date, ".csv"))
f <- httr::GET(url_stable, httr::write_disk(paste0("SI-DEP/downloads/", file_name), overwrite = TRUE))
donnee <- readr::read_delim(paste0("SI-DEP/downloads/", file_name), delim = file_delim)
names(donnee)[names(donnee) == "departement"] <- "dep"
names(donnee)[names(donnee) == "extract_date"] <- "jour"
names(donnee)[names(donnee) == "date_de_passage"] <- "jour"
donnee <- donnee %>%
mutate(semaine = paste0('S', lubridate::isoweek(jour)),
jour_sem = lubridate::wday(jour, week_start = 1, abbr = FALSE, label = TRUE)) %>%
select(semaine, jour, jour_sem, everything())
donnee <- donnee %>% arrange(desc(jour))
donnee <- bind_rows(donnee %>%
filter(dep %in% c('29')),
donnee %>%
filter(dep %in% c('22', '35', '56')))
if (type_fichier == 'incidence'){
donnee <- donnee %>%
mutate(incidence_1e5 = P * 1e5 / pop)
donnee <- donnee %>% arrange(desc(jour), cl_age90)
}
if (type_fichier == 'sursaud'){
donnee <- donnee %>% filter(sursaud_cl_age_corona == "0")
}
retour <- liste_url[type_fichier]
retour <- modifyList(retour, setNames(list(list(df_date = df_date,
file_link = paste0("SI-DEP/downloads/", file_name),
file_name = file_name,
donnee = donnee)), type_fichier))
return(retour)
}
data_and_meta <- unique(names(liste_url)) %>%
purrr::map(get_data, liste_url)
# incidence heb, calcul à la main car non publié à jour
incidence_heb <- data_and_meta %>%
purrr::map('incidence') %>%
purrr::compact() %>%
.[[1]] %>%
.$donnee %>%
filter(cl_age90 == '0') %>%
group_by(semaine, dep, pop) %>%
summarise(P = sum(P),
nb_jour = n()) %>%
mutate(incidence_1e5 = P * 1e5 / pop) %>%
arrange(desc(semaine))
incidence_heb <- bind_rows(
incidence_heb %>% filter(dep == "29"),
incidence_heb %>% filter(dep != "29"))
data_date_max <- data_and_meta %>%
purrr::modify_depth(2, 'df_date') %>%
unlist %>%
max()
data_date_min <- data_and_meta %>%
purrr::modify_depth(2, 'df_date') %>%
unlist %>%
min()
prep_meta <- tibble(
`Feuillet excel` = data_and_meta %>%
purrr::modify_depth(2, 'file_pattern') %>%
unlist,
`Titre de la ressource` = data_and_meta %>%
purrr::modify_depth(2, 'titre') %>%
unlist,
`Date de mise à jour` = data_and_meta %>%
purrr::modify_depth(2, 'df_date') %>%
unlist,
`URL` = data_and_meta %>%
purrr::modify_depth(2, 'url_web') %>%
unlist) %>%
bind_rows(tibble(
`Feuillet excel` = 'incidence_heb',
`Titre de la ressource` = 'Aggrégation des données sp-pe-tb-quot-dep à la semaine (par DIM)',
`Date de mise à jour` = data_and_meta %>%
purrr::map('incidence') %>%
purrr::map('df_date') %>%
purrr::compact() %>%
unlist,
URL = data_and_meta %>%
purrr::map('incidence') %>%
purrr::map('url_web') %>%
purrr::compact() %>%
unlist
))
class(prep_meta$URL) <- "hyperlink"
wb <- openxlsx::createWorkbook()
li <- 1:length(data_and_meta)
# i <- li[1]
openxlsx::addWorksheet(wb, sheetName = "Méta-données")
openxlsx::writeDataTable(wb, sheet = "Méta-données", prep_meta, bandedRows = TRUE, tableStyle = "TableStyleDark9")
tab_excel <- function(i) {
temp <- data_and_meta[i][[1]][[1]]
openxlsx::addWorksheet(wb, sheetName = temp$file_pattern)
openxlsx::writeDataTable(wb, sheet = temp$file_pattern, temp$donnee, bandedRows = TRUE, tableStyle = "TableStyleDark9")
}
li %>% purrr::map(tab_excel)
# Ajout reshape par DIM
openxlsx::addWorksheet(wb, sheetName = 'incidence_heb')
openxlsx::writeDataTable(wb, sheet = 'incidence_heb', incidence_heb, bandedRows = TRUE, tableStyle = "TableStyleDark9")
for (i in 1:(length(data_and_meta)+2)){
if (i <= (length(data_and_meta) + 1)){
openxlsx::writeFormula(wb, "Méta-données",
startRow = i+1,
x = openxlsx::makeHyperlinkString(sheet = prep_meta$`Feuillet excel`[i], text = prep_meta$`Feuillet excel`[i], row = i, col = 2)
)
}
openxlsx::freezePane(wb, i , firstActiveRow = 2, firstActiveCol = 1)
openxlsx::setColWidths(wb, sheet = i, cols = 1:23, widths = "auto")
}
openxlsx::saveWorkbook(wb,
paste0('SI-DEP/excel/covid_data_spf-si-dep-', data_date_max, '.xlsx'), overwrite = TRUE)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment