Execute the following in the browser adress bar:
https://console.cloud.google.com/bigquery?p={project-id}&page=project
e.g. https://console.cloud.google.com/bigquery?p=bigquery-public-data&page=project
library(data.table) | |
library(dplyr) | |
library(R.utils) | |
# JUST MASS | |
mass <- gunzip("arcos-ma-statewide-itemized.tsv.gz") | |
mass_opioids <- fread(file = 'arcos-ma-statewide-itemized.tsv') | |
glimpse(mass_opioids) # 2,574,240 observations | |
summary(mass_opioids) |
Execute the following in the browser adress bar:
https://console.cloud.google.com/bigquery?p={project-id}&page=project
e.g. https://console.cloud.google.com/bigquery?p=bigquery-public-data&page=project
library(dplyr) | |
library(ggplot2) | |
library(plotly) | |
library(viridis) | |
data("starwars") | |
p <- ggplot(starwars) + | |
geom_point( | |
aes( # all 4 aes() arguments will print in the plotly tooltip | |
x = height, # arg 1 |
# File # 1 | |
# https://drive.google.com/open?id=1Lhz23JP4gRW4p_01D7OzaL60CT1wfem- | |
# File # 2 | |
# https://drive.google.com/open?id=1XOWD8COTdDx30HNLZgrp57WzWUGOtaKA | |
# NOTE: To load data, you must download both the extract's data and the DDI | |
# and also set the working directory to the folder with these files (or change the path below). | |
#install.packages("tidyverse") |
library(httr) | |
library(jsonlite) | |
# https://dataseolabs.com | |
# Doc : https://www.similarweb.com/corp/developer/ | |
# Create your key here : https://pro.similarweb.com/#/account/api-management | |
# You can have freely 3 Months of Web Traffic Data | |
# conf | |
myList <- c("cuisineaz.com","marmiton.org","odelices.com","allrecipes.fr") |
library(chunked) | |
library(tidyverse) | |
# I want to look at the daily page views of Wikipedia articles | |
# before 2015... I can get zipped log files | |
# from here: https://dumps.wikimedia.org/other/pagecounts-ez/merged/2012/2012-12/ | |
# I get bz file, unzip to get this: | |
my_file <- 'pagecounts-2012-12-14/pagecounts-2012-12-14' |
library(tidycensus) | |
library(leaflet) | |
library(sf) | |
library(viridis) | |
options(tigris_use_cache = TRUE) | |
il1 <- get_acs(geography = "county", | |
variables = c(hhincome = "B19013_001"), | |
state = "IL", | |
geometry = TRUE) %>% |
library(googleAuthR) | |
## set scopes for calendar | |
options(googleAuthR.scopes.selected = "https://www.googleapis.com/auth/calendar.readonly", | |
googleAuthR.client_id = "XXXX", ## add your Google project client Id | |
googleAuthR.client_secret = "XXXX") ## add your Google project client secret | |
## make sure calendar API is activated for your Google Project at below URL: | |
# https://console.cloud.google.com/apis/api/calendar-json.googleapis.com/overview |
# Instructions for syncing a local folder with a remote FTP or SFTP server | |
# The only requirement is homebrew. To get git-ftp: | |
brew update && brew install git-ftp | |
# Initialize a git repo in the directory you want to sync. Track all files, and commit them to your repo: | |
git init | |
git add -A && git commit -m "Committed all files" |
image: rocker/hadleyverse:latest | |
pipelines: | |
default: | |
- step: | |
script: | |
- cd /opt/atlassian/pipelines/agent/build | |
- Rscript -e 'devtools::install_deps(".", dependencies=TRUE, upgrade=FALSE)' | |
- Rscript -e 'devtools::build()' | |
- Rscript -e 'devtools::install(upgrade_dependencies = FALSE)' |