Skip to content

Instantly share code, notes, and snippets.

View smach's full-sized avatar

Sharon Machlis smach

View GitHub Profile
install.packages("jsonlite", dependencies = TRUE)
install.packages("RCurl", dependencies = TRUE)
library("jsonlite")
library("RCurl")
base_url <- "https://api.parsely.com/v2"
apikey <- "computerworld.com"
api_secret <- "YOUR SECRET KEY"
## Mapping household income by ward in South Africa - data from the 2011 Census
## Author: Kyle Walker. Please share and re-use as much as you'd like!
library(readxl)
library(rgdal)
library(dplyr)
library(tidyr)
library(magrittr)
@emad-elsaid
emad-elsaid / mysql-backup.rb
Created March 4, 2014 12:40
backup mysql database and send it to your email script
#!/usr/bin/env ruby
require 'mail'
mysql_username = 'root'
mysql_password = '123456'
mysql_database = 'test'
system("mysqldump --user=#{mysql_username} --password=#{mysql_password} #{mysql_database} > backup.sql")
# Credit to :
@MarkEdmondson1234
MarkEdmondson1234 / google_calendar_demo.R
Last active October 2, 2017 23:30
A demo of calling Google Calendar API
library(googleAuthR)
## set scopes for calendar
options(googleAuthR.scopes.selected = "https://www.googleapis.com/auth/calendar.readonly",
googleAuthR.client_id = "XXXX", ## add your Google project client Id
googleAuthR.client_secret = "XXXX") ## add your Google project client secret
## make sure calendar API is activated for your Google Project at below URL:
# https://console.cloud.google.com/apis/api/calendar-json.googleapis.com/overview
library(tidycensus)
library(leaflet)
library(sf)
library(viridis)
options(tigris_use_cache = TRUE)
il1 <- get_acs(geography = "county",
variables = c(hhincome = "B19013_001"),
state = "IL",
geometry = TRUE) %>%
@benmarwick
benmarwick / analyse_large_text_file_chunked.R
Last active April 5, 2018 10:57
Use R to analyse a large text file that is too big to read in all at once
library(chunked)
library(tidyverse)
# I want to look at the daily page views of Wikipedia articles
# before 2015... I can get zipped log files
# from here: https://dumps.wikimedia.org/other/pagecounts-ez/merged/2012/2012-12/
# I get bz file, unzip to get this:
my_file <- 'pagecounts-2012-12-14/pagecounts-2012-12-14'
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@pssguy
pssguy / server.R
Created November 29, 2012 20:37
Shiny App allowing online selection of subjects for graphical and tabular presentation of daily Wikipedia search rates
# libraries used. install as necessary
library(shiny)
library(RJSONIO) # acquiring and parsing data
library(ggplot2) # graphs
library(plyr) # manipulating data
library(lubridate) #dates
library(stringr)
trim.leading <- function (x) sub("^\\s+", "", x)
@andrewbtran
andrewbtran / top_1_percent
Created August 3, 2018 19:43
top 1 percent ipums
# File # 1
# https://drive.google.com/open?id=1Lhz23JP4gRW4p_01D7OzaL60CT1wfem-
# File # 2
# https://drive.google.com/open?id=1XOWD8COTdDx30HNLZgrp57WzWUGOtaKA
# NOTE: To load data, you must download both the extract's data and the DDI
# and also set the working directory to the folder with these files (or change the path below).
#install.packages("tidyverse")
library(data.table)
library(dplyr)
library(R.utils)
# JUST MASS
mass <- gunzip("arcos-ma-statewide-itemized.tsv.gz")
mass_opioids <- fread(file = 'arcos-ma-statewide-itemized.tsv')
glimpse(mass_opioids) # 2,574,240 observations
summary(mass_opioids)