Skip to content

Instantly share code, notes, and snippets.

View alfcrisci's full-sized avatar

Alfonso alfcrisci

View GitHub Profile
# go to 'https://developers.facebook.com/tools/explorer' to get your access token
access_token <- "******************* INPUT YOUR ACCESS TOKEN ******************************"
require(RCurl)
require(rjson)
# Facebook json function copied from original (Romain Francois) post
facebook <- function( path = "me", access_token, options){
if( !missing(options) ){
options <- sprintf( "?%s", paste( names(options), "=", unlist(options), collapse = "&", sep = "" ) )
function getTweets(searchTerm, maxResults, sinceid, languageCode) {
//Based on Mikael Thuneberg getTweets - mod by mhawksey to convert to json
// if you include setRowsData this can be used to output chosen entries
var data = [];
var idx = 0;
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sumSheet = ss.getSheetByName("Readme/Settings");
if (isConfigured()){
var oauthConfig = UrlFetchApp.addOAuthService("twitter");
oauthConfig.setAccessTokenUrl("https://api.twitter.com/oauth/access_token");
# 18/05/2013
# Key words : TextMining, Elections, France, Debate, 2nd Round
# We use the packages qdap from (donner le lien) and
# tm to perform textmining analysis and the classical
# package like ggplot or RColorBrewer to get the graphics pretty.
suppressPackageStartupMessages(require(twitteR))
suppressPackageStartupMessages(require(XML))
suppressPackageStartupMessages(require(tm))
suppressPackageStartupMessages(require(rgdal))
#Load the packages
library(ggplot2)
library(maptools)
library(OpenStreetMap)
gpclibPermit()
#Import the data
text <- download.file("https://raw.github.com/alstat/Analysis-with-Programming/master/2013/R/R-Mapping-Super-Typhoon-Yolanda-Haiyan-Track/TyDatYolanda2013.csv",
destfile = "/tmp/test.csv", method = "curl")
tydat <- read.csv("/tmp/test.csv", header = TRUE)
###################################
## CartoDB 2.0 Install [Working] ##
## Tested on Ubuntu 12.04 ##
###################################
# Change password
passwd
adduser [username]
adduser [username] sudo
curl -s "http://talos.irpi.cnr.it/zareport/za.php?action=get_full_report_data&cod=Tosc-A2"
@alfcrisci
alfcrisci / corstar.R
Last active August 29, 2015 14:13 — forked from aL3xa/corstar.R
corstar <- function(x, y = NULL, use = "pairwise", method = "pearson", round = 3, row.labels, col.labels, ...) {
require(psych)
ct <- corr.test(x, y, use, method) # calculate correlation
r <- ct$r # get correlation coefs
p <- ct$p # get p-values
stars <- ifelse(p < .001, "***", ifelse(p < .01, "** ", ifelse(p < .05, "* ", " "))) # generate significance stars
# Install subversion
sudo apt-get -y install subversion
# Install g++
sudo apt-get -y install g++
# Install Hierarchical Data Format library
# NOTE: This library is not necessarily needed, but was required
# in order for this to compile against a clean Ubuntu 12.04 LTS system.
# I didn't need it on a clean EC2 Ubuntu 12.10 instance, so
polygonizer <- function(x, outshape=NULL, gdalformat = 'ESRI Shapefile',
pypath=NULL, readpoly=TRUE, quietish=TRUE) {
# x: an R Raster layer, or the file path to a raster file recognised by GDAL
# outshape: the path to the output shapefile (if NULL, a temporary file will be created)
# gdalformat: the desired OGR vector format
# pypath: the path to gdal_polygonize.py (if NULL, an attempt will be made to determine the location
# readpoly: should the polygon shapefile be read back into R, and returned by this function? (logical)
# quietish: should (some) messages be suppressed? (logical)
if (isTRUE(readpoly)) require(rgdal)
if (is.null(pypath)) {
@alfcrisci
alfcrisci / lintemp.R
Last active August 29, 2015 14:22 — forked from johnbaums/lintemp.R
#### Temporal Interpolation ####################################################
# Perform cell-wise linear interpolation between multiple raster layers, and
# extrapolation beyond the upper limit of input data. Output is saved in .tif
# format.
#
# Arguments
# s: a rasterStack containing the time slices to be interpolated
#
# xin: a numeric vector that indicates the times associated with layers in s (in
# the same order as the layers of s - see names(s))