Skip to content

Instantly share code, notes, and snippets.

View sillasgonzaga's full-sized avatar

Sillas Teixeira Gonzaga sillasgonzaga

View GitHub Profile
#1 Baixar série historica do dolar de 2 anos
library(quantmod)
library(xlsx)
library(plyr)
library(dplyr)
library(corrplot)
@sillasgonzaga
sillasgonzaga / dash_runner.py
Created March 15, 2016 23:59 — forked from ChuckWoodraska/dash_runner.py
Used to sniff traffic for Amazon dash button and play a song through my chromecast.
import time
import sys
import logging
import pychromecast
from scapy.all import *
mac_address = '00:00:00:00:00:00'
def arp_display(pkt):
if pkt[ARP].op == 1:
if pkt[ARP].hwsrc == mac_address:
@sillasgonzaga
sillasgonzaga / pi_accuracy.R
Created July 7, 2016 01:05
Prediction intervals for forecasting models
pi_accuracy <- function(fc, yobs){
# source: http://ellisp.github.io/blog/2016/01/30/hybrid-forecasts
# checks the success of prediction intervals of an object of class
# forecast with actual values
if(length(yobs) != length(fc$mean)){
stop("yobs needs to be the same length as the forecast period.")
}
n <- length(yobs)
yobsm <- cbind(yobs, yobs)
In <- (yobsm > fc$lower & yobsm < fc$upper)
meu_tema <- function(base.size = 9, legend.text.size = 9, axis.text.size = 10, axis.title.size = 11) {
# exemplo da função lm
#ggplot(agua_agregado, aes(x = dif_pressao, y = vazao_massica_media)) +
# geom_point() + geom_smooth(method = "lm") +
# source: http://stackoverflow.com/a/21030800/4577128
clipboard <- function(x, sep="\t", row.names=FALSE, col.names=TRUE){
con <- pipe("xclip -selection clipboard -i", open="w")
write.table(x, con, sep=sep, row.names=row.names, col.names=col.names)
close(con)
}
# funcoes
# pacotes
library(magrittr) # não vivo sem esse pacote
library(rvest) # principal pacote para web-scraping
library(readr) # usado para extrair numeros de texto
library(stringr) # usado para o data cleaning
library(curl) # usado como suporte para o rvest
library(tidyr) # data cleaningry
# projeto analise de sentimento
library(tidyr)
library(dplyr)
library(magrittr)
library(stringr)
library(lubridate)
library(ggplot2)
library(tm)
library(SnowballC)
library(wordcloud)
library(stringr)
library(magrittr)
library(lubridate)
library(glue)
library(dplyr)
library(readr)
# exemplo:
extrair_ans_UF <- function(uf, limpar_pasta_temporaria = TRUE) {
# source: https://gist.github.com/corynissen/5389426
getLongURL.curl <- function(shorturl){
# uses curl statement to get expanded url from t.co links (or any link)
# loop through until there's no location attribute... that's the long link.
newurl <- shorturl
url <- ""
while(url != newurl){
data <- system(paste0("curl -I ", newurl), intern=T)
if(sum(grepl("location: ", tolower(data))) == 0){
grafico_tfidf <- function(data, n_grams = 1, remover_nomes_proprios,
agregar_por_noticia, remover_stop_words = TRUE){
data <- data %>%
unnest_tokens(palavra, corpo_noticia, token = "ngrams", n = n_grams, to_lower = FALSE)
if (agregar_por_noticia){
# remover as duplicatas dentre de uma mesma noticia
data %<>% distinct(url, palavra, .keep_all = TRUE)
}