Navigation Menu

Skip to content

Instantly share code, notes, and snippets.

@skillachie
skillachie / storm_articles.py
Last active September 30, 2015 20:52
Collect news articles related to specific events
from news_corpus_builder import NewsCorpusGenerator
from pprint import pprint
import sys
# Location where you want to save the news articles
corpus_dir = '/home/skillachie/Development/event_articles'
category_total = 300
extractor = NewsCorpusGenerator(corpus_dir,'file')
import pandas as pd
unique = [1,2,3,4,5,6,7,8,9,10,11,12,13,51,52,55]
df1= pd.qcut(unique,10,[1,2,3,4,5,6,7,8,9,10])
print df1
print 'works'
# represents cases when we simply have 0/na for some deciles
# Need to fix
non_unique = results = [1,1,1,1,1,1,1,1,1,1,1,1,1,5,5,5]
import pandas as pd
from pprint import pprint
import numpy as np
df = pd.DataFrame.from_csv('nlp_binary_features_business_days.csv')
pprint(df.isnull().sum())
#pprint(df)
~
~
@skillachie
skillachie / finsymbols_sp500
Created February 2, 2015 21:34
SP500 Symbols Query
from finsymbols import get_sp500_symbols
symbols = get_sp500_symbols()
for symbol in symbols:
print symbol['symbol']
#!/usr/bin/env python
import pika
import sys
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
channel = connection.channel()
#!/usr/bin/env python
import pika
import sys
#Example publisher code take from RabbitMQ topic tutorial
routing_key = sys.argv[1] if len(sys.argv) > 1 else 'anonymous.info'
message = ' '.join(sys.argv[2:]) or 'Hello World!'
channel.basic_publish(exchange='data_gateway',
routing_key=routing_key,
@skillachie
skillachie / setup_exchanges_queues.py
Created June 27, 2014 03:29
RabbitMQ Exchange to Exchange Binding With Persistence Using Queues
#!/usr/bin/env python
import pika
import sys
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
channel = connection.channel()
@skillachie
skillachie / tasks.py
Created June 15, 2013 18:34
Celery and MongoDB
from celery import Celery
import time
#Specify mongodb host and datababse to connect to
BROKER_URL = 'mongodb://localhost:27017/jobs'
celery = Celery('EOD_TASKS',broker=BROKER_URL)
#Loads settings for Backend to store results of jobs
celery.config_from_object('celeryconfig')
from celery.schedules import crontab
CELERY_RESULT_BACKEND = "mongodb"
CELERY_MONGODB_BACKEND_SETTINGS = {
"host": "127.0.0.1",
"port": 27017,
"database": "jobs",
"taskmeta_collection": "stock_taskmeta_collection",
}
@skillachie
skillachie / gist:3803275
Created September 29, 2012 05:36
Example
#!/usr/bin/ruby
require 'rubygems'
require 'nokogiri'
require 'open-uri'
require 'image_sorcery'
require 'mini_magick'
# -- Obtain metainfo via 'fetch-ebook-meta'a
#NOTE cover image for books gets saved to the curent working directory of the script.