View start_streamming_spark.py
spark_streaming_context.start()
spark_streaming_context.awaitTermination()
spark_streaming_context.stop()
View spark_to_redshift.py
def toRedshift(time, rdd):
try:
sqlContext = getSqlContextInstance(rdd.context)
schema = StructType([
StructField('user_id', StringType(), True),
StructField('device_id', StringType(), True),
StructField('steps', IntegerType(), True),
StructField('battery_level', IntegerType(), True),
StructField('calories_spent', IntegerType(), True),
View s3_spark_output.py
spark_context.saveAsTextFile("s3n://parents/activity_log/01010101.txt")
View spark_context.py
spark_context = SparkContext(appName=kinesis_app_name)
spark_streaming_context = StreamingContext(spark_context, spark_batch_interval)
sql_context = SQLContext(spark_context)
kinesis_stream = KinesisUtils.createStream(
spark_streaming_context, kinesis_app_name, kinesis_stream, kinesis_endpoint,
aws_region, kinesis_initial_position, kinesis_checkpoint_interval)
py_rdd = kinesis_stream.map(lambda x: json.loads(x))
View kinesis_consumer.py
from __future__ import print_function
from pyspark import SparkContext
from pyspark.streaming import StreamingContext
from pyspark.streaming.kinesis import KinesisUtils, InitialPositionInStream
import datetime
import json
from pyspark.sql import SQLContext, Row
from pyspark.sql.types import *
aws_region = 'us-east-1'
View request.swift
import HTTP
import File
import HTTPSClient
import JSON
var url: String?
do {
let client = try! Client(uri: "https://api.github.com:443")
var response = try client.get("/repos/vsouza/awesome-ios/git/trees/HEAD")
let buffer = try response.body.becomeBuffer()
View colorsUtil.swift
//
// ColorsUtil.swift
//
// Created by Vinicius Souza on 5/11/16.
// Copyright © 2016 Vinicius Souza. All rights reserved.
//
import Foundation
import UIKit
View GQL.swift
import Foundation
protocol GQLNodeArgument {}
extension String: GQLNodeArgument {}
extension NSNumber: GQLNodeArgument {}
class GQLNode: StringLiteralConvertible, ArrayLiteralConvertible, Printable, DebugPrintable {
let name: String?
View parse_aws_s3_billing.py
# -*- coding:utf-8 -*-
'''
Simplistic script to parse the detailed AWS billing CSV file.
Script displays cost of S3 operations broken down per region, bucket and usage
type (either storage or network). It also sums up the amount of storage used per bucket.
Output is filtered wrt to costs < 1$.
See http://docs.aws.amazon.com/awsaccountbilling/latest/about/programaccess.html for
how to set up programmatic access to your billing.
View reflection.go
package main
import (
"fmt"
"reflect"
)
type Foo struct {
FirstName string `tag_name:"tag 1"`
LastName string `tag_name:"tag 2"`