View Sendy.md

Sendy

Sendy is a self hosted email newsletter application that lets you send trackable emails via Amazon Simple Email Service (SES).

Heroku

You can deploy Sendy on Heroku using the following instructions (I assume you've already installed the heroku toolbelt).

  1. On Heroku, create a new app.
  2. Clone that app to your desktop
View date.sql
/* Adapted from Tom Cunningham's 'Data Warehousing with MySql' (www.meansandends.com/mysql-data-warehouse) */
###### small-numbers table
DROP TABLE IF EXISTS numbers_small;
CREATE TABLE numbers_small (number INT);
INSERT INTO numbers_small VALUES (0),(1),(2),(3),(4),(5),(6),(7),(8),(9);
###### main numbers table
DROP TABLE IF EXISTS numbers;
CREATE TABLE numbers (number BIGINT);
View gist:fa75903000899293cefd83c18566849e
<?php
/* Caveat: I'm not a PHP programmer, so this may or may
* not be the most idiomatic code...
*
* FPDF is a free PHP library for creating PDFs:
* http://www.fpdf.org/
*/
require("fpdf.php");
class PDF extends FPDF {
View gist:d73329202559b7e3c083aadb45334729
  1. General Background and Overview
View spark_mnist_mlp.py
from __future__ import print_function
from pyspark import SparkContext, SparkConf
from pyspark.mllib.linalg import DenseVector, VectorUDT
from pyspark.sql import SQLContext
from pyspark.ml.classification import MultilayerPerceptronClassifier
from pyspark.ml.evaluation import MulticlassClassificationEvaluator
from pyspark.sql.types import StructType, StructField, StringType, DoubleType, ArrayType
View LDA_SparkDocs
/*
This example uses Scala. Please see the MLlib documentation for a Java example.
Try running this code in the Spark shell. It may produce different topics each time (since LDA includes some randomization), but it should give topics similar to those listed above.
This example is paired with a blog post on LDA in Spark: http://databricks.com/blog
Spark: http://spark.apache.org/
*/
import scala.collection.mutable
View IpServices.scala
def ipToLong(ipAddress: String): Long = {
ipAddress.split("\\.").reverse.zipWithIndex.map(a=>a._1.toInt*math.pow(256,a._2).toLong).sum
}
def longToIP(long: Long): String = {
(0 until 4).map(a=>long / math.pow(256, a).floor.toInt % 256).reverse.mkString(".")
}
View install_scala_centos.sh
export SCALA_VERSION=scala-2.11.5
sudo wget http://www.scala-lang.org/files/archive/${SCALA_VERSION}.tgz
sudo echo "SCALA_HOME=/usr/local/scala/scala-2.11.5" > /etc/profile.d/scala.sh
sudo echo 'export SCALA_HOME' >> /etc/profile.d/scala.sh
sudo mkdir -p /usr/local/scala
sudo -s cp $SCALA_VERSION.tgz /usr/local/scala/
cd /usr/local/scala/
sudo -s tar xvf $SCALA_VERSION.tgz
sudo rm -f $SCALA_VERSION.tgz
sudo chown -R root:root /usr/local/scala
View RabbitReceiver.scala
package botkop.sparti.receiver
import com.rabbitmq.client._
import org.apache.spark.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.receiver.Receiver
import scala.reflect.ClassTag