Skip to content

Instantly share code, notes, and snippets.

View ezhulenev's full-sized avatar
🏠
Working from home

Eugene Zhulenev ezhulenev

🏠
Working from home
View GitHub Profile
@ezhulenev
ezhulenev / InstantInsanity.scala
Created April 25, 2017 20:15
Type-Level Instant Insanity in Scala
object InstantInsanity extends App {
// scalastyle:off
def undefined[T]: T = ???
def ⊥[T]: T = undefined
trait R
trait G
trait B
@ezhulenev
ezhulenev / InstantInsanity.scala
Created April 25, 2017 16:42
Instant Insanity in Scala
object InstantInsanity extends App {
type Cube = Seq[Char]
val cubes: Seq[Cube] = Seq("BGWGBR", "WGBWRR", "GWRBRR", "BRGGWW").map(_.toSeq)
// Rotate a cube 90 degrees over its Z-axis, leaving up and down in place.
def rot: Cube => Cube = { case Seq(u, f, r, b, l, d) => Seq(u, r, b, l, f, d) }
// Twist a cube around the axis running from the upper-front-right
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RoleAnnotations #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
@ezhulenev
ezhulenev / spark-thred-safe.scala
Created August 11, 2015 22:16
Thread-safe Spark Sql Context
object ServerSparkContext {
private[this] lazy val _sqlContext = {
val conf = new SparkConf()
.setAppName("....")
val sc = new SparkContext(conf)
// TODO: Bug in Spark: http://stackoverflow.com/questions/30323212
val ctx = new HiveContext(sc)
ctx.setConf("spark.sql.hive.convertMetastoreParquet", "false")
@ezhulenev
ezhulenev / folds.scala
Last active August 29, 2015 14:25 — forked from tonymorris/folds.scala
Fold exercises in Scala
trait MyOption[A] {
def fold[B](n: => B, s: A => B): B
// Define the usual Option API.
//
// * Constructors (on the object)
// some
// none
// * methods
// map
@ezhulenev
ezhulenev / debug.log
Created December 22, 2014 17:23
OrderBook dynamics log
12:20:53.549 [main] INFO c.s.dynamics.DecisionTreeDynamics$ - Load Training data from: /Users/ezhulenev/data/NYSE/EQY_US_NYSE_BOOK_20130403. Filtered: AZ
12:20:53.677 [main] DEBUG c.s.dynamics.DecisionTreeDynamics$ - Training data set [1]:
12:20:53.686 [main] DEBUG c.s.dynamics.DecisionTreeDynamics$ - - OpenBookFile(AZ,A,2013-04-03,/Users/ezhulenev/data/NYSE/EQY_US_NYSE_BOOK_20130403/openbookultraAZ_A20130403_1_of_1)
12:20:53.686 [main] INFO c.s.dynamics.DecisionTreeDynamics$ - Load Validation data from: /Users/ezhulenev/data/NYSE/EQY_US_NYSE_BOOK_20130404. Filtered: AZ
12:20:53.687 [main] DEBUG c.s.dynamics.DecisionTreeDynamics$ - Validation data set [1]:
12:20:53.687 [main] DEBUG c.s.dynamics.DecisionTreeDynamics$ - - OpenBookFile(AZ,A,2013-04-04,/Users/ezhulenev/data/NYSE/EQY_US_NYSE_BOOK_20130404/openbookultraAZ_A20130404_1_of_1)
12:20:53.699 [main] INFO c.s.dynamics.ConfiguredSparkContext - Create spark context. Master: local[2]. App Name: OrderBookDynamics
12:20:55.330 [main] INFO com.scalafi.dyna
@ezhulenev
ezhulenev / deeptreemap.scala
Created December 4, 2014 22:43
Deep TreeMap conversion
import scala.collection.immutable.TreeMap
trait ToTreeMap[A] {
type Result
def treeMap(x: A): Result
}
trait LowerPriorityToTreeMap {
implicit def plainMap[K, V](implicit ord: Ordering[K]): ToTreeMap[Map[K, V]] =
@ezhulenev
ezhulenev / gist:329efd28da8ca51d9f5f
Last active September 2, 2016 01:50 — forked from munhitsu/gist:1034876
Python on Mac OS + virtualenvwrapper
# In case you had some strange python installation
# NOTE: .pydistutils.cfg seems to be not compatible with brew install python
# areas I needed to clean before installation
# clean up ~/Library/Python
# clean up .local
# preconditions:
# xcode with command line tools installed
xcode-select --install
@ezhulenev
ezhulenev / svm.scala
Created November 11, 2014 02:51
SVM MLLib
object SVM extends App {
import org.apache.spark.mllib.classification.SVMWithSGD
import org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.{SparkContext, SparkConf}
import scala.util.Random
/**
* Mention of focus company
*
* @param ticker ticker of focus company
* @param source source of this mention (Twitter, RSS, etc...)
* @param sourceId source specific id
* @param time time
* @param mentions set of other tickers including focus ticker itself
*/
case class Mention(ticker: Ticker, source: String, sourceId: String, time: DateTime, mentions: Set[Ticker])