Skip to content

Instantly share code, notes, and snippets.

View oluies's full-sized avatar

Örjan Angré (Lundberg) oluies

  • Sweden
  • 17:35 (UTC +02:00)
  • X @oluies
View GitHub Profile
@oluies
oluies / brews.txt
Created September 14, 2017 19:34 — forked from anonymous/brews.txt
ack
adns
ansible
ant
apache-spark
argtable
arpack
augeas
autoconf
autojump
@oluies
oluies / after.csv
Created August 8, 2017 12:34 — forked from bfritz/after.csv
rapture-csv in Ammonite REPL
territory_id first_name last_name email employee_id
XMOWSM54 Peter Alexander palexander0@unesco.org E00QTOF
XMRNBM47 Samuel Lopez slopez1@163.com E00UBFA
XMOWMF87 Elizabeth Stone estone2@usatoday.com E00WDYK
XMZWPW22 William Carroll wcarroll3@odnoklassniki.ru E00VDYQ
XMOWRW46 Carolyn Little clittle4@ox.ac.uk E00HUPR
XMZNDX26 Marilyn Robinson mrobinson5@wired.com E00ZJGS
XMZNAI68 Christopher Rogers crogers6@posterous.com E00DCHF
XMONCD74 Anthony Allen aallen7@flickr.com E00ACEQ
XMRNMD81 Martin Baker mbaker8@hatena.ne.jp E00DKRZ
@oluies
oluies / exclude_targetdirs.sh
Created January 21, 2017 18:30 — forked from viktorklang/exclude_targetdirs.sh
Adds all your sbt target dirs as path excludes for Time Machine
#WARNING: Use at your own risk. No warranties expressed or implied. YMMV. Drive responsibly. Eat healthy.
#First, `cd` into the parent dir for all of your `sbt`/`maven` projects (I assume you have one of those)
find "$(cd ..; pwd)" -type d -name "target" -exec sudo tmutil addexclusion -p {} +
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
package com.combient.sparkjob.tedsds
/**
* Created by olu on 09/03/16.
*/
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions._
@oluies
oluies / gce-vpn-install.sh
Created February 7, 2016 12:51 — forked from greenido/gce-vpn-install.sh
Installing vpn on GCE
#!/bin/sh
#
# Automatic configuration of a VPN on GCE debian-7-wheezy server.
# Tested only on debian-7-wheezy.
#
# This work is licensed under the Creative Commons Attribution-ShareAlike 3.0
# Unported License: http://creativecommons.org/licenses/by-sa/3.0/
#
# Thx to: https://github.com/sarfata/voodooprivacy/blob/master/voodoo-vpn.sh for the code/idea
#
@oluies
oluies / Proxy.scala
Created October 3, 2011 07:08 — forked from gkossakowski/Proxy.scala
ScalaProxy example
package test;
import java.lang.{reflect => jreflect}
import scala.reflect.mirror._
/**
* Scala counterpart of java.lang.reflect.InvocationHandler
*/
trait InvocationHandler {
def invoke(proxy: AnyRef, method: Symbol, args: Array[AnyRef]): AnyRef
scala> // Problem: Some java apis return nulls.
scala> System.getenv("PROBABLY_NOT_DEFINED_RUBISH_NAME")
res0: java.lang.String = null
scala> System.getenv("PROBABLY_NOT_DEFINED_RUBISH_NAME").split(",")
java.lang.NullPointerException
at .<init>(<console>:7)
at .<clinit>(<console>)
at RequestResult$.<init>(<console>:9)
trait SqlParser extends scala.util.parsing.combinator.RegexParsers {
// ignore ordinary whitespace, line comments, and inline comments between combinators
override val whiteSpace = "(?sm)(\\s*(?:--.*?$|/\\*((?!\\*/).)*\\*/)\\s*|\\s+)+".r
def sqlStatement: Parser[Statement] =
opt(whiteSpace) ~> positioned( procedureCallStatement
| insertStatementCustom
| insertStatement
| updateStatement
class HeartMonitor(millisUntilDeclaredDead: Long) extends Actor with FSM[Health, Long] {
import System.{currentTimeMillis => now}
val nextTest = 1000L
notifying {
case Transition(Stale, Alive) =>
log.info("HeartMonitor received initial heartbeat")
case Transition(Dead, Alive) =>
log.info("HeartMonitor noticed we are back alive again")
case Transition(_, Dead) =>