Skip to content

Instantly share code, notes, and snippets.

View retroryan's full-sized avatar

Ryan Knight retroryan

  • www.grandcloud.com
  • Park City, Utah
View GitHub Profile
import zhttp.service.{ChannelFactory, Client, EventLoopGroup}
import zio.{Tag, ZEnvironment, ZIO, ZLayer}
trait NumService:
val get: ZIO[Any, Throwable, Int]
final case class NumServiceLive(c: ChannelFactory, e: EventLoopGroup) extends NumService:
val url = "https://random-num-x5ht4amjia-uc.a.run.app/"
val get: ZIO[Any, Throwable, Int] =
val request =
@retroryan
retroryan / gist:7448032411c415330cc5fd81ff549b3b
Last active July 13, 2018 07:09
Event-Sourcing with FaunaDB
#create class
CreateClass({ name: "ledger" })
#create index
CreateIndex(
{
name: "UNIQUE_ENTRY_CONSTRAINT",
source: Class("ledger"),
terms: [{ field: ["data", "clientId"] }],
values: [{ field: ["data", "counter"] }],
@retroryan
retroryan / gist:faa2ad045242c2617f72021bf72a864b
Last active July 13, 2018 06:43
Introduction to Fauna Shell
#Create a Class
CreateClass({ name: "orders" })
#Create an Index
CreateIndex(
{
name: "orders_index",
source: Class("orders"),
values: [{ field: ["data", "orderId"], reverse:true }, { field: ["ref"] }],
serialized: true
15kX9REWJS9v3NmT1JeeUN6NnovQk9WnWz

Keybase proof

I hereby claim:

  • I am retroryan on github.
  • I am binaryknight (https://keybase.io/binaryknight) on keybase.
  • I have a public key ASD4qP7q58L0A88jrHOloskDr5ixa3CT77MWpbiEtxO5Cwo

To claim this, I am signing this object:

override val supervisorStrategy: SupervisorStrategy = {
val decider: SupervisorStrategy.Decider = {
case Guest.CaffeineException =>
SupervisorStrategy.Stop
case Waiter.FrustratedException(coffee, guest) =>
barista.tell(Barista.PrepareCoffee(coffee, guest), sender())
SupervisorStrategy.Restart
}
OneForOneStrategy()(decider orElse super.supervisorStrategy.decider)
}
@retroryan
retroryan / spark streaming windowing.scala
Created November 2, 2015 17:54
spark streaming windowing example
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.Time
import kafka.serializer.StringDecoder
import org.joda.time.DateTime
import org.apache.spark.sql.SaveMode
import sqlContext.implicits._
val ratingsStream = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, topics)
val msgs = ratingsStream.transform {
import scala.annotation.tailrec
def atoi(chList:List[String]):Int = {
@tailrec
def atoiAccumulator(chList: List[String], accumulator: Int): Int = chList match {
case Nil => accumulator
case head :: tail =>
val tensMult = scala.math.pow(10, tail.length).toInt
val nxtAccumulator = (head.toInt * tensMult) + accumulator
class DseSparkContextFactory extends SparkContextFactory {
import SparkJobUtils._
type C = SparkContext with ContextLike
def makeContext(config: Config, contextConfig: Config, contextName: String): C = {
val conf = configToSparkConf(config, contextConfig, contextName)
val sparkMaster = "dsetool sparkmaster".!!.trim
conf.set("spark.master",sparkMaster)
################ TEST RUN 1 Java 8 Async Receiver ###########################
java -jar target/cqltestclient.jar --host=127.0.0.1 --activity=WriteTelemetryAsync:7000000:150:1500 --activity=ReadTelemetryAsync:700000:50:500
5/4/15 9:16:55 PM ==============================================================
-- Counters --------------------------------------------------------------------
ActivityExecutorService.activities
count = 2
ReadTelemetryAsyncActivity.async-pending