Skip to content

Instantly share code, notes, and snippets.

import java.util.concurrent.{ExecutorService, Executors}
import cats.effect.{ContextShift, IO, Resource}
import scala.concurrent.{ExecutionContext, ExecutionContextExecutor}
class KafkaContext(cs: ContextShift[IO]) {
// A thread pool with exactly 1 thread
private val threadPool = Executors.newFixedThreadPool(1)
protected val synchronousExecutionContext = ExecutionContext.fromExecutor(threadPool)
def execute[A](f: => A): IO[A] = cs.evalOn(synchronousExecutionContext)(IO(f))
import cats.effect.{ConcurrentEffect, ExitCode, IO, IOApp}
import scala.concurrent.{ExecutionContext, ExecutionContextExecutor}
object ConsumerApplication extends IOApp {
implicit private val ec: ExecutionContextExecutor = ExecutionContext.global
private val cs = IO.contextShift(ec)
implicit private val concurrentEffect: ConcurrentEffect[IO] = IO.ioConcurrentEffect(cs)
val config = ConsumerConfig("localhost:9092", "test-topic", "consumer-group-1")
import java.time.Duration
import java.util.{Collections, Properties}
import cats.effect.{IO, Resource, Timer}
import org.apache.kafka.clients.consumer.{KafkaConsumer, ConsumerConfig => KafkaConsumerConfig}
import org.apache.kafka.common.serialization.StringDeserializer
import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContext
def consumeFromKafka(topic: String) = {
val props = new Properties()
props.put("bootstrap.servers", "localhost:9094")
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props.put("auto.offset.reset", "latest")
props.put("group.id", "consumer-group")
val consumer: KafkaConsumer[String, String] = new KafkaConsumer[String, String](props)
consumer.subscribe(util.Arrays.asList(topic))
while (true) {
PROMPT='$(kube_ps1) '$PROMPT
import sys
import signal
def signal_handler(sig, frame):
print(f"EMERGENCY LANDING BY USER!")
scf.cf.commander.send_setpoint(0, 0, 0, 0)
sys.exit(0)
@dosht
dosht / CassandraSparkExamples.scala
Created July 14, 2015 16:47
Some Spark/Cassandra Examples
package exampels
import com.datastax.spark.connector.cql.CassandraConnector
import org.apache.spark.{ SparkConf, SparkContext }
import org.apache.spark.sql.cassandra.CassandraSQLContext
import org.apache.spark.SparkContext._
import com.datastax.spark.connector._
object CassandraSparkExamples {
// Connection
@dosht
dosht / Iris.scala
Created May 17, 2015 07:22
DL4J Scala Example
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.layers.RBM;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.nn.conf.distribution.UniformDistribution;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import org.deeplearning4j.nn.conf.`override`.ClassifierOverride;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.deeplearning4j.optimize.api.IterationListener;
@dosht
dosht / styles.py
Created August 30, 2014 19:07
Which way do you prefer?
# Partition the following data into 2 lists: males and females
users = [{
"name": "user_%s" % x,
"gender": 'f' if x % 3 is random.randint(0,3) else 'm'
}
for x in range(1, 100)]
# Which way do you prefer?
# 1
@dosht
dosht / gist:4d5445c92e71d46af971
Last active August 29, 2015 14:03
XHR polling and streaming for sockjs
package com.cloud9ers.play2.sockjs.transports
import scala.Array.canBuildFrom
import scala.concurrent.Promise
import scala.concurrent.duration.DurationInt
import org.codehaus.jackson.JsonParseException
import com.cloud9ers.play2.sockjs.{ JsonCodec, Session, SockJsFrames }
import akka.actor.{ ActorRef, PoisonPill, Props, actorRef2Scala }
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.iteratee.Concurrent