Skip to content

Instantly share code, notes, and snippets.

View l15k4's full-sized avatar
🎯
Focusing

Jakub Liska l15k4

🎯
Focusing
View GitHub Profile
object Play extends App {
implicit val alloc = offheap.malloc
val arr = new scala.Array[offheap.Array[Byte]](Int.MaxValue/8)
var counter = 0
while (counter < Int.MaxValue/8) {
arr(counter) = offheap.Array.fromArray("foobar".getBytes)
counter+=1
package example
import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.Path
import java.util.UUID
import akka.actor.ActorSystem
import akka.stream._
import akka.stream.scaladsl.{FileIO, Flow, Framing, Keep, Sink, Source}
package example
import cats.implicits._
import com.google.common.hash.Hashing
import com.rklaehn.abc._
import com.rklaehn.sonicreducer.Reducer
import org.scalameter.Bench.{HTMLReport, OfflineReport}
import org.scalameter.{Bench, KeyValue}
import org.scalameter.api._
val writer = new ParquetWriter[Group](outFile, new GroupWriteSupport, codec, blockSize, pageSize, dictPageSize, true, true, ParquetProperties.WriterVersion.PARQUET_2_0, configuration)
try {
records.foreach { case ClientRecord(cid, diid, gwid, dvc, geo, timestamp, kvs) =>
val group = new SimpleGroup(schema)
.append("timestamp", timestamp)
.append("cid", cid)
.append("diid", diid)
.append("gwid", gwid)
{
"type" : "index_hadoop",
"spec" : {
"dataSchema" : {
"dataSource" : "daily-data-source",
"parser" : {
"type" : "hadoopyString",
"parseSpec" : {
"format" : "json",
"timestampSpec" : {
#!/usr/bin/env bash
ROOT_DIR=$([[ $PWD == */bin ]] && dirname $PWD || echo $PWD)
DRUID_VERSION="0.9.2-SNAPSHOT"
DRUID_PATH=${ROOT_DIR}/dist/druid-${DRUID_VERSION}
MYSQL_PATH=${ROOT_DIR}/dist/mysql-metadata-storage-${DRUID_VERSION}.tar.gz
[[ -d ${DRUID_PATH} && -f ${MYSQL_PATH} ]] || {
echo "Druid distribution should be here $DRUID_PATH !!!"
[18:32:44] $ docker-compose -f kafka.yml up -d kafka
Creating docker_kafka_1
[18:32:51] $ kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic test
Created topic "test".
[18:33:35] 127 $ kafka-topics.sh --list --zookeeper localhost:2181
test
$ tree
├── root
│   ├── project-a
│   │   ├── doc
│   │   │   ├── bar.md
│   │   │   ├── foo.md
│   │   │   └── readme.md
│   │   └── readme.md -> doc/readme.md
│   ├── project-b
│   │   ├── doc
val msgCount = 10
val producerSettings =
ProducerSettings(system, new StringSerializer, new StringSerializer)
.withBootstrapServers(s"$KafkaHost:9092")
.withProperty("batch.size", "0")
def consumerSettings(topic: String, clientId: String) =
ConsumerSettings(system, new StringDeserializer, new StringDeserializer)
.withBootstrapServers(s"$KafkaHost:9092")
package com.example
import akka.kafka.scaladsl.{Consumer, Producer}
import akka.kafka.{ConsumerSettings, ProducerSettings}
import akka.stream.scaladsl.{Sink, Source}
import kafka.utils.ZkUtils
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.scalatest.BeforeAndAfterEach