Skip to content

Instantly share code, notes, and snippets.

View scalactic's full-sized avatar

scalactic scalactic

  • Apple Inc.
View GitHub Profile
@scalactic
scalactic / setup.css
Created November 14, 2022 19:14
css setup
*, *::before, *::after {
margin: 0;
padding: 0;
box-sizing: border-box;
}
@scalactic
scalactic / AtmCardSkimming.scala
Last active April 24, 2022 13:49
Fraud Detection on ATM Card Skimming: A case study
import org.apache.flink.api.common.state.{ValueState, ValueStateDescriptor}
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.co.RichCoFlatMapFunction
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer
import org.apache.flink.util.Collector
import org.apache.hadoop.conf.{Configuration => HadoopConf}
import org.apache.hadoop.hbase.{HBaseConfiguration, TableName}
import org.apache.hadoop.hbase.client.{Connection, ConnectionFactory, Get, Put}
def withOpen(fileName: String, mode: String, encoding: String)(fp: Iterator[String] => Unit ): Unit = {
import scala.io.Source
val source = Source.fromFile(fileName)
val lines = source.getLines()
fp(lines)
source.close
}
withOpen("myfile.txt", "r", "utf-8") { lines =>
lines.foreach(println)
test_cases = [
([-1, 0, 0, 1, 1, 2], [1, 2, 2, 1, 1, 1]),
([-1, 0, 1, 2], [1, 4, 3, 4]),
([-1, 0, 0, 0], [10, 11, 10, 10]),
([-1, 0], [20, 100]),
([-1, 0, 0, 0, 0, 3, 4, 6, 0, 3], [298, 2187, 5054, 266, 1989, 6499, 5450, 2205, 5893, 8095]),
([-1, 0, 1, 2, 1, 0, 5, 2, 0, 0], [8475, 6038, 8072, 7298, 5363, 9732, 3786, 5521, 8295, 6186]),
([-1, 0, 1, 2, 3, 4, 5, 6, 7, 8], [8618, 5774, 7046, 459, 2279, 2894, 798, 2328, 1011, 2780])
]
@scalactic
scalactic / ILogging.scala
Last active September 26, 2021 09:00
Simple Java Logger
import java.sql.Timestamp
import java.util.logging.{ConsoleHandler, Formatter, Level, LogRecord, Logger}
trait ILogging {
private def setupLogger(level: Level): Logger = {
object SimpleFormatter extends Formatter {
override def format(record: LogRecord): String =
s"[${new Timestamp(record.getMillis)}] [${record.getLevel}] [${record.getLoggerName}] ${record.getMessage} \n"
}
val logger: Logger = Logger.getLogger(this.getClass.getName)
@scalactic
scalactic / SparkGenerateSchema.scala
Last active July 14, 2021 15:12
Generate schema from case class in spark
import org.apache.spark.sql.types.{StructType, ArrayType}
import org.apache.spark.sql.catalyst.ScalaReflection
/** Simple schema */
case class A(key: String, time: java.sql.Timestamp, date: java.sql.Date, decimal: java.math.BigDecimal, map: Map[String, Int], nested: Seq[Map[String, Seq[Int]]])
val schema = ScalaReflection.schemaFor[A].dataType.asInstanceOf[StructType]
schema.printTreeString
/** Array schema */
val arrSchema = ScalaReflection.schemaFor[Seq[A]].dataType.asInstanceOf[ArrayType]
@scalactic
scalactic / mergesort.js
Created July 9, 2021 17:30
merge sort in javascript
function merge(arr, l, m, r) {
let i, j, k;
let n1 = m - l + 1;
let n2 = r - m;
/* create temp arrays */
let L = new Array(n1);
let R = new Array(n2);
/* Copy data to temp arrays L[] and R[] */
for (i = 0; i < n1; i++)
L[i] = arr[l + i];
@scalactic
scalactic / SparkDFWithSchema.scala
Created July 8, 2021 13:19
Create Spark DataFrame with given schema.
import org.apache.spark.sql.Row
import org.apache.spark.sql.types._
val schema = StructType(
Array(
StructField("key", StringType, nullable = false),
StructField("time", IntegerType, nullable = false)
)
)
//spark-shell --conf spark.sql.sources.partitionOverwriteMode=dynamic
import org.apache.spark.sql.SaveMode
val data = Seq((1,2,"20210701"),(1,3,"20210701"),(3,4,"20210702"),(3,5,"20210702"))
val df = spark.createDataFrame(data).toDF("col_1", "col_2", "prt_date")
// this will create the table, with partitions 20210701, 20210702
df
.write
.partitionBy("prt_date")
@scalactic
scalactic / encrypt_decrypt_AES.scala
Last active June 28, 2021 17:05
Scala encrypt decrpyt AES
import javax.crypto.{Cipher, KeyGenerator}
import javax.crypto.spec.SecretKeySpec
import java.util.Base64
val keyGenerator = KeyGenerator.getInstance("AES")
keyGenerator.init(128)
val secretKey = keyGenerator.generateKey
val encodedKey = Base64.getEncoder.encodeToString(secretKey.getEncoded)
val decodedKey = Base64.getDecoder.decode(encodedKey)