Skip to content

Instantly share code, notes, and snippets.

View ahoy-jon's full-sized avatar
🐋
Thinking!

Jonathan Winandy ahoy-jon

🐋
Thinking!
View GitHub Profile
/*
* Copyright 2017-2019 John A. De Goes and the ZIO Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
/*
* Copyright 2017-2019 John A. De Goes and the ZIO Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
/*
* Copyright 2017-2019 John A. De Goes and the ZIO Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
diff --git a/core/shared/src/main/scala/scalaz/zio/clock/package.scala b/core/shared/src/main/scala/scalaz/zio/clock/package.scala
index 5691082e..33cfcb61 100644
--- a/core/shared/src/main/scala/scalaz/zio/clock/package.scala
+++ b/core/shared/src/main/scala/scalaz/zio/clock/package.scala
@@ -17,28 +17,37 @@
package scalaz.zio
import scalaz.zio.duration.Duration
-
import java.util.concurrent.TimeUnit
val ss: SparkSession
val df: DataFrame
import ss.implicits._
trait NotTypeColumn[C]
object NotTypeColumn {
implicit def notc[C <: org.apache.spark.sql.Column]:NotTypeColumn[C] = new NotTypeColumn[C] {}
package io.univalence
import io.univalence.Fk.Encoder.{BindedTypedExprEncoder, SimpleEncoder}
import io.univalence.Fk.Expr.Ops
import io.univalence.Fk.TypedExpr.TypedOr
import org.json4s.JsonAST.{JArray, JString, JValue}
import scala.language.{dynamics, implicitConversions}
object Fk {
def day1(input:Seq[Int]): (Int, Int) = {
val part1 = 0 + input.sum
val part2 = {
val freq: Seq[Int] = Stream.continually(input).flatten
val states: Seq[Int] = freq.scanLeft(0)(_ + _)
val find: Either[Int, Set[Int]] = foldLeftUntil(states)(Set.empty[Int])(
(set, a) => if (set(a)) Left(a) else Right(set + a))
package testMacWire
import com.softwaremill.macwire._
class DatabaseAccess()
class SecurityFilter()
class UserFinder(databaseAccess: DatabaseAccess, securityFilter: SecurityFilter)
class UserStatusReader(userFinder: UserFinder)
package utils
import org.scalatest.FunSuite
import scalaz.zio.{IO, RTS, Ref}
import utils.CircuitBreaker.{Closed, Open, Status}
final class CircuitBreaker[+BreakingFailure](ref: Ref[CircuitBreakerStatus],
initStatus: CircuitBreakerStatus,
whenOpen: IO[BreakingFailure, Nothing]) {
def protect[E >: BreakingFailure, A](io: IO[E, A]): IO[E, A] = {
package io.univalence
import fr.jetoile.hadoopunit.HadoopUtils
import org.apache.spark.sql.SparkSession
object TestSpark {
def main(args: Array[String]): Unit = {