Skip to content

Instantly share code, notes, and snippets.

View ASRagab's full-sized avatar
👨‍💻

Ahmad Ragab ASRagab

👨‍💻
View GitHub Profile
@ASRagab
ASRagab / gist:5938686
Created July 6, 2013 04:54
Processing Gist which draws a Bubble Chart..manually or as manually as one gets in Processing
Company[] companies;
PFont font;
void setup()
{
String[] company = loadStrings("Testdata.csv");
size(800,800);
background(255);
noStroke();
smooth();
@ASRagab
ASRagab / BerkeleyData.scala
Created November 15, 2015 05:35
Apache Flink Local: Berkeley Data and the Simpson Paradox
import org.apache.flink.api.scala._
object BerkeleyData {
def main(args: Array[String]) {
val env: ExecutionEnvironment = ExecutionEnvironment.getExecutionEnvironment
val path = "/path/to/Berkeley.csv"
case class Admission(status: String, gender: String, dept: String, number: Double = 0.0)
val data = env.readCsvFile[Admission](
trait Database[S, T] {
def objectsFrom[U <: Option[Seq[S]]](action: U)(implicit toDTO: S => T) : Option[Seq[T]] = {
action match {
case Some(result) => Some(result.map(toDTO))
case None => Some(Seq.empty)
}
}
def rowsFrom[V <: Option[Seq[T]]](action: V)(implicit toDAO: T => S) : Option[Seq[S]] = {
@ASRagab
ASRagab / build.sbt
Last active February 22, 2016 02:48
Sample build.sbt for cross compiled scala.js project with scalatest
name := "scalajs root project"
scalaVersion := "2.11.7"
lazy val root = project.in(file(".")).
aggregate(crossedJVM, crossedJS).
settings(
publish := {},
publishLocal := {}
)
@ASRagab
ASRagab / ConnectionLevel.scala
Created February 24, 2016 00:45
Simple Connection Level Akka Http Example
package httpclient
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.ActorMaterializer
import akka.stream.scaladsl._
[error] Exception in thread "main" java.lang.ExceptionInInitializerError
[error] at org.nd4j.nativeblas.NativeOpsHolder.<init>(NativeOpsHolder.java:14)
[error] at org.nd4j.nativeblas.NativeOpsHolder.<clinit>(NativeOpsHolder.java:9)
[error] at org.nd4j.linalg.jcublas.ops.executioner.JCudaExecutioner.<clinit>(JCudaExecutioner.java:65)
[error] at java.lang.Class.forName0(Native Method)
[error] at java.lang.Class.forName(Class.java:264)
[error] at org.nd4j.linalg.factory.Nd4j.initWithBackend(Nd4j.java:5202)
[error] at org.nd4j.linalg.factory.Nd4j.initContext(Nd4j.java:5149)
[error] at org.nd4j.linalg.factory.Nd4j.<clinit>(Nd4j.java:169)
[error] at Main$.delayedEndpoint$Main$1(Main.scala:13)
@ASRagab
ASRagab / build.sbt
Last active August 9, 2016 05:58
DeepLearning4J
lazy val nd4jVersion = SettingKey[String]("nd4jVersion")
lazy val root = (project in file(".")).settings(
scalaVersion := "2.11.8",
name := "nd4sTest",
version := "0.5.0",
organization := "org.nd4j",
resolvers += "Local Maven Repository" at "file:///" + Path.userHome.absolutePath + "/.m2/repository",
nd4jVersion := "0.5.0",
libraryDependencies ++= Seq(
@ASRagab
ASRagab / build.sbt
Last active November 20, 2016 06:17
Correct Dependencies for Scala + Processing 3 including Native Libs (allows OpenGL device to be found for P2D and P3D rendering modes)
name := "GeneticAlgorithm"
version := "1.0"
scalaVersion := "2.11.8"
val processingVersion = "3.2.3"
val joglVersion = "2.3.2"
val processingVideoVersion = "3.0.2"
case class Duck(x: Int, y: Int, direction: String)
case class Pond(x: Int, y: Int)
object GoldenPond extends App {
def getInput: (Pond, Seq[(Duck, List[String])]) = {
val scanner = new java.util.Scanner(System.in)
val pond = scanner.nextLine.split(" ").toList.map(_.toInt).take(2) match {
case Nil => throw new Exception("Not enough coordinates for pond")
case List(a, b) => Pond(a, b)
@ASRagab
ASRagab / build.sbt
Created April 30, 2017 04:48
Spark 2.1.0 and 2.11.11 build.sbt to allow sbt-assembly to merge properly and to be able to 'sbt run'
name := "spark-mllib-test"
version := "1.0"
scalaVersion := "2.11.11"
val sparkVersion = "2.1.0"
libraryDependencies ++= Seq (
"org.apache.spark" %% "spark-core" % sparkVersion % "provided",