Skip to content

Instantly share code, notes, and snippets.

View hhbyyh's full-sized avatar

Yuhao Yang hhbyyh

  • Sunnyvale, CA, U.S.
View GitHub Profile
import time
import tensorflow as tf
from bigdl.optim.optimizer import *
from zoo import init_nncontext
from zoo.pipeline.api.net import TFOptimizer, TFDataset
num_classes = 10
def get_deep_cnn_model():
# From: https://github.com/keras-team/keras/blob/master/examples/cifar10_cnn.py
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
package org.apache.spark.ml.feature
import org.apache.spark.ml.linalg.BLAS.axpy
import org.apache.spark.ml.linalg._
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import scala.util.Random
/**
@hhbyyh
hhbyyh / Smote on Spark
Created June 29, 2017 00:17
Smote on Spark
package org.apache.spark.ml.feature
import org.apache.spark.ml.linalg.BLAS.axpy
import org.apache.spark.ml.linalg._
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import scala.util.Random
/**
/**
* Created by yuhao on 3/19/17.
*/
object InputProcess {
def main(args: Array[String]): Unit = {
//input for model: 13 (row) * 398 (columns)
val input = "[[ 43.]\n [ 51.]\n [ 67.]\n [ 50.]\n [ 53.]\n [ 49.]\n [ 57.]\n [ 55.]\n [ 56.]\n [ 51.]\n [ 54.]\n [ 50.]\n [ 58.]\n [ 52.]\n [ 52.]\n [ 49.]\n [ 52.]\n [ 43.]\n [ 33.]\n [ 36.]\n [ 34.]\n [ 28.]\n [ 26.]\n [ 22.]\n [ 19.]\n [ 47.]\n [ 37.]\n [ 12.]\n [ 14.]\n [ 13.]\n [ 15.]\n [ 18.]\n [ 41.]\n [ 44.]\n [ 61.]\n [ 65.]\n [ 83.]\n [ 98.]\n [ 87.]\n [ 83.]\n [ 85.]\n [ 77.]\n [ 76.]\n [ 91.]\n [ 102.]\n [ 88.]\n [ 85.]\n [ 78.]\n [ 73.]\n [ 75.]\n [ 82.]\n [ 83.]\n [ 90.]\n [ 90.]\n [ 98.]\n [ 97.]\n [ 90.]\n [ 98.]\n [ 98.]\n [ 85.]\n [ 89.]\n [ 90.]\n [ 79.]\n [ 85.]\n [ 68.]\n [ 72.]\n [ 57.]\n [ 43.]\n [ 62.]\n [ 57.]\n [ 42.]\n [ 42.]\n [ 77.]\n [ 80.]\n [ 102.]\n [ 127.]\n [ 131.]\n [ 117.]\n [ 106.]\n [ 116.]\n [ 104.]\n [ 106.]\n [ 95.]\n [ 81.]\n [ 72.]\n [
import org.apache.log4j.{Level, Logger}
import org.apache.spark.ml.fpm.FPGrowth
import org.apache.spark.sql.SparkSession
object KosarakTest {
def main(args: Array[String]): Unit = {
Logger.getLogger("org").setLevel(Level.WARN)
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*