This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
from tensorflow.python.saved_model import signature_constants | |
from tensorflow.python.saved_model import tag_constants | |
import argparse | |
export_dir = 'models/1' | |
builder = tf.saved_model.builder.SavedModelBuilder(export_dir) | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
wget -qO - http://packages.confluent.io/deb/3.1/archive.key | sudo apt-key add - | |
sudo add-apt-repository "deb [arch=amd64] http://packages.confluent.io/deb/3.1 stable main" | |
# Starting Confluent with | |
cd <path to your Confluent Installation>/bin/ | |
./confluent start |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import org.apache.kafka.clients.consumer.ConsumerConfig | |
import org.apache.kafka.common.serialization.StringDeserializer | |
object KafkaSupport { | |
def getKafkaConsumerConfig(brokers: String, groupID: String): Map[String, String] = { | |
Map[String, String]( | |
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> brokers, | |
ConsumerConfig.GROUP_ID_CONFIG -> groupID, |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
object SparkStreamingOnEmbeddings extends App { | |
val spark = SparkSession.builder | |
.master("local[*]") | |
.appName("StreamEmbeddings") | |
.getOrCreate() | |
val sc = spark.sparkContext | |
val ssc = new StreamingContext(sc, Seconds(10)) | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//Given an incoming message to train a Streaming Machine Learning Model | |
val signaturesToTrainModelsOn = 1 to max | |
val trainTopic = "signatures" | |
signaturesToTrainModelsOn.foreach { n => | |
streams.send(new ProducerRecord(trainTopic, "sigs", s"$n")) | |
} | |
//Given an incoming message to predict | |
val signaturesToPredictOn = maths.random() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import org.apache.kafka.clients.producer.KafkaProducer | |
import org.apache.kafka.streams.{StreamBuilder, StreamsConfig} | |
import java.util.Properties | |
val config: Properties = { | |
val p = new Properties() | |
p.put(StreamsConfig.APPLICATION_ID_CONFIG, "simple-confluent-stream") | |
p.put(StreamsConfig.CLIENT_ID_CONFIG, "simple-confluent-stream-client") | |
p.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package com.sparkserver.sparkstream | |
import org.apache.spark.mllib.clustering.StreamingKMeansModel | |
import org.apache.spark.streaming.{ Seconds, StreamingContext } | |
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent | |
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe | |
//import org.apache.spark.{ SparkConf, SparkContext } | |
import org.apache.spark.streaming.kafka010._ | |
import sun.misc.BASE64Decoder | |
import org.apache.spark.mllib.clustering.StreamingKMeans |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//Create new Kafka topic | |
def createTopic(topic: String, client: AdminClient) = { | |
val newTopic = new NewTopic(topic, 6, 3.toShort) | |
try { | |
val topic = client.createTopics(List(newTopic).asJavaCollection) | |
topic.values().get(topic).get() //Not Scala design here | |
} catch { | |
case e: InterruptedException with ExecutionException=> { | |
case d: TopicExistsException => throw new RuntimeException(e.getMessage, e) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package com.mlserver.reactivefacenet.modelserver | |
import java.io.InputStream | |
import com.mlserver.reactivefacenet.utility.ResourceUtils | |
import org.dmg.pmml.{FieldName, PMML} | |
import org.jpmml.evaluator.{Computable, FieldValue, ModelEvaluatorFactory} | |
import org.jpmml.model.PMMLUtil | |
import org.jpmml.evaluator.visitors._ | |
import sun.misc.BASE64Decoder |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def decodeImageString(src: String): Unit = { | |
//Decode Image String from base64 | |
val imageByte = new ByteArrayInputStream(java.util.Base64.getDecoder.decode(src)) | |
val image = ImageIO.read(imageByte) | |
val outputFile = new File("Image.jpeg") | |
ImageIO.write(image, "jpeg", outputFile) | |
} | |