Skip to content

Instantly share code, notes, and snippets.

@dacr
Last active April 2, 2023 10:10
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save dacr/95617ea40df7fbb687e27aa124807c1f to your computer and use it in GitHub Desktop.
Save dacr/95617ea40df7fbb687e27aa124807c1f to your computer and use it in GitHub Desktop.
publish raspberry pi 1-wire DS18B20 temperature sensors to a remote secured kafka / published by https://github.com/dacr/code-examples-manager #b89f39a0-0eee-4361-bdc5-23d814cd8e37/b4e87b63b79c360a6e031611f2ebfd601c44892d
#!/usr/bin/env amm
// summary : publish raspberry pi 1-wire DS18B20 temperature sensors to a remote secured kafka
// keywords : kafka, raspberrypi, sensors, temperature, 1wire, domotic, DS18B20
// publish : gist
// authors : David Crosson
// license : Apache NON-AI License Version 2.0 (https://raw.githubusercontent.com/non-ai-licenses/non-ai-licenses/main/NON-AI-APACHE2)
// id : b89f39a0-0eee-4361-bdc5-23d814cd8e37
// created-on : 2020-04-23T21:27:19Z
// managed-by : https://github.com/dacr/code-examples-manager
// execution : scala ammonite script (http://ammonite.io/) - run as follow 'amm scriptname.sc'
import $ivy.`org.apache.kafka:kafka-clients:2.5.0`
import $ivy.`org.json4s::json4s-native:3.6.7`
import $ivy.`org.json4s::json4s-ext:3.6.7`
import $ivy.`com.github.pathikrit::better-files:3.8.0`
import java.io.FileInputStream
import org.apache.kafka.clients.producer.{ProducerConfig, KafkaProducer, ProducerRecord}
import org.json4s.DefaultFormats
import org.json4s.native.Serialization.{write}
import java.util.Date
import scala.util.Properties.envOrElse
import better.files._
implicit val formats = DefaultFormats.lossless
val producer = {
import ProducerConfig._
val props = new java.util.Properties()
props.load(new FileInputStream(envOrElse("KAFKA_CONNECT_PROPERTIES_FILE","connect.properties")))
props.setProperty(BOOTSTRAP_SERVERS_CONFIG, envOrElse("KAFKA_REMOTE_BROKER", "127.0.0.1:9093"))
props.setProperty(KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
props.setProperty(VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
new KafkaProducer[String, String](props)
}
case class TemperatureMeasure(
timestamp:Date,
place:String,
id:String,
sensor:String,
value:Double,
)
val w1SensorDir = file"/sys/bus/w1/devices"
val TempRE = """(?s).*t=([+-]?\d+).*""".r
while(true) {
val sensorFiles =
w1SensorDir
.collectChildren(f => f.isSymbolicLink && f.name.startsWith("28"))
.toList
.map(_ / "w1_slave")
.filter(_.exists)
sensorFiles.foreach { inputFile =>
val sensorInfo = inputFile.contentAsString
sensorInfo match {
case TempRE(tempAsString) =>
val id = inputFile.parent.name
val temperatureMeasure = TemperatureMeasure(new Date(), "grange", id, "temperature", tempAsString.toDouble / 1000d)
val record = new ProducerRecord("lerocher-sensors", "", write(temperatureMeasure))
//println(temperatureMeasure)
producer.send(record)
case _ =>
System.err.println(s"Can't parse $sensorInfo")
}
}
Thread.sleep(30000L)
}
producer.close()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment