Skip to content

Instantly share code, notes, and snippets.

View markncooper's full-sized avatar

Mark Cooper markncooper

  • Demandbase
  • San Francisco, CA
View GitHub Profile
// ***************************************************************************
// MESH CONFIGURATION
// ***************************************************************************
#include <painlessMesh.h>
#include <ArduinoJson.h> // For message formatting
#define IS_EXPLICIT_MASTER true // <<< UNCOMMENT AND SET TO 'true' FOR ONE DEVICE TO BE MASTER
// If commented out, the mesh root node will act as master.
#define MESH_PREFIX "LED_Sync_Mesh" // Must be unique for your mesh network
// ***************************************************************************
// MESH CONFIGURATION
// ***************************************************************************
#include <painlessMesh.h>
#include <ArduinoJson.h> // For message formatting
// #define IS_EXPLICIT_MASTER true // <<< UNCOMMENT AND SET TO 'true' FOR ONE DEVICE TO BE MASTER
// If commented out, the mesh root node will act as master.
#define MESH_PREFIX "LED_Sync_Mesh" // Must be unique for your mesh network
logfile screenlogs/log.%n
deflog on
defscrollback 10000
screen 1
#other settings
hardstatus alwayslastline
hardstatus string '%{= kG}[ %{G}%H %{g}][%= %{= kw}%?%-Lw%?%{r}(%{W}%n*%f%t%?(%u)%?%{r})%{w}%?%+Lw%?%?%= %{g}][%{B} %d/%m %{W}%c %{g}]'
vbell off
autodetach on
#Binds
#!/usr/bin/python
import time
import os
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--outroot', '-o', dest='outroot', required=True)
parser.add_argument('--num_images', '-n', dest='num_images', required=True)
args = parser.parse_args()
import org.json4s._
import org.json4s.jackson.JsonMethods._
import scala.io.Source
import scala.xml.XML
case class Row(dataCells: Seq[Cell])
case class Cell(label: String, value: String)
trait SimpleSparkRunner extends BeforeAndAfterAll {
this: Suite =>
var spark: SparkSession = _ // scalastyle:off var.field
var sc: SparkContext = _ // scalastyle:off var.field
def threads: Int = 1
class MySimpleJobWrapper extends FlatSpec
with SimpleSparkRunner {
val appConfig = ConfigFactory.parseString(
"""
| google-cloud-config {
| project-id = "my-project"
| dataset-id = "my-dataset"
| tempbucket-id = "trolo-bucket"
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/spark/scheduler/SparkListenerInterface
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:264)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:123)
Caused by: java.lang.ClassNotFoundException: org.apache.spark.scheduler.SparkListenerInterface
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 3 more
SELECT
MSEC_TO_TIMESTAMP(created_at) AS created_at_dt,
MSEC_TO_TIMESTAMP(updated_at) AS updated_at_dt,
*
FROM brigade_prod.mysql_brigade_profiles
package practice
import scala.collection.mutable
import scala.util.Random
case class Flight(start: String, end: String)
object Reassembly {
def main(args: Array[String]): Unit = {