Skip to content

Instantly share code, notes, and snippets.

@akonopko
Created June 23, 2016 07:04
Show Gist options
  • Save akonopko/495f2ece5398700e000ec63dafb61c2b to your computer and use it in GitHub Desktop.
Save akonopko/495f2ece5398700e000ec63dafb61c2b to your computer and use it in GitHub Desktop.
import com.memsql.spark.connector.MemSQLContext
import com.memsql.spark.connector.dataframe.JsonValue
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.Row
import org.apache.spark.sql.types._
import org.apache.spark.{SparkConf, SparkContext}
object TestError {
def main(args: Array[String]) {
Logger.getLogger("org.apache.spark").setLevel(Level.OFF)
Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
val conf = new SparkConf()
.setAppName("Test")
.setMaster("local[4]")
.set("spark.executor.memory", "2g")
.set("memsql.host", "127.0.0.1")
.set("memsql.port", "3306")
.set("memsql.user", "root")
val sc = new SparkContext(conf)
val sqlContext = new MemSQLContext(sc)
import org.apache.spark.sql.functions._
val testRdd = sc.parallelize(Array(Row("foo", "bar"), Row("baz", "qux")))
val schemaRdd = StructType(Seq(StructField("col1", StringType, false), StructField("col2", StringType, false)))
val df = sqlContext.createDataFrame(testRdd, schemaRdd)
def convertMapToJsonValue(map: Any) = {
new JsonValue("[]")
}
val mapToJson = udf(convertMapToJsonValue _)
val dfConvertedMacros = df.withColumn("col1", mapToJson(df("col1")))
dfConvertedMacros.first()
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment