Created
June 23, 2016 07:04
-
-
Save akonopko/495f2ece5398700e000ec63dafb61c2b to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import com.memsql.spark.connector.MemSQLContext | |
import com.memsql.spark.connector.dataframe.JsonValue | |
import org.apache.log4j.{Level, Logger} | |
import org.apache.spark.sql.Row | |
import org.apache.spark.sql.types._ | |
import org.apache.spark.{SparkConf, SparkContext} | |
object TestError { | |
def main(args: Array[String]) { | |
Logger.getLogger("org.apache.spark").setLevel(Level.OFF) | |
Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF) | |
val conf = new SparkConf() | |
.setAppName("Test") | |
.setMaster("local[4]") | |
.set("spark.executor.memory", "2g") | |
.set("memsql.host", "127.0.0.1") | |
.set("memsql.port", "3306") | |
.set("memsql.user", "root") | |
val sc = new SparkContext(conf) | |
val sqlContext = new MemSQLContext(sc) | |
import org.apache.spark.sql.functions._ | |
val testRdd = sc.parallelize(Array(Row("foo", "bar"), Row("baz", "qux"))) | |
val schemaRdd = StructType(Seq(StructField("col1", StringType, false), StructField("col2", StringType, false))) | |
val df = sqlContext.createDataFrame(testRdd, schemaRdd) | |
def convertMapToJsonValue(map: Any) = { | |
new JsonValue("[]") | |
} | |
val mapToJson = udf(convertMapToJsonValue _) | |
val dfConvertedMacros = df.withColumn("col1", mapToJson(df("col1"))) | |
dfConvertedMacros.first() | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment