-
-
Save lo/87d2ddff9d7d8d0e36a3e27e5ac60a01 to your computer and use it in GitHub Desktop.
Convert Array[org.apache.spark.sql.Row] to Array[Map[String, Any]]
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import org.apache.spark.sql.catalyst.expressions.Row | |
import org.apache.spark.sql.catalyst.types._ | |
import scala.collection.mutable.{ArrayBuffer} | |
object SparkRowFormatter { | |
def formatRowsWithSchema(rowArr: Array[Row], schema: StructType) = { | |
rowArr.map(r => formatStruct(schema.fields, r)) | |
} | |
private def formatStruct(schema: Seq[StructField], r: Row) = { | |
val paired = schema.zip(r) | |
paired.foldLeft(Map[String, Any]())((s, p) => s ++ formatItem(p)) | |
} | |
private def formatItem(p: Pair[StructField, Any]): Map[String, Any] = { | |
p match { | |
case (sf, a) => | |
sf.dataType match { | |
case ArrayType(et, _) => | |
Map(sf.name -> (if (a == null) a else formatArray(et, a.asInstanceOf[ArrayBuffer[Any]]))) | |
case StructType(s) => | |
Map(sf.name -> (if (a == null) a else formatStruct(s, a.asInstanceOf[Row]))) | |
case _ => Map(sf.name -> a) | |
} | |
} | |
} | |
private def formatArray(et: DataType, arr: ArrayBuffer[Any]): Seq[Any] = { | |
et match { | |
case StructType(s) => arr.map(e => formatStruct(s, e.asInstanceOf[Row])) | |
case ArrayType(t, _) => | |
arr.map(e => formatArray(t, e.asInstanceOf[ArrayBuffer[Any]])) | |
case _ => arr | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment