Skip to content

Instantly share code, notes, and snippets.

@girisandeep
Last active February 10, 2023 15:59
Show Gist options
  • Save girisandeep/35b21cca890157afe0084a9e400e2e70 to your computer and use it in GitHub Desktop.
Save girisandeep/35b21cca890157afe0084a9e400e2e70 to your computer and use it in GitHub Desktop.
Create custom accumulator using Scala for Spark 2.x
class MyComplex(var x: Int, var y: Int) extends Serializable{
def reset(): Unit = {
x = 0
y = 0
}
def add(p:MyComplex): MyComplex = {
x = x + p.x
y = y + p.y
return this
}
}
import org.apache.spark.util.AccumulatorV2
object ComplexAccumulatorV2 extends AccumulatorV2[MyComplex, MyComplex] {
private val myc:MyComplex = new MyComplex(0,0)
def reset(): Unit = {
myc.reset()
}
def add(v: MyComplex): Unit = {
myc.add(v)
}
def value():MyComplex = {
return myc
}
def isZero(): Boolean = {
return (myc.x == 0 && myc.y == 0)
}
def copy():AccumulatorV2[MyComplex, MyComplex] = {
return ComplexAccumulatorV2
}
def merge(other:AccumulatorV2[MyComplex, MyComplex]) = {
myc.add(other.value)
}
}
sc.register(ComplexAccumulatorV2, "mycomplexacc")
//using custom accumulator
var ca = ComplexAccumulatorV2
var rdd = sc.parallelize(1 to 10)
var res = rdd.map(x => ca.add(new MyComplex(x,x)))
res.count
ca.value.x
ca.value.y
@ptakdhat
Copy link

ptakdhat commented Nov 6, 2019

Hey Sandeep, I tried above code. It is not giving the value of 'res.count'. Can you help me with it.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment