I hereby claim:
- I am andrearota on github.
- I am arota (https://keybase.io/arota) on keybase.
- I have a public key whose fingerprint is AE75 DD6A 531D 4FFB 4042 6E81 28E3 8D6F 7239 5237
To claim this, I am signing this object:
setInterval(function() { | |
document.getElementsByClassName('_k_')[0].click(); | |
document.getElementsByClassName('uiSelectorButton uiButton uiButtonOverlay')[0].click(); | |
document.getElementsByClassName('itemAnchor')[7].click(); | |
document.getElementsByName('delete_conversation')[0].click() | |
}, 500); |
# File: /etc/asound.conf | |
# .alsaequal.bin is automatically created tuning the equalizer with "alsamixer -D equal" | |
ctl.equal { | |
type equal; | |
controls "/home/pi/.alsaequal.bin" | |
} | |
pcm.plugequal { | |
type equal; |
I hereby claim:
To claim this, I am signing this object:
import net.liftweb.json._ | |
case class Entry(name: String, job: String, scores: Array[Double], weights: Array[Double]) | |
object ParsingJson { | |
def main(args: Array[String]) = { | |
implicit val formats = DefaultFormats | |
# Find all the .mkv file in the current folder and for each one extract the srt with ffmpeg | |
# We assume that the srt is the stream 0:2, change it according to your file | |
find . -name "*.mkv" -exec ffmpeg -i {} -map 0:2 {}.srt \; | |
# Now rename the .mkv.srt to .srt, in order to have them automatically loaded by VLC and other players | |
for j in *.mkv.srt; do mv -v -- "$j" "${j%.mkv.srt}.srt"; done |
// Problem: creating a Spark UDF that take extra parameter at invocation time. | |
// Solution: using currying | |
// http://stackoverflow.com/questions/35546576/how-can-i-pass-extra-parameters-to-udfs-in-sparksql | |
// We want to create hideTabooValues, a Spark UDF that set to -1 fields that contains any of given taboo values. | |
// E.g. forbiddenValues = [1, 2, 3] | |
// dataframe = [1, 2, 3, 4, 5, 6] | |
// dataframe.select(hideTabooValues(forbiddenValues)) :> [-1, -1, -1, 4, 5, 6] | |
// | |
// Implementing this in Spark, we find two major issues: |
@echo on | |
REM Script for booting the Docker machine and to start the container at startup | |
REM This file should be scheduled in Windows automatic execution folder | |
REM Set the path of Docker tools | |
set PATH=%PATH%;"C:\Program Files\Docker Toolbox\" | |
REM Set the name of the VM configuration where Docker daemon will be hosted | |
REM The docker machine below should exist! |
from pyspark.sql import SparkSession, functions as f | |
from pyspark.sql.types import IntegerType | |
import sys | |
if __name__ == '__main__': | |
spark = SparkSession.builder.getOrCreate() | |
print('Spark version:', spark.version) | |
print('Python version:', sys.version) |