Skip to content

Instantly share code, notes, and snippets.

View imaxxs's full-sized avatar
🎯

Mahendra Kutare imaxxs

🎯
View GitHub Profile
➜ drive git:(main) ✗ npm install firebase-functions@latest firebase-admin@latest --save
added 203 packages, removed 2 packages, and audited 1624 packages in 12s
120 packages are looking for funding
run `npm fund` for details
53 vulnerabilities (12 moderate, 38 high, 3 critical)
To address issues that do not require attention, run:
@imaxxs
imaxxs / slack-pagerduty-oncall.py
Created February 13, 2020 00:41 — forked from devdazed/slack-pagerduty-oncall.py
Updates a Slack User Group with People that are on call in PagerDuty
#!/usr/bin/env python
from __future__ import print_function
import json
import logging
from urllib2 import Request, urlopen, URLError, HTTPError
from base64 import b64decode
@imaxxs
imaxxs / build.sbt
Created October 5, 2016 19:15 — forked from mneedham/build.sbt
name := "playground"
version := "1.0"
scalaVersion := "2.10.4"
libraryDependencies += "org.apache.spark" %% "spark-core" % "1.1.0"
libraryDependencies += "net.sf.opencsv" % "opencsv" % "2.3"
package spark.example
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
object SparkGrep {
def main(args: Array[String]) {
if (args.length < 3) {
System.err.println("Usage: SparkGrep <host> <input_file> <match_term>")
public class HCatInputFormat extends InputFormat<SerializableWritable<Writable>, HCatRecord> {
private final org.apache.hcatalog.mapreduce.HCatInputFormat input;
public HCatInputFormat() {
input = new org.apache.hcatalog.mapreduce.HCatInputFormat();
}
@Override
public RecordReader<SerializableWritable<Writable>, HCatRecord> createRecordReader(
  1. General Background and Overview
@imaxxs
imaxxs / gist:d97bc120a1ccf543d308
Created November 17, 2014 07:26
CBS New Json Format
{
"video": {
"id": "2d3b187f-058b-4e85-9384-8347f81df2b6",
"medias": {
"desktop": {
"pid": "eZapbH_FCGp2",
"bitrate": 764000,
"uri": "rtmp:\/\/cp98363.edgefcs.net\/ondemand\/?auth=cbs\u0026aifp=v001\u0026slist=media\/2014\/11\/16\/359441475571\/\u003Cbreak\u003Emedia\/2014\/11\/16\/359441475571\/FTN_Commentary_1116_309323_796.mp4"
},
"tablet": {
@imaxxs
imaxxs / gist:206020e8ad76b7408b5a
Created November 17, 2014 07:23
CBS News Json Format
{
video: {
id: "2d3b187f-058b-4e85-9384-8347f81df2b6",
medias: {
desktop: {
pid: "eZapbH_FCGp2",
bitrate: 764000,
uri: "rtmp://cp98363.edgefcs.net/ondemand/?auth=cbs&aifp=v001&slist=media/2014/11/16/359441475571/<break>media/2014/11/16/359441475571/FTN_Commentary_1116_309323_796.mp4"
},
tablet: {
STORE cleaned_table_3 INTO 'hdfs://localhost/queryResults/dcdabb39a7f14304996378a825182a16/b29175766a4a58b62cf384fc67e0c7b56d8dd4da/cleaned_table_3.csv' USING PigStorage(',');
2013-11-21 01:02:38,955 [pool-3-thread-2] INFO com.trifacta.hadoopdata.monitor.pig_runner.BatchPigRunner - pig -P /tmp/pig-6923067481220530956.properties -l /tmp/pig-9019460696332228699.log -f /tmp/pig-script-3626976431948664845.pig
2013-11-21 01:02:38,963 [pool-3-thread-2] INFO org.apache.pig.Main - Apache Pig version 0.11.0-cdh4.3.0 (rexported) compiled May 27 2013, 20:40:22
2013-11-21 01:02:38,963 [pool-3-thread-2] INFO org.apache.pig.Main - Logging error messages to: /tmp/pig-9019460696332228699.log
2013-11-21 01:02:38,963 [pool-3-thread-2] WARN org.apache.hadoop.conf.Configuration - fs.default.name is deprecated. Instead, use fs.defaultFS
2013-11-21 01:02:38,969 [pool-3-thread-2] WARN org.apache.hadoop.conf.Configuration - fs.default.name is deprecated. Instead, use fs.defaultFS
2013-11-21 01:02:38,969 [pool-3-thread-2] WARN
register /vagrant/pig-udfs/target/hadoopdata-udfs-1.0-SNAPSHOT.jar;
original_528bd071475a011915000003_1 = LOAD 'hdfs://localhost/f77be6ba92214126bdcbaf2bb1290e2f_5000.txt' USING com.trifacta.hadoopdata.pig.TrifactaStorage('\n', '--maxRecordLength 1048576') AS column1:chararray;
cleaned_table = FOREACH original_528bd071475a011915000003_1 GENERATE flatten(com.trifacta.hadoopdata.wrangler.udf.SplitUDF($0, '()(,)()', 4999, false, 2)) AS (column2:chararray, column3:chararray, column4:chararray, column5:chararray, column6:chararray, column7:chararray, column8:chararray, column9:chararray, column10:chararray, column11:chararray, column12:chararray, column13:chararray, column14:chararray, column15:chararray, column16:chararray, column17:chararray, column18:chararray, column19:chararray, column20:chararray, column21:chararray, column22:chararray, column23:chararray, column24:chararray, column25:chararray, column26:chararray, column27:chararray, column28:chararray, column29:chararray, column30:chararray, column31:char