Skip to content

Instantly share code, notes, and snippets.

api.get('/route', function (request) {
/* .... Body of the router */
return data.toString("base64") // 1. route body should return response in Base64 String format.
},{ // <-- 2. params required for binary response.
success:
{
contentType: 'image/png',
@infinitylogesh
infinitylogesh / aws-lambda-binary.js
Last active October 10, 2018 01:07
Building an AWS lambda service to return binary data (image) as a response without Access header.
let sh = require("sharp")
var ApiBuilder = require('claudia-api-builder'),
api = module.exports = new ApiBuilder();
var req = require('request').defaults({ encoding: null });
module.exports = api;
/*
Router to resize the image
@infinitylogesh
infinitylogesh / StringSearch.sc
Created March 31, 2017 09:16
Binary search implementation of String search.Can be used to Search a string in a huge list of String,
/*
* Searches the huge Sorted String list using binary search.
*
* @param searchTerm string to be searched for
* @param StringSeq the huge sorted sequence of strings
* @return the position of the searchTerm in the sequence , if not found returns -1
* */
def findEff(searchTerm:String,StringSeq:Seq[String],position:Int = 0):Int = {
// Maximum size of a Log.
logs.groupBy().max("size").show();
// Minimum Size of a Log
logs.groupBy().min("size").show();
// average Size of a Log
logs.groupBy().avg("size").show();
// Count of each response codes.
logs.select("response").groupBy("response").count().show();
// Case class to hold the data extracted from each line.
case class log(ip:String,date:String,hour:Int,min:Int,sec:Int,methodType:String,uri:String,protocol:String,response:Int,size:Int);
// Parses log and returns case class of the parsed values.
def parseLog(line: String,logRegex:scala.util.matching.Regex):log = {
val logRegex(ip,date,hour,min,sec,methodType,uri,protocol,response,size) = line;
log(ip,date,hour.toInt,min.toInt,sec.toInt,methodType,uri,protocol,response.toInt,assertInt(size));
}
// Regex pattern to parse the log
val logRegex = """^(\S+) - - \[(\S+):(\S+):(\S+):(\S+) -\S+] "(\S+) (\S+) (\S+)\/\S+ (\S+) (\S+)""".r;
// File is loaded into Spark context.
val logFile = sc.textFile("..FILE_PATH/access_log");
// LogFile RDD is filtered ( if a line doesn't conform to the regex pattern ) and converted to dataframe.
val logs = logFile.filter(line=>line.matches(logRegex.toString)).map(line=>parseLog(line,logRegex)).toDF();
// Regex pattern to parse the log
val logRegex = """^(\S+) - - \[(\S+):(\S+):(\S+):(\S+) -\S+] "(\S+) (\S+) (\S+)\/\S+ (\S+) (\S+)""".r;
// File is loaded into Spark context.
val logFile = sc.textFile("../hacks/spark/feelers/loganalyzer/access_log");
// LogFile RDD is filtered ( if a line doesn't conform to the regex pattern ) and converted to dataframe.
val logs = logFile.filter(line=>line.matches(logRegex.toString)).map(line=>parseLog(line,logRegex)).toDF();
// Regex pattern to parse the log
val logRegex = """^(\S+) - - \[(\S+):(\S+):(\S+):(\S+) -\S+] "(\S+) (\S+) (\S+)\/\S+ (\S+) (\S+)""".r;
// File is loaded into Spark context.
val logFile = sc.textFile("../hacks/spark/feelers/loganalyzer/access_log");
// LogFile RDD is filtered ( if a line doesn't conform to the regex pattern ) and converted to dataframe.
val logs = logFile.filter(line=>line.matches(logRegex.toString)).map(line=>parseLog(line,logRegex)).toDF();
val logRegex = """^(\S+) - - \[(\S+):(\S+):(\S+):(\S+) -\S+] "(\S+) (\S+) (\S+)\/\S+ (\S+) (\S+)""".r;
val logRegex = """^(\S+) - - \[(\S+):(\S+):(\S+):(\S+) -\S+] "(\S+) (\S+) (\S+)\/\S+ (\S+) (\S+)""".r;