Skip to content

Instantly share code, notes, and snippets.

LambdaSamIntroFn:
Type: 'AWS::Serverless::Function'
Properties:
FunctionName: sam_intro_fn
Handler: publishtosns.lambda_handler
Runtime: python3.9
CodeUri: .
Description: ''
MemorySize: 128
Timeout: 3
import boto3
import urllib.parse
s3 = boto3.client('s3')
sns = boto3.client('sns')
def lambda_handler(event, context):
source_bucket = event['Records'][0]['s3']['bucket']['name']
object_key = urllib.parse.unquote_plus(event['Records'][0]['s3']['object']['key'])
LambdaRole:
Type: 'AWS::IAM::Role'
Properties:
AssumeRolePolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: Allow
Principal:
Service:
- lambda.amazonaws.com
AWSTemplateFormatVersion: '2010-09-09'
Transform: AWS::Serverless-2016-10-31
Description: An AWS Serverless Specification template for aws_sam_intro project
Resources:
SrcBucket:
Type: AWS::S3::Bucket
Properties:
BucketName: sam-intro-bck
PublicAccessBlockConfiguration:
BlockPublicAcls: true
AWSTemplateFormatVersion: '2010-09-09'
Transform: AWS::Serverless-2016-10-31
Description: An AWS Serverless Specification template for aws_sam_intro project
Resources:
SrcBucket:
Type: AWS::S3::Bucket
Properties:
BucketName: sam-intro-bck
PublicAccessBlockConfiguration:
BlockPublicAcls: true
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions.{row_number, rank}
object Practice {
def main(args: Array[String]): Unit = {
Logger.getLogger("org").setLevel(Level.OFF)
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions.{row_number, rank}
object Practice {
def main(args: Array[String]): Unit = {
Logger.getLogger("org").setLevel(Level.OFF)
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
object Practice {
def main(args: Array[String]): Unit = {
Logger.getLogger("org").setLevel(Level.OFF)
val spark = SparkSession.builder().master("local").appName("practice").getOrCreate()
var datadf = spark.read.csv("C:\\testdata\\sample.txt")
System.out.println("Initial Data:")
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
object Practice {
def main(args: Array[String]): Unit = {
Logger.getLogger("org").setLevel(Level.OFF)
val spark = SparkSession.builder().master("local").appName("practice").getOrCreate()
var data = spark.sparkContext.textFile("C:\\testdata\\sample.txt")
System.out.println("Initial Data:")
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
object Practice {
def main(args: Array[String]): Unit = {
Logger.getLogger("org").setLevel(Level.OFF)
val spark = SparkSession.builder().master("local").appName("practice").getOrCreate()
val data = spark.sparkContext.textFile("C:\\testdata\\sample.txt")
System.out.println("Initial Data:")