Skip to content

Instantly share code, notes, and snippets.

import akka.actor.Actor
import akka.actor.Props
import akka.event.Logging
class DudeA extends Actor {
val log = Logging(context.system, this)
def receive = {
case "hello" => sender() ! "Hallo, Grüß Dich!"
case _ => log.info("Hmm...")
trait Actor {
final def sender(): ActorRef = context.sender()
}
@diogoaurelio
diogoaurelio / redshift_lambda_data_loader.py
Last active March 8, 2022 19:38
Sample code tfor a S3 triggered AWS Lambda function to issue Copy command to load data into Redshift DB
import urllib
import pg8000
import boto3
import os
import logging
IAM_ROLE = os.environ['IAM_ROLE']
DB_NAME = os.environ['DB_NAME']
DB_USER = os.environ['DB_USER']
DB_PORT = os.environ['DB_PORT']
@diogoaurelio
diogoaurelio / lambda_module_instantiation.tf
Last active September 16, 2018 11:44
lambda_module_instantiation.tf
module "redshift_loader_lambda" {
source = "github.com/diogoaurelio/terraform-aws-lambda-module"
version = "v0.0.1"
aws_region = "${var.aws_region}"
environment = "${var.environment}"
project = "${var.project}"
lambda_unique_function_name = "${var.redshift_loader_lambda_unique_function_name}"
@diogoaurelio
diogoaurelio / locals.tf
Last active September 16, 2018 14:37
lambda_locals_env_vars.tf
################################################################################
# Locals used for different Lambdas Environmental Variables
################################################################################
locals {
redshift_loader_lambda_env_vars = {
ENVIRONMENT = "${var.environment}"
REGION = "${var.aws_region}"
IAM_ROLE = "${var.redshift_data_loader_lambda_iam_role}"
@diogoaurelio
diogoaurelio / lambda_permissions.tf
Created September 16, 2018 15:00
lambda_permissions.tf
################################################################################
# AWS Lambda IAM Policy document definitions
################################################################################
data "aws_iam_policy_document" "this" {
statement {
effect = "Allow"
actions = [
"s3:GetBucketLocation",
# Redshift - check currently running queries; useful for misbehaving queries;
SELECT
userid
, query
, pid
, starttime
, left(text, 100) as query
FROM stv_inflight;
my_string = """
your string \n seperated \n by \n new \n lines.
"""
account_id = '12345678910'
region = 'eu-west-1'
parameter_name = 'some-secret-name'
key_id = 'your-key-id'
@diogoaurelio
diogoaurelio / pyspark_demo_app.py
Created October 21, 2018 15:06
PySpark 2.3 example
# create the general function
def _amount_spent(quantity: int, price: float) -> float:
"""
Calculates the product between two variables
:param quantity: (float/int)
:param price: (float/int)
:return:
(float/int)
"""
return quantity * price
@diogoaurelio
diogoaurelio / pyspark_demo_app_unit_test.py
Created October 21, 2018 15:09
PySpark demo Application example unit test
from tests.test_utils.test_spark import spark_session
from pyspark.sql import DataFrame, Row, SparkSession
from pyspark.sql.functions import col
from src.job import amount_spent_udf
def test_amount_spent_udf(spark_session: SparkSession) -> None:
input_df = spark_session.createDataFrame([
Row(customer_name="Geoffrey", date="2016-04-22", category="Foo", product_name="Bar", quantity=1, price=2.00),