Skip to content

Instantly share code, notes, and snippets.

FROM nginx:alpine
RUN echo "My app!" > /usr/share/nginx/html/index.html
@mikeapted
mikeapted / _r-lambda.md
Last active November 4, 2019 12:53
Custom R runtime for AWS Lambda with support for MSSQL database connections

Download repo from Appsilon

git clone https://github.com/Appsilon/r-lambda-workflow
cd r-lambda-workflow

Setup a python virtual env (may not be necessary)

@mikeapted
mikeapted / rekognition_webcam.py
Created August 24, 2019 01:33
Sample Python code for sentiment analysis with Rekognition from laptop webcam
import time
import cv2
import boto3
# Get the Client
session = boto3.Session()
rekog_client = session.client("rekognition", region_name='us-east-1')
width = 1280
height = 720
scale_factor = 0.1
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
{
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "PA16 2018-12-13 - @akirmak - RevHist: PA16: sagemaker notebook role type fixed. PA15:-(parameters added for AcctId and S3 bucket's name initials)",
"Parameters": {
"yourInitials": {
"Description": "Your Initials to be used in the s3-bucket created. All in small letters pls. e.g. It shall be 'fs' for Frank Sinatra",
"Type": "String",
"MinLength": "2",
"MaxLength": "5"
}
{
"productName" : "{{commerce.productName}}",
"color" : "{{commerce.color}}",
"department" : "{{commerce.department}}",
"product" : "{{commerce.product}}",
"imageUrl": "{{image.imageUrl}}",
"dateSoldSince": "{{date.past}}",
"dateSoldUntil": "{{date.future}}",
"price": {{random.number(
{
{
"AWSTemplateFormatVersion":"2010-09-09",
"Description":"Creates resources necessary to replicate SQLServer database using AWS Database Migration Service to S3 Data lake.",
"Parameters":{
"KeyName":{
"Description":"",
"Type":"AWS::EC2::KeyPair::KeyName"
}
},
"Mappings" : {
import boto3
import json
def lambda_handler(event, context):
payload = '1,13000 \n 1,20000 \n 2,3500 \n 2,5000 \n 3,3000 \n 3,3300 \n 4,2 \n 4,10'
endpoint_name = 'YourInitials-kmeans-anomalydetection'
runtime = boto3.client('runtime.sagemaker')
response = runtime.invoke_endpoint(EndpointName=endpoint_name,
import sys
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext
from awsglue.job import Job
## @params: [JOB_NAME]
args = getResolvedOptions(sys.argv, ['JOB_NAME'])
select sensorname, sensorvalue, anomalyscore from YourInitial_bigdata.analytic_csv2parquet where anomalyscore > 2 limit 10;