Skip to content

Instantly share code, notes, and snippets.

CREATE TABLE IF NOT EXISTS mydatabase.user_transactions (
dt date,
user_id int,
total_cost_usd float,
registration_date string
)
PARTITIONED BY (dt)
LOCATION 's3://datalake.staging.aws/data/myschema/optimized-data-iceberg-parquet/'
TBLPROPERTIES (
'table_type'='ICEBERG',
# simple_stack.yaml
# AWS Cloudformation example for data pipeline
AWSTemplateFormatVersion: '2010-09-09'
Description: An example template for a Step Functions state machine.
Resources:
LambdaExecutionRole:
Type: "AWS::IAM::Role"
Properties:
AssumeRolePolicyDocument:
Version: "2012-10-17"
aws \
cloudformation deploy \
--template-file simple_stack.yaml \
--stack-name SimpleDataPipeline \
--capabilities CAPABILITY_IAM
AWSTemplateFormatVersion: 2010-09-09
Description: >-
AWS CloudFormation Sample Template: Sample template showing how to
create an Amazon RDS Database Instance with provisioned IOPs.**WARNING** This
template creates an Amazon Relational Database Service database instance. You
will be billed for the AWS resources used if you create a stack from this
template.
Parameters:
DBUser:
Default: root
AWSTemplateFormatVersion: 2010-09-09
Description: >-
AWS CloudFormation Sample Template: Sample template showing how to
create an Amazon RDS Database Instance with provisioned IOPs.**WARNING** This
template creates an Amazon Relational Database Service database instance. You
will be billed for the AWS resources used if you create a stack from this
template.
Parameters:
DBUser:
Default: root
aws --profile mds \
cloudformation deploy \
--template-file AWS_cfn_postgres_micro.yaml \
--stack-name MyPostgresDB \
--capabilities CAPABILITY_IAM
aws --profile mds \
cloudformation deploy \
--template-file AWS_cfn_postgres_micro.yaml \
--stack-name MyMySQLDB \
--capabilities CAPABILITY_IAM
aws \
cloudformation deploy \
--template-file AWS_cfn_mysql_small.yaml \
--stack-name MyMySQLDB \
--capabilities CAPABILITY_IAM
USE gcs_hive.default;
CREATE TABLE gcs_hive.fb.events_raw
WITH (
external_location = 'gs://firebase-events-archive-avro',
format = 'AVRO' -- or 'PARQUET'
);
CREATE SCHEMA gcs_hive.fb WITH (location = 'gs://firebase-events-archive-avro');
CREATE OR REPLACE EXTERNAL TABLE source.custom_hive_partitioned_table
WITH PARTITION COLUMNS (
dt STRING, -- column order must match the external path
lang STRING)
OPTIONS (
uris = ['gs://events-export-avro/public-project/avro_external_test/*'],
format = 'AVRO',
hive_partition_uri_prefix = 'gs://events-export-avro/public-project/avro_external_test',
require_hive_partition_filter = false);