Skip to content

Instantly share code, notes, and snippets.

@micimize
Created April 4, 2020 22:48
Show Gist options
  • Save micimize/2410fa942be40a76119d3defbee337e3 to your computer and use it in GitHub Desktop.
Save micimize/2410fa942be40a76119d3defbee337e3 to your computer and use it in GitHub Desktop.
example postgres macros I've used
#!/bin/sh
# Plugins handle upsert${table} and upsert${table}Batch functions
# Add a `create${table}Batch` function to the schema
# for the given $table, allowing for bulk creates
function batch_create {
table=$1
inputName=$2
echo "
-- batch insert function
create function ${schema}.create_${table}_batch (
${inputName} ${schema}.${table}[]
) returns setof ${schema}.${table} as \$batch$
insert into ${schema}.${table}
select * from unnest(${inputName})
returning *;
\$batch$ LANGUAGE sql;
"
}
# Add a `delete${table}Batch` function to the schema
# for the given $table, allowing for bulk deletion
function batch_delete {
table=$1
inputName="$1_ids"
tid="public.temporal_id"
echo "
-- batch delete function
create function ${schema}.delete_${table}_batch (
${inputName} ${tid}[]
) returns ${tid}[] as \$batch$
with deleted_records as (
delete from ${schema}.${table}
where id = any(${inputName})
returning id
)
select array_agg(id) as deleted from deleted_records;
\$batch$ LANGUAGE sql;
"
}
# Add timetravel triggers for a given $table
# transforming deletions into updates
# and adding historical versions after "true" updates
function timetravel {
table=$1
echo "
-- manages valid_from and valid_until,
-- transforms deletions into updates (if they make it that far)
CREATE TRIGGER ${table}_before
BEFORE INSERT OR UPDATE ON ${schema}.${table}
FOR EACH ROW EXECUTE PROCEDURE process_timetravel_before();
-- insert historical records after user updates,
-- i.e. when (pg_trigger_depth() = 0)
CREATE TRIGGER ${table}_after
AFTER UPDATE ON ${schema}.${table}
FOR EACH ROW
WHEN (PG_TRIGGER_DEPTH() = 0)
EXECUTE PROCEDURE process_timetravel_after();
-- rewrite deletes into updates,
-- returning * for postgraphile and unconditional
CREATE OR REPLACE RULE ${table}_archive AS
ON DELETE TO ${schema}.${table}
DO INSTEAD
UPDATE ${schema}.${table}
SET valid_until = NOW()
WHERE ctid = OLD.ctid
RETURNING *;
"
}
function load_with_timetravel {
table=$1
echo "`load_sql $table`
`timetravel ${table}`
comment on column ${schema}.${table}.id is E'@omit create,update,delete';
comment on column ${schema}.${table}.version_id is E'@omit create,update,delete';
"
}
function define_mixin {
mixin_var_name="with_$1"
file_name=$SQL_DIR/$1.sql
echo "
-- abuse the gset command to define a mixin variable $mixin_var_name
select '$(
# escape single quotes
sed "s/'/''/g" $file_name
)' $mixin_var_name \gset
"
}
function temporal_reference {
table=$1
temporal_field=$2
echo "
_${table}_entity_id uuid not null,
_${table}_${temporal_field} timestamp with time zone not null
"
}
function temporal_foreign_key {
table=$1
temporal_field=$2
echo "
FOREIGN KEY (_${table}_entity_id, _${table}_${temporal_field})
REFERENCES ${table}(entity_id, ${temporal_field})
ON UPDATE CASCADE
"
}
function temporal_reference_api {
schema=$1
table=$2
reference=$3
temporal_field=$4
if [ "valid_from" == "$temporal_field" ]; then
field="version_id"
else
field="id"
fi
entity_id="_${reference}_entity_id"
timestamp="_${reference}_${temporal_field}"
echo "
comment on column ${schema}.${table}.${entity_id} is E'@omit read';
comment on column ${schema}.${table}.${timestamp} is E'@omit read';
create function ${schema}.${table}_${field}(e ${schema}.${table})
returns temporal_id as \$\$
select (${entity_id}, ${timestamp})::temporal_id
\$\$
language sql stable;
"
}
@micimize
Copy link
Author

micimize commented Apr 4, 2020

simplified usage:

psql "$db_url" <<SQL

\set ON_ERROR_STOP

BEGIN;

set search_path = public;

create schema ${schema};
create schema if not exists internal;

-- we abuse the \gset multiline raw variable syntax for this
-- defines a list of fields that can be "mixed in" to other tables via :entity_mixin
`define_mixin 'entity_mixin'`

\i ${SQL_DIR}/timetravel.sql

-- we set the search path to write to the target schema, 
-- but read from public for type info, etc
set search_path = ${schema}, public;

`load_with_timetravel 'app_user'`

`load_sql 'authentication_functions'`

`load_with_timetravel 'task'`
`batch_create 'task' 'tasks'`
`batch_delete 'task'`

`load_with_timetravel 'calendar_event'`
`batch_create 'calendar_event' 'calendar_events'`
`batch_delete 'calendar_event'`

COMMIT;

SQL

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment