Skip to content

Instantly share code, notes, and snippets.

@cj

cj/schema.rb Secret

Created March 26, 2021 17:43
Show Gist options
  • Save cj/bb62776913387c17ca1dc9c1c3ae394c to your computer and use it in GitHub Desktop.
Save cj/bb62776913387c17ca1dc9c1c3ae394c to your computer and use it in GitHub Desktop.
# frozen_string_literal: true
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_03_24_175300) do
# These are extensions that must be enabled in order to support this database
enable_extension "citext"
enable_extension "hstore"
enable_extension "pgcrypto"
enable_extension "plpgsql"
enable_extension "postgis"
enable_extension "uuid-ossp"
# These are custom enum types that must be created before they can be used in the schema definition
create_enum "address_carrier_route_type", ["city_delivery", "rural_route", "highway_contract", "po_box", "general_delivery"]
create_enum "address_record_type", ["street", "highrise", "firm", "po_box", "rural_route", "general_delivery"]
create_enum "address_type", ["residential", "commercial"]
create_enum "address_zip_code_type", ["standard", "po_box", "unique", "military"]
create_enum "order_status", ["new", "in_review", "processing", "completed"]
create_enum "order_type", ["nhd", "flood"]
create_table "active_storage_attachments", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.string("name", null: false)
t.string("record_type", null: false)
t.uuid("record_id", null: false)
t.uuid("blob_id", null: false)
t.datetime("created_at", null: false)
t.uuid("integration_id")
t.index(["blob_id"], name: "index_active_storage_attachments_on_blob_id")
t.index(["integration_id"], name: "index_active_storage_attachments_on_integration_id")
t.index(["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true)
end
create_table "active_storage_blobs", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.string("key", null: false)
t.string("filename", null: false)
t.string("content_type")
t.text("metadata")
t.string("service_name", null: false)
t.bigint("byte_size", null: false)
t.string("checksum", null: false)
t.datetime("created_at", null: false)
t.index(["key"], name: "index_active_storage_blobs_on_key", unique: true)
end
create_table "active_storage_variant_records", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.uuid("blob_id", null: false)
t.string("variation_digest", null: false)
t.index(["blob_id", "variation_digest"], name: "index_active_storage_variant_records_uniqueness", unique: true)
end
create_table "addresses", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.uuid("integration_id")
t.citext("primary_number")
t.citext("street_predirection")
t.citext("street_name")
t.citext("street_suffix")
t.citext("street_postdirection")
t.citext("secondary_designator")
t.citext("secondary_number")
t.citext("pmb_designator")
t.citext("pmb_number")
t.citext("extra_secondary_designator")
t.citext("extra_secondary_number")
t.citext("city")
t.citext("state")
t.citext("zip_code")
t.citext("zip_code_plus_4")
t.enum("zip_code_type", as: "address_zip_code_type")
t.citext("delivery_point_barcode")
t.enum("type", as: "address_type")
t.enum("record_type", as: "address_record_type")
t.boolean("default_building_address")
t.citext("county")
t.citext("county_fips")
t.citext("carrier_route")
t.enum("carrier_route_type", as: "address_carrier_route_type")
t.citext("apn")
t.geography("coords", limit: { srid: 4326, type: "st_point", geographic: true })
t.float("longitude", null: false)
t.float("latitude", null: false)
t.jsonb("original", default: "{}", null: false)
t.uuid("created_by_id")
t.uuid("updated_by_id")
t.datetime("created_at", precision: 6, null: false)
t.datetime("updated_at", precision: 6, null: false)
t.index(["apn"], name: "index_addresses_on_apn")
t.index(["carrier_route"], name: "index_addresses_on_carrier_route")
t.index(["carrier_route_type"], name: "index_addresses_on_carrier_route_type")
t.index(["city"], name: "index_addresses_on_city")
t.index(["coords"], name: "index_addresses_on_coords", using: :gist)
t.index(["county"], name: "index_addresses_on_county")
t.index(["county_fips"], name: "index_addresses_on_county_fips")
t.index(["created_by_id"], name: "index_addresses_on_created_by_id")
t.index(["delivery_point_barcode"], name: "index_addresses_on_delivery_point_barcode")
t.index(["extra_secondary_designator"], name: "index_addresses_on_extra_secondary_designator")
t.index(["extra_secondary_number"], name: "index_addresses_on_extra_secondary_number")
t.index(["integration_id"], name: "index_addresses_on_integration_id")
t.index(["latitude", "longitude"], name: "index_addresses_on_latitude_and_longitude")
t.index(["original"], name: "index_addresses_on_original", using: :gin)
t.index(["pmb_designator"], name: "index_addresses_on_pmb_designator")
t.index(["pmb_number"], name: "index_addresses_on_pmb_number")
t.index(["primary_number"], name: "index_addresses_on_primary_number")
t.index(["record_type"], name: "index_addresses_on_record_type")
t.index(["secondary_designator"], name: "index_addresses_on_secondary_designator")
t.index(["secondary_number"], name: "index_addresses_on_secondary_number")
t.index(["state"], name: "index_addresses_on_state")
t.index(["street_name"], name: "index_addresses_on_street_name")
t.index(["street_postdirection"], name: "index_addresses_on_street_postdirection")
t.index(["street_predirection"], name: "index_addresses_on_street_predirection")
t.index(["street_suffix"], name: "index_addresses_on_street_suffix")
t.index(["type"], name: "index_addresses_on_type")
t.index(["updated_by_id"], name: "index_addresses_on_updated_by_id")
t.index(["zip_code"], name: "index_addresses_on_zip_code")
t.index(["zip_code_plus_4"], name: "index_addresses_on_zip_code_plus_4")
t.index(["zip_code_type"], name: "index_addresses_on_zip_code_type")
end
create_table "addresses_branches", id: false, force: :cascade do |t|
t.uuid("branch_id", null: false)
t.uuid("address_id", null: false)
t.index(["branch_id", "address_id"], name: "index_addresses_branches_on_branch_id_and_address_id")
end
create_table "addresses_companies", id: false, force: :cascade do |t|
t.uuid("company_id", null: false)
t.uuid("address_id", null: false)
t.index(["company_id", "address_id"], name: "index_addresses_companies_on_company_id_and_address_id")
end
create_table "addresses_orders", id: false, force: :cascade do |t|
t.uuid("order_id", null: false)
t.uuid("address_id", null: false)
t.index(["order_id", "address_id"], name: "index_addresses_orders_on_order_id_and_address_id")
end
create_table "branches", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.citext("name")
t.uuid("company_id", null: false)
t.uuid("integration_id")
t.uuid("created_by_id")
t.uuid("updated_by_id")
t.datetime("created_at", precision: 6, null: false)
t.datetime("updated_at", precision: 6, null: false)
t.index(["company_id"], name: "index_branches_on_company_id")
t.index(["created_by_id"], name: "index_branches_on_created_by_id")
t.index(["integration_id"], name: "index_branches_on_integration_id")
t.index(["name"], name: "index_branches_on_name", unique: true)
t.index(["updated_by_id"], name: "index_branches_on_updated_by_id")
end
create_table "companies", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.citext("name", null: false)
t.uuid("integration_id")
t.uuid("created_by_id")
t.uuid("updated_by_id")
t.datetime("created_at", precision: 6, null: false)
t.datetime("updated_at", precision: 6, null: false)
t.index(["created_by_id"], name: "index_companies_on_created_by_id")
t.index(["integration_id"], name: "index_companies_on_integration_id")
t.index(["name"], name: "index_companies_on_name", unique: true)
t.index(["updated_by_id"], name: "index_companies_on_updated_by_id")
end
create_table "integrations", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.citext("name")
t.text("description")
t.text("documentation_url")
t.jsonb("settings", default: "{}", null: false)
t.text("secrets_ciphertext")
t.uuid("created_by_id")
t.uuid("updated_by_id")
t.datetime("created_at", precision: 6, null: false)
t.datetime("updated_at", precision: 6, null: false)
t.index(["created_by_id"], name: "index_integrations_on_created_by_id")
t.index(["name"], name: "index_integrations_on_name", unique: true)
t.index(["settings"], name: "index_integrations_on_settings", using: :gin)
t.index(["updated_by_id"], name: "index_integrations_on_updated_by_id")
end
create_table "orders", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.enum("type", as: "order_type")
t.enum("status", default: "new", as: "order_status")
t.uuid("integration_id")
t.jsonb("integration_reference", default: "{}", null: false)
t.uuid("user_id")
t.uuid("company_id")
t.uuid("branch_id")
t.boolean("life_of_loan_enabled", default: false)
t.uuid("created_by_id")
t.uuid("updated_by_id")
t.datetime("created_at", precision: 6, null: false)
t.datetime("updated_at", precision: 6, null: false)
t.index(["branch_id"], name: "index_orders_on_branch_id")
t.index(["company_id"], name: "index_orders_on_company_id")
t.index(["created_by_id"], name: "index_orders_on_created_by_id")
t.index(["integration_id"], name: "index_orders_on_integration_id")
t.index(["integration_reference"], name: "index_orders_on_integration_reference", using: :gin)
t.index(["status"], name: "index_orders_on_status")
t.index(["type"], name: "index_orders_on_type")
t.index(["updated_by_id"], name: "index_orders_on_updated_by_id")
t.index(["user_id"], name: "index_orders_on_user_id")
end
create_table "roles", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.string("name")
t.uuid("integration_id")
t.string("resource_type")
t.uuid("resource_id")
t.uuid("created_by_id")
t.uuid("updated_by_id")
t.datetime("created_at", precision: 6, null: false)
t.datetime("updated_at", precision: 6, null: false)
t.index(["created_by_id"], name: "index_roles_on_created_by_id")
t.index(["integration_id"], name: "index_roles_on_integration_id")
t.index(["name", "resource_type", "resource_id"], name: "index_roles_on_name_and_resource_type_and_resource_id")
t.index(["resource_type", "resource_id"], name: "index_roles_on_resource")
t.index(["updated_by_id"], name: "index_roles_on_updated_by_id")
end
create_table "users", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.citext("first_name", null: false)
t.citext("last_name", null: false)
t.citext("email", default: "", null: false)
t.uuid("integration_id", null: false)
t.uuid("company_id")
t.uuid("branch_id")
t.string("encrypted_password", default: "", null: false)
t.string("reset_password_token")
t.datetime("reset_password_sent_at")
t.datetime("remember_created_at")
t.integer("sign_in_count", default: 0, null: false)
t.datetime("current_sign_in_at")
t.datetime("last_sign_in_at")
t.string("current_sign_in_ip")
t.string("last_sign_in_ip")
t.string("confirmation_token")
t.datetime("confirmed_at")
t.datetime("confirmation_sent_at")
t.string("unconfirmed_email")
t.integer("failed_attempts", default: 0, null: false)
t.string("unlock_token")
t.datetime("locked_at")
t.string("time_zone")
t.uuid("created_by_id")
t.uuid("updated_by_id")
t.datetime("created_at", precision: 6, null: false)
t.datetime("updated_at", precision: 6, null: false)
t.string("invitation_token_ciphertext")
t.datetime("invitation_created_at")
t.datetime("invitation_sent_at")
t.datetime("invitation_accepted_at")
t.integer("invitation_limit")
t.string("invited_by_type")
t.uuid("invited_by_id")
t.integer("invitations_count", default: 0)
t.index(["branch_id"], name: "index_users_on_branch_id")
t.index(["company_id"], name: "index_users_on_company_id")
t.index(["confirmation_token"], name: "index_users_on_confirmation_token", unique: true)
t.index(["created_by_id"], name: "index_users_on_created_by_id")
t.index(["email", "integration_id"], name: "index_users_on_email_and_integration_id", unique: true)
t.index(["first_name", "last_name"], name: "index_users_on_first_name_and_last_name")
t.index(["first_name"], name: "index_users_on_first_name")
t.index(["integration_id"], name: "index_users_on_integration_id")
t.index(["invitation_token_ciphertext"], name: "index_users_on_invitation_token_ciphertext", unique: true)
t.index(["invited_by_id"], name: "index_users_on_invited_by_id")
t.index(["invited_by_type", "invited_by_id"], name: "index_users_on_invited_by")
t.index(["last_name"], name: "index_users_on_last_name")
t.index(["reset_password_token"], name: "index_users_on_reset_password_token", unique: true)
t.index(["time_zone"], name: "index_users_on_time_zone")
t.index(["unlock_token"], name: "index_users_on_unlock_token", unique: true)
t.index(["updated_by_id"], name: "index_users_on_updated_by_id")
end
create_table "users_roles", id: false, force: :cascade do |t|
t.uuid("user_id")
t.uuid("role_id")
t.index(["user_id", "role_id"], name: "index_users_roles_on_user_id_and_role_id")
end
add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id"
add_foreign_key "active_storage_variant_records", "active_storage_blobs", column: "blob_id"
add_foreign_key "addresses", "integrations"
add_foreign_key "addresses", "users", column: "created_by_id"
add_foreign_key "addresses", "users", column: "updated_by_id"
add_foreign_key "branches", "companies"
add_foreign_key "branches", "integrations"
add_foreign_key "branches", "users", column: "created_by_id"
add_foreign_key "branches", "users", column: "updated_by_id"
add_foreign_key "companies", "integrations"
add_foreign_key "companies", "users", column: "created_by_id"
add_foreign_key "companies", "users", column: "updated_by_id"
add_foreign_key "integrations", "users", column: "created_by_id"
add_foreign_key "integrations", "users", column: "updated_by_id"
add_foreign_key "orders", "integrations"
add_foreign_key "orders", "users", column: "created_by_id"
add_foreign_key "orders", "users", column: "updated_by_id"
add_foreign_key "roles", "integrations"
add_foreign_key "roles", "users", column: "created_by_id"
add_foreign_key "roles", "users", column: "updated_by_id"
add_foreign_key "users", "integrations"
add_foreign_key "users", "users", column: "created_by_id"
add_foreign_key "users", "users", column: "updated_by_id"
create_function :logidze_compact_history, sql_definition: <<-SQL
CREATE OR REPLACE FUNCTION public.logidze_compact_history(log_data jsonb, cutoff integer DEFAULT 1)
RETURNS jsonb
LANGUAGE plpgsql
AS $function$
DECLARE
merged jsonb;
BEGIN
LOOP
merged := jsonb_build_object(
'ts',
log_data#>'{h,1,ts}',
'v',
log_data#>'{h,1,v}',
'c',
(log_data#>'{h,0,c}') || (log_data#>'{h,1,c}')
);
IF (log_data#>'{h,1}' ? 'm') THEN
merged := jsonb_set(merged, ARRAY['m'], log_data#>'{h,1,m}');
END IF;
log_data := jsonb_set(
log_data,
'{h}',
jsonb_set(
log_data->'h',
'{1}',
merged
) - 0
);
cutoff := cutoff - 1;
EXIT WHEN cutoff <= 0;
END LOOP;
return log_data;
END;
$function$
SQL
create_function :logidze_filter_keys, sql_definition: <<-SQL
CREATE OR REPLACE FUNCTION public.logidze_filter_keys(obj jsonb, keys text[], include_columns boolean DEFAULT false)
RETURNS jsonb
LANGUAGE plpgsql
AS $function$
DECLARE
res jsonb;
key text;
BEGIN
res := '{}';
IF include_columns THEN
FOREACH key IN ARRAY keys
LOOP
IF obj ? key THEN
res = jsonb_insert(res, ARRAY[key], obj->key);
END IF;
END LOOP;
ELSE
res = obj;
FOREACH key IN ARRAY keys
LOOP
res = res - key;
END LOOP;
END IF;
RETURN res;
END;
$function$
SQL
create_function :logidze_logger, sql_definition: <<-SQL
CREATE OR REPLACE FUNCTION public.logidze_logger()
RETURNS trigger
LANGUAGE plpgsql
AS $function$
DECLARE
changes jsonb;
version jsonb;
snapshot jsonb;
new_v integer;
size integer;
history_limit integer;
debounce_time integer;
current_version integer;
merged jsonb;
iterator integer;
item record;
columns text[];
include_columns boolean;
ts timestamp with time zone;
ts_column text;
BEGIN
ts_column := NULLIF(TG_ARGV[1], 'null');
columns := NULLIF(TG_ARGV[2], 'null');
include_columns := NULLIF(TG_ARGV[3], 'null');
IF TG_OP = 'INSERT' THEN
-- always exclude log_data column
changes := to_jsonb(NEW.*) - 'log_data';
IF columns IS NOT NULL THEN
snapshot = logidze_snapshot(changes, ts_column, columns, include_columns);
ELSE
snapshot = logidze_snapshot(changes, ts_column);
END IF;
IF snapshot#>>'{h, -1, c}' != '{}' THEN
NEW.log_data := snapshot;
END IF;
ELSIF TG_OP = 'UPDATE' THEN
IF OLD.log_data is NULL OR OLD.log_data = '{}'::jsonb THEN
-- always exclude log_data column
changes := to_jsonb(NEW.*) - 'log_data';
IF columns IS NOT NULL THEN
snapshot = logidze_snapshot(changes, ts_column, columns, include_columns);
ELSE
snapshot = logidze_snapshot(changes, ts_column);
END IF;
IF snapshot#>>'{h, -1, c}' != '{}' THEN
NEW.log_data := snapshot;
END IF;
RETURN NEW;
END IF;
history_limit := NULLIF(TG_ARGV[0], 'null');
debounce_time := NULLIF(TG_ARGV[4], 'null');
current_version := (NEW.log_data->>'v')::int;
IF ts_column IS NULL THEN
ts := statement_timestamp();
ELSE
ts := (to_jsonb(NEW.*)->>ts_column)::timestamp with time zone;
IF ts IS NULL OR ts = (to_jsonb(OLD.*)->>ts_column)::timestamp with time zone THEN
ts := statement_timestamp();
END IF;
END IF;
IF NEW = OLD THEN
RETURN NEW;
END IF;
IF current_version < (NEW.log_data#>>'{h,-1,v}')::int THEN
iterator := 0;
FOR item in SELECT * FROM jsonb_array_elements(NEW.log_data->'h')
LOOP
IF (item.value->>'v')::int > current_version THEN
NEW.log_data := jsonb_set(
NEW.log_data,
'{h}',
(NEW.log_data->'h') - iterator
);
END IF;
iterator := iterator + 1;
END LOOP;
END IF;
changes := '{}';
IF (coalesce(current_setting('logidze.full_snapshot', true), '') = 'on') THEN
changes = hstore_to_jsonb_loose(hstore(NEW.*));
ELSE
changes = hstore_to_jsonb_loose(
hstore(NEW.*) - hstore(OLD.*)
);
END IF;
changes = changes - 'log_data';
IF columns IS NOT NULL THEN
changes = logidze_filter_keys(changes, columns, include_columns);
END IF;
IF changes = '{}' THEN
RETURN NEW;
END IF;
new_v := (NEW.log_data#>>'{h,-1,v}')::int + 1;
size := jsonb_array_length(NEW.log_data->'h');
version := logidze_version(new_v, changes, ts);
IF (
debounce_time IS NOT NULL AND
(version->>'ts')::bigint - (NEW.log_data#>'{h,-1,ts}')::text::bigint <= debounce_time
) THEN
-- merge new version with the previous one
new_v := (NEW.log_data#>>'{h,-1,v}')::int;
version := logidze_version(new_v, (NEW.log_data#>'{h,-1,c}')::jsonb || changes, ts);
-- remove the previous version from log
NEW.log_data := jsonb_set(
NEW.log_data,
'{h}',
(NEW.log_data->'h') - (size - 1)
);
END IF;
NEW.log_data := jsonb_set(
NEW.log_data,
ARRAY['h', size::text],
version,
true
);
NEW.log_data := jsonb_set(
NEW.log_data,
'{v}',
to_jsonb(new_v)
);
IF history_limit IS NOT NULL AND history_limit <= size THEN
NEW.log_data := logidze_compact_history(NEW.log_data, size - history_limit + 1);
END IF;
END IF;
return NEW;
END;
$function$
SQL
create_function :logidze_snapshot, sql_definition: <<-SQL
CREATE OR REPLACE FUNCTION public.logidze_snapshot(item jsonb, ts_column text DEFAULT NULL::text, columns text[] DEFAULT NULL::text[], include_columns boolean DEFAULT false)
RETURNS jsonb
LANGUAGE plpgsql
AS $function$
DECLARE
ts timestamp with time zone;
BEGIN
IF ts_column IS NULL THEN
ts := statement_timestamp();
ELSE
ts := coalesce((item->>ts_column)::timestamp with time zone, statement_timestamp());
END IF;
IF columns IS NOT NULL THEN
item := logidze_filter_keys(item, columns, include_columns);
END IF;
return json_build_object(
'v', 1,
'h', jsonb_build_array(
logidze_version(1, item, ts)
)
);
END;
$function$
SQL
create_function :logidze_version, sql_definition: <<-SQL
CREATE OR REPLACE FUNCTION public.logidze_version(v bigint, data jsonb, ts timestamp with time zone)
RETURNS jsonb
LANGUAGE plpgsql
AS $function$
DECLARE
buf jsonb;
BEGIN
buf := jsonb_build_object(
'ts',
(extract(epoch from ts) * 1000)::bigint,
'v',
v,
'c',
data
);
IF coalesce(current_setting('logidze.meta', true), '') <> '' THEN
buf := jsonb_insert(buf, '{m}', current_setting('logidze.meta')::jsonb);
END IF;
RETURN buf;
END;
$function$
SQL
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment