Skip to content

Instantly share code, notes, and snippets.

@DanielFGray
Last active October 21, 2023 17:15
Show Gist options
  • Save DanielFGray/dc3d626f9d41ee7e394385073fcda25e to your computer and use it in GitHub Desktop.
Save DanielFGray/dc3d626f9d41ee7e394385073fcda25e to your computer and use it in GitHub Desktop.
begin;
-- user accounts
create extension if not exists citext;
create table users (
user_id int primary key generated always as identity,
username citext unique not null,
created_at timestamptz not null default now()
);
create index on users(username);
create table user_secrets (
user_id int primary key references users,
password_hash text
);
create table user_emails (
email_id int primary key generated always as identity,
user_id int references users,
email citext not null check(email ~ '[^@]+@[^@]+\.[^@]+')
);
create index on user_emails (user_id);
create table user_payments (
payment_id int primary key generated always as identity,
user_id int references users,
cc_number text not null,
expiration text not null
);
-- the auction stuff
create table auctions (
auction_id int primary key generated always as identity,
owner_id int references users on delete set null,
title text not null,
description text not null,
minimum_bid int not null check(minimum_bid > 0),
buy_it_price int,
created_at timestamptz not null default now(),
closes_at timestamptz not null default now() + '5 days'
);
create index on auctions(owner_id);
create table auction_files (
file_id int primary key generated always as identity,
auction_id int not null references auctions on delete cascade,
path text not null,
created_at timestamptz not null default now()
);
create index on auction_files(auction_id);
create table bids (
bid_id int primary key generated always as identity,
auction_id int references auctions on delete cascade,
user_id int not null references users on delete set null,
amount int not null,
created_at timestamptz not null default now()
);
create index on bids(auction_id);
create index on bids(user_id);
create index on bids(created_at desc);
-- trigger to keep data sane, makes sure new bids are never lower than the previous, and that they meet the minimum bid
create or replace function tg__increasing_bids() returns trigger as $$
declare
v_highest_bid int;
v_minimum_bid int;
begin
select max(amount) into v_highest_bid from bids where auction_id = NEW.auction_id;
select minimum_bid into v_minimum_bid from auctions where auction_id = NEW.auction_id; -- TODO does this need to happen *every* insert?
if v_highest_bid >= NEW.amount or v_minimum_bid > NEW.amount then
raise exception 'bid too low' using errcode = 'NENUF'; -- custom errcode
end if;
return NEW;
end;
$$ language plpgsql;
create trigger _500_increasing_bids
before insert on bids
for each row
execute procedure tg__increasing_bids();
-- some fake data
insert into users
(username)
values
('alice'),
('bob'),
('charlie'),
('dan'),
('eli');
insert into auctions
(owner_id, title, description, minimum_bid, buy_it_price)
values
(1, 'xbox 360', 'still works', 20, 200),
(3, 'ps5', 'brand new, mint condition', 500, null);
insert into bids (auction_id, user_id, amount) values
(1, 2, 20),
(2, 4, 500);
analyse;
set local enable_seqscan = off; -- turn off sequential scans so we can see if the indexes are being used in the following query
-- example query fetching all latest bids
explain analyze
select
bidder,
amount,
title,
u.username as owner
from bids
join auctions using(auction_id)
join users u on u.user_id = auctions.owner_id,
lateral ( select username as bidder from users where user_id = bids.user_id) as get_username
order by bids.created_at desc;
rollback; -- destroy everything, this was just a test
end;
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment