Skip to content

Instantly share code, notes, and snippets.

@ilmarkerm
Last active May 30, 2022 07:16
Show Gist options
  • Save ilmarkerm/777abdeab11014d9e92105ede6c85498 to your computer and use it in GitHub Desktop.
Save ilmarkerm/777abdeab11014d9e92105ede6c85498 to your computer and use it in GitHub Desktop.
Generic data auditing using triggers - since Oracle Autonomous database does not have FDA
--liquibase formatted sql
--changeset ilmker:table_audit_data_archive
--comment: Since autonomous database does not support FDA, need to create custom data versioning solution. You can make it immutable table if you want.
create table audit_data_archive (
id raw(16) default sys_guid() not null, -- don't want it to be primary key to avoid global index on it.
table_name varchar2(120) not null,
table_id number(20) not null,
table_action varchar(1) not null,
change_time_utc timestamp default sys_extract_utc(systimestamp) not null,
txn_instance number(4) default to_number(sys_context('USERENV','INSTANCE')),
txn_local_transaction_id varchar2(100), --default DBMS_TRANSACTION.LOCAL_TRANSACTION_ID
change_context varchar2(4000) check (change_context is json),
new_row blob check (new_row is json)
) pctfree 0 partition by range (change_time_utc) interval (numtoyminterval(1, 'MONTH')) (
partition p_first values less than (timestamp'2022-05-01 00:00:00')
);
create global temporary table audit_changed_rows (
table_action varchar(1) not null,
table_name varchar2(120) not null,
table_id number(20) not null
) on commit delete rows;
--changeset ilmker:package_audit_data_archive_util splitStatements:false runOnChange:true stripComments:false
CREATE OR REPLACE PACKAGE audit_data_archive_util IS
-- Simple generic data auditing package since Oracle Autonomous Database does not have Flashback Data Archive
-- 2022 Ilmar Kerm
PROCEDURE flush_audit;
PROCEDURE cleanup;
END;
--changeset ilmker:package_body_audit_data_archive_util splitStatements:false runOnChange:true stripComments:false
create or replace PACKAGE BODY audit_data_archive_util IS
-- Simple generic data auditing package since Oracle Autonomous Database does not have Flashback Data Archive
-- 2022 Ilmar Kerm
FUNCTION audit_column_list_json(p_table_name IN user_tables.table_name%type) RETURN varchar2 RESULT_CACHE IS
v_result VARCHAR2(3500);
BEGIN
-- Get the list of all non-LOB columns in a table for usage in JSON_OBJECT function
-- No other reason just to not save images and documents into history table to save diskspace
-- If not needed, make the procedure just return '*'
SELECT listagg(''''||column_name||''' value "'||column_name||'"',',') within group (order by column_id)
INTO v_result
FROM user_tab_columns WHERE table_name = p_table_name and data_type not in ('BLOB','CLOB','LONG','XMLTYPE','JSON');
RETURN v_result;
END;
FUNCTION current_context RETURN audit_data_archive.change_context%type IS
type st_type is table of varchar2(32);
st st_type:= st_type('ACTION','MODULE','CLIENT_IDENTIFIER','CLIENT_INFO','CLIENT_PROGRAM_NAME','CURRENT_USER','SCHEDULER_JOB','HOST');
v_json JSON_OBJECT_T;
BEGIN
-- Format some current session information as JSON
v_json:= JSON_OBJECT_T();
FOR i in 1..st.count LOOP
IF SYS_CONTEXT('USERENV', st(i)) IS NOT NULL THEN
v_json.put(st(i), SYS_CONTEXT('USERENV', st(i)));
END IF;
END LOOP;
RETURN v_json.to_string;
END;
PROCEDURE flush_audit IS
v_context audit_data_archive.change_context%type;
v_txn_id audit_data_archive.txn_local_transaction_id%type;
v_change_time audit_data_archive.change_time_utc%type;
BEGIN
-- Write audit records
-- NB! Single statement can modify data in multiple tables (multi-table insert for example)
v_context:= current_context;
v_txn_id:= DBMS_TRANSACTION.LOCAL_TRANSACTION_ID;
v_change_time:= sys_extract_utc(systimestamp); -- just because would be nice to have the same timestamp on all records changed with the same statement
--
FOR rec IN (SELECT DISTINCT table_name FROM audit_changed_rows WHERE table_action IN ('I','U')) LOOP
-- Add inserted and updated records
EXECUTE IMMEDIATE '
INSERT INTO audit_data_archive (table_name, table_id, table_action, txn_local_transaction_id, change_context, change_time_utc, new_row)
SELECT a.table_name, a.table_id, a.table_action, :1, :2, :3, t.new_row
FROM audit_changed_rows a JOIN (
-- This is just here as a subquery in order to avoid namespace issues with generated JSON_OBJECT column list
SELECT id, JSON_OBJECT('||audit_column_list_json(rec.table_name)||' NULL ON NULL RETURNING BLOB) new_row
FROM "'||rec.table_name||'"
) t ON t.id = a.table_id
WHERE a.table_action IN (''I'',''U'') AND a.table_name = :4
' USING v_txn_id, v_context, v_change_time, rec.table_name;
END LOOP;
-- Add deleted records
INSERT INTO audit_data_archive (table_name, table_id, table_action, txn_local_transaction_id, change_context, change_time_utc)
SELECT table_name, table_id, table_action, v_txn_id, v_context, v_change_time
FROM audit_changed_rows WHERE table_action = 'D';
-- Cleanup since the same transaction might start changing rows again and temp table is automatically cleaned after transaction end
DELETE FROM audit_changed_rows;
END;
PROCEDURE cleanup IS
v_retention NUMBER;
v_drop_time TIMESTAMP;
v_parttime TIMESTAMP;
BEGIN
-- Drop old audit partitions that are older than retention time
EXECUTE IMMEDIATE 'ALTER SESSION SET ddl_lock_timeout=30';
-- Retention time calculation
v_retention:= NVL(parameter_util.get_value_n('audit.retention_months'), 3);
IF v_retention < 2 THEN
v_retention:= 2;
END IF;
v_drop_time:= TRUNC(sys_extract_utc(systimestamp), 'MM') - NUMTOYMINTERVAL(v_retention-1, 'MONTH');
--
FOR rec IN (SELECT partition_name, high_value FROM user_tab_partitions WHERE table_name = 'AUDIT_DATA_ARCHIVE' AND partition_position > 1 ORDER BY partition_position) LOOP
-- This converts LONG into usable TIMESTAMP
EXECUTE IMMEDIATE 'BEGIN :1 := '||rec.high_value||'; END;' USING OUT v_parttime;
IF v_parttime <= v_drop_time THEN
--dbms_output.put_line(rec.partition_name);
EXECUTE IMMEDIATE 'ALTER TABLE AUDIT_DATA_ARCHIVE DROP PARTITION "'||rec.partition_name||'"';
ELSE
EXIT;
END IF;
END LOOP;
END;
END;
--changeset ilmker:job_audit_data_archive_cleanup splitStatements:false runOnChange:true stripComments:false
--comment: Create scheduler job to remove old audit records
DECLARE
p_job_name VARCHAR2(32):= 'AUDIT_DATA_ARCHIVE_CLEANUP';
p_cnt NUMBER;
BEGIN
SELECT count(*) INTO p_cnt FROM user_scheduler_jobs WHERE job_name = p_job_name;
IF p_cnt > 0 THEN
dbms_scheduler.drop_job(job_name=>p_job_name);
END IF;
dbms_scheduler.create_job(job_name=>p_job_name, job_type=>'PLSQL_BLOCK', job_action=>'BEGIN audit_data_archive_util.cleanup; END;', start_date=>systimestamp+interval '5' minute, repeat_interval=>'FREQ=MONTHLY', enabled=>true);
END;
--changeset ilmker:table_t1
--comment: Just a small table to test auditing
create table t1 (
id number primary key,
a number,
b varchar2(10),
c timestamp,
d timestamp with time zone
);
--changeset ilmker:trigger_t1_audit splitStatements:false runOnChange:true stripComments:false
CREATE OR REPLACE TRIGGER audit_t1 FOR insert OR update OR delete ON t1 COMPOUND TRIGGER
AFTER EACH ROW IS
p_action varchar(1);
p_table varchar2(32):= 'T1';
p_id audit_changed_rows.table_id%type;
BEGIN
-- Record the fact that something was changed in the current row
p_action:= CASE WHEN inserting THEN 'I' WHEN updating THEN 'U' WHEN deleting THEN 'D' END;
p_id:= CASE WHEN deleting THEN :OLD.id ELSE :NEW.id END;
INSERT INTO audit_changed_rows (table_action, table_name, table_id) VALUES (p_action, p_table, p_id);
END AFTER EACH ROW;
AFTER STATEMENT IS
BEGIN
-- Execute auditing
audit_data_archive_util.flush_audit;
END AFTER STATEMENT;
END;
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment