Skip to content

Instantly share code, notes, and snippets.

View dmoore247's full-sized avatar

Douglas Moore dmoore247

View GitHub Profile
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>PHI Privacy Enhancement</title>
<!-- Tailwind CSS CDN -->
<script src="https://cdn.tailwindcss.com"></script>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap" rel="stylesheet">
<style>
@dmoore247
dmoore247 / databricks-backup-notebook.py
Created April 19, 2025 01:19
Unity Catalog metadata backup script
#
# For every table in information schema, make a backup copy of it to the storage location
#
table_list = spark.catalog.listTables(f"`{catalog_name}`.information_schema")
for table in table_list:
print(f'backing up {table.catalog}.information_schema.{table.name} to {storage_location}/{table.name}...')
info_schema_table_df = spark.sql(f"SELECT * FROM {table.catalog}.information_schema.{table.name}")
info_schema_table_df.write.format("delta").mode("overwrite").save(f"{storage_location}/{table.name}")
print('backup complete')
def update_from_to_merge_into(expression: exp.Expression) -> exp.Expression:
"""Transform UPDATE FROM to MERGE INTO"""
from_expression = expression.find(exp.From)
if isinstance(expression, exp.Update) and from_expression:
joins = from_expression.find_all(exp.Join)
n_joins = len(list(joins))
if joins and n_joins == 1:
join = from_expression.find(exp.Join)
if join:
on = join.args.get("on")