Read all table comments from a database and write to json files
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# | |
# Read all table comments from a database and write to json files | |
# * Tables without comment are skipped | |
# * Output format: <schema>.<table>.json | |
# | |
import json | |
import os | |
from sqlalchemy import create_engine, inspect, MetaData, Table | |
# --- CONFIG ----------------------------------------------------------------------- | |
db_connection_url = 'postgresql+psycopg2://<USER>:<PASS>@<HOST>:<PORT>/egon-data' | |
outpath = '/path/to/store/jsons/' | |
# exclude some system schemas | |
exclude_schemas = ['pg_catalog', 'information_schema'] | |
# Set fields id and publicationDate to some value to make OMI happy | |
fill_id_and_pubdate = False | |
# ---------------------------------------------------------------------------------- | |
con = create_engine(db_connection_url) | |
inspector = inspect(con) | |
tables = [] | |
for schema in [_ for _ in inspector.get_schema_names() if _ not in exclude_schemas]: | |
for table in inspector.get_table_names(schema=schema): | |
table_obj = Table(table, | |
MetaData(), | |
schema=schema, | |
autoload=True, | |
autoload_with=con) | |
outfile = os.path.join(outpath, f'{schema}.{table}.json') | |
if table_obj.comment is not None: | |
with open(outfile, 'w', encoding='utf-8') as f: | |
comment = json.loads(table_obj.comment) | |
if fill_id_and_pubdate: | |
comment['id'] = 'none' | |
comment['publicationDate'] = '2021-09-23' | |
json.dump(comment, f, ensure_ascii=False, indent=4) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment