Skip to content

Instantly share code, notes, and snippets.

@mortenpi
Last active September 6, 2015 20:38
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mortenpi/9d52ee9cb37699377f5d to your computer and use it in GitHub Desktop.
Save mortenpi/9d52ee9cb37699377f5d to your computer and use it in GitHub Desktop.
A quick hack to generate a sqlite database from yumdb and repository cache to ease understanding what packages are installed and available.
#!/usr/bin/env python3
import argparse
import os
import os.path
import re
import sqlite3
def yumdb_packages(path):
dirs = os.listdir(path)
dirs.sort()
for d in dirs:
for pname in sorted(os.listdir(os.path.join(path,d))):
yield(pname)
def scan_yumdb(conn, path, tablename):
if tablename is None:
tablename = 'installed'
print('Scanning YUMDB at {}'.format(path))
c = conn.cursor()
c.execute('CREATE TABLE {} (name, version, release, arch, hash)'.format(tablename))
conn.commit()
r = re.compile('([0-9a-f]+)-([A-Za-z0-9._+-]+)-([A-Za-z0-9.+]+)-([A-Za-z0-9._]+)-([a-z0-9_]+)')
for p in yumdb_packages(path):
m = r.match(p)
if m is None:
print('ERROR: no match for `{}`'.format(p))
continue
#hash,name,version,release,arch = m.groups()
c.execute('INSERT INTO {} (hash,name,version,release,arch) VALUES (?,?,?,?,?)'.format(tablename), m.groups())
conn.commit()
def merge_single_cache(conn, repo, dbfile, tablename):
print('> merging: {}'.format(dbfile))
c = conn.cursor()
connin = sqlite3.connect(dbfile)
cin = connin.cursor()
cin.execute('SELECT name,arch,version,release FROM packages')
for pkg in cin.fetchall():
#print((repo,) + pkg)
c.execute('INSERT INTO {} (repo,name,arch,version,release) VALUES (?,?,?,?,?)'.format(tablename), (repo,) + pkg)
conn.commit()
connin.close()
def merge_caches(conn, path, tablename):
if tablename is None:
tablename = 'available'
print('Merging cache at {}'.format(path))
c = conn.cursor()
print('Creating table `{}`'.format(tablename))
c.execute('CREATE TABLE {} (repo, name, version, release, arch)'.format(tablename))
conn.commit()
for d in os.listdir(path):
if not os.path.isdir(os.path.join(path, d)):
continue
for f in os.listdir(os.path.join(path, d)):
if not f.endswith('.sqlite'):
continue
merge_single_cache(conn, d, os.path.join(path, d, f), tablename)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Scan yum info into a sqlite database.')
parser.add_argument('--cache', action='store_true', help='Scan a repo cache instead.')
parser.add_argument('-n', '--name', type=str, help='Name of the table.')
parser.add_argument('path', type=str, help='Path to the yumdb (or cache).')
parser.add_argument('database', type=str, help='Name of the output sqlite database.')
args = parser.parse_args()
if os.path.exists(args.database):
print('INFO: Adding to an existing database.')
conn = sqlite3.connect(args.database)
if args.cache:
merge_caches(conn, os.path.abspath(args.path), tablename=args.name)
else:
scan_yumdb(conn, os.path.abspath(args.path), tablename=args.name)
conn.close()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment