Skip to content

Instantly share code, notes, and snippets.

@malcolmgreaves
Forked from nk9/largestFiles.py
Last active August 19, 2024 03:12
Show Gist options
  • Save malcolmgreaves/39e33e9b161916cb92ae0fdcfea91d64 to your computer and use it in GitHub Desktop.
Save malcolmgreaves/39e33e9b161916cb92ae0fdcfea91d64 to your computer and use it in GitHub Desktop.
Python script to find the largest files in a git repository.
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Updated to use Python 3 by Malcolm Greaves.
#
# Python script to find the largest files in a git repository.
# The general method is based on the script in this blog post:
# http://stubbisms.wordpress.com/2009/07/10/git-script-to-show-largest-pack-objects-and-trim-your-waist-line/
#
# The above script worked for me, but was very slow on my 11GB repository. This version has a bunch
# of changes to speed things up to a more reasonable time. It takes less than a minute on repos with 250K objects.
#
# The MIT License (MIT)
# Copyright (c) 2015 Nick Kocharhook
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
# OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from subprocess import check_output, CalledProcessError, Popen, PIPE
import argparse
import signal
import sys
sortByOnDiskSize = False
def main():
global sortByOnDiskSize
signal.signal(signal.SIGINT, signal_handler)
args = parseArguments()
sortByOnDiskSize = args.sortByOnDiskSize
sizeLimit = 1024*args.filesExceeding
if args.filesExceeding > 0:
print(f"Finding objects larger than {args.filesExceeding}kB…")
else:
print(f"Finding the {args.matchCount} largest objects…")
blobs = getTopBlobs(args.matchCount, sizeLimit)
populateBlobPaths(blobs)
printOutBlobs(blobs)
def getTopBlobs(count, sizeLimit):
sortColumn = 4
if sortByOnDiskSize:
sortColumn = 3
verifyPack = f"git verify-pack -v `git rev-parse --git-dir`/objects/pack/pack-*.idx | grep blob | sort -k{sortColumn}nr"
output = check_output(verifyPack, shell=True).decode('utf-8').split("\n")[:-1]
blobs = dict()
compareBlob = Blob(f"a b {sizeLimit} {sizeLimit} c") # use __lt__ to do the appropriate comparison
for objLine in output:
blob = Blob(objLine)
if sizeLimit > 0:
if compareBlob < blob:
blobs[blob.sha1] = blob
else:
break
else:
blobs[blob.sha1] = blob
if len(blobs) == count:
break
return blobs
def populateBlobPaths(blobs):
if len(blobs) > 0:
print("Finding object paths…")
outstandingKeys = set(blobs.keys())
# Only include revs which have a path. Other revs aren't blobs.
revList = "git rev-list --all --objects | awk '$2 {print}'"
allObjectLines = check_output(revList, shell=True).decode('utf-8').split("\n")[:-1]
for line in allObjectLines:
cols = line.split()
sha1, path = cols[0], " ".join(cols[1:])
if sha1 in outstandingKeys:
outstandingKeys.remove(sha1)
blobs[sha1].path = path
# short-circuit the search if we're done
if not len(outstandingKeys):
break
def printOutBlobs(blobs):
if len(blobs) > 0:
csvLines = ["size,pack,hash,path"]
for blob in sorted(blobs.values(), reverse=True):
csvLines.append(blob.csvLine())
p = Popen(["column", "-t", "-s", "','"], stdin=PIPE, stdout=PIPE, stderr=PIPE)
lines = "\n".join(csvLines)+"\n"
stdout, stderr = p.communicate(lines.encode("UTF-8"))
print("\nAll sizes in kB. The pack column is the compressed size of the object inside the pack file.\n")
print(stdout.decode('utf-8').rstrip('\n'))
else:
print("No files found which match those criteria.")
def parseArguments():
parser = argparse.ArgumentParser(description='List the largest files in a git repository')
parser.add_argument('-c', '--match-count', dest='matchCount', type=int, default=10,
help='The number of files to return. Default is 10. Ignored if --files-exceeding is used.')
parser.add_argument('--files-exceeding', dest='filesExceeding', type=int, default=0,
help='The cutoff amount, in KB. Files with a pack size (or pyhsical size, with -p) larger than this will be printed.')
parser.add_argument('-p', '--physical-sort', dest='sortByOnDiskSize', action='store_true', default=False,
help='Sort by the on-disk size of the files. Default is to sort by the pack size.')
return parser.parse_args()
def signal_handler(signal, frame):
print('Caught Ctrl-C. Exiting.')
sys.exit(0)
class Blob(object):
sha1 = ''
size = 0
packedSize = 0
path = ''
def __init__(self, line):
cols = line.split()
self.sha1, self.size, self.packedSize = cols[0], int(cols[2]), int(cols[3])
def __repr__(self):
return f"{self.sha1} - {self.size} - {self.packedSize} - {self.path}"
def __lt__(self, other):
if (sortByOnDiskSize):
return self.size < other.size
else:
return self.packedSize < other.packedSize
def csvLine(self):
return f"{int(self.size/1024)},{int(self.packedSize/1024)},{self.sha1},{self.path}"
if __name__ == '__main__':
main()
@ivakyb
Copy link

ivakyb commented Feb 20, 2020

@jordangrant you can try git gc to remove unreachable objects.

@jtheletter
Copy link

File "largestFiles.py", line 47
    print "Finding objects larger than {}kB…".format(args.filesExceeding)
                                            ^
SyntaxError: invalid syntax

Python 3.7.3
macOS Catalina 10.15.4
git version 2.23.0

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment