Skip to content

Instantly share code, notes, and snippets.

@nk9
Last active November 14, 2023 09:47
Star You must be signed in to star a gist
Save nk9/b150542ef72abc7974cb to your computer and use it in GitHub Desktop.
Python script to find the largest files in a git repository.
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Python script to find the largest files in a git repository.
# The general method is based on the script in this blog post:
# http://stubbisms.wordpress.com/2009/07/10/git-script-to-show-largest-pack-objects-and-trim-your-waist-line/
#
# The above script worked for me, but was very slow on my 11GB repository. This version has a bunch
# of changes to speed things up to a more reasonable time. It takes less than a minute on repos with 250K objects.
#
# The MIT License (MIT)
# Copyright (c) 2015 Nick Kocharhook
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
# OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from subprocess import check_output, CalledProcessError, Popen, PIPE
import argparse
import signal
import sys
sortByOnDiskSize = False
def main():
global sortByOnDiskSize
signal.signal(signal.SIGINT, signal_handler)
args = parseArguments()
sortByOnDiskSize = args.sortByOnDiskSize
sizeLimit = 1024*args.filesExceeding
if args.filesExceeding > 0:
print "Finding objects larger than {}kB…".format(args.filesExceeding)
else:
print "Finding the {} largest objects…".format(args.matchCount)
blobs = getTopBlobs(args.matchCount, sizeLimit)
populateBlobPaths(blobs)
printOutBlobs(blobs)
def getTopBlobs(count, sizeLimit):
sortColumn = 4
if sortByOnDiskSize:
sortColumn = 3
verifyPack = "git verify-pack -v `git rev-parse --git-dir`/objects/pack/pack-*.idx | grep blob | sort -k{}nr".format(sortColumn)
output = check_output(verifyPack, shell=True).split("\n")[:-1]
blobs = dict()
compareBlob = Blob("a b {} {} c".format(sizeLimit, sizeLimit)) # use __lt__ to do the appropriate comparison
for objLine in output:
blob = Blob(objLine)
if sizeLimit > 0:
if compareBlob < blob:
blobs[blob.sha1] = blob
else:
break
else:
blobs[blob.sha1] = blob
if len(blobs) == count:
break
return blobs
def populateBlobPaths(blobs):
if len(blobs):
print "Finding object paths…"
# Only include revs which have a path. Other revs aren't blobs.
revList = "git rev-list --all --objects | awk '$2 {print}'"
allObjectLines = check_output(revList, shell=True).split("\n")[:-1]
outstandingKeys = blobs.keys()
for line in allObjectLines:
cols = line.split()
sha1, path = cols[0], " ".join(cols[1:])
if (sha1 in outstandingKeys):
outstandingKeys.remove(sha1)
blobs[sha1].path = path
# short-circuit the search if we're done
if not len(outstandingKeys):
break
def printOutBlobs(blobs):
if len(blobs):
csvLines = ["size,pack,hash,path"]
for blob in sorted(blobs.values(), reverse=True):
csvLines.append(blob.csvLine())
p = Popen(["column", "-t", "-s", "','"], stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate("\n".join(csvLines)+"\n")
print "\nAll sizes in kB. The pack column is the compressed size of the object inside the pack file.\n"
print stdout.rstrip('\n')
else:
print "No files found which match those criteria."
def parseArguments():
parser = argparse.ArgumentParser(description='List the largest files in a git repository')
parser.add_argument('-c', '--match-count', dest='matchCount', type=int, default=10,
help='The number of files to return. Default is 10. Ignored if --files-exceeding is used.')
parser.add_argument('--files-exceeding', dest='filesExceeding', type=int, default=0,
help='The cutoff amount, in KB. Files with a pack size (or pyhsical size, with -p) larger than this will be printed.')
parser.add_argument('-p', '--physical-sort', dest='sortByOnDiskSize', action='store_true', default=False,
help='Sort by the on-disk size of the files. Default is to sort by the pack size.')
return parser.parse_args()
def signal_handler(signal, frame):
print('Caught Ctrl-C. Exiting.')
sys.exit(0)
class Blob(object):
sha1 = ''
size = 0
packedSize = 0
path = ''
def __init__(self, line):
cols = line.split()
self.sha1, self.size, self.packedSize = cols[0], int(cols[2]), int(cols[3])
def __repr__(self):
return '{} - {} - {} - {}'.format(self.sha1, self.size, self.packedSize, self.path)
def __lt__(self, other):
if (sortByOnDiskSize):
return self.size < other.size
else:
return self.packedSize < other.packedSize
def csvLine(self):
return "{},{},{},{}".format(self.size/1024, self.packedSize/1024, self.sha1, self.path)
# Default function is main()
if __name__ == '__main__':
main()
@tony
Copy link

tony commented Aug 3, 2016

Can you license this MIT?

@nk9
Copy link
Author

nk9 commented Nov 2, 2016

Done!

@fidergo-stephane-gourichon

Problem

This fails on locales where space is a digit separator as this confuses sorting, for example when LANG=fr_FR.UTF-8.
Observed behavior is: returns a list of objects which aren't the top biggest at all, and are reported with zero size.

Workaround

Setting LC_ALL to C before calling script works.

Solution

It would be more robust if the script did that inside.

@bassjacob
Copy link

This works really nicely, thank you 😄

@Chenzo
Copy link

Chenzo commented Feb 22, 2017

OMG. Thank you. Someone checked in 250 meg MP4s and I couldn't find them! This totally fixed that!

@nk9
Copy link
Author

nk9 commented Mar 7, 2017

Sorry for the delay @fidergo-stephane-gourichon, just noticed your comment. I can't get the problem to reproduce, so can't be sure the fix is working. Can you show in CLI how to repro?

bash-3.2$ export LC_ALL="fr_FR.UTF-8"
bash-3.2$ echo $LC_ALL
fr_FR.UTF-8
bash-3.2$ ~/bin/largestFiles.py -c2
Finding the 2 largest objects…
Finding object paths…

All sizes in kB. The pack column is the compressed size of the object inside the pack file.

size   pack   hash                                      path
23848  23717  8f93badb15b5467734d7429711fef8d16dd5390f  Umatilla/2002/05212002SOV.pdf
23319  19298  fa9a43793153f16021af4e1b1e550dfae33c09cb  Crook/2016 May Primary Precinct Level Results.pdf

@fidergo-stephane-gourichon

@nk9 I just saw your comment. Here's a repro on my machine and explanations below.

Reproduced on Ubuntu 16.04 AMD64 and Debian.
I guess any ordinary Linux distribution with glibc will behave the same.
Also, any git repository will do, I just git clone https://github.com/git/git.

python ~/bin/largestFiles.py 

Finding the 10 largest objects…
Finding object paths…

All sizes in kB. The pack column is the compressed size of the object inside the pack file.

size  pack  hash                                      path
1     0     3c65edb5c44e1fa57beef851d56a9674a5be1b72  fast-import.c
0     0     78499db81bd130fcf64a5a5e7adf85e1b522d230  Documentation/git.txt
0     0     67e98a6323e2dd4e3b4b8003806f7dffaff29a64  parse-options.c
0     0     2c87886c444a7dbaa9dc74e5f9e2ae84bdbdb00e  Makefile
0     0     ece009662379dd26a523cb082b50c11109c96b14  fast-import.c
0     0     aec924b2e1d669b2554e0c0e8f6d4b88ab3cfce5  t/t5801-remote-helpers.sh
0     0     3e53fbd623f14446de6aabb79614ce34ef6e0cfe  git-gui
0     0     80d795dc567553fe3a89b34e200d881026de1aba  Documentation/git.txt
0     0     70ff904717c2ffed12a70b988b1aa4dea3a896e2  sha1_file.c
0     0     673a1841793ff5cd5b3422471dfaebf049b5b63e  git-fetch.sh

Now with the fix:

export LC_ALL=C ; python ~/bin/largestFiles.py 

Finding the 10 largest objects…
Finding object paths…

All sizes in kB. The pack column is the compressed size of the object inside the pack file.

size  pack  hash                                      path
412   371   ba9aaa145ccd24ef760cf31c74d8f7ca1a2e47b0  t/t0013/shattered-1.pdf
487   122   28c81d7377c9d32773baaa936159cc12a3ffc7fd  po/sv.po
449   121   4c0c4756e0dcc6a5eed52ef85ef83d8156a85995  po/zh_CN.po
472   119   913db393dca5feabe9589627a30dc3a63a808674  po/de.po
457   112   04ace38b8af29c759b9749fd8a474650c1665c19  po/vi.po
437   111   e783680573b54659c2edabf1b18920540b1ecb69  po/sv.po
424   99    171f813cd3a889e8761c1658bc94ae2ce61fe3f9  po/bg.po
423   98    ccdd11eea98b36e0d0737d572e70607f54f224c0  po/bg.po
411   95    155def5ba5779c43f89f6970f5782e31cc0752cc  po/bg.po
248   94    fb0e0de441da521168c99e771cbc9787dd035800  po/fr.po

The missing trick (most probably)

Perhaps I guess the reason why you could not see similar result on your machine, perhaps not.
It is not enough to set LC_ALL to some locale name. The locale has to be actually installed on the system, else code defaults to C locale which does not exhibit the problem.

This command shows possible locales and allows you to choose which to install:

sudo dpkg-reconfigure locales

I use the one you mentioned, but was probably not installed on your system: fr_FR.UTF-8.

(If that's not enough, you can fire up a small virtual machine where you install a plain Ubuntu or a lighter Debian, not even needing X or graphical session, with localization options to reproduce the issue.)

Hope this helps.

@skyl
Copy link

skyl commented Apr 18, 2018

I wonder how hard it would be to make this look into submodules ...

@T3rm1
Copy link

T3rm1 commented Dec 13, 2018

Why is this written for Python 2 :O

@malcolmgreaves
Copy link

@nk9 Thank you for writing this! I updated this to use Python 3 here [1].

[1] https://gist.github.com/malcolmgreaves/39e33e9b161916cb92ae0fdcfea91d64

@nk9
Copy link
Author

nk9 commented May 8, 2020

No real idea, and I'm not going to be supporting this code. But Google suggests you are in the wrong directory.

@beliaev-maksim
Copy link

beliaev-maksim commented Apr 21, 2021

just run one-liner in GIT Bash

git rev-list --objects --all |
  git cat-file --batch-check='%(objecttype) %(objectname) %(objectsize) %(rest)' |
  sed -n 's/^blob //p' |
  sort --numeric-sort --key=2 |
  cut -c 1-12,41- |
  $(command -v gnumfmt || echo numfmt) --field=2 --to=iec-i --suffix=B --padding=7 --round=nearest

@mk2s
Copy link

mk2s commented May 3, 2021

That one liner worked, and was very fast. Thank you.

@hholst80
Copy link

hholst80 commented May 24, 2022

--- largestFiles.py     2022-05-24 12:52:39.271688402 +0200
+++ largestFiles.py3    2022-05-24 12:51:52.700100072 +0200                                                                                                                                                                                                                                                                                                                                                                                                                                                                                           @@ -38,15 +38,15 @@
        global sortByOnDiskSize

        signal.signal(signal.SIGINT, signal_handler)
-
+
        args = parseArguments()
        sortByOnDiskSize = args.sortByOnDiskSize
        sizeLimit = 1024*args.filesExceeding

        if args.filesExceeding > 0:
-               print "Finding objects larger than {}kB…".format(args.filesExceeding)
+               print("Finding objects larger than {}kB…".format(args.filesExceeding))
        else:
-               print "Finding the {} largest objects…".format(args.matchCount)
+               print("Finding the {} largest objects…".format(args.matchCount))

        blobs = getTopBlobs(args.matchCount, sizeLimit)

@@ -55,12 +55,12 @@

 def getTopBlobs(count, sizeLimit):
        sortColumn = 4
-
+
        if sortByOnDiskSize:
                sortColumn = 3

        verifyPack = "git verify-pack -v `git rev-parse --git-dir`/objects/pack/pack-*.idx | grep blob | sort -k{}nr".format(sortColumn)
-       output = check_output(verifyPack, shell=True).split("\n")[:-1]
+       output = check_output(verifyPack, shell=True).decode().split("\n")[:-1]

        blobs = dict()
        compareBlob = Blob("a b {} {} c".format(sizeLimit, sizeLimit)) # use __lt__ to do the appropriate comparison
@@ -84,13 +84,13 @@

 def populateBlobPaths(blobs):
        if len(blobs):
-               print "Finding object paths…"
+               print("Finding object paths…")

                # Only include revs which have a path. Other revs aren't blobs.
                revList = "git rev-list --all --objects | awk '$2 {print}'"
-               allObjectLines = check_output(revList, shell=True).split("\n")[:-1]
+               allObjectLines = check_output(revList, shell=True).decode().split("\n")[:-1]

-               outstandingKeys = blobs.keys()
+               outstandingKeys = set(blobs.keys())

                for line in allObjectLines:
                        cols = line.split()
@@ -113,12 +113,14 @@
                        csvLines.append(blob.csvLine())

                p = Popen(["column", "-t", "-s", "','"], stdin=PIPE, stdout=PIPE, stderr=PIPE)
-               stdout, stderr = p.communicate("\n".join(csvLines)+"\n")
+               _ = "\n".join(csvLines)+"\n"
+               stdout, stderr = p.communicate(_.encode())
+               stdout = stdout.decode()

-               print "\nAll sizes in kB. The pack column is the compressed size of the object inside the pack file.\n"
-               print stdout.rstrip('\n')
+               print("\nAll sizes in kB. The pack column is the compressed size of the object inside the pack file.\n")
+               print(stdout.rstrip('\n'))
        else:
-               print "No files found which match those criteria."
+               print("No files found which match those criteria.")


 def parseArguments():

@hholst80
Copy link

just run one-liner in GIT Bash

git rev-list --objects --all |
  git cat-file --batch-check='%(objecttype) %(objectname) %(objectsize) %(rest)' |
  sed -n 's/^blob //p' |
  sort --numeric-sort --key=2 |
  cut -c 1-12,41- |
  $(command -v gnumfmt || echo numfmt) --field=2 --to=iec-i --suffix=B --padding=7 --round=nearest

Master class Shell skills. Thanks for sharing!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment