Skip to content

Instantly share code, notes, and snippets.

@hao-yu
Created March 28, 2022 10:45
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
Star You must be signed in to star a gist
Embed
What would you like to do?
from pulpcore.app.models.content import Artifact, ContentArtifact
from pulp_file.app.models import FileContent
from hashlib import sha256
from django.db import connection
from multiprocessing import Process
from time import sleep
artifacts = {} # filename: Artifact
content = {} # filename: FileContent
print(">>>BUILDING CONTENT/CA/ARTIFACTS...")
for i in range(20000):
path = f'/tmp/{i:06d}.txt'
content_path = f'{i:06d}.txt'
with open(path, "w") as f:
f.write(path)
with open(path, "rb") as f:
sum256 = sha256(f.read()).hexdigest()
attrs = {"relative_path": content_path, "digest": sum256}
fc = FileContent(**attrs)
fc.save()
content[path] = fc
attrs = {"file": path, "sha256": sum256, "size": i}
a = Artifact(**attrs)
a.save()
artifacts[path] = a
# 10 lists of ContentArtifacts
ca_lists = [[], [], [], [], []]
for k in content.keys():
for i in range(5):
attrs = {"content": fc, "relative_path": f'{i}/{content[k].relative_path}'}
ca = ContentArtifact(**attrs)
ca.save()
ca.artifact = artifacts[k]
ca_lists[i].append(ca)
def bulk_doit(ca_list):
print(">>> ENTER...")
connection.close()
connection.connect()
ContentArtifact.objects.bulk_update(ca_list, ["artifact"], batch_size=500)
print(">>> EXIT...")
#Repeat a few times to make sure the deadlock issue can be reproduced
for r in range(3):
print("Try round: %s" % str(r + 1))
# Ensure there are something to update when rerunning this part
connection.connect()
ContentArtifact.objects.filter().update(artifact_id=None)
processes = []
for i in range(5):
for j in range(4):
p = Process(target=bulk_doit, args=(ca_lists[0],))
processes.append(p)
p.start()
sleep(3)
for p in processes:
p.join()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment