Skip to content

Instantly share code, notes, and snippets.

@ar45
Last active August 29, 2015 14:14
Show Gist options
  • Save ar45/9c1448a91bcc94997ff0 to your computer and use it in GitHub Desktop.
Save ar45/9c1448a91bcc94997ff0 to your computer and use it in GitHub Desktop.
Enable Defer Protect for models. And pass the source instance to pre_delete and post_delete signals
__author__ = 'Aron Podrigal'
from operator import attrgetter
from django.db import models, transaction, router
from django.db.models import sql, signals
from django.db.models.deletion import Collector, ProtectedError
from django.db.models.query import QuerySet
from django.utils import six
def DEFERRED_PROTECT(collector, field, sub_objs, using):
"""
function to be passed to related fields as on_delete param
MyField = models.ForeignKey(ToModel, on_delete=DEFERRED_PROTECT)
This will protect the Foreign Object from being deleted,
unless the related object is to be deleted without being cascaded.
for example.
Class A(MyModelBase):
....
class B(MyModelBase):
a = models.ForeignKey(A)
class C(MyModelBase):
a = models.ForeignKey(A)
b = models.ForeignKey(B, on_delete=DEFERRED_PROTECT)
deleting a instance of `A` would cascade delete its related objects of `B and C`
so if `C.b` was cascaded by `A`, we will ignore the PROTECT of `C.b`
This requires the use of our custom Manager And QuerySet
"""
collector.protected_objects.setdefault(
sub_objs[0]._meta.model, ([], field))[0].extend(sub_objs)
class MyCollector(Collector):
def __init__(self, using):
super(MyCollector, self).__init__(using)
# track the source which started the deletion
self.source_instance = None
# track the instances that are to be protected through
# this foreign key. before we do the actual deletion
# we check if these objects were also deleted through
# another unprotected foreign key, they are deleted.
# {model: ([sub_objs], field)}
self.protected_objects = {}
def delete(self):
"""
Extend the standard delete to:
1. Check if there are any protected related objects that would stop the current object from being deleted
2. Pass in the source object to the pre and post delete callbacks to allow different processing depending on
whether the delete is a direct delete or a cascade from a related object.
"""
# sort instance collections
for model, instances in self.data.items():
self.data[model] = sorted(instances, key=attrgetter("pk"))
# if possible, bring the models in an order suitable for databases that
# don't support transactions or cannot defer constraint checks until the
# end of a transaction.
self.sort()
for model, protected in self.protected_objects.items():
sub_objs, field = protected
protected_objs = [obj for obj in sub_objs if obj not in self.data.get(model, set())]
if protected_objs:
raise ProtectedError(
"Cannot delete some instances of model '%s' because "
"they are referenced through a protected foreign key: '%s.%s'" % (
field.rel.to.__name__, protected_objs[0].__class__.__name__, field.name
),
protected_objs
)
with transaction.commit_on_success_unless_managed(using=self.using):
# send pre_delete signals
for model, obj in self.instances_with_model():
if not model._meta.auto_created:
signals.pre_delete.send(
sender=model, instance=obj, using=self.using, source=self.source_instance
)
# fast deletes
for qs in self.fast_deletes:
qs._raw_delete(using=self.using)
# update fields
for model, instances_for_fieldvalues in six.iteritems(self.field_updates):
query = sql.UpdateQuery(model)
for (field, value), instances in six.iteritems(instances_for_fieldvalues):
query.update_batch([obj.pk for obj in instances],
{field.name: value}, self.using)
# reverse instance collections
for instances in six.itervalues(self.data):
instances.reverse()
# delete instances
for model, instances in six.iteritems(self.data):
query = sql.DeleteQuery(model)
pk_list = [obj.pk for obj in instances]
query.delete_batch(pk_list, self.using)
if not model._meta.auto_created:
for obj in instances:
signals.post_delete.send(
sender=model, instance=obj, using=self.using, source=self.source_instance
)
# update collected instances
for model, instances_for_fieldvalues in six.iteritems(self.field_updates):
for (field, value), instances in six.iteritems(instances_for_fieldvalues):
for obj in instances:
setattr(obj, field.attname, value)
for model, instances in six.iteritems(self.data):
for instance in instances:
setattr(instance, model._meta.pk.attname, None)
class MyQuerySet(QuerySet):
"""Custom QuerySet
QuerySet which uses MyCollector to collect related objects
"""
def delete(self):
"""
Deletes the records in the current QuerySet.
"""
assert self.query.can_filter(), \
"Cannot use 'limit' or 'offset' with delete."
del_query = self._clone()
# The delete is actually 2 queries - one to find related objects,
# and one to delete. Make sure that the discovery of related
# objects is performed on the same database as the deletion.
del_query._for_write = True
# Disable non-supported fields.
del_query.query.select_for_update = False
del_query.query.select_related = False
del_query.query.clear_ordering(force_empty=True)
collector = MyCollector(using=del_query.db)
collector.collect(del_query)
collector.source_instance = self
collector.delete()
# Clear the result cache, in case this QuerySet gets reused.
self._result_cache = None
delete.alters_data = True
class MyManager(models.Manager):
"""A manager to customize the delete behavior
self.delete() calls the pre_delete and post_delete signals
with an additional parameter `source` which is the instance
.delete() was called on. `source` maybe either a model instance
or the model manager's QuerySet instance.
"""
def get_queryset(self):
return MyQuerySet(self.model, using=self._db)
class MyModelBase(object):
"""
Model Base Class that implements our custom delete functionality.
"""
objects = MyManager()
def delete(self, using=None):
using = using or router.db_for_write(self.__class__, instance=self)
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (
self._meta.object_name, self._meta.pk.attname)
collector = MyCollector(using=using)
collector.collect([self])
collector.delete()
delete.alters_data = True
from django.db.models import signals
import modelbase
Class A(modelbase.MyModelBase):
pass
class B(modelbase.MyModelBase):
a = models.ForeignKey(A)
class C(modelbase.MyModelBase):
a = models.ForeignKey(A)
b = models.ForeignKey(B, on_delete=modelbase.DEFERRED_PROTECT)
def pre_delete(sender, **kwargs):
"""
example pre_delete / post_delete function
that takes into account where the delete came from
"""
# get the instance that caused the delete
source = kwargs['source']
instance = kwargs['instance']
source_instance = source[0] if hasattr(source, 'model') else source
# check if this is the same model as the sender
# or this was a related object that caused a cascade
if instance._meta.model != source_instance._meta.model:
return
# else this, was the model on which the delete was directly called called
# eg. continue deleting some files
instance.delete_all_files()
signals.pre_delete.connect(pre_delete, sender=A)
signals.pre_delete.connect(pre_delete, sender=B)
signals.pre_delete.connect(pre_delete, sender=C)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment