softdelete cascade, add object journal for debugging

Change-Id: Idd77e55ed1d763bf6448a481f573abca1147ad58
diff --git a/xos/api/utility/object_journal.py b/xos/api/utility/object_journal.py
new file mode 100644
index 0000000..bc01851
--- /dev/null
+++ b/xos/api/utility/object_journal.py
@@ -0,0 +1,85 @@
+import json
+from django.http import HttpResponse
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+from rest_framework.reverse import reverse
+from rest_framework import serializers
+from rest_framework import generics
+from rest_framework import status
+from rest_framework import filters
+from rest_framework import pagination
+from core.models import *
+from xos.apibase import XOSListCreateAPIView, XOSRetrieveUpdateDestroyAPIView, XOSPermissionDenied
+from api.xosapi_helpers import PlusModelSerializer, XOSViewSet, ReadOnlyField
+from django.db import models
+
+class JournalEntrySerializer(PlusModelSerializer):
+    id = serializers.CharField(read_only=True)
+    objClassName = serializers.CharField()
+    objId = serializers.CharField()
+    objUnicode = serializers.CharField()
+    operation = serializers.CharField()
+    msg = serializers.CharField()
+    timestamp = serializers.DateTimeField()
+
+    class Meta:
+        model = JournalEntry
+        fields = ('id', 'objClassName', 'objId', 'objUnicode', 'operation', 'msg', 'timestamp')
+
+# Note: Supports order and limit
+#           /api/utility/object_journal/?ordering=-timestamp&limit=100
+
+class ObjectJournalViewSet(XOSViewSet):
+    base_name = "object_journal"
+    method_name = "object_journal"
+    method_kind = "viewset"
+    serializer_class = JournalEntrySerializer
+    queryset = JournalEntry.objects.all()
+    ordering_filter = filters.OrderingFilter()
+    filter_backends = (filters.DjangoFilterBackend,)
+    filter_fields = ('id', 'objClassName', 'objId', 'objUnicode', 'operation', 'timestamp')
+    pagination_class = pagination.LimitOffsetPagination
+
+    @classmethod
+    def get_urlpatterns(self, api_path="^"):
+        patterns = super(ObjectJournalViewSet, self).get_urlpatterns(api_path=api_path)
+
+        #patterns.append( self.list_url("html/$", {"get": "get_oj_html"}, "oj_html") )
+
+        return patterns
+
+    def filter_queryset(self, queryset):
+        queryset = super(ObjectJournalViewSet, self).filter_queryset(queryset)

+        return self.ordering_filter.filter_queryset(self.request, queryset, self)
+
+    def list(self, request):
+        if hasattr(self.request,"query_params"):
+            format = self.request.query_params.get('output_format', "json")
+            limit = int(self.request.query_params.get('limit', 1000000))
+        else:
+            format = "json"
+            limit = 1000000
+
+        if format=="json":
+            return super(ObjectJournalViewSet, self).list(request)
+
+        if format=="html":
+            lines=[]
+            for obj in self.filter_queryset(self.get_queryset()).all():
+                lines.append("<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></TR>" % (obj.id, obj.objClassName, obj.objId, obj.objUnicode, obj.operation, obj.msg or ""))
+            lines = lines[:limit]
+            return HttpResponse("<html><head></head><body><table>%s</table></body></html>" % "\n".join(lines), content_type="text/html")
+
+        if format=="ascii":
+            lines=[]
+            for obj in self.filter_queryset(self.get_queryset()).all():
+                lines.append("%-10s %-24s %-10s %-32s %-48s %s" % (obj.id, obj.objClassName, obj.objId, str(obj.objUnicode)[:32], obj.operation, obj.msg or ""))
+            lines = lines[:limit]
+            return HttpResponse("\n".join(lines), content_type="text/ascii")
+
+        return HttpResponse("unknown format")
+
+
+
+
+
diff --git a/xos/core/admin.py b/xos/core/admin.py
index 2ba283d..109150f 100644
--- a/xos/core/admin.py
+++ b/xos/core/admin.py
@@ -21,6 +21,8 @@
 from django.utils.html import conditional_escape, format_html
 from django.utils.safestring import mark_safe
 from django.utils.text import capfirst
+from django.utils.translation import ugettext, ugettext_lazy
+from django.contrib.admin.actions import delete_selected
 from suit.widgets import LinkedSelect
 
 # thread locals necessary to work around a django-suit issue
@@ -295,6 +297,40 @@
 
         return inlines
 
+    # see http://stackoverflow.com/questions/1471909/django-model-delete-not-triggered
+    class SafeDeleteQuerysetWrapper(object):
+        def __init__(self, wrapped_queryset):
+            self.wrapped_queryset = wrapped_queryset
+

+        def _safe_delete(self):

+            for obj in list(self.wrapped_queryset):

+                obj.delete()

+

+        def __getattr__(self, attr):

+            if attr == 'delete':

+                return self._safe_delete

+            else:

+                return getattr(self.wrapped_queryset, attr)

+

+        def __iter__(self):

+            for obj in list(self.wrapped_queryset):

+                yield obj

+

+        def __getitem__(self, index):

+            return self.wrapped_queryset[index]

+

+        def __len__(self):

+            return len(self.wrapped_queryset)

+

+    def get_actions(self, request):

+        actions = super(XOSAdminMixin, self).get_actions(request)

+        actions['delete_selected'] = (XOSAdminMixin.action_safe_bulk_delete, 'delete_selected', ugettext_lazy("Delete selected %(verbose_name_plural)s"))

+        return actions

+

+    def action_safe_bulk_delete(self, request, queryset):

+        wrapped_queryset = XOSAdminMixin.SafeDeleteQuerysetWrapper(queryset)

+        return delete_selected(self, request, wrapped_queryset)
+
 
 class ReadOnlyAwareAdmin(XOSAdminMixin, admin.ModelAdmin):
     # Note: Make sure XOSAdminMixin is listed before
diff --git a/xos/core/models/__init__.py b/xos/core/models/__init__.py
index 3162b35..67489a4 100644
--- a/xos/core/models/__init__.py
+++ b/xos/core/models/__init__.py
@@ -32,3 +32,4 @@
 from .network import Network, NetworkParameterType, NetworkParameter, Port, NetworkTemplate, Router, NetworkSlice, ControllerNetwork, AddressPool
 from .billing import Account, Invoice, Charge, UsableObject, Payment
 from .program import Program
+from .journal import JournalEntry, journal_object
diff --git a/xos/core/models/journal.py b/xos/core/models/journal.py
new file mode 100644
index 0000000..6c25033
--- /dev/null
+++ b/xos/core/models/journal.py
@@ -0,0 +1,43 @@
+from django.db import models
+from django.utils import timezone
+
+from xos.config import Config
+from xos.exceptions import *
+from operator import attrgetter
+import json
+
+class JournalEntry(models.Model):
+    objClassName = models.CharField(max_length=64)
+    objId = models.CharField(max_length=64, null=True, blank=True)  # null=True, for objects journaled before save
+    objUnicode = models.CharField(max_length=1024)
+    operation = models.CharField(max_length=64)
+    msg = models.TextField(blank=True, null=True)
+    timestamp = models.DateTimeField(default=timezone.now)
+
+    def __unicode__(self): return u'%s' % (self.name)
+
+    def can_update(self, user):
+        return True
+
+
+def journal_object(o, operation, msg=None, timestamp=None):
+    # do not journal unless it has been explicitly enabled
+    if not getattr(Config(), "debug_enable_journal", None):
+        return
+
+    # ignore objects that generate too much noise
+    if o.__class__.__name__ in ["Diag"]:
+        return
+
+    if not timestamp:
+        timestamp = timezone.now()
+
+    j = JournalEntry(objClassName = o.__class__.__name__,
+                     objId = o.id,
+                     objUnicode = str(o),
+                     operation = operation,
+                     msg= msg)
+    j.save()
+
+
+
diff --git a/xos/core/models/plcorebase.py b/xos/core/models/plcorebase.py
index 163bb53..9d3d06d 100644
--- a/xos/core/models/plcorebase.py
+++ b/xos/core/models/plcorebase.py
@@ -5,6 +5,7 @@
 import threading
 from django import db
 from django.db import models
+from django.db import transaction
 from django.forms.models import model_to_dict
 from django.core.urlresolvers import reverse
 from django.forms.models import model_to_dict
@@ -12,6 +13,7 @@
 from django.core.exceptions import PermissionDenied
 from model_autodeletion import ephemeral_models
 from cgi import escape as html_escape
+from journal import journal_object
 
 import redis
 from redis import ConnectionError
@@ -194,6 +196,14 @@
                 return
         raise Exception("Field value %s is not in %s" % (field, str(choices)))
 
+# For cascading deletes, we need a Collector that doesn't do fastdelete,
+# so we get a full list of models.
+from django.db.models.deletion import Collector
+from django.db import router
+class XOSCollector(Collector):
+  def can_fast_delete(self, *args, **kwargs):
+    return False
+
 class PlCoreBase(models.Model, PlModelMixIn):
     objects = PlCoreBaseManager()
     deleted_objects = PlCoreBaseDeletionManager()
@@ -209,6 +219,9 @@
     backend_register = models.CharField(max_length=1024,
                                       default="{}", null=True)
 
+    # If True, then the backend wants to delete this object
+    backend_need_delete = models.BooleanField(default=False)
+
     backend_status = models.CharField(max_length=1024,
                                       default="0 - Provisioning in progress")
     deleted = models.BooleanField(default=False)
@@ -248,16 +261,33 @@
             pass
 
         if (purge):
+            journal_object(self, "delete.purge")
             super(PlCoreBase, self).delete(*args, **kwds)
         else:
-            if (not self.write_protect):
+            if (not self.write_protect ):
                 self.deleted = True
                 self.enacted=None
                 self.policed=None
+                journal_object(self, "delete.mark_deleted")
                 self.save(update_fields=['enacted','deleted','policed'], silent=silent)
 
-
+                collector = XOSCollector(using=router.db_for_write(self.__class__, instance=self))
+                collector.collect([self])

+                with transaction.atomic():

+                    for (k, models) in collector.data.items():

+                        for model in models:

+                            if model.deleted:

+                                # in case it's already been deleted, don't delete again

+                                continue

+                            model.deleted = True

+                            model.enacted=None
+                            model.policed=None
+                            journal_object(model, "delete.cascade.mark_deleted", msg="root = %r" % self)
+                            model.save(update_fields=['enacted','deleted','policed'], silent=silent)

+

     def save(self, *args, **kwargs):
+        journal_object(self, "plcorebase.save")
+
         # let the user specify silence as either a kwarg or an instance varible
         silent = self.silent
         if "silent" in kwargs:
@@ -296,10 +326,12 @@
             except:
                 changed_fields.append('__lookup_error')
 
-        
+        journal_object(self, "plcorebase.save.super_save")
 
         super(PlCoreBase, self).save(*args, **kwargs)
 
+        journal_object(self, "plcorebase.save.super_save_returned")
+
         try:
             r = redis.Redis("redis")
             payload = json.dumps({'pk':self.pk,'changed_fields':changed_fields})
diff --git a/xos/core/models/user.py b/xos/core/models/user.py
index a081afd..e3e5fd2 100644
--- a/xos/core/models/user.py
+++ b/xos/core/models/user.py
@@ -141,6 +141,7 @@
     policed = models.DateTimeField(null=True, default=None)
     backend_status = StrippedCharField(max_length=1024,
                                        default="Provisioning in progress")
+    backend_need_delete = models.BooleanField(default=False)
     deleted = models.BooleanField(default=False)
     write_protect = models.BooleanField(default=False)
     lazy_blocked = models.BooleanField(default=False)
diff --git a/xos/synchronizers/base/syncstep.py b/xos/synchronizers/base/syncstep.py
index d1109e6..a8951e9 100644
--- a/xos/synchronizers/base/syncstep.py
+++ b/xos/synchronizers/base/syncstep.py
@@ -225,7 +225,9 @@
                     for f in failed:
                         self.check_dependencies(o,f) # Raises exception if failed
                     if (deletion):
+                        journal_object(o,"syncstep.call.delete_record")
                         self.delete_record(o)
+                        journal_object(o,"syncstep.call.delete_purge")
                         o.delete(purge=True)
                     else:
                         new_enacted = timezone.now()
@@ -234,6 +236,13 @@
                         except AttributeError:
                             run_always = False
 
+                        # Mark this as an object that will require delete. Do
+                        # this now rather than after the syncstep,
+                        if not (o.backend_need_delete):
+                            o.backend_need_delete = True
+                            o.save(update_fields=['backend_need_delete'])
+
+                        journal_object(o,"syncstep.call.sync_record")
                         self.sync_record(o)
 
 #                         if (not run_always):
@@ -244,6 +253,7 @@
                         scratchpad = {'next_run':0, 'exponent':0, 'last_success':time.time()}
                         o.backend_register = json.dumps(scratchpad)
                         o.backend_status = "1 - OK"
+                        journal_object(o,"syncstep.call.save_update")
                         o.save(update_fields=['enacted','backend_status','backend_register'])
                 except (InnocuousException,Exception,DeferredException) as e:
                     logger.log_exc("sync step failed!",extra=o.tologdict())
diff --git a/xos/synchronizers/model_policy.py b/xos/synchronizers/model_policy.py
index 0737d63..cfc9e90 100644
--- a/xos/synchronizers/model_policy.py
+++ b/xos/synchronizers/model_policy.py
@@ -6,6 +6,7 @@
 from django.db.transaction import atomic
 from django.dispatch import receiver
 from django.utils import timezone
+from django.db import models as django_models
 from generate.dependency_walker import *
 from xos.logger import Logger, logging
 
@@ -144,6 +145,21 @@
         if (time.time()-start<1):
             time.sleep(1)
 
+from core.models.plcorebase import XOSCollector
+from django.db import router
+def has_deleted_dependencies(m):
+    # Check to see if 'm' would cascade to any objects that have the 'deleted'
+    # field set in them.
+    collector = XOSCollector(using=router.db_for_write(m.__class__, instance=m))
+    collector.collect([m])

+    deps=[]

+    for (k, models) in collector.data.items():

+        for model in models:
+            if model==m:
+                continue
+            deps.append(model)
+    return deps
+
 def run_policy_once():
         from core.models import Instance,Slice,Controller,Network,User,SlicePrivilege,Site,SitePrivilege,Image,ControllerSlice,ControllerUser,ControllerSite
         models = [Controller, Site, SitePrivilege, Image, ControllerSlice, ControllerSite, ControllerUser, User, Slice, Network, Instance, SlicePrivilege]
@@ -167,14 +183,31 @@
             execute_model_policy(o, True)
 
         # Reap non-sync'd models here
-        reaped = [Slice]
+        # models_to_reap = [Slice,Network,NetworkSlice]
 
-        for m in reaped:
+        models_to_reap = django_models.get_models(include_auto_created=False)
+        for m in models_to_reap:
+            if not hasattr(m, "deleted_objects"):
+                continue
+
             dobjs = m.deleted_objects.all()
             for d in dobjs:
+                if hasattr(d,"_meta") and hasattr(d._meta,"proxy") and d._meta.proxy:
+                    # skip proxy objects; we'll get the base instead
+                    continue
+                if getattr(d, "backend_need_delete", False):
+                    journal_object(d, "reaper.need_delete")
+                    print "Reaper: skipping %r because it has need_delete set" % d
+                    continue
+                deleted_deps = has_deleted_dependencies(d)
+                if deleted_deps:
+                    journal_object(d, "reaper.has_deleted_dependencies", msg=",".join([str(m) for m in deleted_deps]))
+                    print 'Reaper: cannot purge object %r because it has deleted dependencies: %s' % (d, ",".join([str(m) for m in deleted_deps]))
+                    continue
                 deps = walk_inv_deps(noop, d)
                 if (not deps):
-                    print 'Purging object %r'%d
+                    journal_object(d, "reaper.purge")
+                    print 'Reaper: purging object %r'%d
                     d.delete(purge=True)
 
         try: