CORD-1045 fix user delete cascade and redis

Change-Id: Id4f3d9141cc949a40a6eb6401e5a035358e352ff
diff --git a/xos/core/models/plcorebase.py b/xos/core/models/plcorebase.py
index c4cdf21..5d5ae54 100644
--- a/xos/core/models/plcorebase.py
+++ b/xos/core/models/plcorebase.py
@@ -1,6 +1,7 @@
 import datetime
 import json
 import os
+import pytz
 import inspect
 import sys
 import threading
@@ -15,12 +16,16 @@
 from model_autodeletion import ephemeral_models
 from cgi import escape as html_escape
 from journal import journal_object
+from django.db.models.deletion import Collector
+from django.db import router
 
 import redis
 from redis import ConnectionError
 
-
 def date_handler(obj):
+    if isinstance(obj, pytz.tzfile.DstTzInfo):
+        # json can't serialize DstTzInfo
+        return str(obj)
     return obj.isoformat() if hasattr(obj, 'isoformat') else obj
 
 try:
@@ -201,10 +206,62 @@
                 return
         raise Exception("Field value %s is not in %s" % (field, str(choices)))
 
+    def serialize_for_redis(self):
+        """ Serialize the object for posting to redis.
+
+            The API serializes ForeignKey fields by naming them <name>_id
+            whereas model_to_dict leaves them with the original name. Modify
+            the results of model_to_dict to provide the same fieldnames.
+        """
+
+        field_types = {}
+        for f in self._meta.fields:
+            field_types[f.name] = f.get_internal_type()
+
+        fields = model_to_dict(self)
+        for k in fields.keys():
+            if field_types.get(k,None) == "ForeignKey":
+                new_key_name = "%s_id" % k
+                if (k in fields) and (new_key_name not in fields):
+                    fields[new_key_name] = fields[k]
+                    del fields[k]
+
+        return fields
+
+    def push_redis_event(self):
+        # Transmit update via Redis
+        changed_fields = []
+
+        if self.pk is not None:
+            my_model = type(self)
+            try:
+                orig = my_model.objects.get(pk=self.pk)
+
+                for f in my_model._meta.fields:
+                    oval = getattr(orig, f.name)
+                    nval = getattr(self, f.name)
+                    if oval != nval:
+                        changed_fields.append(f.name)
+            except:
+                changed_fields.append('__lookup_error')
+
+        try:
+            r = redis.Redis("redis")
+            # NOTE the redis event has been extended with model properties to facilitate the support of real time notification in the UI
+            # keep this monitored for performance reasons and eventually revert it back to fetch model properties via the REST API
+            model = self.serialize_for_redis()
+            bases = inspect.getmro(self.__class__)
+            bases = [x for x in bases if issubclass(x, PlCoreBase)]
+            class_names = ",".join([x.__name__ for x in bases])
+            model['class_names'] = class_names
+            payload = json.dumps({'pk': self.pk, 'changed_fields': changed_fields, 'object': model}, default=date_handler)
+            r.publish(self.__class__.__name__, payload)
+        except ConnectionError:
+            # Redis not running.
+            pass
+
 # For cascading deletes, we need a Collector that doesn't do fastdelete,
 # so we get a full list of models.
-from django.db.models.deletion import Collector
-from django.db import router
 class XOSCollector(Collector):
   def can_fast_delete(self, *args, **kwargs):
     return False
@@ -303,28 +360,6 @@
                             journal_object(model, "delete.cascade.mark_deleted", msg="root = %r" % self)
                             model.save(update_fields=['enacted','deleted','policed'], silent=silent)
 
-    def serialize_for_redis(self):
-        """ Serialize the object for posting to redis.
-
-            The API serializes ForeignKey fields by naming them <name>_id
-            whereas model_to_dict leaves them with the original name. Modify
-            the results of model_to_dict to provide the same fieldnames.
-        """
-
-        field_types = {}
-        for f in self._meta.fields:
-            field_types[f.name] = f.get_internal_type()
-
-        fields = model_to_dict(self)
-        for k in fields.keys():
-            if field_types.get(k,None) == "ForeignKey":
-                new_key_name = "%s_id" % k
-                if (k in fields) and (new_key_name not in fields):
-                    fields[new_key_name] = fields[k]
-                    del fields[k]
-
-        return fields
-
     def save(self, *args, **kwargs):
         journal_object(self, "plcorebase.save")
 
@@ -358,42 +393,13 @@
         if (caller_kind!="synchronizer") or always_update_timestamp:
             self.updated = timezone.now()
 
-        # Transmit update via Redis
-        changed_fields = []
-
-        if self.pk is not None:
-            my_model = type(self)
-            try:
-                orig = my_model.objects.get(pk=self.pk)
-
-                for f in my_model._meta.fields:
-                    oval = getattr(orig, f.name)
-                    nval = getattr(self, f.name)
-                    if oval != nval:
-                        changed_fields.append(f.name)
-            except:
-                changed_fields.append('__lookup_error')
-
         journal_object(self, "plcorebase.save.super_save")
 
         super(PlCoreBase, self).save(*args, **kwargs)
 
         journal_object(self, "plcorebase.save.super_save_returned")
 
-        try:
-            r = redis.Redis("redis")
-            # NOTE the redis event has been extended with model properties to facilitate the support of real time notification in the UI
-            # keep this monitored for performance reasons and eventually revert it back to fetch model properties via the REST API
-            model = self.serialize_for_redis()
-            bases = inspect.getmro(self.__class__)
-            bases = [x for x in bases if issubclass(x, PlCoreBase)]
-            class_names = ",".join([x.__name__ for x in bases])
-            model['class_names'] = class_names
-            payload = json.dumps({'pk': self.pk, 'changed_fields': changed_fields, 'object': model}, default=date_handler)
-            r.publish(self.__class__.__name__, payload)
-        except ConnectionError:
-            # Redis not running.
-            pass
+        self.push_redis_event()
 
         # This is a no-op if observer_disabled is set
         # if not silent:
diff --git a/xos/core/models/user.py b/xos/core/models/user.py
index 07d59e6..89464bf 100644
--- a/xos/core/models/user.py
+++ b/xos/core/models/user.py
@@ -8,15 +8,21 @@
 
 from core.middleware import get_request
 from core.models import DashboardView, PlCoreBase, PlModelMixIn, Site, ModelLink
-from core.models.plcorebase import StrippedCharField
+from core.models.plcorebase import StrippedCharField, XOSCollector
 from django.contrib.auth.models import AbstractBaseUser, BaseUserManager
 from django.core.exceptions import PermissionDenied
 from django.core.mail import EmailMultiAlternatives
 from django.db import models
+from django.db import transaction
+from django.db import router
 from django.db.models import F, Q
 from django.forms.models import model_to_dict
 from django.utils import timezone
 from timezones.fields import TimeZoneField
+from journal import journal_object
+
+import redis
+from redis import ConnectionError
 
 # ------ from plcorebase.py ------
 try:
@@ -170,6 +176,7 @@
     def __init__(self, *args, **kwargs):
         super(User, self).__init__(*args, **kwargs)
         self._initial = self._dict  # for PlModelMixIn
+        self.silent = False
 
     def isReadOnlyUser(self):
         return self.is_readonly
@@ -182,24 +189,6 @@
         # The user is identified by their email address
         return self.email
 
-    def delete(self, *args, **kwds):
-        # so we have something to give the observer
-        purge = kwds.get('purge', False)
-        if purge:
-            del kwds['purge']
-        try:
-            purge = purge or observer_disabled
-        except NameError:
-            pass
-
-        if (purge):
-            super(User, self).delete(*args, **kwds)
-        else:
-            if (not self.write_protect):
-                self.deleted = True
-                self.enacted = None
-                self.save(update_fields=['enacted', 'deleted'])
-
     @property
     def keyname(self):
         return self.email[:self.email.find('@')]
@@ -248,13 +237,58 @@
 #            roles[slice_membership.role.role_type].append(slice_membership.slice.name)
 #        return roles
 
+    def delete(self, *args, **kwds):
+        # so we have something to give the observer
+        purge = kwds.get('purge',False)
+        if purge:
+            del kwds['purge']
+        silent = kwds.get('silent',False)
+        if silent:
+            del kwds['silent']
+        try:
+            purge = purge or observer_disabled
+        except NameError:
+            pass
+
+        if (purge):
+            journal_object(self, "delete.purge")
+            super(User, self).delete(*args, **kwds)
+        else:
+            if (not self.write_protect ):
+                self.deleted = True
+                self.enacted=None
+                self.policed=None
+                journal_object(self, "delete.mark_deleted")
+                self.save(update_fields=['enacted','deleted','policed'], silent=silent)
+
+                collector = XOSCollector(using=router.db_for_write(self.__class__, instance=self))
+                collector.collect([self])
+                with transaction.atomic():
+                    for (k, models) in collector.data.items():
+                        for model in models:
+                            if model.deleted:
+                                # in case it's already been deleted, don't delete again
+                                continue
+                            model.deleted = True
+                            model.enacted=None
+                            model.policed=None
+                            journal_object(model, "delete.cascade.mark_deleted", msg="root = %r" % self)
+                            model.save(update_fields=['enacted','deleted','policed'], silent=silent)
+
     def save(self, *args, **kwargs):
+        journal_object(self, "plcorebase.save")
+
         if not self.id:
             self.set_password(self.password)
         if self.is_active and self.is_registering:
             self.send_temporary_password()
             self.is_registering = False
 
+        # let the user specify silence as either a kwarg or an instance varible
+        silent = self.silent
+        if "silent" in kwargs:
+            silent=silent or kwargs.pop("silent")
+
         caller_kind = "unknown"
 
         if ('synchronizer' in threading.current_thread().name):
@@ -267,11 +301,33 @@
         if "always_update_timestamp" in kwargs:
             always_update_timestamp = always_update_timestamp or kwargs.pop("always_update_timestamp")
 
-        # TODO: plCoreBase logic for updating timestamps is missing
+        # SMBAKER: if an object is trying to delete itself, or if the observer
+        # is updating an object's backend_* fields, then let it slip past the
+        # composite key check.
+        ignore_composite_key_check=False
+        if "update_fields" in kwargs:
+            ignore_composite_key_check=True
+            for field in kwargs["update_fields"]:
+                if not (field in ["backend_register", "backend_status", "deleted", "enacted", "updated"]):
+                    ignore_composite_key_check=False
+
+        if (caller_kind!="synchronizer") or always_update_timestamp:
+            self.updated = timezone.now()
 
         self.username = self.email
+
+        journal_object(self, "plcorebase.save.super_save")
+
         super(User, self).save(*args, **kwargs)
 
+        journal_object(self, "plcorebase.save.super_save_returned")
+
+        self.push_redis_event()
+
+        # This is a no-op if observer_disabled is set
+        # if not silent:
+        #    notify_observer()
+
         self._initial = self._dict
 
     def send_temporary_password(self):