Back to index

plone3  3.1.7
DummyTools.py
Go to the documentation of this file.
00001 from Acquisition import aq_base
00002 from copy import deepcopy
00003 from StringIO import StringIO
00004 from OFS.SimpleItem import SimpleItem
00005 from Products.CMFDefault.DublinCore import DefaultDublinCoreImpl
00006 from Products.CMFEditions.utilities import dereference
00007 from Products.CMFEditions.ArchivistTool import ObjectData
00008 from Products.CMFEditions.ArchivistTool import PreparedObject
00009 from Products.CMFEditions.ArchivistTool import AttributeAdapter
00010 from Products.CMFEditions.ArchivistTool import VersionData
00011 from Products.CMFEditions.interfaces.IArchivist import ArchivistError
00012 from Products.CMFEditions.interfaces.IStorage import IStreamableReference
00013 from Products.CMFEditions.interfaces.IStorage import IStorage
00014 from Products.CMFEditions.interfaces.IStorage import IPurgeSupport
00015 from Products.CMFEditions.interfaces.IPurgePolicy import IPurgePolicy
00016 from Products.CMFEditions.interfaces.IStorage import StorageUnregisteredError
00017 from Products.CMFEditions.interfaces.IStorage import StorageRetrieveError
00018 from Products.CMFCore.utils import getToolByName
00019 from cPickle import Pickler, Unpickler
00020 from DateTime import DateTime
00021 import types
00022 
00023 # Make alog module level so that it survives transaction rollbacks
00024 alog = []
00025 
00026 class Dummy(SimpleItem, DefaultDublinCoreImpl):
00027    pass
00028 
00029 class UniqueIdError(Exception):
00030     pass
00031 
00032 class DummyBaseTool(SimpleItem):
00033     def getId(self):
00034         return self.id
00035 
00036 def deepCopy(obj):
00037     stream = StringIO()
00038     p = Pickler(stream, 1)
00039     p.dump(obj)
00040     stream.seek(0)
00041     u = Unpickler(stream)
00042     return u.load()
00043 
00044 def notifyModified(obj):
00045     """Notify the object as modified.
00046 
00047     Sleeps as long as DateTime delivers a different time then
00048     notifies the object as modified (faster than time.sleep(2)).
00049     """
00050     t = obj.modified()
00051     while t == DateTime(): pass
00052     obj.notifyModified()
00053 
00054 
00055 class DummyArchivist(SimpleItem):
00056     """Archivist simulating modifiers and history storage.
00057     """
00058     id = 'portal_archivist'
00059 
00060     def getId(self):
00061         return self.id
00062 
00063     def __init__(self):
00064         self._archive = {}
00065         self._counter = 0
00066         self.reset_log()
00067         self.alog_indent = ''
00068 
00069     def log(self, msg):
00070         alog.append(msg)
00071 
00072     def get_log(self):
00073         return "\n".join(alog)
00074 
00075     def reset_log(self):
00076         global alog
00077         alog = []
00078 
00079     def prepare(self, obj, app_metadata=None, sys_metadata={}):
00080         obj, history_id = dereference(obj)
00081         if history_id is None:
00082             # object isn't under version control yet
00083             # An working copy beeing under version control needs to have
00084             # a history_id, version_id (starts with 0) and a location_id
00085             # (the current implementation isn't able yet to handle multiple
00086             # locations. Nevertheless lets set the location id to a well
00087             # known default value)
00088             portal_hidhandler = getToolByName(obj, 'portal_historyidhandler')
00089             history_id = portal_hidhandler.register(obj)
00090             version_id = obj.version_id = 0
00091             obj.location_id = 0
00092             is_registered = False
00093         else:
00094             version_id = len(self.queryHistory(obj))
00095             is_registered = True
00096 
00097         base_obj = aq_base(obj)
00098         doc1_inside = getattr(base_obj, 'doc1_inside', None)
00099         doc2_inside = getattr(base_obj, 'doc2_inside', None)
00100         doc3_outside = getattr(base_obj, 'doc3_outside', None)
00101 
00102         # simulate clone modifiers
00103         icrefs = []
00104         ocrefs = []
00105         clone = deepCopy(base_obj)
00106         if doc1_inside is not None:
00107             icrefs.append(AttributeAdapter(clone, 'doc1_inside'))
00108         if doc2_inside is not None:
00109             icrefs.append(AttributeAdapter(clone, 'doc2_inside'))
00110         if doc3_outside is not None:
00111             ocrefs.append(AttributeAdapter(clone, 'doc3_outside'))
00112         crefs = icrefs + ocrefs
00113 
00114         # simulate before save modifier
00115         iorefs = []
00116         oorefs = []
00117         if doc1_inside is not None:
00118             iorefs.append(getattr(obj, 'doc1_inside'))
00119         if doc2_inside is not None:
00120             iorefs.append(getattr(obj, 'doc2_inside'))
00121         if doc3_outside is not None:
00122             oorefs.append(getattr(obj, 'doc3_outside'))
00123         orefs = iorefs + oorefs
00124         for cref in crefs:
00125             cref.setAttribute(VersionAwareReference())
00126 
00127         # log
00128         if sys_metadata['originator'] is None:
00129             self.log("")
00130         if orefs:
00131             self.log("%sprepare %s: hid=%s, refs=(%s)"
00132                         % (self.alog_indent,
00133                            obj.getId(),
00134                            history_id,
00135                            ', '.join([ref.getId() for ref in orefs])))
00136         else:
00137             self.log("%sprepare %s: hid=%s"
00138                         % (self.alog_indent, obj.getId(), history_id))
00139         self.alog_indent += '  '
00140 
00141         # prepare object structure
00142         original_info = ObjectData(obj, iorefs, oorefs)
00143         clone_info = ObjectData(clone, icrefs, ocrefs)
00144 
00145         approxSize = None
00146 
00147         return PreparedObject(history_id, original_info, clone_info, (),
00148                               app_metadata, sys_metadata, is_registered, approxSize)
00149 
00150     def register(self, prepared_obj):
00151         # log
00152         self.log("%sregister %s: hid=%s, is_registered=%s"
00153                     % (self.alog_indent,
00154                        prepared_obj.original.object.getId(),
00155                        prepared_obj.history_id,
00156                        prepared_obj.is_registered))
00157 
00158         if not prepared_obj.is_registered:
00159             # new empty history
00160             self._archive[prepared_obj.history_id] = []
00161             self.save(prepared_obj)
00162 
00163     def save(self, prepared_obj, autoregister=False):
00164         if not prepared_obj.is_registered:
00165             if not autoregister:
00166                 raise ArchivistError("not registered: %s " % prepared_obj.original.object)
00167             self._archive[prepared_obj.history_id] = []
00168 
00169         # log
00170         self.alog_indent = self.alog_indent[0:-2]
00171 
00172         irefs = [ref.getAttribute() for ref in prepared_obj.clone.inside_refs]
00173         orefs = [ref.getAttribute() for ref in prepared_obj.clone.outside_refs]
00174         irefs_prep = ['{hid:%s, vid:%s}' % (r.history_id, r.version_id)
00175                       for r in irefs]
00176         orefs_prep = ['{hid:%s, vid:%s}' % (r.history_id, r.version_id)
00177                       for r in orefs]
00178         irefs = ', '.join(irefs_prep)
00179         orefs = ', '.join(orefs_prep)
00180         if irefs:
00181             irefs = "irefs=(%s), " % irefs
00182         if orefs:
00183             orefs = "orefs=(%s), " % orefs
00184         refs = irefs + orefs
00185         self.log("%ssave    %s: hid=%s, %sisreg=%s, auto=%s"
00186                     % (self.alog_indent,
00187                        prepared_obj.original.object.getId(),
00188                        prepared_obj.history_id,
00189                        refs,
00190                        prepared_obj.is_registered,
00191                        autoregister))
00192 
00193         # save in the format the data needs to be retrieved
00194         svdata = {
00195             'clone': prepared_obj.clone,
00196             'referenced_data': prepared_obj.referenced_data,
00197             'metadata': prepared_obj.metadata,
00198         }
00199         # storage simulation
00200         self._archive[prepared_obj.history_id].append(svdata)
00201 
00202     def retrieve(self, obj=None, history_id=None, selector=None, preserve=(),
00203                  countPurged=True):
00204         obj, history_id = dereference(obj, history_id, self)
00205         if selector is None:
00206             selector = len(self._archive[history_id]) - 1  #HEAD
00207 
00208         self.log("%sretrieve %s: hid=%s, selector=%s"
00209                     % (self.alog_indent, obj.getId(), history_id, selector))
00210 
00211         data = self._archive[history_id][selector]
00212         attr_handling_references = ['_objects']
00213         attr_handling_references.extend(data['clone'].object.objectIds())
00214         attr_handling_references.extend(obj.objectIds())
00215         vdata = VersionData(data['clone'],
00216                     [],
00217                     attr_handling_references,
00218                     data['referenced_data'],
00219                     data['metadata'])
00220 
00221         return deepCopy(vdata)
00222 
00223     def getHistory(self, obj=None, history_id=None, preserve=()):
00224         obj, history_id = dereference(obj, history_id, self)
00225         return [deepCopy(obj) for obj in self._archive[history_id]]
00226 
00227     def queryHistory(self, obj=None, history_id=None,
00228                      preserve=(), default=[]):
00229         try:
00230             history = self.getHistory(obj=obj, history_id=history_id, preserve=preserve)
00231         except KeyError:
00232             return default
00233         if history:
00234             return history
00235         return default
00236 
00237     def isUpToDate(self, obj=None, history_id=None, selector=None):
00238         obj = dereference(obj=obj, history_id=history_id, zodb_hook=self)[0]
00239         mem = self.retrieve(obj=obj, history_id=history_id, selector=selector)
00240         return mem.data.object.modified() == obj.modified()
00241 
00242 
00243 class VersionAwareReference:
00244     def __init__(self, **info):
00245         self.history_id = None
00246         self.version_id = None
00247         self.info = info
00248 
00249     def setReference(self, target_obj, remove_info=True):
00250         portal_hidhandler = getToolByName(target_obj, 'portal_historyidhandler')
00251         portal_archivist = getToolByName(target_obj, 'portal_archivist')
00252         self.history_id = portal_hidhandler.queryUid(target_obj)
00253         self.version_id = len(portal_archivist.queryHistory(target_obj))-1
00254         self.location_id = 1 # only one location possible currently
00255         if remove_info and hasattr(self, 'info'):
00256             self.info = None
00257 
00258 
00259 class DummyModifier(DummyBaseTool):
00260     __implements__ = ()
00261     id = 'portal_modifier'
00262 
00263     def beforeSaveModifier(self, obj, clone):
00264         return {}, [], [] # XXX 2nd and 3rd shall be lists
00265 
00266     def afterRetrieveModifier(self, obj, repo_clone, preserve=()):
00267         preserved = {}
00268         # just a dead simple test implementation
00269         for key in preserve:
00270             preserved[key] = key
00271         return [], [], preserved
00272 
00273     def getReferencedAttributes(self, obj):
00274         return {}
00275 
00276     def reattachReferencedAttributes(self, object, referenced_data):
00277         # nothing to do
00278         return
00279 
00280     def getOnCloneModifiers(self, obj):
00281         return None
00282 
00283 class FolderishContentObjectModifier(DummyBaseTool):
00284     """This is a full fledged modifier.
00285     """
00286 
00287     __implements__ = ()
00288     id = 'portal_modifier'
00289 
00290     def getReferencedAttributes(self, obj):
00291         # we declare the title beeing a big blob we don't want to be
00292         # pickled and unpickled by the archivist
00293         return {'title': obj.title}
00294 
00295     def getOnCloneModifiers(self, obj):
00296         """Removes childrens ending with '_inside' or '_outside'.
00297 
00298         Just replaces object manager sub objects ending '_inside' or
00299         '_outside' by a uninitialzed 'IVersionAwareReference'.
00300         All other childrens get versioned with the parent.
00301         """
00302         portal_archivist = getToolByName(obj, 'portal_archivist')
00303         VersionAwareReference = portal_archivist.classes.VersionAwareReference
00304 
00305         # do not pickle the object managers subobjects
00306         refs = {}
00307         outside_refs = []
00308         inside_refs = []
00309         for name, sub in obj.objectItems():
00310             pyid = id(aq_base(sub))
00311             if name.endswith('_inside'):
00312                 inside_refs.append(sub)
00313                 refs[pyid] = True
00314             elif name.endswith('_outside'):
00315                 outside_refs.append(sub)
00316                 refs[pyid] = True
00317 
00318         # do not pickle the big blob attributes
00319         base_obj = aq_base(obj)
00320         for attr in self.getReferencedAttributes(obj).keys():
00321             try:
00322                 pyid = id(getattr(base_obj, attr))
00323             except AttributeError:
00324                 pass
00325             else:
00326                 refs[pyid] = False
00327 
00328         def persistent_id(obj):
00329             if id(obj) in refs:
00330                 return id(obj)
00331             return None
00332 
00333         def persistent_load(pid):
00334             if pid in refs:
00335                 if refs[pid]:
00336                     # references
00337                     return VersionAwareReference()
00338                 else:
00339                     # just directly passed attributes
00340                     return None
00341             # should never reach this!
00342             assert False
00343 
00344         return persistent_id, persistent_load, inside_refs, outside_refs, ''
00345 
00346     def beforeSaveModifier(self, obj, clone):
00347         """Returns all unititialized 'IVersionAwareReference' objects.
00348 
00349         This allways goes in conjunction with 'getOnCloneModifiers'.
00350         """
00351         portal_archivist = getToolByName(obj, 'portal_archivist')
00352         AttributeAdapter = portal_archivist.classes.AttributeAdapter
00353 
00354         # just return adapters to the attributes that were replaced by
00355         # a uninitialzed 'IVersionAwareReference' object
00356         outside_refs = []
00357         inside_refs = []
00358         for name in clone.objectIds():
00359             if name.endswith('_inside'):
00360                 inside_refs.append(AttributeAdapter(clone, name))
00361             elif name.endswith('_outside'):
00362                 outside_refs.append(AttributeAdapter(clone, name))
00363 
00364         return {}, inside_refs, outside_refs
00365 
00366     def afterRetrieveModifier(self, obj, repo_clone, preserve=()):
00367         preserved = {}
00368         # just a dead simple test implementation
00369         for key in preserve:
00370             preserved[key] = key
00371 
00372         ref_names = self._getAttributeNamesHandlingSubObjects(obj)
00373         return [], ref_names, {}
00374 
00375     def reattachReferencedAttributes(self, object, referenced_data):
00376         # just a dead simple test implementation
00377         for key, value in referenced_data.items():
00378             setattr(object, key, value)
00379 
00380     def _getAttributeNamesHandlingSubObjects(self, obj):
00381         return ['_objects'].extend(obj.objectIds())
00382 
00383 
00384 class DummyHistoryIdHandler(DummyBaseTool):
00385     id = 'portal_historyidhandler'
00386 
00387     UID_ATTRIBUTE_NAME = 'editions_uhid'
00388 
00389     uhid_counter = 0
00390 
00391     UniqueIdError = UniqueIdError
00392 
00393     objectRegistry = {}
00394 
00395     def register(self, obj):
00396         uhid = self.queryUid(obj)
00397         if uhid is None:
00398             self.uhid_counter += 1
00399             uhid = self.uhid_counter
00400             setattr(obj, self.UID_ATTRIBUTE_NAME, uhid)
00401             self.objectRegistry[uhid] = obj
00402         return uhid
00403 
00404     def queryUid(self, obj, default=None):
00405         return getattr(aq_base(obj), self.UID_ATTRIBUTE_NAME, default)
00406 
00407     def getUid(self, obj):
00408         uid = self.queryUid(obj, default=None)
00409         if uid is None:
00410             raise UniqueIdError("'%s' has no unique id attached." % obj)
00411         return uid
00412 
00413     def queryObject(self, uid, default=None):
00414         try:
00415             return self.objectRegistry[uid]
00416         except KeyError:
00417             return default
00418 
00419 #    def setUid(self, obj, uid, check_uniqueness=True):
00420 #        setattr(obj, self.UID_ATTRIBUTE_NAME, uid)
00421 
00422 class StorageVersionData:
00423     def __init__(self, object, referenced_data, metadata):
00424         self.object = object
00425         self.referenced_data = referenced_data
00426         self.metadata = metadata
00427     def isValid(self):
00428         return not isinstance(self.object, Removed)
00429         
00430 class Removed:
00431     """Indicates that removement of data
00432     """
00433     
00434     def __init__(self, reason, metadata):
00435         """Store Removed Info
00436         """
00437         self.reason = reason
00438         self.metadata = metadata
00439 
00440 class MemoryStorage(DummyBaseTool):
00441 
00442     __implements__ = (IStorage, IPurgeSupport)
00443     id = 'portal_historiesstorage'
00444 
00445 
00446     def __init__(self):
00447         self._histories = {}
00448 
00449     def register(self, history_id, object, referenced_data={}, metadata=None):
00450         histories = self._histories
00451         if history_id not in histories.keys():
00452            return self._save(history_id, object, referenced_data, metadata)
00453 
00454     def save(self, history_id, object, referenced_data={}, metadata=None):
00455         # delegate the decission what to purge to the purge policy tool
00456         # if it exists. If the call returns ``True`` do not save the current
00457         # version.
00458         policy = getToolByName(self, 'portal_purgepolicy', None)
00459         if policy is not None:
00460             if not policy.beforeSaveHook(history_id, metadata):
00461                 return len(self._histories[history_id]) - 1
00462         
00463         if not self._histories.has_key(history_id):
00464             raise StorageUnregisteredError(
00465                 "Saving or retrieving an unregistered object is not "
00466                 "possible. Register the object with history id '%s' first. "
00467                 % history_id)
00468 
00469         return self._save(history_id, object, referenced_data, metadata)
00470 
00471 
00472     def _save(self, history_id, object, referenced_data={}, metadata=None):
00473         histories = self._histories
00474         cloned_referenced_data = {}
00475 
00476         for key, ref in referenced_data.items():
00477             # a real storage may treat IStreamableReference obj differently
00478             if IStreamableReference.isImplementedBy(ref):
00479                 cloned_referenced_data[key] = deepCopy(ref.getObject())
00480             else:
00481                 cloned_referenced_data[key] = deepCopy(ref)
00482         vdata = StorageVersionData(object=deepCopy(object),
00483                                    referenced_data=cloned_referenced_data,
00484                                    metadata=metadata)
00485         if history_id in histories.keys():
00486             histories[history_id].append(vdata)
00487         else:
00488             histories[history_id] = [vdata]
00489 
00490         return len(histories[history_id]) - 1
00491 
00492     def retrieve(self, history_id, selector=None, 
00493                  countPurged=True, substitute=True):
00494         if selector is None:
00495             selector = len(self._getHistory(history_id)) - 1
00496             
00497         if countPurged:
00498             try:
00499                 vdata = self._getHistory(history_id)[selector]
00500             except IndexError:
00501                 raise StorageRetrieveError("Retrieving non existing version %s" 
00502                                            % selector)
00503             
00504             vdata.referenced_data = deepcopy(vdata.referenced_data)
00505             if substitute and isinstance(vdata.object, Removed):
00506                 # delegate retrieving to purge policy if one is available
00507                 # if none is available just return "the removed object"
00508                 policy = getToolByName(self, 'portal_purgepolicy', None)
00509                 if policy is not None:
00510                     vdata = policy.retrieveSubstitute(history_id, selector, vdata)
00511             return vdata
00512         else:
00513             valid = 0
00514             history = self._getHistory(history_id)
00515             for vdata in history:
00516                 if isinstance(vdata.object, Removed):
00517                     continue
00518                 if valid == selector:
00519                     return vdata
00520                 valid += 1
00521             raise StorageRetrieveError("Retrieving non existing version %s" 
00522                                        % selector)
00523 
00524     def getHistory(self, history_id, preserve=(), countPurged=True, 
00525                    substitute=True):
00526         history = []
00527         sel = 0
00528         
00529         while True:
00530             try:
00531                 vdata = self.retrieve(history_id, sel, countPurged, substitute)
00532             except StorageRetrieveError:
00533                 break
00534             history.append(vdata)
00535             sel += 1
00536             
00537         return HistoryList(history)
00538 
00539     def isRegistered(self, history_id):
00540         return history_id in self._histories
00541 
00542     def getModificationDate(self, history_id, selector=None, 
00543                             countPurged=True, substitute=True):
00544         vdata = self.retrieve(history_id, selector, countPurged, substitute)
00545         return vdata.object.object.modified()
00546 
00547     def purge(self, history_id, selector, metadata={}, countPurged=True):
00548         """See ``IPurgeSupport``
00549         """
00550         histories = self._histories
00551         history = histories[history_id]
00552         vdata = self.retrieve(history_id, selector, countPurged, 
00553                               substitute=False)
00554         selector = history.index(vdata)
00555         if not isinstance(vdata.object, Removed):
00556             # prepare replacement for the deleted object and metadata
00557             removedInfo = Removed("purged", metadata)
00558             
00559             # digging into ZVC internals: remove the stored object
00560             history[selector] = StorageVersionData(removedInfo, None, metadata)
00561 
00562     def _getHistory(self, history_id):
00563         try:
00564             history = self._histories[history_id]
00565         except KeyError:
00566             raise StorageUnregisteredError(
00567                 "Saving or retrieving an unregistered object is not "
00568                 "possible. Register the object with history id '%s' first. "
00569                 % history_id)
00570         return history
00571 #        return HistoryList(history)
00572 
00573     def _getLength(self, history_id, countPurged=True):
00574         """Returns the length of the history
00575         """
00576         histories = self._histories
00577         history = self._getHistory(history_id)
00578         if countPurged:
00579             return len(history)
00580         
00581         length = 0
00582         for vdata in history:
00583             if not isinstance(vdata.object, Removed):
00584                 length += 1
00585         
00586         return length
00587 
00588 
00589 class HistoryList(types.ListType):
00590     """
00591     """
00592     def __getitem__(self, selector):
00593         if selector is None:
00594             selector = -1
00595         try:
00596            return types.ListType.__getitem__(self, selector)
00597         except IndexError:
00598             raise StorageRetrieveError("Retrieving non existing version %s" % selector)
00599 
00600 
00601 class DummyPurgePolicy(DummyBaseTool):
00602     """Dummy Purge Policy
00603     """
00604     __implements__ = IPurgePolicy
00605     id = 'portal_purgepolicy'
00606 
00607     def beforeSaveHook(self, history_id, obj, metadata={}):
00608         """Purge old versions
00609         
00610         Purges old version so that at maximum two versions reside in 
00611         the history.
00612         """
00613         storage = getToolByName(self, 'portal_historiesstorage')
00614         currentVersion = len(storage.getHistory(history_id))
00615         while True:
00616             length = len(storage.getHistory(history_id, countPurged=False))
00617             if length < 2:
00618                 break
00619             comment = "purged on save of version %s" % currentVersion
00620             metadata = {"sys_metadata": {"comment": comment}}
00621             storage.purge(history_id, 0, metadata, countPurged=False)
00622         
00623         return True
00624     
00625     def retrieveSubstitute(self, history_id, selector, default=None):
00626         """Retrives the next older version
00627         """
00628         storage = getToolByName(self, 'portal_historiesstorage')
00629         while selector:
00630             selector -= 1
00631             data = storage.retrieve(history_id, selector, substitute=False)
00632             if data.isValid():
00633                 return data
00634         return default
00635 
00636 
00637 class PurgePolicyTestDummyStorage(DummyBaseTool):
00638     """Partial Storage used for PurgePolicy Tetss
00639     """
00640 
00641     __implements__ = (IStorage, IPurgeSupport)
00642     id = 'portal_historiesstorage'
00643 
00644     def __init__(self):
00645         self.history = []
00646 
00647     def save(self, history_id, obj):
00648         self.history.append(obj)
00649 
00650     def getHistory(self, history_id, preserve=(), countPurged=True, 
00651                    substitute=True):
00652         return self.history
00653 
00654     def purge(self, history_id, selector, metadata={}, 
00655               countPurged=True):
00656         del self.history[selector]
00657 
00658     def retrieve(self, history_id, selector=None, 
00659                  countPurged=True, substitute=True):
00660         if selector >= len(self.history):
00661             raise StorageRetrieveError()
00662         return self.history[selector]
00663 
00664 
00665 class DummyData(object):
00666     def __init__(self, data):
00667         self.data = data
00668     
00669     def isValid(self):
00670         return True
00671 
00672 
00673 class RemovedData(object):
00674     def isValid(self):
00675         return False