Back to index

plone3  3.1.7
ApeSupport.py
Go to the documentation of this file.
00001 """ \
00002 Bring generic Ape Support to Archetypes.
00003 
00004 The goal of this module is to implement generic mapping of Archetypes Schema
00005 to real tables with real columns for each field in the schema.
00006 
00007 **Experimental**
00008 
00009 This code works with Ape 1.0
00010 
00011 Whats working so far:
00012 
00013  - The following types are handled so far:
00014 
00015   - string,
00016 
00017   - int.
00018 
00019 Whats not working so far:
00020 
00021  - References,images are not yet supported
00022 
00023  - Renaming of objects generates errors
00024 
00025 ArchGenXML has support for APE:
00026 
00027  When you invoke ArchGenXML with the option --ape-support the outline_od.xmi
00028  sample works with APE correctly all ape_config and the serializer/gateway
00029  stuff is generated for you.
00030  
00031 ApeSupport is tested with Ape 1.0 and PostgreSQL
00032 """
00033 
00034 from Products.Archetypes.BaseUnit import BaseUnit
00035 from Products.Archetypes.atapi import *
00036 from types import ClassType
00037 
00038 from apelib.core.interfaces import ISerializer
00039 from apelib.sql.sqlbase import SQLGatewayBase
00040 from apelib.sql.structure import RowSequenceSchema
00041 from apelib.sql.properties import SQLFixedProperties
00042 from apelib.zodb3.serializers import RemainingState as RemainingBase
00043 from apelib.zodb3.serializers import encode_to_text
00044 
00045 
00046 from apelib.core.interfaces \
00047      import ISerializer, IFullSerializationEvent, IFullDeserializationEvent
00048 from Persistence import Persistent, PersistentMapping
00049 from StringIO import StringIO
00050 from cPickle import Pickler, UnpickleableError
00051 import os
00052 
00053 
00054 #map types between APE and Archetypes Schemas
00055 
00056 typemap={
00057     'text':'string',
00058     'datetime':'string',
00059     'boolean':'int',
00060     'integer':'int',
00061     #'reference':'string:list',
00062     'computed':'string' #ouch!!
00063 }
00064 
00065 def AtType2ApeType(f):
00066     t=f._properties['type']
00067     if t=='reference':
00068         #print 'REF:',f.getName(),f.multiValued
00069         if f.multiValued:
00070             return 'string'
00071         else:
00072             return 'string'
00073     if t=='computed':
00074         return None
00075 
00076     return typemap.get(t,t)
00077 
00078 def AtSchema2ApeSchema(atschema):
00079     schema=RowSequenceSchema()
00080     column_defs=[]
00081     for f in atschema.fields():
00082         if f.isMetadata:
00083             continue
00084         pk=0
00085         name = f.getName()
00086         t = AtType2ApeType(f)
00087         if not t: # then dont add it to the schema
00088             continue
00089         if name=='id':pk=1
00090         schema.add(name, t, pk)
00091         column_defs.append((name,t,pk))
00092 
00093     #print schema,tuple(column_defs)
00094     return schema,tuple(column_defs)
00095 
00096 # creates a generic gateway instance based on
00097 # the klass's Schema
00098 def constructGateway(klass):
00099     table_name = klass.__name__.lower()
00100     schema, column_defs = AtSchema2ApeSchema(klass.schema)
00101     res=SQLFixedProperties('db', table_name, schema)
00102     return res
00103 
00104 # creates a generic serializer instance based on
00105 # the klass's Schema
00106 def constructSerializer(klass):
00107     res=ArcheSerializer()
00108     res.klass=klass
00109     res.schema=AtSchema2ApeSchema(klass.schema)[0]
00110     return res
00111 
00112 
00113 # generic Serializer class
00114 # which reflects the class's Schema
00115 class ArcheSerializer:
00116     """Serializer for OFS.PropertyManager properties."""
00117 
00118     __implements__ = ISerializer
00119 
00120     schema = RowSequenceSchema()
00121 
00122     def getSchema(self):
00123         return self.schema
00124 
00125     def can_serialize(self, object):
00126         return isinstance(object, Master)
00127 
00128     def serialize(self, event):
00129         res = []
00130         for f in event.obj.Schema().fields():
00131             if f.isMetadata:
00132                 continue
00133 
00134             name = f.getName()
00135             t = AtType2ApeType(f)
00136             if not t:
00137                 continue
00138             event.ignore(name)
00139             data = f.getAccessor(event.obj)()
00140             res.append((name, t, data))
00141         return res
00142 
00143     def deserialize(self, event, state):
00144         for id, t, v in state:
00145             event.obj.__dict__.update({id:v})
00146 
00147 # this replacement of RemainingState is necessary in order to
00148 # replace the BaseUnit members by string data because
00149 # Baseunits are not pickleable (dunno why)
00150 # overloaded the serialize method in order to clean the __dict__
00151 # correctly
00152 class RemainingState(RemainingBase):
00153 
00154     def cleanDictCopy(self,dict):
00155         ''' cleans out the baseUnit instances of the dict, because the are not picklable '''
00156         res={}
00157 
00158         for k in dict.keys():
00159             v=dict[k]
00160             if type(v) == type({}) or ec_isinstance(v,PersistentMapping):
00161                 v1=self.cleanDictCopy(v)
00162             elif ec_isinstance(v,BaseUnit):
00163                 v1=v.getRaw()
00164             else:
00165                 v1=v
00166             res[k]=v1
00167 
00168         return res
00169 
00170 
00171     def serialize(self, event):
00172         assert IFullSerializationEvent.isImplementedBy(event)
00173         assert isinstance(event.obj, Persistent)
00174 
00175         # Allow pickling of cyclic references to the object.
00176         event.serialized('self', event.obj, False)
00177 
00178         # Ignore previously serialized attributes
00179         state = self.cleanDictCopy(event.obj.__dict__)
00180         for key in state.keys():
00181             if key.startswith('_v_'):
00182                 del state[key]
00183         for attrname in event.get_seralized_attributes():
00184             if state.has_key(attrname):
00185                 del state[attrname]
00186         if not state:
00187             # No data needs to be stored
00188             return ''
00189 
00190         outfile = StringIO()
00191         p = Pickler(outfile, 1)  # Binary pickle
00192         unmanaged = []
00193 
00194         def persistent_id(ob, identify_internal=event.identify_internal,
00195                           unmanaged=unmanaged):
00196             ref = identify_internal(ob)
00197             if ref is None:
00198                 if hasattr(ob, '_p_oid'):
00199                     # Persistent objects that end up in the remainder
00200                     # are unmanaged.  Tell ZODB about them so that
00201                     # ZODB can deal with them specially.
00202                     unmanaged.append(ob)
00203             return ref
00204 
00205         # Preserve order to a reasonable extent by storing a list
00206         # instead of a dictionary.
00207         state_list = state.items()
00208         state_list.sort()
00209         p.persistent_id = persistent_id
00210         try:
00211             p.dump(state_list)
00212         except UnpickleableError, exc:
00213             # Try to reveal which attribute is unpickleable.
00214             attrname = None
00215             attrvalue = None
00216             for key, value in state_list:
00217                 del unmanaged[:]
00218                 outfile.seek(0)
00219                 outfile.truncate()
00220                 p = Pickler(outfile)
00221                 p.persistent_id = persistent_id
00222                 try:
00223                     p.dump(value)
00224                 except UnpickleableError:
00225                     attrname = key
00226                     attrvalue = value
00227                     break
00228             if attrname is not None:
00229                 # Provide a more informative exception.
00230                 if os.environ.get('APE_TRACE_UNPICKLEABLE'):
00231                     # Provide an opportunity to examine
00232                     # the "attrvalue" attribute.
00233                     raise RuntimeError(
00234                         'Unable to pickle the %s attribute, %s, '
00235                         'of %s at %s.  %s.' % (
00236                         repr(attrname), repr(attrvalue), repr(event.obj),
00237                         repr(event.oid), str(exc)))
00238             else:
00239                 # Couldn't help.
00240                 raise
00241 
00242         p.persistent_id = lambda ob: None  # Stop recording references
00243         p.dump(unmanaged)
00244         event.upos.extend(unmanaged)
00245 
00246         s = outfile.getvalue()
00247         return encode_to_text(s, state.keys(), len(unmanaged))
00248 
00249 # helper functions for issubclass and isinstance
00250 # with extension classes.
00251 # borrowed from Greg Ward (thanx Greg :)
00252 def ec_issubclass (class1, class2):
00253     """A version of 'issubclass' that works with extension classes
00254     as well as regular Python classes.
00255     """
00256 
00257     # Both class objects are regular Python classes, so use the
00258     # built-in 'issubclass()'.
00259     if type(class1) is ClassType and type(class2) is ClassType:
00260         return __builtin__.issubclass(class1, class2)
00261 
00262     # Both so-called class objects have a '__bases__' attribute: ie.,
00263     # they aren't regular Python classes, but they sure look like them.
00264     # Assume they are extension classes and reimplement what the builtin
00265     # 'issubclass()' does behind the scenes.
00266     elif hasattr(class1, '__bases__') and hasattr(class2, '__bases__'):
00267         # XXX it appears that "ec.__class__ is type(ec)" for an
00268         # extension class 'ec': could we/should we use this as an
00269         # additional check for extension classes?
00270 
00271         # Breadth-first traversal of class1's superclass tree.  Order
00272         # doesn't matter because we're just looking for a "yes/no"
00273         # answer from the tree; if we were trying to resolve a name,
00274         # order would be important!
00275         stack = [class1]
00276         while stack:
00277             if stack[0] is class2:
00278                 return 1
00279             stack.extend(list(stack[0].__bases__))
00280             del stack[0]
00281         else:
00282             return 0
00283 
00284     # Not a regular class, not an extension class: blow up for consistency
00285     # with builtin 'issubclass()"
00286     else:
00287         raise TypeError, "arguments must be class or ExtensionClass objects"
00288 
00289 # ec_issubclass ()
00290 
00291 def ec_isinstance (object, klass):
00292     """A version of 'isinstance' that works with extension classes
00293     as well as regular Python classes."""
00294 
00295     if type(klass) is ClassType:
00296         return isinstance(object, klass)
00297     elif hasattr(object, '__class__'):
00298         return ec_issubclass(object.__class__, klass)
00299     else:
00300         return 0