Back to index

plone3  3.1.7
utils.py
Go to the documentation of this file.
00001 ##########################################################
00002 #
00003 # Licensed under the terms of the GNU Public License
00004 # (see docs/LICENSE.GPL)
00005 #
00006 # Copyright (c) 2005:
00007 #   - The Open Planning Project (http://www.openplans.org/)
00008 #   - Whit Morriss <whit at www.openplans.org>
00009 #   - and contributors
00010 #
00011 ##########################################################
00012 from normalize import titleToNormalizedId as normalize
00013 from zope.component import getMultiAdapter
00014 from wicked.interfaces import IWickedFilter, WickedEvent
00015 
00016 def linkcache(func):
00017     def cache(wfilter, chunk, normalized):
00018         # cache depends on query and match
00019         # this could use some untangling
00020         # generic function? 
00021         value = wfilter.cache.get(normalized)
00022         if not value:
00023             value = func(wfilter, chunk, normalized)
00024             if value:
00025                 uid = value[0]['uid']
00026                 wfilter.cache.set((normalized, uid), value)
00027         return value
00028     return cache
00029 
00030 def getWicked(field, instance, event=None):
00031     if not event:
00032         event = WickedEvent()
00033     return getMultiAdapter((field, instance, event), IWickedFilter)
00034 
00035 
00036 def match(query):
00037     def match(self, best_match=True):
00038         data = query(self)
00039         if data and best_match:
00040             return [getMatch(self.chunk, data, normalled=self.normalled)]
00041         return data
00042     return match
00043 
00044 
00045 def cleanUID(brain):
00046     """takes a brain, returns a hashable value"""
00047     from Missing import Value
00048     uid = brain.UID
00049     if uid is Value:
00050         uid = brain.data_record_id_
00051     return uid
00052 
00053 
00054 def packBrain(brain):
00055     """
00056     converts dataobjects in to template ready dictionaries
00057 
00058     >>> from Missing import Value
00059     >>> class brain(object):
00060     ...     def getPath(self):
00061     ...         return '/path'
00062     ...     data_record_id_=123
00063     ...     UID='unique'
00064     ...     getIcon='/donkey.gif'
00065     >>> packBrain(brain())['uid']
00066     'unique'
00067 
00068     packBrain needs to filter out Missing.Value that make creep in.
00069 
00070     >>> fbrain = brain()
00071     >>> brain.UID=Value
00072     >>> packBrain(fbrain)['uid']
00073     123
00074     """
00075     # missing values are not hashable nor can they be dict keys.  they
00076     # should never see the light of day. In the rare case that the UID
00077     # index is updated for an object, substitute the record id
00078 
00079     uid = cleanUID(brain)
00080     
00081     return dict(path=brain.getPath(),
00082                 icon=brain.getIcon,
00083                 uid=uid,
00084                 rid=brain.data_record_id_)
00085 
00086 
00087 def getMatch(chunk, brains, normalled=None):
00088     """
00089     Given a set of query results and the wicked link text, return
00090     the single brain that represents the correct object to link to, or
00091     None.
00092     
00093     Assumes that brains are already ordered oldest to newest, so
00094     the first absolute match is the one returned.  Matches on id
00095     take priority over matches on title
00096 
00097     all strings are normalized and interned for comparison matches.
00098 
00099     >>> from testing.general import pdo
00100     >>> mkbrain = lambda i: pdo(getId='-'.join([str(x) for x in i]), Title='%s %s' %i, created=i[1])
00101     >>> seed = zip('abc', range(3))
00102     >>> brains = [mkbrain(i) for i in seed]
00103     >>> chunk = ''
00104     >>> normalled = ''
00105 
00106     Test null conditions
00107 
00108     >>> getMatch(chunk, brains)
00109     >>> getMatch(chunk, brains, normalled)
00110     >>> getMatch(chunk, brains[:1], normalled)
00111 
00112     Test single brain matches
00113 
00114     >>> getMatch('', brains[:1], 'a-0').getId
00115     'a-0'
00116     
00117     >>> getMatch(brains[0].getId, brains[:1], 'blah').getId
00118     'a-0'
00119 
00120     >>> getMatch(brains[0].Title, brains[:1]).getId
00121     'a-0'
00122 
00123     Test multi brain matches. brain 0 should win over brain 3
00124     for all matches
00125 
00126     >>> from copy import copy
00127     >>> newbrain = copy(brains[0])
00128     >>> newbrain.update(dict(created=3))
00129     >>> brains =   brains + [newbrain]
00130     >>> getMatch('', brains, 'a-0').created
00131     0
00132     
00133     >>> getMatch(brains[0].getId, brains).created
00134     0
00135 
00136     >>> getMatch(brains[0].Title, brains).created
00137     0
00138 
00139     Test title to chunk match
00140 
00141     >>> brains[3].Title='A unique title'
00142     >>> getMatch(brains[3].Title, brains).Title
00143     'A unique title'
00144     """
00145     normalled_chunk = normalled
00146     if not normalled_chunk:
00147         normalled_chunk = normalize(chunk)
00148     if not isinstance(brains, list):
00149         # make a copy to AdvancedQuery sequencing issues
00150         brains = [x for x in brains]
00151 
00152     # inspect single return case
00153 
00154     if len(brains) == 1 and \
00155            (brains[0].getId == normalled_chunk \
00156             or brains[0].getId.strip() == chunk.strip() \
00157             or normalize(brains[0].Title) == normalled_chunk):
00158         return brains[0]
00159 
00160     # first, match id
00161 
00162     # reversing the brains into a dict clobbers younger matches with
00163     # the same id. we'll match against the normalled chunk, then the
00164     # chunk (for single work chunks)
00165 
00166     btup = [(brain.getId, brain) for brain in brains]
00167     id_dict = dict(reversed(btup))
00168     for unk in normalled_chunk, chunk,:
00169         if id_dict.has_key(unk):
00170             return id_dict[unk]
00171 
00172     # second, match Title
00173     brains=[brain for brain in brains \
00174             if normalize(brain.Title) == normalled_chunk]
00175     
00176     return brains and brains[0] or None
00177 
00178 
00179 
00180 def counter():
00181     count=0
00182     while True:
00183         count+=1
00184         yield count
00185 
00186 
00187 _marker = object()
00188 class Memoizer(object):
00189     propname = '_mp_cache'
00190     def clearbefore(self, func):
00191         def clear(*args, **kwargs):
00192             inst=args[0]
00193             if hasattr(inst, self.propname):
00194                 delattr(inst, self.propname)
00195             return func(*args, **kwargs)
00196         return clear
00197 
00198     def memoizedproperty(self, func):
00199         return property(self.memoize(func))
00200 
00201     def memoize(self, func):
00202         def memogetter(*args, **kwargs):
00203             inst = args[0]
00204             cache = getattr(inst, self.propname, dict())
00205             key=hash((func.__name__, args, frozenset(kwargs)))
00206             val = cache.get(key, _marker)
00207             if val is _marker:
00208                 val=func(*args, **kwargs)
00209                 cache[key]=val
00210                 setattr(inst, self.propname, cache)
00211             return val
00212         return memogetter
00213 
00214 
00215 _m = Memoizer()
00216 memoize = _m.memoize
00217 memoizedproperty = _m.memoizedproperty
00218 clearbefore = _m.clearbefore
00219     
00220 
00221 def test_suite():
00222     import unittest
00223     from zope.testing import doctest
00224     optionflags = doctest.REPORT_ONLY_FIRST_FAILURE | doctest.ELLIPSIS
00225     return doctest.DocTestSuite('wicked.utils',
00226                                 optionflags=optionflags)