Back to index

moin  1.9.0~rc2
Public Member Functions | Private Member Functions
MoinMoin.support.pygments.lexer.RegexLexerMeta Class Reference
Inheritance diagram for MoinMoin.support.pygments.lexer.RegexLexerMeta:
Inheritance graph
[legend]
Collaboration diagram for MoinMoin.support.pygments.lexer.RegexLexerMeta:
Collaboration graph
[legend]

List of all members.

Public Member Functions

def process_tokendef
def __call__
def __new__

Private Member Functions

def _process_state

Detailed Description

Metaclass for RegexLexer, creates the self._tokens attribute from
self.tokens on the first instantiation.

Definition at line 346 of file lexer.py.


Member Function Documentation

def MoinMoin.support.pygments.lexer.RegexLexerMeta.__call__ (   cls,
  args,
  kwds 
)

Definition at line 423 of file lexer.py.

00423 
00424     def __call__(cls, *args, **kwds):
00425         if not hasattr(cls, '_tokens'):
00426             cls._all_tokens = {}
00427             cls._tmpname = 0
00428             if hasattr(cls, 'token_variants') and cls.token_variants:
00429                 # don't process yet
00430                 pass
00431             else:
00432                 cls._tokens = cls.process_tokendef('', cls.tokens)
00433 
00434         return type.__call__(cls, *args, **kwds)
00435 

def MoinMoin.support.pygments.lexer.LexerMeta.__new__ (   cls,
  name,
  bases,
  d 
) [inherited]

Definition at line 38 of file lexer.py.

00038 
00039     def __new__(cls, name, bases, d):
00040         if 'analyse_text' in d:
00041             d['analyse_text'] = make_analysator(d['analyse_text'])
00042         return type.__new__(cls, name, bases, d)
00043 

def MoinMoin.support.pygments.lexer.RegexLexerMeta._process_state (   cls,
  unprocessed,
  processed,
  state 
) [private]

Definition at line 352 of file lexer.py.

00352 
00353     def _process_state(cls, unprocessed, processed, state):
00354         assert type(state) is str, "wrong state name %r" % state
00355         assert state[0] != '#', "invalid state name %r" % state
00356         if state in processed:
00357             return processed[state]
00358         tokens = processed[state] = []
00359         rflags = cls.flags
00360         for tdef in unprocessed[state]:
00361             if isinstance(tdef, include):
00362                 # it's a state reference
00363                 assert tdef != state, "circular state reference %r" % state
00364                 tokens.extend(cls._process_state(unprocessed, processed, str(tdef)))
00365                 continue
00366 
00367             assert type(tdef) is tuple, "wrong rule def %r" % tdef
00368 
00369             try:
00370                 rex = re.compile(tdef[0], rflags).match
00371             except Exception, err:
00372                 raise ValueError("uncompilable regex %r in state %r of %r: %s" %
00373                                  (tdef[0], state, cls, err))
00374 
00375             assert type(tdef[1]) is _TokenType or callable(tdef[1]), \
00376                    'token type must be simple type or callable, not %r' % (tdef[1],)
00377 
00378             if len(tdef) == 2:
00379                 new_state = None
00380             else:
00381                 tdef2 = tdef[2]
00382                 if isinstance(tdef2, str):
00383                     # an existing state
00384                     if tdef2 == '#pop':
00385                         new_state = -1
00386                     elif tdef2 in unprocessed:
00387                         new_state = (tdef2,)
00388                     elif tdef2 == '#push':
00389                         new_state = tdef2
00390                     elif tdef2[:5] == '#pop:':
00391                         new_state = -int(tdef2[5:])
00392                     else:
00393                         assert False, 'unknown new state %r' % tdef2
00394                 elif isinstance(tdef2, combined):
00395                     # combine a new state from existing ones
00396                     new_state = '_tmp_%d' % cls._tmpname
00397                     cls._tmpname += 1
00398                     itokens = []
00399                     for istate in tdef2:
00400                         assert istate != state, 'circular state ref %r' % istate
00401                         itokens.extend(cls._process_state(unprocessed,
00402                                                           processed, istate))
00403                     processed[new_state] = itokens
00404                     new_state = (new_state,)
00405                 elif isinstance(tdef2, tuple):
00406                     # push more than one state
00407                     for state in tdef2:
00408                         assert (state in unprocessed or
00409                                 state in ('#pop', '#push')), \
00410                                'unknown new state ' + state
00411                     new_state = tdef2
00412                 else:
00413                     assert False, 'unknown new state def %r' % tdef2
00414             tokens.append((rex, tdef[1], new_state))
00415         return tokens

def MoinMoin.support.pygments.lexer.RegexLexerMeta.process_tokendef (   cls,
  name,
  tokendefs = None 
)

Definition at line 416 of file lexer.py.

00416 
00417     def process_tokendef(cls, name, tokendefs=None):
00418         processed = cls._all_tokens[name] = {}
00419         tokendefs = tokendefs or cls.tokens[name]
00420         for state in tokendefs.keys():
00421             cls._process_state(tokendefs, processed, state)
00422         return processed


The documentation for this class was generated from the following file: