Back to index

python3.2  3.2.2
Public Member Functions | Public Attributes
asdl.ASDLParser Class Reference
Inheritance diagram for asdl.ASDLParser:
Inheritance graph
[legend]
Collaboration diagram for asdl.ASDLParser:
Collaboration graph
[legend]

List of all members.

Public Member Functions

def __init__
def typestring
def error
def p_module_0
def p_module
def p_version
def p_definition_0
def p_definition_1
def p_definition
def p_type_0
def p_type_1
def p_type_2
def p_product
def p_sum_0
def p_sum_1
def p_sum_2
def p_constructor_0
def p_constructor_1
def p_fields_0
def p_fields_1
def p_field_0
def p_field_1
def p_field_2
def p_field_3
def p_field_4
def p_field_5
def __getstate__
def __setstate__
def preprocess
def addRule
def collectRules
def augment
def computeNull
def makeState0
def finalState
def makeNewRules
def parse
def isnullable
def skip
def makeState
def goto
def gotoT
def gotoST
def add
def makeSet
def makeSet_fast
def predecessor
def causal
def deriveEpsilon
def buildTree
def ambiguity
def resolve

Public Attributes

 rules
 rule2func
 rule2name
 ruleschanged
 newrules
 new2old
 cores
 states
 nullable
 links
_PyObject_HEAD_EXTRA Py_ssize_t ob_refcnt
struct _typeobjectob_type

Detailed Description

Definition at line 106 of file asdl.py.


Constructor & Destructor Documentation

def asdl.ASDLParser.__init__ (   self)

Definition at line 107 of file asdl.py.

00107 
00108     def __init__(self):
00109         super(ASDLParser, self).__init__("module")

Here is the caller graph for this function:


Member Function Documentation

def spark.GenericParser.__getstate__ (   self) [inherited]

Definition at line 132 of file spark.py.

00132 
00133     def __getstate__(self):
00134         if self.ruleschanged:
00135             #
00136             #  XXX - duplicated from parse()
00137             #
00138             self.computeNull()
00139             self.newrules = {}
00140             self.new2old = {}
00141             self.makeNewRules()
00142             self.ruleschanged = 0
00143             self.edges, self.cores = {}, {}
00144             self.states = { 0: self.makeState0() }
00145             self.makeState(0, self._BOF)
00146         #
00147         #  XXX - should find a better way to do this..
00148         #
00149         changes = 1
00150         while changes:
00151             changes = 0
00152             for k, v in self.edges.items():
00153                 if v is None:
00154                     state, sym = k
00155                     if state in self.states:
00156                         self.goto(state, sym)
00157                         changes = 1
00158         rv = self.__dict__.copy()
00159         for s in self.states.values():
00160             del s.items
00161         del rv['rule2func']
00162         del rv['nullable']
00163         del rv['cores']
00164         return rv

Here is the call graph for this function:

def spark.GenericParser.__setstate__ (   self,
  D 
) [inherited]

Definition at line 165 of file spark.py.

00165 
00166     def __setstate__(self, D):
00167         self.rules = {}
00168         self.rule2func = {}
00169         self.rule2name = {}
00170         self.collectRules()
00171         start = D['rules'][self._START][0][1][1]        # Blech.
00172         self.augment(start)
00173         D['rule2func'] = self.rule2func
00174         D['makeSet'] = self.makeSet_fast
00175         self.__dict__ = D

Here is the call graph for this function:

def spark.GenericParser.add (   self,
  set,
  item,
  i = None,
  predecessor = None,
  causal = None 
) [inherited]

Definition at line 470 of file spark.py.

00470 
00471     def add(self, set, item, i=None, predecessor=None, causal=None):
00472         if predecessor is None:
00473             if item not in set:
00474                 set.append(item)
00475         else:
00476             key = (item, i)
00477             if item not in set:
00478                 self.links[key] = []
00479                 set.append(item)
00480             self.links[key].append((predecessor, causal))

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.addRule (   self,
  doc,
  func,
  _preprocess = 1 
) [inherited]

Definition at line 183 of file spark.py.

00183 
00184     def addRule(self, doc, func, _preprocess=1):
00185         fn = func
00186         rules = doc.split()
00187 
00188         index = []
00189         for i in range(len(rules)):
00190             if rules[i] == '::=':
00191                 index.append(i-1)
00192         index.append(len(rules))
00193 
00194         for i in range(len(index)-1):
00195             lhs = rules[index[i]]
00196             rhs = rules[index[i]+2:index[i+1]]
00197             rule = (lhs, tuple(rhs))
00198 
00199             if _preprocess:
00200                 rule, fn = self.preprocess(rule, func)
00201 
00202             if lhs in self.rules:
00203                 self.rules[lhs].append(rule)
00204             else:
00205                 self.rules[lhs] = [ rule ]
00206             self.rule2func[rule] = fn
00207             self.rule2name[rule] = func.__name__[2:]
00208         self.ruleschanged = 1

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.ambiguity (   self,
  rules 
) [inherited]

Definition at line 659 of file spark.py.

00659 
00660     def ambiguity(self, rules):
00661         #
00662         #  XXX - problem here and in collectRules() if the same rule
00663         #        appears in >1 method.  Also undefined results if rules
00664         #        causing the ambiguity appear in the same method.
00665         #
00666         sortlist = []
00667         name2index = {}
00668         for i in range(len(rules)):
00669             lhs, rhs = rule = rules[i]
00670             name = self.rule2name[self.new2old[rule]]
00671             sortlist.append((len(rhs), name))
00672             name2index[name] = i
00673         sortlist.sort()
00674         list = [b for a, b in sortlist]
00675         return rules[name2index[self.resolve(list)]]

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.augment (   self,
  start 
) [inherited]

Definition at line 216 of file spark.py.

00216 
00217     def augment(self, start):
00218         rule = '%s ::= %s %s' % (self._START, self._BOF, start)
00219         self.addRule(rule, lambda args: args[1], 0)

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.buildTree (   self,
  nt,
  item,
  tokens,
  k 
) [inherited]

Definition at line 626 of file spark.py.

00626 
00627     def buildTree(self, nt, item, tokens, k):
00628         state, parent = item
00629 
00630         choices = []
00631         for rule in self.states[state].complete:
00632             if rule[0] == nt:
00633                 choices.append(rule)
00634         rule = choices[0]
00635         if len(choices) > 1:
00636             rule = self.ambiguity(choices)
00637         #output(rule)
00638 
00639         rhs = rule[1]
00640         attr = [None] * len(rhs)
00641 
00642         for i in range(len(rhs)-1, -1, -1):
00643             sym = rhs[i]
00644             if sym not in self.newrules:
00645                 if sym != self._BOF:
00646                     attr[i] = tokens[k-1]
00647                     key = (item, k)
00648                     item, k = self.predecessor(key, None)
00649             #elif self.isnullable(sym):
00650             elif self._NULLABLE == sym[0:len(self._NULLABLE)]:
00651                 attr[i] = self.deriveEpsilon(sym)
00652             else:
00653                 key = (item, k)
00654                 why = self.causal(key)
00655                 attr[i] = self.buildTree(sym, why[0],
00656                                          tokens, why[1])
00657                 item, k = self.predecessor(key, why)
00658         return self.rule2func[self.new2old[rule]](attr)

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.causal (   self,
  key 
) [inherited]

Definition at line 600 of file spark.py.

00600 
00601     def causal(self, key):
00602         links = self.links[key]
00603         if len(links) == 1:
00604             return links[0][1]
00605         choices = []
00606         rule2cause = {}
00607         for p, c in links:
00608             rule = c[2]
00609             choices.append(rule)
00610             rule2cause[rule] = c
00611         return rule2cause[self.ambiguity(choices)]

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.collectRules (   self) [inherited]

Definition at line 209 of file spark.py.

00209 
00210     def collectRules(self):
00211         for name in _namelist(self):
00212             if name[:2] == 'p_':
00213                 func = getattr(self, name)
00214                 doc = func.__doc__
00215                 self.addRule(doc, func)

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.computeNull (   self) [inherited]

Definition at line 220 of file spark.py.

00220 
00221     def computeNull(self):
00222         self.nullable = {}
00223         tbd = []
00224 
00225         for rulelist in self.rules.values():
00226             lhs = rulelist[0][0]
00227             self.nullable[lhs] = 0
00228             for rule in rulelist:
00229                 rhs = rule[1]
00230                 if len(rhs) == 0:
00231                     self.nullable[lhs] = 1
00232                     continue
00233                 #
00234                 #  We only need to consider rules which
00235                 #  consist entirely of nonterminal symbols.
00236                 #  This should be a savings on typical
00237                 #  grammars.
00238                 #
00239                 for sym in rhs:
00240                     if sym not in self.rules:
00241                         break
00242                 else:
00243                     tbd.append(rule)
00244         changes = 1
00245         while changes:
00246             changes = 0
00247             for lhs, rhs in tbd:
00248                 if self.nullable[lhs]:
00249                     continue
00250                 for sym in rhs:
00251                     if not self.nullable[sym]:
00252                         break
00253                 else:
00254                     self.nullable[lhs] = 1
00255                     changes = 1

Here is the caller graph for this function:

def spark.GenericParser.deriveEpsilon (   self,
  nt 
) [inherited]

Definition at line 612 of file spark.py.

00612 
00613     def deriveEpsilon(self, nt):
00614         if len(self.newrules[nt]) > 1:
00615             rule = self.ambiguity(self.newrules[nt])
00616         else:
00617             rule = self.newrules[nt][0]
00618         #output(rule)
00619 
00620         rhs = rule[1]
00621         attr = [None] * len(rhs)
00622 
00623         for i in range(len(rhs)-1, -1, -1):
00624             attr[i] = self.deriveEpsilon(rhs[i])
00625         return self.rule2func[self.new2old[rule]](attr)

Here is the call graph for this function:

Here is the caller graph for this function:

def asdl.ASDLParser.error (   self,
  tok 
)

Reimplemented from spark.GenericParser.

Definition at line 113 of file asdl.py.

00113 
00114     def error(self, tok):
00115         raise ASDLSyntaxError(tok.lineno, tok)

Here is the caller graph for this function:

def spark.GenericParser.finalState (   self,
  tokens 
) [inherited]

Definition at line 262 of file spark.py.

00262 
00263     def finalState(self, tokens):
00264         #
00265         #  Yuck.
00266         #
00267         if len(self.newrules[self._START]) == 2 and len(tokens) == 0:
00268             return 1
00269         start = self.rules[self._START][0][1][1]
00270         return self.goto(1, start)

Here is the call graph for this function:

def spark.GenericParser.goto (   self,
  state,
  sym 
) [inherited]

Definition at line 443 of file spark.py.

00443 
00444     def goto(self, state, sym):
00445         key = (state, sym)
00446         if key not in self.edges:
00447             #
00448             #  No transitions from state on sym.
00449             #
00450             return None
00451 
00452         rv = self.edges[key]
00453         if rv is None:
00454             #
00455             #  Target state isn't generated yet.  Remedy this.
00456             #
00457             rv = self.makeState(state, sym)
00458             self.edges[key] = rv
00459         return rv

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.gotoST (   self,
  state,
  st 
) [inherited]

Definition at line 463 of file spark.py.

00463 
00464     def gotoST(self, state, st):
00465         rv = []
00466         for t in self.states[state].T:
00467             if st == t:
00468                 rv.append(self.goto(state, t))
00469         return rv

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.gotoT (   self,
  state,
  t 
) [inherited]

Definition at line 460 of file spark.py.

00460 
00461     def gotoT(self, state, t):
00462         return [self.goto(state, t)]

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.isnullable (   self,
  sym 
) [inherited]

Definition at line 348 of file spark.py.

00348 
00349     def isnullable(self, sym):
00350         #
00351         #  For symbols in G_e only.  If we weren't supporting 1.5,
00352         #  could just use sym.startswith().
00353         #
00354         return self._NULLABLE == sym[0:len(self._NULLABLE)]

Here is the caller graph for this function:

def spark.GenericParser.makeNewRules (   self) [inherited]

Definition at line 271 of file spark.py.

00271 
00272     def makeNewRules(self):
00273         worklist = []
00274         for rulelist in self.rules.values():
00275             for rule in rulelist:
00276                 worklist.append((rule, 0, 1, rule))
00277 
00278         for rule, i, candidate, oldrule in worklist:
00279             lhs, rhs = rule
00280             n = len(rhs)
00281             while i < n:
00282                 sym = rhs[i]
00283                 if sym not in self.rules or \
00284                    not self.nullable[sym]:
00285                     candidate = 0
00286                     i = i + 1
00287                     continue
00288 
00289                 newrhs = list(rhs)
00290                 newrhs[i] = self._NULLABLE+sym
00291                 newrule = (lhs, tuple(newrhs))
00292                 worklist.append((newrule, i+1,
00293                                  candidate, oldrule))
00294                 candidate = 0
00295                 i = i + 1
00296             else:
00297                 if candidate:
00298                     lhs = self._NULLABLE+lhs
00299                     rule = (lhs, rhs)
00300                 if lhs in self.newrules:
00301                     self.newrules[lhs].append(rule)
00302                 else:
00303                     self.newrules[lhs] = [ rule ]
00304                 self.new2old[rule] = oldrule

Here is the call graph for this function:

def spark.GenericParser.makeSet (   self,
  token,
  sets,
  i 
) [inherited]

Definition at line 481 of file spark.py.

00481 
00482     def makeSet(self, token, sets, i):
00483         cur, next = sets[i], sets[i+1]
00484 
00485         ttype = token is not None and self.typestring(token) or None
00486         if ttype is not None:
00487             fn, arg = self.gotoT, ttype
00488         else:
00489             fn, arg = self.gotoST, token
00490 
00491         for item in cur:
00492             ptr = (item, i)
00493             state, parent = item
00494             add = fn(state, arg)
00495             for k in add:
00496                 if k is not None:
00497                     self.add(next, (k, parent), i+1, ptr)
00498                     nk = self.goto(k, None)
00499                     if nk is not None:
00500                         self.add(next, (nk, i+1))
00501 
00502             if parent == i:
00503                 continue
00504 
00505             for rule in self.states[state].complete:
00506                 lhs, rhs = rule
00507                 for pitem in sets[parent]:
00508                     pstate, pparent = pitem
00509                     k = self.goto(pstate, lhs)
00510                     if k is not None:
00511                         why = (item, i, rule)
00512                         pptr = (pitem, parent)
00513                         self.add(cur, (k, pparent),
00514                                  i, pptr, why)
00515                         nk = self.goto(k, None)
00516                         if nk is not None:
00517                             self.add(cur, (nk, i))

Here is the call graph for this function:

def spark.GenericParser.makeSet_fast (   self,
  token,
  sets,
  i 
) [inherited]

Definition at line 518 of file spark.py.

00518 
00519     def makeSet_fast(self, token, sets, i):
00520         #
00521         #  Call *only* when the entire state machine has been built!
00522         #  It relies on self.edges being filled in completely, and
00523         #  then duplicates and inlines code to boost speed at the
00524         #  cost of extreme ugliness.
00525         #
00526         cur, next = sets[i], sets[i+1]
00527         ttype = token is not None and self.typestring(token) or None
00528 
00529         for item in cur:
00530             ptr = (item, i)
00531             state, parent = item
00532             if ttype is not None:
00533                 k = self.edges.get((state, ttype), None)
00534                 if k is not None:
00535                     #self.add(next, (k, parent), i+1, ptr)
00536                     #INLINED --v
00537                     new = (k, parent)
00538                     key = (new, i+1)
00539                     if new not in next:
00540                         self.links[key] = []
00541                         next.append(new)
00542                     self.links[key].append((ptr, None))
00543                     #INLINED --^
00544                     #nk = self.goto(k, None)
00545                     nk = self.edges.get((k, None), None)
00546                     if nk is not None:
00547                         #self.add(next, (nk, i+1))
00548                         #INLINED --v
00549                         new = (nk, i+1)
00550                         if new not in next:
00551                             next.append(new)
00552                         #INLINED --^
00553             else:
00554                 add = self.gotoST(state, token)
00555                 for k in add:
00556                     if k is not None:
00557                         self.add(next, (k, parent), i+1, ptr)
00558                         #nk = self.goto(k, None)
00559                         nk = self.edges.get((k, None), None)
00560                         if nk is not None:
00561                             self.add(next, (nk, i+1))
00562 
00563             if parent == i:
00564                 continue
00565 
00566             for rule in self.states[state].complete:
00567                 lhs, rhs = rule
00568                 for pitem in sets[parent]:
00569                     pstate, pparent = pitem
00570                     #k = self.goto(pstate, lhs)
00571                     k = self.edges.get((pstate, lhs), None)
00572                     if k is not None:
00573                         why = (item, i, rule)
00574                         pptr = (pitem, parent)
00575                         #self.add(cur, (k, pparent),
00576                         #        i, pptr, why)
00577                         #INLINED --v
00578                         new = (k, pparent)
00579                         key = (new, i)
00580                         if new not in cur:
00581                             self.links[key] = []
00582                             cur.append(new)
00583                         self.links[key].append((pptr, why))
00584                         #INLINED --^
00585                         #nk = self.goto(k, None)
00586                         nk = self.edges.get((k, None), None)
00587                         if nk is not None:
00588                             #self.add(cur, (nk, i))
00589                             #INLINED --v
00590                             new = (nk, i)
00591                             if new not in cur:
00592                                 cur.append(new)
00593                             #INLINED --^

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.makeState (   self,
  state,
  sym 
) [inherited]

Definition at line 363 of file spark.py.

00363 
00364     def makeState(self, state, sym):
00365         assert sym is not None
00366         #
00367         #  Compute \epsilon-kernel state's core and see if
00368         #  it exists already.
00369         #
00370         kitems = []
00371         for rule, pos in self.states[state].items:
00372             lhs, rhs = rule
00373             if rhs[pos:pos+1] == (sym,):
00374                 kitems.append((rule, self.skip(rule, pos+1)))
00375         core = kitems
00376 
00377         core.sort()
00378         tcore = tuple(core)
00379         if tcore in self.cores:
00380             return self.cores[tcore]
00381         #
00382         #  Nope, doesn't exist.  Compute it and the associated
00383         #  \epsilon-nonkernel state together; we'll need it right away.
00384         #
00385         k = self.cores[tcore] = len(self.states)
00386         K, NK = _State(k, kitems), _State(k+1, [])
00387         self.states[k] = K
00388         predicted = {}
00389 
00390         edges = self.edges
00391         rules = self.newrules
00392         for X in K, NK:
00393             worklist = X.items
00394             for item in worklist:
00395                 rule, pos = item
00396                 lhs, rhs = rule
00397                 if pos == len(rhs):
00398                     X.complete.append(rule)
00399                     continue
00400 
00401                 nextSym = rhs[pos]
00402                 key = (X.stateno, nextSym)
00403                 if nextSym not in rules:
00404                     if key not in edges:
00405                         edges[key] = None
00406                         X.T.append(nextSym)
00407                 else:
00408                     edges[key] = None
00409                     if nextSym not in predicted:
00410                         predicted[nextSym] = 1
00411                         for prule in rules[nextSym]:
00412                             ppos = self.skip(prule)
00413                             new = (prule, ppos)
00414                             NK.items.append(new)
00415             #
00416             #  Problem: we know K needs generating, but we
00417             #  don't yet know about NK.  Can't commit anything
00418             #  regarding NK to self.edges until we're sure.  Should
00419             #  we delay committing on both K and NK to avoid this
00420             #  hacky code?  This creates other problems..
00421             #
00422             if X is K:
00423                 edges = {}
00424 
00425         if NK.items == []:
00426             return k
00427 
00428         #
00429         #  Check for \epsilon-nonkernel's core.  Unfortunately we
00430         #  need to know the entire set of predicted nonterminals
00431         #  to do this without accidentally duplicating states.
00432         #
00433         core = sorted(predicted.keys())
00434         tcore = tuple(core)
00435         if tcore in self.cores:
00436             self.edges[(k, None)] = self.cores[tcore]
00437             return k
00438 
00439         nk = self.cores[tcore] = self.edges[(k, None)] = NK.stateno
00440         self.edges.update(edges)
00441         self.states[nk] = NK
00442         return k

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.makeState0 (   self) [inherited]

Definition at line 256 of file spark.py.

00256 
00257     def makeState0(self):
00258         s0 = _State(0, [])
00259         for rule in self.newrules[self._START]:
00260             s0.items.append((rule, 0))
00261         return s0

def asdl.ASDLParser.p_constructor_0 (   self,
  id 
)

Definition at line 192 of file asdl.py.

00192 
00193     def p_constructor_0(self, id):
00194         " constructor ::= Id "
00195         return Constructor(id[0])

def asdl.ASDLParser.p_constructor_1 (   self,
  info 
)

Definition at line 196 of file asdl.py.

00196 
00197     def p_constructor_1(self, info):
00198         " constructor ::= Id ( fields ) "
00199         id, _0, fields, _1 = info
00200         # XXX can't I just construct things in the right order?
00201         fields.reverse()
00202         return Constructor(id, fields)

def asdl.ASDLParser.p_definition (   self,
  info 
)

Definition at line 148 of file asdl.py.

00148 
00149     def p_definition(self, info):
00150         " definition ::= Id = type "
00151         id, _, type = info
00152         return [Type(id, type)]

def asdl.ASDLParser.p_definition_0 (   self,
  definition 
)

Definition at line 140 of file asdl.py.

00140 
00141     def p_definition_0(self, definition):
00142         " definitions ::= definition "
00143         return definition[0]

def asdl.ASDLParser.p_definition_1 (   self,
  definitions 
)

Definition at line 144 of file asdl.py.

00144 
00145     def p_definition_1(self, definitions):
00146         " definitions ::= definition definitions "
00147         return definitions[0] + definitions[1]

def asdl.ASDLParser.p_field_0 (   self,
  type_ 
)

Definition at line 212 of file asdl.py.

00212 
00213     def p_field_0(self, type_):
00214         " field ::= Id "
00215         return Field(type_[0])

def asdl.ASDLParser.p_field_1 (   self,
  info 
)

Definition at line 216 of file asdl.py.

00216 
00217     def p_field_1(self, info):
00218         " field ::= Id Id "
00219         type, name = info
00220         return Field(type, name)

def asdl.ASDLParser.p_field_2 (   self,
  info 
)

Definition at line 221 of file asdl.py.

00221 
00222     def p_field_2(self, info):
00223         " field ::= Id * Id "
00224         type, _, name = info
00225         return Field(type, name, seq=True)

def asdl.ASDLParser.p_field_3 (   self,
  info 
)

Definition at line 226 of file asdl.py.

00226 
00227     def p_field_3(self, info):
00228         " field ::= Id ? Id "
00229         type, _, name = info
00230         return Field(type, name, opt=True)

def asdl.ASDLParser.p_field_4 (   self,
  type_ 
)

Definition at line 231 of file asdl.py.

00231 
00232     def p_field_4(self, type_):
00233         " field ::= Id * "
00234         return Field(type_[0], seq=True)

def asdl.ASDLParser.p_field_5 (   self,
  type_ 
)

Definition at line 235 of file asdl.py.

00235 
00236     def p_field_5(self, type_):
00237         " field ::= Id ? "
00238         return Field(type[0], opt=True)

def asdl.ASDLParser.p_fields_0 (   self,
  field 
)

Definition at line 203 of file asdl.py.

00203 
00204     def p_fields_0(self, field):
00205         " fields ::= field "
00206         return [field[0]]

def asdl.ASDLParser.p_fields_1 (   self,
  info 
)

Definition at line 207 of file asdl.py.

00207 
00208     def p_fields_1(self, info):
00209         " fields ::= field , fields "
00210         field, _, fields = info
00211         return fields + [field]

def asdl.ASDLParser.p_module (   self,
  info 
)

Definition at line 124 of file asdl.py.

00124 
00125     def p_module(self, info):
00126         " module ::= Id Id version { definitions } "
00127         module, name, version, _0, definitions, _1 = info
00128         if module.value != "module":
00129             raise ASDLSyntaxError(module.lineno,
00130                                   msg="expected 'module', found %s" % module)
00131         return Module(name, definitions, version)

def asdl.ASDLParser.p_module_0 (   self,
  info 
)

Definition at line 116 of file asdl.py.

00116 
00117     def p_module_0(self, info):
00118         " module ::= Id Id version { } "
00119         module, name, version, _0, _1 = info
00120         if module.value != "module":
00121             raise ASDLSyntaxError(module.lineno,
00122                                   msg="expected 'module', found %s" % module)
00123         return Module(name, None, version)

def asdl.ASDLParser.p_product (   self,
  info 
)

Definition at line 171 of file asdl.py.

00171 
00172     def p_product(self, info):
00173         " product ::= ( fields ) "
00174         _0, fields, _1 = info
00175         # XXX can't I just construct things in the right order?
00176         fields.reverse()
00177         return Product(fields)

def asdl.ASDLParser.p_sum_0 (   self,
  constructor 
)

Definition at line 178 of file asdl.py.

00178 
00179     def p_sum_0(self, constructor):
00180         " sum ::= constructor "
00181         return [constructor[0]]

def asdl.ASDLParser.p_sum_1 (   self,
  info 
)

Definition at line 182 of file asdl.py.

00182 
00183     def p_sum_1(self, info):
00184         " sum ::= constructor | sum "
00185         constructor, _, sum = info
00186         return [constructor] + sum

def asdl.ASDLParser.p_sum_2 (   self,
  info 
)

Definition at line 187 of file asdl.py.

00187 
00188     def p_sum_2(self, info):
00189         " sum ::= constructor | sum "
00190         constructor, _, sum = info
00191         return [constructor] + sum

def asdl.ASDLParser.p_type_0 (   self,
  product 
)

Definition at line 153 of file asdl.py.

00153 
00154     def p_type_0(self, product):
00155         " type ::= product "
00156         return product[0]

def asdl.ASDLParser.p_type_1 (   self,
  sum 
)

Definition at line 157 of file asdl.py.

00157 
00158     def p_type_1(self, sum):
00159         " type ::= sum "
00160         return Sum(sum[0])

def asdl.ASDLParser.p_type_2 (   self,
  info 
)

Definition at line 161 of file asdl.py.

00161 
00162     def p_type_2(self, info):
00163         " type ::= sum Id ( fields ) "
00164         sum, id, _0, attributes, _1 = info
00165         if id.value != "attributes":
00166             raise ASDLSyntaxError(id.lineno,
00167                                   msg="expected attributes, found %s" % id)
00168         if attributes:
00169             attributes.reverse()
00170         return Sum(sum, attributes)

def asdl.ASDLParser.p_version (   self,
  info 
)

Definition at line 132 of file asdl.py.

00132 
00133     def p_version(self, info):
00134         "version ::= Id String"
00135         version, V = info
00136         if version.value != "version":
00137             raise ASDLSyntaxError(version.lineno,
00138                                   msg="expected 'version', found %" % version)
00139         return V

def spark.GenericParser.parse (   self,
  tokens 
) [inherited]

Definition at line 312 of file spark.py.

00312 
00313     def parse(self, tokens):
00314         sets = [ [(1,0), (2,0)] ]
00315         self.links = {}
00316 
00317         if self.ruleschanged:
00318             self.computeNull()
00319             self.newrules = {}
00320             self.new2old = {}
00321             self.makeNewRules()
00322             self.ruleschanged = 0
00323             self.edges, self.cores = {}, {}
00324             self.states = { 0: self.makeState0() }
00325             self.makeState(0, self._BOF)
00326 
00327         for i in range(len(tokens)):
00328             sets.append([])
00329 
00330             if sets[i] == []:
00331                 break
00332             self.makeSet(tokens[i], sets, i)
00333         else:
00334             sets.append([])
00335             self.makeSet(None, sets, len(tokens))
00336 
00337         #_dump(tokens, sets, self.states)
00338 
00339         finalitem = (self.finalState(tokens), 0)
00340         if finalitem not in sets[-2]:
00341             if len(tokens) > 0:
00342                 self.error(tokens[i-1])
00343             else:
00344                 self.error(None)
00345 
00346         return self.buildTree(self._START, finalitem,
00347                               tokens, len(sets)-2)

Here is the caller graph for this function:

def spark.GenericParser.predecessor (   self,
  key,
  causal 
) [inherited]

Definition at line 594 of file spark.py.

00594 
00595     def predecessor(self, key, causal):
00596         for p, c in self.links[key]:
00597             if c == causal:
00598                 return p
00599         assert 0

Here is the caller graph for this function:

def spark.GenericParser.preprocess (   self,
  rule,
  func 
) [inherited]

Reimplemented in spark.GenericASTMatcher, and spark.GenericASTBuilder.

Definition at line 181 of file spark.py.

00181 
00182     def preprocess(self, rule, func):       return rule, func

Here is the caller graph for this function:

def spark.GenericParser.resolve (   self,
  list 
) [inherited]

Reimplemented in spark.GenericASTMatcher.

Definition at line 676 of file spark.py.

00676 
00677     def resolve(self, list):
00678         #
00679         #  Resolve ambiguity in favor of the shortest RHS.
00680         #  Since we walk the tree from the top down, this
00681         #  should effectively resolve in favor of a "shift".
00682         #
00683         return list[0]
00684 
00685 #
00686 #  GenericASTBuilder automagically constructs a concrete/abstract syntax tree
00687 #  for a given input.  The extra argument is a class (not an instance!)
00688 #  which supports the "__setslice__" and "__len__" methods.
00689 #
00690 #  XXX - silently overrides any user code in methods.
00691 #

Here is the caller graph for this function:

def spark.GenericParser.skip (   self,
  hs,
  pos = 0 
) [inherited]

Definition at line 355 of file spark.py.

00355 
00356     def skip(self, hs, pos=0):
00357         n = len(hs[1])
00358         while pos < n:
00359             if not self.isnullable(hs[1][pos]):
00360                 break
00361             pos = pos + 1
00362         return pos

Here is the call graph for this function:

Here is the caller graph for this function:

def asdl.ASDLParser.typestring (   self,
  tok 
)

Reimplemented from spark.GenericParser.

Definition at line 110 of file asdl.py.

00110 
00111     def typestring(self, tok):
00112         return tok.type

Here is the caller graph for this function:


Member Data Documentation

Definition at line 142 of file spark.py.

Definition at line 314 of file spark.py.

Definition at line 139 of file spark.py.

Definition at line 138 of file spark.py.

Definition at line 221 of file spark.py.

Definition at line 107 of file object.h.

struct _typeobject* _object::ob_type [inherited]

Definition at line 108 of file object.h.

Definition at line 117 of file spark.py.

Definition at line 118 of file spark.py.

Definition at line 116 of file spark.py.

Definition at line 121 of file spark.py.

Definition at line 143 of file spark.py.


The documentation for this class was generated from the following file: