Back to index

python3.2  3.2.2
Public Member Functions | Public Attributes
spark.GenericASTMatcher Class Reference
Inheritance diagram for spark.GenericASTMatcher:
Inheritance graph
[legend]
Collaboration diagram for spark.GenericASTMatcher:
Collaboration graph
[legend]

List of all members.

Public Member Functions

def __init__
def preprocess
def foundMatch
def match_r
def match
def resolve
def __getstate__
def __setstate__
def addRule
def collectRules
def augment
def computeNull
def makeState0
def finalState
def makeNewRules
def typestring
def error
def parse
def isnullable
def skip
def makeState
def goto
def gotoT
def gotoST
def add
def makeSet
def makeSet_fast
def predecessor
def causal
def deriveEpsilon
def buildTree
def ambiguity

Public Attributes

 ast
 input
 rules
 rule2func
 rule2name
 ruleschanged
 newrules
 new2old
 cores
 states
 nullable
 links

Detailed Description

Definition at line 790 of file spark.py.


Constructor & Destructor Documentation

def spark.GenericASTMatcher.__init__ (   self,
  start,
  ast 
)

Definition at line 791 of file spark.py.

00791 
00792     def __init__(self, start, ast):
00793         GenericParser.__init__(self, start)
00794         self.ast = ast

Here is the caller graph for this function:


Member Function Documentation

def spark.GenericParser.__getstate__ (   self) [inherited]

Definition at line 132 of file spark.py.

00132 
00133     def __getstate__(self):
00134         if self.ruleschanged:
00135             #
00136             #  XXX - duplicated from parse()
00137             #
00138             self.computeNull()
00139             self.newrules = {}
00140             self.new2old = {}
00141             self.makeNewRules()
00142             self.ruleschanged = 0
00143             self.edges, self.cores = {}, {}
00144             self.states = { 0: self.makeState0() }
00145             self.makeState(0, self._BOF)
00146         #
00147         #  XXX - should find a better way to do this..
00148         #
00149         changes = 1
00150         while changes:
00151             changes = 0
00152             for k, v in self.edges.items():
00153                 if v is None:
00154                     state, sym = k
00155                     if state in self.states:
00156                         self.goto(state, sym)
00157                         changes = 1
00158         rv = self.__dict__.copy()
00159         for s in self.states.values():
00160             del s.items
00161         del rv['rule2func']
00162         del rv['nullable']
00163         del rv['cores']
00164         return rv

Here is the call graph for this function:

def spark.GenericParser.__setstate__ (   self,
  D 
) [inherited]

Definition at line 165 of file spark.py.

00165 
00166     def __setstate__(self, D):
00167         self.rules = {}
00168         self.rule2func = {}
00169         self.rule2name = {}
00170         self.collectRules()
00171         start = D['rules'][self._START][0][1][1]        # Blech.
00172         self.augment(start)
00173         D['rule2func'] = self.rule2func
00174         D['makeSet'] = self.makeSet_fast
00175         self.__dict__ = D

Here is the call graph for this function:

def spark.GenericParser.add (   self,
  set,
  item,
  i = None,
  predecessor = None,
  causal = None 
) [inherited]

Definition at line 470 of file spark.py.

00470 
00471     def add(self, set, item, i=None, predecessor=None, causal=None):
00472         if predecessor is None:
00473             if item not in set:
00474                 set.append(item)
00475         else:
00476             key = (item, i)
00477             if item not in set:
00478                 self.links[key] = []
00479                 set.append(item)
00480             self.links[key].append((predecessor, causal))

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.addRule (   self,
  doc,
  func,
  _preprocess = 1 
) [inherited]

Definition at line 183 of file spark.py.

00183 
00184     def addRule(self, doc, func, _preprocess=1):
00185         fn = func
00186         rules = doc.split()
00187 
00188         index = []
00189         for i in range(len(rules)):
00190             if rules[i] == '::=':
00191                 index.append(i-1)
00192         index.append(len(rules))
00193 
00194         for i in range(len(index)-1):
00195             lhs = rules[index[i]]
00196             rhs = rules[index[i]+2:index[i+1]]
00197             rule = (lhs, tuple(rhs))
00198 
00199             if _preprocess:
00200                 rule, fn = self.preprocess(rule, func)
00201 
00202             if lhs in self.rules:
00203                 self.rules[lhs].append(rule)
00204             else:
00205                 self.rules[lhs] = [ rule ]
00206             self.rule2func[rule] = fn
00207             self.rule2name[rule] = func.__name__[2:]
00208         self.ruleschanged = 1

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.ambiguity (   self,
  rules 
) [inherited]

Definition at line 659 of file spark.py.

00659 
00660     def ambiguity(self, rules):
00661         #
00662         #  XXX - problem here and in collectRules() if the same rule
00663         #        appears in >1 method.  Also undefined results if rules
00664         #        causing the ambiguity appear in the same method.
00665         #
00666         sortlist = []
00667         name2index = {}
00668         for i in range(len(rules)):
00669             lhs, rhs = rule = rules[i]
00670             name = self.rule2name[self.new2old[rule]]
00671             sortlist.append((len(rhs), name))
00672             name2index[name] = i
00673         sortlist.sort()
00674         list = [b for a, b in sortlist]
00675         return rules[name2index[self.resolve(list)]]

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.augment (   self,
  start 
) [inherited]

Definition at line 216 of file spark.py.

00216 
00217     def augment(self, start):
00218         rule = '%s ::= %s %s' % (self._START, self._BOF, start)
00219         self.addRule(rule, lambda args: args[1], 0)

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.buildTree (   self,
  nt,
  item,
  tokens,
  k 
) [inherited]

Definition at line 626 of file spark.py.

00626 
00627     def buildTree(self, nt, item, tokens, k):
00628         state, parent = item
00629 
00630         choices = []
00631         for rule in self.states[state].complete:
00632             if rule[0] == nt:
00633                 choices.append(rule)
00634         rule = choices[0]
00635         if len(choices) > 1:
00636             rule = self.ambiguity(choices)
00637         #output(rule)
00638 
00639         rhs = rule[1]
00640         attr = [None] * len(rhs)
00641 
00642         for i in range(len(rhs)-1, -1, -1):
00643             sym = rhs[i]
00644             if sym not in self.newrules:
00645                 if sym != self._BOF:
00646                     attr[i] = tokens[k-1]
00647                     key = (item, k)
00648                     item, k = self.predecessor(key, None)
00649             #elif self.isnullable(sym):
00650             elif self._NULLABLE == sym[0:len(self._NULLABLE)]:
00651                 attr[i] = self.deriveEpsilon(sym)
00652             else:
00653                 key = (item, k)
00654                 why = self.causal(key)
00655                 attr[i] = self.buildTree(sym, why[0],
00656                                          tokens, why[1])
00657                 item, k = self.predecessor(key, why)
00658         return self.rule2func[self.new2old[rule]](attr)

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.causal (   self,
  key 
) [inherited]

Definition at line 600 of file spark.py.

00600 
00601     def causal(self, key):
00602         links = self.links[key]
00603         if len(links) == 1:
00604             return links[0][1]
00605         choices = []
00606         rule2cause = {}
00607         for p, c in links:
00608             rule = c[2]
00609             choices.append(rule)
00610             rule2cause[rule] = c
00611         return rule2cause[self.ambiguity(choices)]

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.collectRules (   self) [inherited]

Definition at line 209 of file spark.py.

00209 
00210     def collectRules(self):
00211         for name in _namelist(self):
00212             if name[:2] == 'p_':
00213                 func = getattr(self, name)
00214                 doc = func.__doc__
00215                 self.addRule(doc, func)

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.computeNull (   self) [inherited]

Definition at line 220 of file spark.py.

00220 
00221     def computeNull(self):
00222         self.nullable = {}
00223         tbd = []
00224 
00225         for rulelist in self.rules.values():
00226             lhs = rulelist[0][0]
00227             self.nullable[lhs] = 0
00228             for rule in rulelist:
00229                 rhs = rule[1]
00230                 if len(rhs) == 0:
00231                     self.nullable[lhs] = 1
00232                     continue
00233                 #
00234                 #  We only need to consider rules which
00235                 #  consist entirely of nonterminal symbols.
00236                 #  This should be a savings on typical
00237                 #  grammars.
00238                 #
00239                 for sym in rhs:
00240                     if sym not in self.rules:
00241                         break
00242                 else:
00243                     tbd.append(rule)
00244         changes = 1
00245         while changes:
00246             changes = 0
00247             for lhs, rhs in tbd:
00248                 if self.nullable[lhs]:
00249                     continue
00250                 for sym in rhs:
00251                     if not self.nullable[sym]:
00252                         break
00253                 else:
00254                     self.nullable[lhs] = 1
00255                     changes = 1

Here is the caller graph for this function:

def spark.GenericParser.deriveEpsilon (   self,
  nt 
) [inherited]

Definition at line 612 of file spark.py.

00612 
00613     def deriveEpsilon(self, nt):
00614         if len(self.newrules[nt]) > 1:
00615             rule = self.ambiguity(self.newrules[nt])
00616         else:
00617             rule = self.newrules[nt][0]
00618         #output(rule)
00619 
00620         rhs = rule[1]
00621         attr = [None] * len(rhs)
00622 
00623         for i in range(len(rhs)-1, -1, -1):
00624             attr[i] = self.deriveEpsilon(rhs[i])
00625         return self.rule2func[self.new2old[rule]](attr)

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.error (   self,
  token 
) [inherited]

Reimplemented in asdl.ASDLParser.

Definition at line 308 of file spark.py.

00308 
00309     def error(self, token):
00310         output("Syntax error at or near `%s' token" % token)
00311         raise SystemExit

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.finalState (   self,
  tokens 
) [inherited]

Definition at line 262 of file spark.py.

00262 
00263     def finalState(self, tokens):
00264         #
00265         #  Yuck.
00266         #
00267         if len(self.newrules[self._START]) == 2 and len(tokens) == 0:
00268             return 1
00269         start = self.rules[self._START][0][1][1]
00270         return self.goto(1, start)

Here is the call graph for this function:

def spark.GenericASTMatcher.foundMatch (   self,
  args,
  func 
)

Definition at line 805 of file spark.py.

00805 
00806     def foundMatch(self, args, func):
00807         func(args[-1])
00808         return args[-1]

Here is the caller graph for this function:

def spark.GenericParser.goto (   self,
  state,
  sym 
) [inherited]

Definition at line 443 of file spark.py.

00443 
00444     def goto(self, state, sym):
00445         key = (state, sym)
00446         if key not in self.edges:
00447             #
00448             #  No transitions from state on sym.
00449             #
00450             return None
00451 
00452         rv = self.edges[key]
00453         if rv is None:
00454             #
00455             #  Target state isn't generated yet.  Remedy this.
00456             #
00457             rv = self.makeState(state, sym)
00458             self.edges[key] = rv
00459         return rv

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.gotoST (   self,
  state,
  st 
) [inherited]

Definition at line 463 of file spark.py.

00463 
00464     def gotoST(self, state, st):
00465         rv = []
00466         for t in self.states[state].T:
00467             if st == t:
00468                 rv.append(self.goto(state, t))
00469         return rv

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.gotoT (   self,
  state,
  t 
) [inherited]

Definition at line 460 of file spark.py.

00460 
00461     def gotoT(self, state, t):
00462         return [self.goto(state, t)]

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.isnullable (   self,
  sym 
) [inherited]

Definition at line 348 of file spark.py.

00348 
00349     def isnullable(self, sym):
00350         #
00351         #  For symbols in G_e only.  If we weren't supporting 1.5,
00352         #  could just use sym.startswith().
00353         #
00354         return self._NULLABLE == sym[0:len(self._NULLABLE)]

Here is the caller graph for this function:

def spark.GenericParser.makeNewRules (   self) [inherited]

Definition at line 271 of file spark.py.

00271 
00272     def makeNewRules(self):
00273         worklist = []
00274         for rulelist in self.rules.values():
00275             for rule in rulelist:
00276                 worklist.append((rule, 0, 1, rule))
00277 
00278         for rule, i, candidate, oldrule in worklist:
00279             lhs, rhs = rule
00280             n = len(rhs)
00281             while i < n:
00282                 sym = rhs[i]
00283                 if sym not in self.rules or \
00284                    not self.nullable[sym]:
00285                     candidate = 0
00286                     i = i + 1
00287                     continue
00288 
00289                 newrhs = list(rhs)
00290                 newrhs[i] = self._NULLABLE+sym
00291                 newrule = (lhs, tuple(newrhs))
00292                 worklist.append((newrule, i+1,
00293                                  candidate, oldrule))
00294                 candidate = 0
00295                 i = i + 1
00296             else:
00297                 if candidate:
00298                     lhs = self._NULLABLE+lhs
00299                     rule = (lhs, rhs)
00300                 if lhs in self.newrules:
00301                     self.newrules[lhs].append(rule)
00302                 else:
00303                     self.newrules[lhs] = [ rule ]
00304                 self.new2old[rule] = oldrule

Here is the call graph for this function:

def spark.GenericParser.makeSet (   self,
  token,
  sets,
  i 
) [inherited]

Definition at line 481 of file spark.py.

00481 
00482     def makeSet(self, token, sets, i):
00483         cur, next = sets[i], sets[i+1]
00484 
00485         ttype = token is not None and self.typestring(token) or None
00486         if ttype is not None:
00487             fn, arg = self.gotoT, ttype
00488         else:
00489             fn, arg = self.gotoST, token
00490 
00491         for item in cur:
00492             ptr = (item, i)
00493             state, parent = item
00494             add = fn(state, arg)
00495             for k in add:
00496                 if k is not None:
00497                     self.add(next, (k, parent), i+1, ptr)
00498                     nk = self.goto(k, None)
00499                     if nk is not None:
00500                         self.add(next, (nk, i+1))
00501 
00502             if parent == i:
00503                 continue
00504 
00505             for rule in self.states[state].complete:
00506                 lhs, rhs = rule
00507                 for pitem in sets[parent]:
00508                     pstate, pparent = pitem
00509                     k = self.goto(pstate, lhs)
00510                     if k is not None:
00511                         why = (item, i, rule)
00512                         pptr = (pitem, parent)
00513                         self.add(cur, (k, pparent),
00514                                  i, pptr, why)
00515                         nk = self.goto(k, None)
00516                         if nk is not None:
00517                             self.add(cur, (nk, i))

Here is the call graph for this function:

def spark.GenericParser.makeSet_fast (   self,
  token,
  sets,
  i 
) [inherited]

Definition at line 518 of file spark.py.

00518 
00519     def makeSet_fast(self, token, sets, i):
00520         #
00521         #  Call *only* when the entire state machine has been built!
00522         #  It relies on self.edges being filled in completely, and
00523         #  then duplicates and inlines code to boost speed at the
00524         #  cost of extreme ugliness.
00525         #
00526         cur, next = sets[i], sets[i+1]
00527         ttype = token is not None and self.typestring(token) or None
00528 
00529         for item in cur:
00530             ptr = (item, i)
00531             state, parent = item
00532             if ttype is not None:
00533                 k = self.edges.get((state, ttype), None)
00534                 if k is not None:
00535                     #self.add(next, (k, parent), i+1, ptr)
00536                     #INLINED --v
00537                     new = (k, parent)
00538                     key = (new, i+1)
00539                     if new not in next:
00540                         self.links[key] = []
00541                         next.append(new)
00542                     self.links[key].append((ptr, None))
00543                     #INLINED --^
00544                     #nk = self.goto(k, None)
00545                     nk = self.edges.get((k, None), None)
00546                     if nk is not None:
00547                         #self.add(next, (nk, i+1))
00548                         #INLINED --v
00549                         new = (nk, i+1)
00550                         if new not in next:
00551                             next.append(new)
00552                         #INLINED --^
00553             else:
00554                 add = self.gotoST(state, token)
00555                 for k in add:
00556                     if k is not None:
00557                         self.add(next, (k, parent), i+1, ptr)
00558                         #nk = self.goto(k, None)
00559                         nk = self.edges.get((k, None), None)
00560                         if nk is not None:
00561                             self.add(next, (nk, i+1))
00562 
00563             if parent == i:
00564                 continue
00565 
00566             for rule in self.states[state].complete:
00567                 lhs, rhs = rule
00568                 for pitem in sets[parent]:
00569                     pstate, pparent = pitem
00570                     #k = self.goto(pstate, lhs)
00571                     k = self.edges.get((pstate, lhs), None)
00572                     if k is not None:
00573                         why = (item, i, rule)
00574                         pptr = (pitem, parent)
00575                         #self.add(cur, (k, pparent),
00576                         #        i, pptr, why)
00577                         #INLINED --v
00578                         new = (k, pparent)
00579                         key = (new, i)
00580                         if new not in cur:
00581                             self.links[key] = []
00582                             cur.append(new)
00583                         self.links[key].append((pptr, why))
00584                         #INLINED --^
00585                         #nk = self.goto(k, None)
00586                         nk = self.edges.get((k, None), None)
00587                         if nk is not None:
00588                             #self.add(cur, (nk, i))
00589                             #INLINED --v
00590                             new = (nk, i)
00591                             if new not in cur:
00592                                 cur.append(new)
00593                             #INLINED --^

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.makeState (   self,
  state,
  sym 
) [inherited]

Definition at line 363 of file spark.py.

00363 
00364     def makeState(self, state, sym):
00365         assert sym is not None
00366         #
00367         #  Compute \epsilon-kernel state's core and see if
00368         #  it exists already.
00369         #
00370         kitems = []
00371         for rule, pos in self.states[state].items:
00372             lhs, rhs = rule
00373             if rhs[pos:pos+1] == (sym,):
00374                 kitems.append((rule, self.skip(rule, pos+1)))
00375         core = kitems
00376 
00377         core.sort()
00378         tcore = tuple(core)
00379         if tcore in self.cores:
00380             return self.cores[tcore]
00381         #
00382         #  Nope, doesn't exist.  Compute it and the associated
00383         #  \epsilon-nonkernel state together; we'll need it right away.
00384         #
00385         k = self.cores[tcore] = len(self.states)
00386         K, NK = _State(k, kitems), _State(k+1, [])
00387         self.states[k] = K
00388         predicted = {}
00389 
00390         edges = self.edges
00391         rules = self.newrules
00392         for X in K, NK:
00393             worklist = X.items
00394             for item in worklist:
00395                 rule, pos = item
00396                 lhs, rhs = rule
00397                 if pos == len(rhs):
00398                     X.complete.append(rule)
00399                     continue
00400 
00401                 nextSym = rhs[pos]
00402                 key = (X.stateno, nextSym)
00403                 if nextSym not in rules:
00404                     if key not in edges:
00405                         edges[key] = None
00406                         X.T.append(nextSym)
00407                 else:
00408                     edges[key] = None
00409                     if nextSym not in predicted:
00410                         predicted[nextSym] = 1
00411                         for prule in rules[nextSym]:
00412                             ppos = self.skip(prule)
00413                             new = (prule, ppos)
00414                             NK.items.append(new)
00415             #
00416             #  Problem: we know K needs generating, but we
00417             #  don't yet know about NK.  Can't commit anything
00418             #  regarding NK to self.edges until we're sure.  Should
00419             #  we delay committing on both K and NK to avoid this
00420             #  hacky code?  This creates other problems..
00421             #
00422             if X is K:
00423                 edges = {}
00424 
00425         if NK.items == []:
00426             return k
00427 
00428         #
00429         #  Check for \epsilon-nonkernel's core.  Unfortunately we
00430         #  need to know the entire set of predicted nonterminals
00431         #  to do this without accidentally duplicating states.
00432         #
00433         core = sorted(predicted.keys())
00434         tcore = tuple(core)
00435         if tcore in self.cores:
00436             self.edges[(k, None)] = self.cores[tcore]
00437             return k
00438 
00439         nk = self.cores[tcore] = self.edges[(k, None)] = NK.stateno
00440         self.edges.update(edges)
00441         self.states[nk] = NK
00442         return k

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.makeState0 (   self) [inherited]

Definition at line 256 of file spark.py.

00256 
00257     def makeState0(self):
00258         s0 = _State(0, [])
00259         for rule in self.newrules[self._START]:
00260             s0.items.append((rule, 0))
00261         return s0

def spark.GenericASTMatcher.match (   self,
  ast = None 
)

Definition at line 822 of file spark.py.

00822 
00823     def match(self, ast=None):
00824         if ast is None:
00825             ast = self.ast
00826         self.input = []
00827 
00828         self.match_r(ast)
00829         self.parse(self.input)

def spark.GenericASTMatcher.match_r (   self,
  node 
)

Definition at line 809 of file spark.py.

00809 
00810     def match_r(self, node):
00811         self.input.insert(0, node)
00812         children = 0
00813 
00814         for child in node:
00815             if children == 0:
00816                 self.input.insert(0, '(')
00817             children = children + 1
00818             self.match_r(child)
00819 
00820         if children > 0:
00821             self.input.insert(0, ')')

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.parse (   self,
  tokens 
) [inherited]

Definition at line 312 of file spark.py.

00312 
00313     def parse(self, tokens):
00314         sets = [ [(1,0), (2,0)] ]
00315         self.links = {}
00316 
00317         if self.ruleschanged:
00318             self.computeNull()
00319             self.newrules = {}
00320             self.new2old = {}
00321             self.makeNewRules()
00322             self.ruleschanged = 0
00323             self.edges, self.cores = {}, {}
00324             self.states = { 0: self.makeState0() }
00325             self.makeState(0, self._BOF)
00326 
00327         for i in range(len(tokens)):
00328             sets.append([])
00329 
00330             if sets[i] == []:
00331                 break
00332             self.makeSet(tokens[i], sets, i)
00333         else:
00334             sets.append([])
00335             self.makeSet(None, sets, len(tokens))
00336 
00337         #_dump(tokens, sets, self.states)
00338 
00339         finalitem = (self.finalState(tokens), 0)
00340         if finalitem not in sets[-2]:
00341             if len(tokens) > 0:
00342                 self.error(tokens[i-1])
00343             else:
00344                 self.error(None)
00345 
00346         return self.buildTree(self._START, finalitem,
00347                               tokens, len(sets)-2)

Here is the caller graph for this function:

def spark.GenericParser.predecessor (   self,
  key,
  causal 
) [inherited]

Definition at line 594 of file spark.py.

00594 
00595     def predecessor(self, key, causal):
00596         for p, c in self.links[key]:
00597             if c == causal:
00598                 return p
00599         assert 0

Here is the caller graph for this function:

def spark.GenericASTMatcher.preprocess (   self,
  rule,
  func 
)

Reimplemented from spark.GenericParser.

Definition at line 795 of file spark.py.

00795 
00796     def preprocess(self, rule, func):
00797         rebind = lambda func, self=self: \
00798                         lambda args, func=func, self=self: \
00799                                 self.foundMatch(args, func)
00800         lhs, rhs = rule
00801         rhslist = list(rhs)
00802         rhslist.reverse()
00803 
00804         return (lhs, tuple(rhslist)), rebind(func)

Here is the call graph for this function:

def spark.GenericASTMatcher.resolve (   self,
  list 
)

Reimplemented from spark.GenericParser.

Definition at line 830 of file spark.py.

00830 
00831     def resolve(self, list):
00832         #
00833         #  Resolve ambiguity in favor of the longest RHS.
00834         #
00835         return list[-1]

def spark.GenericParser.skip (   self,
  hs,
  pos = 0 
) [inherited]

Definition at line 355 of file spark.py.

00355 
00356     def skip(self, hs, pos=0):
00357         n = len(hs[1])
00358         while pos < n:
00359             if not self.isnullable(hs[1][pos]):
00360                 break
00361             pos = pos + 1
00362         return pos

Here is the call graph for this function:

Here is the caller graph for this function:

def spark.GenericParser.typestring (   self,
  token 
) [inherited]

Reimplemented in asdl.ASDLParser.

Definition at line 305 of file spark.py.

00305 
00306     def typestring(self, token):
00307         return None

Here is the caller graph for this function:


Member Data Documentation

Definition at line 793 of file spark.py.

Definition at line 142 of file spark.py.

Definition at line 825 of file spark.py.

Definition at line 314 of file spark.py.

Definition at line 139 of file spark.py.

Definition at line 138 of file spark.py.

Definition at line 221 of file spark.py.

Definition at line 117 of file spark.py.

Definition at line 118 of file spark.py.

Definition at line 116 of file spark.py.

Definition at line 121 of file spark.py.

Definition at line 143 of file spark.py.


The documentation for this class was generated from the following file: