2828 List ,
2929 Optional ,
3030 Text ,
31+ Iterator ,
3132 Tuple ,
33+ TypeVar ,
34+ Generic ,
3235 Union ,
3336)
37+ from dataclasses import dataclass , field
3438
3539# Pgen imports
3640from . import grammar , parse , token , tokenize , pgen
3741from logging import Logger
3842from blib2to3 .pytree import _Convert , NL
3943from blib2to3 .pgen2 .grammar import Grammar
44+ from contextlib import contextmanager
4045
4146Path = Union [str , "os.PathLike[str]" ]
4247
4348
49+ @dataclass
50+ class ReleaseRange :
51+ start : int
52+ end : Optional [int ] = None
53+ tokens : List [Any ] = field (default_factory = list )
54+
55+ def lock (self ) -> None :
56+ total_eaten = len (self .tokens )
57+ self .end = self .start + total_eaten
58+
59+
60+ class TokenProxy :
61+ def __init__ (self , generator : Any ) -> None :
62+ self ._tokens = generator
63+ self ._counter = 0
64+ self ._release_ranges : List [ReleaseRange ] = []
65+
66+ @contextmanager
67+ def release (self ) -> Iterator ["TokenProxy" ]:
68+ release_range = ReleaseRange (self ._counter )
69+ self ._release_ranges .append (release_range )
70+ try :
71+ yield self
72+ finally :
73+ # Lock the last release range to the final position that
74+ # has been eaten.
75+ release_range .lock ()
76+
77+ def eat (self , point : int ) -> Any :
78+ eaten_tokens = self ._release_ranges [- 1 ].tokens
79+ if point < len (eaten_tokens ):
80+ return eaten_tokens [point ]
81+ else :
82+ while point >= len (eaten_tokens ):
83+ token = next (self ._tokens )
84+ eaten_tokens .append (token )
85+ return token
86+
87+ def __iter__ (self ) -> "TokenProxy" :
88+ return self
89+
90+ def __next__ (self ) -> Any :
91+ # If the current position is already compromised (looked up)
92+ # return the eaten token, if not just go further on the given
93+ # token producer.
94+ for release_range in self ._release_ranges :
95+ assert release_range .end is not None
96+
97+ start , end = release_range .start , release_range .end
98+ if start <= self ._counter < end :
99+ token = release_range .tokens [self ._counter - start ]
100+ break
101+ else :
102+ token = next (self ._tokens )
103+ self ._counter += 1
104+ return token
105+
106+ def can_advance (self , to : int ) -> bool :
107+ # Try to eat, fail if it can't. The eat operation is cached
108+ # so there wont be any additional cost of eating here
109+ try :
110+ self .eat (to )
111+ except StopIteration :
112+ return False
113+ else :
114+ return True
115+
116+
44117class Driver (object ):
45118 def __init__ (
46119 self ,
@@ -57,14 +130,18 @@ def __init__(
57130 def parse_tokens (self , tokens : Iterable [Any ], debug : bool = False ) -> NL :
58131 """Parse a series of tokens and return the syntax tree."""
59132 # XXX Move the prefix computation into a wrapper around tokenize.
133+ proxy = TokenProxy (tokens )
134+
60135 p = parse .Parser (self .grammar , self .convert )
61- p .setup ()
136+ p .setup (proxy = proxy )
137+
62138 lineno = 1
63139 column = 0
64140 indent_columns = []
65141 type = value = start = end = line_text = None
66142 prefix = ""
67- for quintuple in tokens :
143+
144+ for quintuple in proxy :
68145 type , value , start , end , line_text = quintuple
69146 if start != (lineno , column ):
70147 assert (lineno , column ) <= start , ((lineno , column ), start )
0 commit comments