Package lepl :: Package lexer :: Module stream
[hide private]
[frames] | no frames]

Source Code for Module lepl.lexer.stream

  1   
  2  # The contents of this file are subject to the Mozilla Public License 
  3  # (MPL) Version 1.1 (the "License"); you may not use this file except 
  4  # in compliance with the License. You may obtain a copy of the License 
  5  # at http://www.mozilla.org/MPL/ 
  6  # 
  7  # Software distributed under the License is distributed on an "AS IS" 
  8  # basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See 
  9  # the License for the specific language governing rights and 
 10  # limitations under the License. 
 11  # 
 12  # The Original Code is LEPL (http://www.acooke.org/lepl) 
 13  # The Initial Developer of the Original Code is Andrew Cooke. 
 14  # Portions created by the Initial Developer are Copyright (C) 2009-2010 
 15  # Andrew Cooke (andrew@acooke.org). All Rights Reserved. 
 16  # 
 17  # Alternatively, the contents of this file may be used under the terms 
 18  # of the LGPL license (the GNU Lesser General Public License, 
 19  # http://www.gnu.org/licenses/lgpl.html), in which case the provisions 
 20  # of the LGPL License are applicable instead of those above. 
 21  # 
 22  # If you wish to allow use of your version of this file only under the 
 23  # terms of the LGPL License and not to allow others to use your version 
 24  # of this file under the MPL, indicate your decision by deleting the 
 25  # provisions above and replace them with the notice and other provisions 
 26  # required by the LGPL License.  If you do not delete the provisions 
 27  # above, a recipient may use your version of this file under either the 
 28  # MPL or the LGPL License. 
 29   
 30  ''' 
 31  Stream support for lexers. 
 32  ''' 
 33   
 34   
 35  from lepl.stream.iter import base_iterable_factory 
 36  from lepl.stream.core import OFFSET, s_delta, s_line, HashKey, s_key, s_next 
 37  from lepl.stream.facade import HelperFacade 
 38  from lepl.support.lib import fmt, LogMixin 
 39   
 40   
41 -class TokenHelper(base_iterable_factory(lambda cons: cons.head[1], '<token>')):
42 ''' 43 This wraps a sequence of values generated by the lexer. The sequence 44 is a source of (tokens, stream) instances, where the stream was generated 45 from the source. 46 47 It follows that the `value` returned by s_next is also (tokens, stream). 48 This is interpreted by `Token` which forwards `stream` to sub-matchers. 49 50 Implementation is vaguely similar to `IterableHelper`, in that we use 51 a `Cons` based linked list to allow memory handling. However, instead 52 of a "line" of data, each node contains, again, (tokens, stream) and 53 there is no need to store the line_stream explicitly in the state. 54 ''' 55
56 - def __init__(self, id=None, factory=None, max=None, global_kargs=None, 57 cache_level=None, delta=None, len=None):
58 super(TokenHelper, self).__init__(id=id, factory=factory, 59 max=max, global_kargs=global_kargs, 60 cache_level=cache_level, delta=delta) 61 self._len = len
62
63 - def key(self, cons, other):
64 try: 65 (tokens, line_stream) = cons.head 66 key = s_key(line_stream, other) 67 except StopIteration: 68 self._debug('Default hash (EOS)') 69 tokens = '<EOS>' 70 key = HashKey(self.id, other) 71 #self._debug(fmt('Hash at {0!r} {1}', tokens, hash(key))) 72 return key
73
74 - def next(self, cons, count=1):
75 assert count == 1 76 s_next(cons.head[1], count=0) # ping max 77 return (cons.head, (cons.tail, self))
78
79 - def line(self, cons, empty_ok):
80 ''' 81 This doesn't have much meaning in terms of tokens, but might be 82 used for some debug output, so return something vaguely useful. 83 ''' 84 try: 85 # implement in terms of next so that filtering works as expected 86 ((_, line_stream), _) = self.next(cons) 87 return s_line(line_stream, empty_ok) 88 except StopIteration: 89 if empty_ok: 90 raise TypeError('Token stream cannot return an empty line') 91 else: 92 raise
93
94 - def len(self, cons):
95 if self._len is None: 96 self._error('len(tokens)') 97 raise TypeError 98 else: 99 try: 100 (_, line_stream) = cons.head 101 return self._len - s_delta(line_stream)[OFFSET] 102 except StopIteration: 103 return 0
104
105 - def stream(self, state, value, id_=None):
106 raise TypeError
107 108 109
110 -class FilteredTokenHelper(LogMixin, HelperFacade):
111 ''' 112 Used by `RestrictTokensBy` to filter tokens from the delegate. 113 114 This filters a list of token IDs in order. If the entire list does 115 not match then then next token is returned (even if it appears in the 116 list). 117 ''' 118
119 - def __init__(self, delegate, *ids):
120 super(FilteredTokenHelper, self).__init__(delegate) 121 self._ids = ids 122 self._debug(fmt('Filtering tokens {0}', ids))
123
124 - def next(self, state, count=1):
125 126 def add_self(response): 127 ''' 128 Replace the previous helper with this one, which will then 129 delegate to the previous when needed. 130 ''' 131 ((tokens, token), (state, _)) = response 132 self._debug(fmt('Return {0}', tokens)) 133 return ((tokens, token), (state, self))
134 135 if count != 1: 136 raise TypeError('Filtered tokens must be read singly') 137 discard = list(reversed(self._ids)) 138 start = state 139 while discard: 140 ((tokens, _), (state, _)) = \ 141 super(FilteredTokenHelper, self).next(state) 142 if discard[-1] in tokens: 143 self._debug(fmt('Discarding token {0}', discard[-1])) 144 discard.pop() 145 else: 146 self._debug(fmt('Failed to discard token {0}: {1}', 147 discard[-1], tokens)) 148 return add_self(super(FilteredTokenHelper, self).next(start)) 149 return add_self(super(FilteredTokenHelper, self).next(state))
150