Class: Antlr4::Runtime::BufferedTokenStream

Inherits:
TokenStream show all
Defined in:
lib/antlr4/runtime/buffered_token_stream.rb

Direct Known Subclasses

CommonTokenStream

Constant Summary

Constants inherited from IntStream

IntStream::EOF, IntStream::UNKNOWN_SOURCE_NAME

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(token_source) ⇒ BufferedTokenStream

Returns a new instance of BufferedTokenStream

Raises:

  • (NilPointerException)

4
5
6
7
8
9
10
11
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 4

def initialize(token_source)
  raise NilPointerException, 'token_source cannot be nil' if token_source.nil?

  @token_source = token_source
  @tokens = []
  @ptr = -1
  @fetched_eof = false
end

Instance Attribute Details

#token_source(tokenSource) ⇒ Object (readonly)

Returns the value of attribute token_source


13
14
15
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 13

def token_source
  @token_source
end

#tokensObject (readonly)

Returns the value of attribute tokens


162
163
164
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 162

def tokens
  @tokens
end

Instance Method Details

#adjust_seek_index(i) ⇒ Object


142
143
144
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 142

def adjust_seek_index(i)
  i
end

#consumeObject


40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 40

def consume
  skip_eof_check = false
  if @ptr >= 0
    if @fetched_eof
      # the last token in tokens is EOF. skip check if p indexes any
      # fetched token except the last.
      skip_eof_check = @ptr < @tokens.length - 1
    else # no EOF token in tokens. skip check if p indexes a fetched token.
      skip_eof_check = @ptr < @tokens.length
    end
  else # not yet initialized
    skip_eof_check = false
  end

  if !skip_eof_check && la(1) == EOF
    raise IllegalStateException, 'cannot consume EOF'
  end

  @ptr = adjust_seek_index(@ptr + 1) if sync(@ptr + 1)
end

#fetch(n) ⇒ Object


72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 72

def fetch(n)
  return 0 if @fetched_eof

  i = 0
  while i < n
    t = @token_source.next_token
    t.setTokenIndex(@tokens.length) if t.is_a? WritableToken
    @tokens << t
    if t.type == Token::EOF
      @fetched_eof = true
      return i + 1
    end
    i += 1
  end

  n
end

#fillObject


323
324
325
326
327
328
329
330
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 323

def fill
  lazy_init
  block_size = 1000
  loop do
    fetched = fetch(block_size)
    return if fetched < block_size
  end
end

#filter_for_channel(from, to, channel) ⇒ Object


266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 266

def filter_for_channel(from, to, channel)
  hidden = []
  i = from
  while i <= to
    t = @tokens[i]
    if channel == -1
      hidden.add(t) if t.channel != Lexer.DEFAULT_TOKEN_CHANNEL
    else
      hidden.add(t) if t.channel == channel
    end
    i += 1
  end
  return nil if hidden.empty?

  hidden
end

#get(i) ⇒ Object


90
91
92
93
94
95
96
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 90

def get(i)
  if i < 0 || i >= @tokens.length
    raise IndexOutOfBoundsException, 'token index ' + i + ' out of range 0..' + (@tokens.length - 1)
  end

  @tokens[i]
end

#get_list(start, stop) ⇒ Object


98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 98

def get_list(start, stop)
  return nil if start < 0 || stop < 0

  lazy_init
  subset = []
  stop = @tokens.length - 1 if stop >= @tokens.length
  i = start
  while i <= stop
    t = @tokens[i]
    break if t.type == Token::EOF

    subset.add(t)
    i += 1
  end
  subset
end

#get_tokens2(start, stop, ttype) ⇒ Object


184
185
186
187
188
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 184

def get_tokens2(start, stop, ttype)
  s = Set.new
  s.add(ttype)
  tokens1(start, stop, s)
end

#hidden_tokens_to_left(token_index, channel) ⇒ Object


241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 241

def hidden_tokens_to_left(token_index, channel)
  lazy_init
  if token_index < 0 || token_index >= tokens.size
    raise IndexOutOfBoundsException, token_index + ' not in 0..' + (@tokens.length - 1)
  end

  if token_index == 0
    # obviously no tokens can appear before the first token
    return nil
  end

  prev_on_channel = previous_token_on_channel(token_index - 1, Lexer.DEFAULT_TOKEN_CHANNEL)
  return nil if prev_on_channel == token_index - 1

  # if none onchannel to left, prev_on_channel=-1 then from=0
  from = prev_on_channel + 1
  to = token_index - 1

  filter_for_channel(from, to, channel)
end

#hidden_tokens_to_left1(token_index) ⇒ Object


262
263
264
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 262

def hidden_tokens_to_left1(token_index)
  hidden_tokens_to_left(token_index, -1)
end

#hidden_tokens_to_right(token_index, channel) ⇒ Object


223
224
225
226
227
228
229
230
231
232
233
234
235
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 223

def hidden_tokens_to_right(token_index, channel)
  lazy_init
  if token_index < 0 || token_index >= tokens.size
    raise IndexOutOfBoundsException, token_index + ' not in 0..' + (@tokens.length - 1)
  end

  next_on_channel = next_token_on_channel(token_index + 1, Lexer.DEFAULT_TOKEN_CHANNEL)
  from = token_index + 1
  # if none onchannel to right, next_on_channel=-1 so set to = last token
  to = next_on_channel == -1 ? size - 1 : next_on_channel

  filter_for_channel(from, to, channel)
end

#hidden_tokens_to_right2(token_index) ⇒ Object


237
238
239
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 237

def hidden_tokens_to_right2(token_index)
  hidden_tokens_to_right(token_index, -1)
end

#indexObject


15
16
17
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 15

def index
  @ptr
end

#la(i) ⇒ Object


115
116
117
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 115

def la(i)
  lt(i).type
end

#lazy_initObject


146
147
148
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 146

def lazy_init
  setup if @ptr == -1
end

#lb(k) ⇒ Object


119
120
121
122
123
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 119

def lb(k)
  return nil if (@ptr - k) < 0

  @tokens[@ptr - k]
end

#lt(k) ⇒ Object


125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 125

def lt(k)
  lazy_init
  return nil if k == 0

  return lb(-k) if k < 0

  i = @ptr + k - 1
  sync(i)
  if i >= @tokens.length # return EOF token
    # EOF must be last token
    return @tokens.get(@tokens.length - 1)
  end

  #    if ( i>range ) range = i
  @tokens[i]
end

#markObject


19
20
21
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 19

def mark
  0
end

#next_token_on_channel(i, channel) ⇒ Object


190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 190

def next_token_on_channel(i, channel)
  sync(i)
  return size - 1 if i >= size

  token = @tokens[i]
  while token.channel != channel
    return i if token.type == Token::EOF

    i += 1
    sync(i)
    token = @tokens[i]
  end

  i
end

#previous_token_on_channel(i, channel) ⇒ Object


206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 206

def previous_token_on_channel(i, channel)
  sync(i)
  if i >= size
    # the EOF token is on every channel
    return size - 1
  end

  while i >= 0
    token = @tokens[i]
    return i if token.type == Token::EOF || token.channel == channel

    i -= 1
  end

  i
end

#release(marker) ⇒ Object


23
24
25
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 23

def release(marker)
  ;
end

#resetObject


27
28
29
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 27

def reset
  seek(0)
end

#seek(index) ⇒ Object


31
32
33
34
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 31

def seek(index)
  lazy_init
  @ptr = adjust_seek_index(index)
end

#setupObject


150
151
152
153
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 150

def setup
  sync(0)
  @ptr = adjust_seek_index(0)
end

#sizeObject


36
37
38
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 36

def size
  @tokens.length
end

#source_nameObject


283
284
285
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 283

def source_name
  @token_source.get_source_name
end

#sync(i) ⇒ Object


61
62
63
64
65
66
67
68
69
70
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 61

def sync(i)
  n = i - @tokens.length + 1 # how many more elements we need?

  if n > 0
    fetched = fetch(n)
    return fetched >= n
  end

  true
end

#textObject


287
288
289
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 287

def text
  text2(Interval.of(0, size - 1))
end

#text2(interval) ⇒ Object


291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 291

def text2(interval)
  start = interval.a
  stop = interval.b
  return '' if start < 0 || stop < 0

  fill
  stop = @tokens.length - 1 if stop >= @tokens.length

  buf = ''
  i = start
  while i <= stop
    t = @tokens[i]
    break if t.type == Token::EOF

    buf << t.text
    buf << i += 1
  end
  buf
end

#text3(ctx) ⇒ Object


311
312
313
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 311

def text3(ctx)
  text2(ctx.source_interval)
end

#text4(start, stop) ⇒ Object


315
316
317
318
319
320
321
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 315

def text4(start, stop)
  if !start.nil? && !stop.nil?
    return text2(Interval.of(start.index, stop.index))
  end

  ''
end

#tokens1(start, stop, types = nil) ⇒ Object


164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 164

def tokens1(start, stop, types = nil)
  lazy_init
  if start < 0 || stop >= @tokens.length || stop < 0 || start >= @tokens.length

    raise IndexOutOfBoundsException, 'start ' + start + ' or stop ' + stop + ' not in 0..' + (@tokens.length - 1)
  end
  return nil if start > stop

  # list = tokens[start:stop]:T t, t.getType() in typesend
  filtered_tokens = []
  i = start
  while i <= stop
    t = @tokens[i]
    filtered_tokens.add(t) if types.nil? || types.include?(t.type)
    i += 1
  end
  filtered_tokens = nil if filtered_tokens.empty?
  filtered_tokens
end