Class: CTokenizer::LexerBase

Inherits:
Object
  • Object
show all
Includes:
CTokenizer
Defined in:
lib/dbc/ctokenizer.rb

Overview

wraps a lexer and uses that to produce new tokens

Constant Summary

Constants included from CTokenizer

EOF_TOKEN

Instance Attribute Summary collapse

Instance Method Summary collapse

Methods included from CTokenizer

check_token, #collect, #each, #error, error, line_count, #parse_error, #to_a, #token_error, #warning, whitespace?

Constructor Details

#initialize(str, file = nil, line = 1) ⇒ LexerBase

Returns a new instance of LexerBase.



218
219
220
221
222
223
224
# File 'lib/dbc/ctokenizer.rb', line 218

def initialize(str, file=nil, line=1)
	if (str.class <= String)
		@source = Lexer.new(str, file, line)
	else
		@source = str
	end
end

Instance Attribute Details

#sourceObject (readonly)

Returns the value of attribute source.



226
227
228
# File 'lib/dbc/ctokenizer.rb', line 226

def source
  @source
end

Instance Method Details

#empty?Boolean

Returns:

  • (Boolean)


251
252
253
# File 'lib/dbc/ctokenizer.rb', line 251

def empty?
	@source.empty?
end

#fileObject



240
241
242
# File 'lib/dbc/ctokenizer.rb', line 240

def file
	@source.file
end

#lineObject



243
244
245
# File 'lib/dbc/ctokenizer.rb', line 243

def line
	@source.line
end

#match?(regexp) ⇒ Boolean

Returns:

  • (Boolean)


232
233
234
# File 'lib/dbc/ctokenizer.rb', line 232

def match?(regexp)
	@source.match?(regexp)
end

#post_matchObject



236
237
238
# File 'lib/dbc/ctokenizer.rb', line 236

def post_match
	@source.post_match
end

#scan(regexp) ⇒ Object



228
229
230
# File 'lib/dbc/ctokenizer.rb', line 228

def scan(regexp)
	@source.scan(regexp)
end

#shiftObject



247
248
249
# File 'lib/dbc/ctokenizer.rb', line 247

def shift
	@source.shift
end