Class: CTokenizer::LexerBase

Inherits:
Object
  • Object
show all
Includes:
CTokenizer
Defined in:
lib/dbc/ctokenizer.rb

Overview

wraps a lexer and uses that to produce new tokens

Constant Summary

Constants included from CTokenizer

EOF_TOKEN

Instance Attribute Summary collapse

Instance Method Summary collapse

Methods included from CTokenizer

check_token, #collect, #each, #error, error, line_count, #parse_error, #to_a, #token_error, #warning, whitespace?

Constructor Details

#initialize(str, file = nil, line = 1) ⇒ LexerBase

Returns a new instance of LexerBase.



219
220
221
222
223
224
225
# File 'lib/dbc/ctokenizer.rb', line 219

def initialize(str, file=nil, line=1)
	if (str.class <= String)
		@source = Lexer.new(str, file, line)
	else
		@source = str
	end
end

Instance Attribute Details

#sourceObject (readonly)

Returns the value of attribute source.



227
228
229
# File 'lib/dbc/ctokenizer.rb', line 227

def source
  @source
end

Instance Method Details

#empty?Boolean

Returns:

  • (Boolean)


252
253
254
# File 'lib/dbc/ctokenizer.rb', line 252

def empty?
	@source.empty?
end

#fileObject



241
242
243
# File 'lib/dbc/ctokenizer.rb', line 241

def file
	@source.file
end

#lineObject



244
245
246
# File 'lib/dbc/ctokenizer.rb', line 244

def line
	@source.line
end

#match?(regexp) ⇒ Boolean

Returns:

  • (Boolean)


233
234
235
# File 'lib/dbc/ctokenizer.rb', line 233

def match?(regexp)
	@source.match?(regexp)
end

#post_matchObject



237
238
239
# File 'lib/dbc/ctokenizer.rb', line 237

def post_match
	@source.post_match
end

#scan(regexp) ⇒ Object



229
230
231
# File 'lib/dbc/ctokenizer.rb', line 229

def scan(regexp)
	@source.scan(regexp)
end

#shiftObject



248
249
250
# File 'lib/dbc/ctokenizer.rb', line 248

def shift
	@source.shift
end