Module: CTokenizer

Included in:
Lexer, LexerBase, Preprocessor::AllTokens, Preprocessor::SourceTokens
Defined in:
lib/caphir/ctokenizer.rb

Defined Under Namespace

Modules: Expression, Scoped, Sourced Classes: CLexer, CPLexer, Error, Lexer, LexerBase, SkipMacros, Splitter

Constant Summary collapse

EOF_TOKEN =

End of File token. Must use an empty string here, so the token behaves like a normal token. Removes a special case

[false, ''.freeze].freeze
C_RESERVED =
c_reserved_symbol.dup
CP_RESERVED =
cp_reserved_symbol.dup

Class Method Summary collapse

Instance Method Summary collapse

Class Method Details

.error(file, line, msg) ⇒ Object

Raises:


65
66
67
# File 'lib/caphir/ctokenizer.rb', line 65

def CTokenizer.error(file, line, msg)
	raise CTokenizer::Error.new(file, line), msg
end

.line_count(str) ⇒ Object


69
70
71
72
73
# File 'lib/caphir/ctokenizer.rb', line 69

def CTokenizer.line_count(str)
	count = str.count("\n")
	count = str.count("\r") if count == 0
	count
end

Instance Method Details

#collectObject


110
111
112
113
114
115
116
# File 'lib/caphir/ctokenizer.rb', line 110

def collect
	ary = []
	until self.empty?
		ary << yield(self.shift)
	end
	ary
end

#eachObject


103
104
105
106
107
108
# File 'lib/caphir/ctokenizer.rb', line 103

def each
	until self.empty?
		yield(self.shift)
	end
	self
end

#error(msg) ⇒ Object


75
76
77
# File 'lib/caphir/ctokenizer.rb', line 75

def error(msg)
	CTokenizer.error(file, line, msg)
end

#parse_error(token) ⇒ Object


87
88
89
# File 'lib/caphir/ctokenizer.rb', line 87

def parse_error(token)
	self.error("parse error on token: #{token}")
end

#to_aObject


95
96
97
98
99
100
101
# File 'lib/caphir/ctokenizer.rb', line 95

def to_a
	ary = []
	until self.empty?
		ary << self.shift
	end
	ary
end

#token_error(token) ⇒ Object


79
80
81
# File 'lib/caphir/ctokenizer.rb', line 79

def token_error(token)
	self.error("unrecognized token: #{token}")
end

#unmatched_error(token) ⇒ Object


83
84
85
# File 'lib/caphir/ctokenizer.rb', line 83

def unmatched_error(token)
	self.error("unmatched '#{token}'")
end

#warning(msg) ⇒ Object


91
92
93
# File 'lib/caphir/ctokenizer.rb', line 91

def warning(msg)
	warn "#{file + ':' if file}#{line}: #{msg}"
end