Class: Preprocessor::Parser

Inherits:
CTokenizer::LexerBase show all
Includes:
CTokenizer, Resolve
Defined in:
lib/dbc/preprocessor.rb

Constant Summary

Constants included from CTokenizer

CTokenizer::EOF_TOKEN

Instance Attribute Summary

Attributes inherited from CTokenizer::LexerBase

#source

Instance Method Summary collapse

Methods included from Resolve

#args_given?, #resolve, #resolving?

Methods included from CTokenizer

check_token, #collect, #each, #error, error, line_count, #parse_error, #to_a, #token_error, #warning, whitespace?

Methods inherited from CTokenizer::LexerBase

#file, #line, #match?, #post_match, #scan

Constructor Details

#initialize(search_path, search_path_limited, source, file = nil, line = 1) ⇒ Parser

Returns a new instance of Parser.



136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
# File 'lib/dbc/preprocessor.rb', line 136

def initialize(search_path, search_path_limited, source, file=nil, line=1)
	if source.class <= String
		source = CTokenizer::Lexer.new(source, file, line)
	end

	@search_path = search_path
	@search_path_limited = search_path_limited
	
	@cond_comp = [true] # conditional compile is true at base

	@source = Tokens.new(source)
	@macro_tokens = CTokenizer::Lexer.new('', file, line)
	@defines = { '__FILE__' => FileMacro, '__LINE__' => LineMacro }
	@resolving = {}
	
	@parser = StatementParser.new(@defines)
end

Instance Method Details

#add_tokens(tokens) ⇒ Object



154
155
156
157
158
# File 'lib/dbc/preprocessor.rb', line 154

def add_tokens(tokens)
	post_m = @macro_tokens.post_match
	tokens << post_m if post_m
	@macro_tokens = CTokenizer::Lexer.new(tokens, @source.file, @source.line)
end

#base?Boolean

Returns:

  • (Boolean)


173
174
175
# File 'lib/dbc/preprocessor.rb', line 173

def base?
	@source.base?
end

#define(macro, params = nil, tokens = nil) ⇒ Object



164
165
166
167
# File 'lib/dbc/preprocessor.rb', line 164

def define(macro, params=nil, tokens=nil)
	self.error("cannot use 'defined' as a macro name") if macro == 'defined'
	@defines[macro] = Define.new(params, tokens)
end

#defined?(macro) ⇒ Boolean

Returns:

  • (Boolean)


160
161
162
# File 'lib/dbc/preprocessor.rb', line 160

def defined?(macro)
	@defines.include?(macro)
end

#empty?Boolean

Returns:

  • (Boolean)


177
178
179
# File 'lib/dbc/preprocessor.rb', line 177

def empty?
	@source.empty? and @macro_tokens.empty?
end

#shiftObject



181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
# File 'lib/dbc/preprocessor.rb', line 181

def shift
	if @macro_tokens.empty?
		t = nil
		loop do
			start_line = @source.start_line
			t = @source.shift
			if t[1] == '#'
				# raise token error, unless we are at the start of the line
				# or we are ignoring these tokens.
				self.token_error('#') unless start_line or not @cond_comp.last
				# replace t with whitespace
				t = self.parse_statement
			end
			# if @cond_comp.last is true then
			# we need to process the tokens
			break if @cond_comp.last or !t[0]
		end # loop
	else
		t = @macro_tokens.shift
	end # if
	resolve(t)
end

#undef(macro) ⇒ Object



169
170
171
# File 'lib/dbc/preprocessor.rb', line 169

def undef(macro)
	@defines.delete(macro)
end