Class: Logicality::Lexer::RegexpLexer
- Inherits:
-
Object
- Object
- Logicality::Lexer::RegexpLexer
show all
- Includes:
- Grammar
- Defined in:
- lib/logicality/lexer/regexp_lexer.rb
Constant Summary
Constants included
from Grammar
Grammar::AND_OP, Grammar::LEFT_PAREN, Grammar::NOT_OP, Grammar::OR_OP, Grammar::RIGHT_PAREN, Grammar::VALUE
Instance Attribute Summary collapse
Class Method Summary
collapse
Instance Method Summary
collapse
Constructor Details
#initialize(expression) ⇒ RegexpLexer
Returns a new instance of RegexpLexer.
36
37
38
39
40
41
42
43
44
45
46
|
# File 'lib/logicality/lexer/regexp_lexer.rb', line 36
def initialize(expression)
raise ArgumentError, 'Expression is required' unless expression && expression.to_s.length > 0
@expression = expression.to_s
if invalid_matches.length > 0
raise ArgumentError, "Invalid syntax: #{invalid_matches}"
end
reset
end
|
Instance Attribute Details
#expression ⇒ Object
Returns the value of attribute expression.
34
35
36
|
# File 'lib/logicality/lexer/regexp_lexer.rb', line 34
def expression
@expression
end
|
Class Method Details
.invalid_pattern ⇒ Object
15
16
17
|
# File 'lib/logicality/lexer/regexp_lexer.rb', line 15
def invalid_pattern
"#{pattern}|(\\s*)"
end
|
.invalid_regexp ⇒ Object
19
20
21
|
# File 'lib/logicality/lexer/regexp_lexer.rb', line 19
def invalid_regexp
Regexp.new(invalid_pattern)
end
|
.pattern ⇒ Object
23
24
25
26
|
# File 'lib/logicality/lexer/regexp_lexer.rb', line 23
def pattern
Grammar.constants.map { |c| Grammar.const_get(c).source }
.join('|')
end
|
.regexp ⇒ Object
28
29
30
|
# File 'lib/logicality/lexer/regexp_lexer.rb', line 28
def regexp
Regexp.new(pattern)
end
|
Instance Method Details
#next_token ⇒ Object
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
|
# File 'lib/logicality/lexer/regexp_lexer.rb', line 48
def next_token
return nil if index > matches.length - 1
increment
scan_array = matches[index]
return nil unless scan_array
tokens = scan_array.map.with_index do |value, index|
const = Grammar.constants[index]
value ? Token.new(const, value) : nil
end.compact
if tokens.length > 1
raise ArgumentError, "Too many tokens found for: #{scan_array}"
elsif tokens.length == 0
raise ArgumentError, "Cannot tokenize: #{scan_array}"
end
tokens.first
end
|
#reset ⇒ Object
71
72
73
74
75
|
# File 'lib/logicality/lexer/regexp_lexer.rb', line 71
def reset
@index = -1
self
end
|