Class: ABNF::Parser::Compiler::Tokenizer
- Inherits:
-
Object
- Object
- ABNF::Parser::Compiler::Tokenizer
- Defined in:
- lib/abnf/parser/compiler/tokenizer.rb
Constant Summary collapse
- C_NL =
"(?:;[[:graph:][:blank:]]*)?\\r\\n".freeze
- C_WSP =
"(?:(?:#{C_NL})?[[:blank:]])".freeze
Instance Attribute Summary collapse
-
#abnf ⇒ Object
readonly
Returns the value of attribute abnf.
Class Method Summary collapse
Instance Method Summary collapse
- #apply(regexp) ⇒ Object
- #call(&block) ⇒ Object
- #each(&block) ⇒ Object
-
#initialize(abnf) ⇒ Tokenizer
constructor
A new instance of Tokenizer.
- #next ⇒ Object
- #possible_tokens ⇒ Object
- #to_enum ⇒ Object
Constructor Details
#initialize(abnf) ⇒ Tokenizer
Returns a new instance of Tokenizer.
10 11 12 |
# File 'lib/abnf/parser/compiler/tokenizer.rb', line 10 def initialize abnf @abnf = abnf end |
Instance Attribute Details
#abnf ⇒ Object (readonly)
Returns the value of attribute abnf.
8 9 10 |
# File 'lib/abnf/parser/compiler/tokenizer.rb', line 8 def abnf @abnf end |
Class Method Details
.build(abnf) ⇒ Object
14 15 16 |
# File 'lib/abnf/parser/compiler/tokenizer.rb', line 14 def self.build abnf new abnf.dup end |
.call(abnf) ⇒ Object
18 19 20 21 |
# File 'lib/abnf/parser/compiler/tokenizer.rb', line 18 def self.call abnf instance = build abnf instance.() end |
Instance Method Details
#apply(regexp) ⇒ Object
23 24 25 26 27 28 |
# File 'lib/abnf/parser/compiler/tokenizer.rb', line 23 def apply regexp match_data = regexp.match abnf return if match_data.nil? or not match_data.pre_match.empty? abnf.slice! 0, match_data.to_s.size match_data end |
#call(&block) ⇒ Object
30 31 32 33 34 35 36 37 38 |
# File 'lib/abnf/parser/compiler/tokenizer.rb', line 30 def call &block tokens = [] each do |token| tokens << token end tokens end |
#each(&block) ⇒ Object
40 41 42 43 44 45 46 47 48 49 |
# File 'lib/abnf/parser/compiler/tokenizer.rb', line 40 def each &block until abnf.empty? length = abnf.bytesize token = self.next block.(token) if block fail "Parse error" unless abnf.bytesize < length end end |
#next ⇒ Object
51 52 53 54 55 56 57 58 59 60 61 |
# File 'lib/abnf/parser/compiler/tokenizer.rb', line 51 def next possible_tokens.each do |type, regexp| match_data = apply regexp next unless match_data token = Token.new type, match_data.to_s return token end nil end |
#possible_tokens ⇒ Object
63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
# File 'lib/abnf/parser/compiler/tokenizer.rb', line 63 def possible_tokens @@possible_tokens ||= { 'prose-val'.freeze => %r{\A<[\x20-\x3D\x3F-\x7E]*>}n, 'hex-val-RANGE'.freeze => %r{\A%x[[:xdigit:]]+-[[:xdigit:]]+}n, 'hex-val-SEQUENCE'.freeze => %r{\A%x[[:xdigit:]]+(?:\.[[:xdigit:]]+)*}n, 'dec-val-RANGE'.freeze => %r{\A%d[[:digit:]]+-[[:digit:]]+}n, 'dec-val-SEQUENCE'.freeze => %r{\A%d[[:digit:]]+(?:\.[[:digit:]]+)*}n, 'bin-val-RANGE'.freeze => %r{\A%b[01]+-[01]+}n, 'bin-val-SEQUENCE'.freeze => %r{\A%b[01]+(?:\.[01]+)*}n, 'char-val'.freeze => %r{\A"[\x20-\x21\x23-\x7E]*"}n, 'option-START'.freeze => %r{\A\[#{C_WSP}*}n, 'option-STOP'.freeze => %r{\A#{C_WSP}*\]}n, 'group-START'.freeze => %r{\A\(#{C_WSP}*}n, 'group-STOP'.freeze => %r{\A#{C_WSP}*\)}n, 'repeat-RANGE'.freeze => %r{\A[[:digit:]]*\*[[:digit:]]*}n, 'repeat-EXACT'.freeze => %r{\A[[:digit:]]+}n, 'alternation-SLASH'.freeze => %r{\A#{C_WSP}*/#{C_WSP}*}n, 'defined-as'.freeze => %r{\A#{C_WSP}*=/?#{C_WSP}*}n, 'rulename'.freeze => %r{\A[[:alpha:]][-[:alnum:]]*}n, 'c-wsp'.freeze => %r{\A(?:(?:#{C_NL})?[[:blank:]])+}n, 'c-nl'.freeze => %r{\A#{C_NL}}n, } end |
#to_enum ⇒ Object
87 88 89 90 91 |
# File 'lib/abnf/parser/compiler/tokenizer.rb', line 87 def to_enum Enumerator::Lazy.new self do |yielder, token| yielder << token end end |