Class: SQLPP::Tokenizer
- Inherits:
-
Object
- Object
- SQLPP::Tokenizer
- Defined in:
- lib/sqlpp/tokenizer.rb
Defined Under Namespace
Classes: EOFError, Exception, Token, UnexpectedCharacter
Constant Summary collapse
- KEYWORDS =
%w( and as asc between by case cross desc distinct else end first from full group having ilike in inner is join last left like limit not null nulls offset on or order outer right select then when where )
- KEYWORDS_REGEX =
Regexp.new('\b(' + KEYWORDS.join('|') + ')\b', Regexp::IGNORECASE)
Instance Method Summary collapse
- #_scan ⇒ Object
- #_scan_to_delim(delim, pos) ⇒ Object
-
#initialize(string) ⇒ Tokenizer
constructor
A new instance of Tokenizer.
- #next ⇒ Object
- #peek ⇒ Object
- #push(token) ⇒ Object
Constructor Details
#initialize(string) ⇒ Tokenizer
Returns a new instance of Tokenizer.
55 56 57 58 |
# File 'lib/sqlpp/tokenizer.rb', line 55 def initialize(string) @scanner = StringScanner.new(string) @buffer = [] end |
Instance Method Details
#_scan ⇒ Object
77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
# File 'lib/sqlpp/tokenizer.rb', line 77 def _scan pos = @scanner.pos if @scanner.eos? Token.new(:eof, nil, pos) elsif (key = @scanner.scan(KEYWORDS_REGEX)) Token.new(:key, key.downcase.to_sym, pos) elsif (num = @scanner.scan(/\d+(?:\.\d+)?/)) Token.new(:lit, num, pos) elsif (id = @scanner.scan(/\w+/)) Token.new(:id, id, pos) elsif (punct = @scanner.scan(/<=|<>|!=|>=|::/)) Token.new(:punct, punct, pos) elsif (punct = @scanner.scan(/[<>=\(\).*,\/+\-\[\]]/)) Token.new(:punct, punct, pos) elsif (delim = @scanner.scan(/["`]/)) contents = _scan_to_delim(delim, pos) Token.new(:id, "#{delim}#{contents}#{delim}", pos) elsif @scanner.scan(/'/) contents = _scan_to_delim("'", pos) Token.new(:lit, "'#{contents}'", pos) elsif (space = @scanner.scan(/\s+/)) Token.new(:space, space, pos) else raise UnexpectedCharacter, @scanner.rest end end |
#_scan_to_delim(delim, pos) ⇒ Object
105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
# File 'lib/sqlpp/tokenizer.rb', line 105 def _scan_to_delim(delim, pos) escape, if_peek = case delim when '"', '`' then ["\\", nil] when "'" then ["'", "'"] end string = "" loop do ch = @scanner.getch if ch == escape && (if_peek.nil? || @scanner.peek(1) == if_peek) ch << @scanner.getch end case ch when nil then raise EOFError, "end of input reached in string started at #{pos} with #{delim.inspect}" when delim then return string else string << ch end end end |
#next ⇒ Object
60 61 62 63 64 65 66 |
# File 'lib/sqlpp/tokenizer.rb', line 60 def next if @buffer.any? @buffer.pop else _scan end end |
#peek ⇒ Object
68 69 70 |
# File 'lib/sqlpp/tokenizer.rb', line 68 def peek push(self.next) end |
#push(token) ⇒ Object
72 73 74 75 |
# File 'lib/sqlpp/tokenizer.rb', line 72 def push(token) @buffer.push(token) token end |