Class: Saga::Tokenizer
- Inherits:
-
Object
- Object
- Saga::Tokenizer
- Defined in:
- lib/saga/tokenizer.rb
Constant Summary collapse
- RE_STORY =
/\./.freeze
- RE_DEFINITION =
/\A[[:alpha:]]([[:alpha:]]|[\s-])+:/.freeze
- RE_STORY_NUMBER =
/\#(\d+)/.freeze
- RE_STORY_ITERATION =
/i(\d+)/.freeze
- RE_STORY_ESTIMATE_PART =
/(\d+)(d|w|h|)/.freeze
Instance Attribute Summary collapse
-
#current_section ⇒ Object
Returns the value of attribute current_section.
Class Method Summary collapse
- .interval(input) ⇒ Object
- .tokenize_author(input) ⇒ Object
- .tokenize_definition(input) ⇒ Object
- .tokenize_story(input) ⇒ Object
- .tokenize_story_attributes(input) ⇒ Object
Instance Method Summary collapse
- #expect_stories? ⇒ Boolean
-
#initialize(parser) ⇒ Tokenizer
constructor
A new instance of Tokenizer.
- #process(input) ⇒ Object
- #process_line(input, index = 0) ⇒ Object
Constructor Details
#initialize(parser) ⇒ Tokenizer
Returns a new instance of Tokenizer.
8 9 10 11 |
# File 'lib/saga/tokenizer.rb', line 8 def initialize(parser) @parser = parser @current_section = nil end |
Instance Attribute Details
#current_section ⇒ Object
Returns the value of attribute current_section.
3 4 5 |
# File 'lib/saga/tokenizer.rb', line 3 def current_section @current_section end |
Class Method Details
.interval(input) ⇒ Object
44 45 46 47 48 49 50 51 52 53 |
# File 'lib/saga/tokenizer.rb', line 44 def self.interval(input) case input.strip when 'd' :days when 'w' :weeks else :hours end end |
.tokenize_author(input) ⇒ Object
106 107 108 109 110 111 112 113 114 |
# File 'lib/saga/tokenizer.rb', line 106 def self.(input) = {} parts = input[1..-1].split(',') [:name] = parts[0].strip if parts[0] [:email] = parts[1].strip if parts[1] [:company] = parts[2].strip if parts[2] [:website] = parts[3].strip if parts[3] end |
.tokenize_definition(input) ⇒ Object
98 99 100 101 102 103 104 |
# File 'lib/saga/tokenizer.rb', line 98 def self.tokenize_definition(input) if match = /^([^:]+)\s*:\s*(.+)\s*$/.match(input) { title: match[1], definition: match[2] } else {} end end |
.tokenize_story(input) ⇒ Object
87 88 89 90 91 92 93 94 95 96 |
# File 'lib/saga/tokenizer.rb', line 87 def self.tokenize_story(input) parts = input.split(' - ') if parts.length > 1 story = tokenize_story_attributes(parts[-1]) story[:description] = parts[0..-2].join('-').strip story else { description: input.strip } end end |
.tokenize_story_attributes(input) ⇒ Object
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
# File 'lib/saga/tokenizer.rb', line 59 def self.tokenize_story_attributes(input) return {} if input.nil? attributes = {} rest = [] parts = input.split(/\s/) parts.each do |part| if part.strip == '' next elsif match = RE_STORY_NUMBER.match(part) attributes[:id] = match[1].to_i elsif match = RE_STORY_ITERATION.match(part) attributes[:iteration] = match[1].to_i elsif match = /#{RE_STORY_ESTIMATE_PART}-#{RE_STORY_ESTIMATE_PART}/.match(part) estimate = "#{match[1, 2].join}-#{match[3, 2].join}" attributes[:estimate] = [estimate, :range] elsif match = RE_STORY_ESTIMATE_PART.match(part) attributes[:estimate] = [match[1].to_i, interval(match[2])] else rest << part end end attributes[:status] = rest.join(' ') unless rest.empty? attributes end |
Instance Method Details
#expect_stories? ⇒ Boolean
13 14 15 |
# File 'lib/saga/tokenizer.rb', line 13 def expect_stories? %w[story stories].include?(current_section.to_s) end |
#process(input) ⇒ Object
38 39 40 41 42 |
# File 'lib/saga/tokenizer.rb', line 38 def process(input) input.split("\n").each_with_index do |line, index| process_line(line, index) end end |
#process_line(input, index = 0) ⇒ Object
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
# File 'lib/saga/tokenizer.rb', line 17 def process_line(input, index = 0) if input[0, 2] == ' ' @parser.handle_notes(input.strip) elsif input[0, 3] == '| ' @parser.handle_notes(input[1..-1].strip) elsif input[0, 1] == '|' @parser.handle_nested_story(self.class.tokenize_story(input[1..-1])) elsif input[0, 1] == '-' @parser.(self.class.(input)) elsif input =~ RE_DEFINITION @parser.handle_definition(self.class.tokenize_definition(input)) elsif expect_stories? && input =~ RE_STORY @parser.handle_story(self.class.tokenize_story(input)) else @parser.handle_string(input) end rescue StandardError $stderr.write "On line #{index}: #{input.inspect}:" raise end |