Class: Spellr::Tokenizer

Inherits:
Object
  • Object
show all
Defined in:
lib/spellr/tokenizer.rb

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(file, start_at: nil, skip_key: true) ⇒ Tokenizer

Returns a new instance of Tokenizer.



12
13
14
15
16
17
18
# File 'lib/spellr/tokenizer.rb', line 12

def initialize(file, start_at: nil, skip_key: true)
  @start_at = start_at || ColumnLocation.new(line_location: LineLocation.new(file))
  @file = file.is_a?(StringIO) || file.is_a?(IO) ? file : ::File.new(file)
  @file.pos = @start_at.line_location.byte_offset

  @line_tokenizer = LineTokenizer.new('', skip_key: skip_key)
end

Instance Attribute Details

#fileObject (readonly)

Returns the value of attribute file.



10
11
12
# File 'lib/spellr/tokenizer.rb', line 10

def file
  @file
end

Instance Method Details

#each_line_with_statsObject

rubocop:disable Metrics/MethodLength



54
55
56
57
58
59
60
61
62
63
64
65
66
# File 'lib/spellr/tokenizer.rb', line 54

def each_line_with_stats # rubocop:disable Metrics/MethodLength
  char_offset = @start_at.line_location.char_offset
  byte_offset = @start_at.line_location.byte_offset

  file.each_line.with_index(@start_at.line_location.line_number) do |line, line_number|
    yield line, line_number, char_offset, byte_offset

    char_offset += line.length
    byte_offset += line.bytesize
  end
ensure
  file.close
end

#each_term(&block) ⇒ Object



28
29
30
31
32
33
34
# File 'lib/spellr/tokenizer.rb', line 28

def each_term(&block)
  file.each_line do |line|
    prepare_tokenizer_for_line(line).each_term(&block)
  end
ensure
  file.close
end

#each_token(skip_term_proc: nil) ⇒ Object

rubocop:disable Metrics/MethodLength



36
37
38
39
40
41
42
43
44
# File 'lib/spellr/tokenizer.rb', line 36

def each_token(skip_term_proc: nil) # rubocop:disable Metrics/MethodLength
  each_line_with_stats do |line, line_number, char_offset, byte_offset|
    prepare_tokenizer_for_line(line).each_token(skip_term_proc: skip_term_proc) do |token|
      token.line = prepare_line(line, line_number, char_offset, byte_offset)

      yield token
    end
  end
end

#map(&block) ⇒ Object



24
25
26
# File 'lib/spellr/tokenizer.rb', line 24

def map(&block)
  enum_for(:each_token).map(&block)
end

#normalized_termsObject



68
69
70
# File 'lib/spellr/tokenizer.rb', line 68

def normalized_terms
  enum_for(:each_term).map(&:spellr_normalize).uniq.sort
end

#prepare_line(line, line_number, char_offset, byte_offset) ⇒ Object



46
47
48
49
50
51
52
# File 'lib/spellr/tokenizer.rb', line 46

def prepare_line(line, line_number, char_offset, byte_offset)
  line_location = LineLocation.new(
    file, line_number, char_offset: char_offset, byte_offset: byte_offset
  )
  column_location = ColumnLocation.new(line_location: line_location)
  Token.new(line, location: column_location)
end

#termsObject

leftovers:test



20
21
22
# File 'lib/spellr/tokenizer.rb', line 20

def terms # leftovers:test
  enum_for(:each_term).to_a
end