Class: Cocoadex::Tokenizer

Inherits:
Object
  • Object
show all
Defined in:
lib/cocoadex/tokenizer.rb

Class Method Summary collapse

Class Method Details

.data_pathObject

Cache storage location



6
7
8
# File 'lib/cocoadex/tokenizer.rb', line 6

def self.data_path
  Cocoadex.config_file("data/store.blob")
end

.fuzzy_match(text) ⇒ Object

Find all tokens with a term at least starting with a text string. If there is an exact match, return it instead of the entire list



29
30
31
32
33
34
35
36
# File 'lib/cocoadex/tokenizer.rb', line 29

def self.fuzzy_match text
  subset = tokens.select {|t| t.term.start_with? text }
  if token = subset_match(subset, text)
    [token]
  else
    subset
  end
end

.loaded?Boolean

Returns:

  • (Boolean)


42
43
44
# File 'lib/cocoadex/tokenizer.rb', line 42

def self.loaded?
  File.exists?(data_path)
end

.match(text) ⇒ Object

Find all tokens with a term identical to a string



16
17
18
# File 'lib/cocoadex/tokenizer.rb', line 16

def self.match text
  subset_match(tokens, text)
end

.persistObject



38
39
40
# File 'lib/cocoadex/tokenizer.rb', line 38

def self.persist
  Serializer.write_array(data_path, tokens, :overwrite)
end

.subset_match(subset, text) ⇒ Object

Find all tokens in a subset with a term identical to a string



22
23
24
# File 'lib/cocoadex/tokenizer.rb', line 22

def self.subset_match subset, text
  subset.detect {|t| t.term == text }
end

.tokenize_class(docset, path, id) ⇒ Object

Find all searchable keywords in a class and add to cache



48
49
50
51
52
53
54
55
# File 'lib/cocoadex/tokenizer.rb', line 48

def self.tokenize_class docset, path, id
  klass = Cocoadex::Class.new(path)
  properties = {
    :method   => klass.methods,
    :property => klass.properties
  }
  tokenize(docset, klass, :class, id, properties)
end

.tokenize_ref(docset, path, id) ⇒ Object

Find all searchable keywords in a reference and add to cache



59
60
61
62
63
64
65
66
67
68
69
70
# File 'lib/cocoadex/tokenizer.rb', line 59

def self.tokenize_ref docset, path, id
  ref = Cocoadex::GenericRef.new(path)
  properties = {
    :constant    => ref.constants,
    :data_type   => ref.data_types,
    :result_code => ref.result_codes,
    :const_group => ref.const_groups,
    :function    => ref.functions,
    :callback    => ref.callbacks
  }
  tokenize(docset, ref, :ref, id, properties)
end

.tokensObject

All indexed searchable keys



11
12
13
# File 'lib/cocoadex/tokenizer.rb', line 11

def self.tokens
  @store ||= loaded? ? Serializer.read(data_path) : []
end

.untokenize(keys) ⇒ Object

Create Cocoadex model objects from tokenized keywords references



74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
# File 'lib/cocoadex/tokenizer.rb', line 74

def self.untokenize keys
  keys.map do |key|
    case key.type
    when :class
      Cocoadex::Class.new(key.url)
    when :ref
      Cocoadex::GenericRef.new(key.url)
    when :data_type,   :result_code, :function,
         :const_group, :constant, :callback

      if class_key = tokens.detect {|k| k.id == key.fk}
        ref = Cocoadex::GenericRef.new(class_key.url)
        list = case key.type
          when :result_code then ref.result_codes
          when :data_type   then ref.data_types
          when :const_group then ref.const_groups
          when :constant    then ref.constants
          when :function    then ref.functions
          when :callback    then ref.callbacks
        end
        list.detect {|m| m.name == key.term}
      end
    when :method, :property
      if class_key = tokens.detect {|k| k.id == key.fk}
        klass = Cocoadex::Class.new(class_key.url)
        list = key.type == :method ? klass.methods : klass.properties
        list.detect {|m| m.name == key.term}
      end
    end
  end
end