Class: TokenTrieNER

Inherits:
NER
  • Object
show all
Defined in:
lib/rbbt/ner/token_trieNER.rb

Defined Under Namespace

Modules: EnumeratedArray Classes: Code

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Methods inherited from NER

#entities

Constructor Details

#initialize(type = nil, file = nil, options = {}) ⇒ TokenTrieNER

Returns a new instance of TokenTrieNER.



244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
# File 'lib/rbbt/ner/token_trieNER.rb', line 244

def initialize(type = nil, file = nil, options = {})
  options = Misc.add_defaults options, :longest_match => true, :no_clean => false, :slack => nil, :split_at => nil,
    :persist => false
  @slack = slack
  @longest_match = options.delete :longest_match
  @split_at = options.delete :split_at
  @no_clean = options.delete :no_clean

  file = [] if file.nil?
  file = [file] unless Array === file
  persist_options = Misc.pull_keys options, :persist
  @index = Persist.persist_tsv(file, options, persist_options) do |data|
    data.serializer = :marshal if data.respond_to? :serializer and data.serializer == :type

    @index = data
    file.each do |f| 
      merge(f, type)
    end

    @index
  end
end

Instance Attribute Details

#indexObject

Returns the value of attribute index.



243
244
245
# File 'lib/rbbt/ner/token_trieNER.rb', line 243

def index
  @index
end

#longest_matchObject

Returns the value of attribute longest_match.



243
244
245
# File 'lib/rbbt/ner/token_trieNER.rb', line 243

def longest_match
  @longest_match
end

#no_cleanObject

Returns the value of attribute no_clean.



243
244
245
# File 'lib/rbbt/ner/token_trieNER.rb', line 243

def no_clean
  @no_clean
end

#slackObject

Returns the value of attribute slack.



243
244
245
# File 'lib/rbbt/ner/token_trieNER.rb', line 243

def slack
  @slack
end

#split_atObject

Returns the value of attribute split_at.



243
244
245
# File 'lib/rbbt/ner/token_trieNER.rb', line 243

def split_at
  @split_at
end

#typeObject

Returns the value of attribute type.



243
244
245
# File 'lib/rbbt/ner/token_trieNER.rb', line 243

def type
  @type
end

Class Method Details

.clean(token) ⇒ Object



8
9
10
11
12
13
14
# File 'lib/rbbt/ner/token_trieNER.rb', line 8

def self.clean(token)
  if token.length > 3
    token.downcase.sub(/-/,'')
  else
    token
  end
end

.find(index, tokens, longest_match = true, slack = nil, first = true) ⇒ Object



201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
# File 'lib/rbbt/ner/token_trieNER.rb', line 201

def self.find(index, tokens, longest_match = true, slack = nil, first = true)
  head = tokens.next
  
  next_index = follow(index, head)


  return find_fail(index, tokens, head, longest_match, slack, first) if next_index.nil?

  if not tokens.left?
    if next_index.include? :END
      return [next_index[:END], [head]]
    else
      return find_fail(index, tokens, head, longest_match, slack, first)
    end
  else

    return [next_index[:END], [head]] if next_index.include?(:END) and not longest_match

    matches = find(next_index, tokens, longest_match, slack, false) # Recursion

    if not matches.nil?
      matches.last.unshift head
      return matches
    end
    
    return [next_index[:END], [head]] if next_index.include?(:END)

    return find_fail(index, tokens, head, longest_match, slack, first)
  end
end

.find_fail(index, tokens, head, longest_match, slack, first) ⇒ Object



188
189
190
191
192
193
194
195
196
197
198
199
# File 'lib/rbbt/ner/token_trieNER.rb', line 188

def self.find_fail(index, tokens, head, longest_match, slack, first)
  if Proc === slack and not first and not head.nil? and tokens.left? and slack.call(head) 
    matches = find(index, tokens, longest_match, slack, false) # Recursion
    if not matches.nil?
      matches.last.unshift head
      return matches
    end
  end

  tokens.back
  return nil
end

.follow(index, head) ⇒ Object

{{{ Matching



172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
# File 'lib/rbbt/ner/token_trieNER.rb', line 172

def self.follow(index, head)
  res = nil

  if index.include? head
    return index[head]
  end

  return nil unless (not TokyoCabinet::HDB === index ) and index.include? :PROCS

  index[:PROCS].each do |key,value|
    return value if key.call(head)
  end

  nil
end

.index_for_tokens(tokens, code, type = nil, slack = nil) ⇒ Object



97
98
99
100
101
102
103
104
105
106
107
108
109
110
# File 'lib/rbbt/ner/token_trieNER.rb', line 97

def self.index_for_tokens(tokens, code, type = nil, slack = nil)
  if not tokens.left?
    {:END => [Code.new(code, type)]}
  else
    head = tokens.next
    if (slack.nil? or not slack.call(head))
      res = {head => index_for_tokens(tokens, code, type, slack)}
    else
      res = {head => index_for_tokens(tokens, code, type, slack)}.merge(index_for_tokens(tokens, code, type, slack))
    end
    tokens.back
    res
  end
end

.make_match(match_tokens, type, codes) ⇒ Object



232
233
234
235
236
237
238
239
240
241
# File 'lib/rbbt/ner/token_trieNER.rb', line 232

def self.make_match(match_tokens, type, codes)
  match = ""
  match_offset = match_tokens.first.offset
  match_tokens.each{|t| 
    match << " " * (t.offset - (match_offset + match.length)) if t.offset > (match_offset + match.length)
    match << ((t.respond_to?(:original) and not t.original.nil?) ? t.original : t)
  }

  NamedEntity.setup(match, match_tokens.first.offset, type, codes)
end

.merge(index1, index2) ⇒ Object



112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
# File 'lib/rbbt/ner/token_trieNER.rb', line 112

def self.merge(index1, index2)
  index1.write if index1.respond_to? :write and not index1.write?
  index2.each do |key, new_index2|
    case
    when key == :END
      end1 = index1[:END] || []
      end1 += new_index2.reject{|new| end1.collect{|e| e.to_s }.include? new.to_s }
      end1.uniq!
      index1[:END] = end1
    when index1.include?(key)
      new = merge(index1[key], new_index2)
      index1[key] = new
    else
      index1[key] = new_index2
    end
  end
  index1.read if index1.respond_to? :read

  index1
end

.prepare_token(token, start, extend_to_token = true, no_clean = false) ⇒ Object



16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
# File 'lib/rbbt/ner/token_trieNER.rb', line 16

def self.prepare_token(token, start, extend_to_token = true, no_clean = false)
  if no_clean
    if extend_to_token
      Token.setup(clean(token), start, token)
    else
      token
    end
  else
    if extend_to_token
      Token.setup(clean(token), start, token)
    else
      clean(token)
    end
  end
end

.process(index, hash, type = nil, slack = nil, split_at = nil, no_clean = false) ⇒ Object



133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
# File 'lib/rbbt/ner/token_trieNER.rb', line 133

def self.process(index, hash, type = nil, slack = nil, split_at = nil, no_clean = false)

  chunk_size = hash.size / 100
  items_in_chunk = 0
  tmp_index = {}
  hash.send(hash.respond_to?(:through)? :through : :each) do |code, names|
    names = Array === names ? names : [names]
    names.flatten! if Array === names.first and not Token === names.first.first

    if names.empty?
      names.unshift code unless TSV === hash and not (hash.fields.nil? or hash.fields.empty?)
    end

    names.each do |name|
      next if name.empty? or (String === name and name.length < 2)

      tokens = Array === name ? name : tokenize(name, false, split_at, no_clean) 
      tokens.extend EnumeratedArray

      token_index = index_for_tokens(tokens, code, type, slack)

      tmp_index = merge(tmp_index, token_index) unless tokens.empty?

      items_in_chunk += 1

      if items_in_chunk > chunk_size
        index = merge(index, tmp_index)
        tmp_index = {}
        items_in_chunk = 0
      end
    end
  end
  index = merge(index, tmp_index)

  index
end

.tokenize(text, extend_to_token = true, split_at = nil, no_clean = false, start = 0) ⇒ Object



32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
# File 'lib/rbbt/ner/token_trieNER.rb', line 32

def self.tokenize(text, extend_to_token = true, split_at = nil, no_clean = false, start = 0)
  split_at = /\s|(\(|\)|[-."':,])/ if split_at.nil?

  tokens = []
  while matchdata = text.match(split_at)
    tokens << prepare_token(matchdata.pre_match, start, extend_to_token, no_clean) unless matchdata.pre_match.empty?
    tokens << prepare_token(matchdata.captures.first, start + matchdata.begin(1), extend_to_token, no_clean) if matchdata.captures.any? and not matchdata.captures.first.empty?
    start += matchdata.end(0)
    text = matchdata.post_match
  end
   
  tokens << prepare_token(text, start, extend_to_token) unless text.empty?

  tokens
end

Instance Method Details

#match(text) ⇒ Object



293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
# File 'lib/rbbt/ner/token_trieNER.rb', line 293

def match(text)
  tokens = Array === text ? text : TokenTrieNER.tokenize(text, true, split_at, no_clean)

  tokens.extend EnumeratedArray
  tokens.pos = 0

  matches = []
  while tokens.left?
    new_matches = TokenTrieNER.find(@index, tokens, longest_match, slack) 

    if new_matches
      codes, match_tokens = new_matches
      matches << TokenTrieNER.make_match(match_tokens, codes.collect{|c| c.type}, codes.collect{|c| c.code})
    else
      tokens.advance
    end
  end

  matches
end

#merge(new, type = nil) ⇒ Object



267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
# File 'lib/rbbt/ner/token_trieNER.rb', line 267

def merge(new, type = nil)
  case
  when TokenTrieNER === new
    Log.debug "TokenTrieNER merging other TokenTrieNER"
    TokenTrieNER.merge(@index, new.index)
  when TSV === new
    Log.debug "TokenTrieNER merging TSV"
    new.with_unnamed do
      new.with_monitor({:step => 1000, :desc => "Processing TSV into TokenTrieNER"}) do
        TokenTrieNER.process(@index, new, type, slack, split_at, no_clean)
      end
    end
  when Hash === new
    Log.debug "TokenTrieNER merging Hash"
    TokenTrieNER.merge(@index, new)
  when String === new
    Log.debug "TokenTrieNER merging file: #{ new }"
    new = TSV.open(new, :flat)
    new.with_unnamed do
      new.with_monitor({:step => 1000, :desc => "Processing TSV into TokenTrieNER"}) do
        TokenTrieNER.process(@index, new, type, slack, split_at, no_clean)
      end
    end
  end
end