Class: ODBA::Storage

Inherits:
Object show all
Includes:
Singleton
Defined in:
lib/odba/storage.rb

Overview

:nodoc: all

Constant Summary collapse

BULK_FETCH_STEP =
2500
TABLES =
[
  # in table 'object', the isolated dumps of all objects are stored
  ['object', <<-'SQL'],
  ['prefetchable_index', <<-SQL],
  ['extent_index', <<-SQL],
CREATE INDEX extent_index ON object(extent);
  SQL
  # helper table 'object_connection'
  ['object_connection', <<-'SQL'],
  ['target_id_index', <<-SQL],
CREATE INDEX target_id_index ON object_connection(target_id);
  SQL
  # helper table 'collection'
  ['collection', <<-'SQL'],
]

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initializeStorage

Returns a new instance of Storage.



48
49
50
# File 'lib/odba/storage.rb', line 48

def initialize
	@id_mutex = Mutex.new
end

Instance Attribute Details

#dbiObject



201
202
203
# File 'lib/odba/storage.rb', line 201

def dbi
	Thread.current[:txn] || @dbi
end

Instance Method Details

#bulk_restore(bulk_fetch_ids) ⇒ Object



51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
# File 'lib/odba/storage.rb', line 51

def bulk_restore(bulk_fetch_ids)
	if(bulk_fetch_ids.empty?)
		[]
	else
		bulk_fetch_ids = bulk_fetch_ids.uniq
		rows = []
		while(!(ids = bulk_fetch_ids.slice!(0, BULK_FETCH_STEP)).empty?)
			sql = <<-SQL
				SELECT odba_id, content FROM object 
				WHERE odba_id IN (#{ids.join(',')})
			SQL
			rows.concat(self.dbi.select_all(sql))
		end
		rows
	end
end

#collection_fetch(odba_id, key_dump) ⇒ Object



67
68
69
70
71
72
73
74
# File 'lib/odba/storage.rb', line 67

def collection_fetch(odba_id, key_dump)
  sql = <<-SQL
    SELECT value FROM collection 
    WHERE odba_id = ? AND key = ?
  SQL
  row = self.dbi.select_one(sql, odba_id, key_dump)
  row.first unless row.nil?
end

#collection_remove(odba_id, key_dump) ⇒ Object



75
76
77
78
79
80
# File 'lib/odba/storage.rb', line 75

def collection_remove(odba_id, key_dump)
  self.dbi.do <<-SQL, odba_id, key_dump
    DELETE FROM collection
    WHERE odba_id = ? AND key = ?
  SQL
end

#collection_store(odba_id, key_dump, value_dump) ⇒ Object



81
82
83
84
85
86
# File 'lib/odba/storage.rb', line 81

def collection_store(odba_id, key_dump, value_dump)
  self.dbi.do <<-SQL, odba_id, key_dump, value_dump
    INSERT INTO collection (odba_id, key, value)
    VALUES (?, ?, ?)
  SQL
end

#condition_index_delete(index_name, origin_id, search_terms, target_id = nil) ⇒ Object



87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
# File 'lib/odba/storage.rb', line 87

def condition_index_delete(index_name, origin_id, 
                           search_terms, target_id=nil)
  values = []
  sql = "DELETE FROM #{index_name}"
  if(origin_id)
    sql << " WHERE origin_id = ?"
  else
    sql << " WHERE origin_id IS ?"
  end
  search_terms.each { |key, value|
    sql << " AND %s = ?" % key
    values << value
  }
  if(target_id)
    sql << " AND target_id = ?"
    values << target_id
  end
  self.dbi.do sql, origin_id, *values
end

#condition_index_ids(index_name, id, id_name) ⇒ Object



106
107
108
109
110
111
112
113
# File 'lib/odba/storage.rb', line 106

def condition_index_ids(index_name, id, id_name)
  sql = <<-SQL
    SELECT DISTINCT *
    FROM #{index_name}
    WHERE #{id_name}=?
  SQL
  self.dbi.select_all(sql, id)
end

#create_condition_index(table_name, definition) ⇒ Object



132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
# File 'lib/odba/storage.rb', line 132

def create_condition_index(table_name, definition)
  self.dbi.do <<-SQL
CREATE TABLE #{table_name} (
  origin_id INTEGER,
  #{definition.collect { |*pair| pair.join(' ') }.join(",\n  ") },
  target_id INTEGER
);
  SQL
  #index origin_id
  self.dbi.do <<-SQL
CREATE INDEX origin_id_#{table_name} ON #{table_name}(origin_id);
  SQL
  #index search_term
  definition.each { |name, datatype|
    self.dbi.do <<-SQL
CREATE INDEX #{name}_#{table_name} ON #{table_name}(#{name});
    SQL
  }
  #index target_id
  self.dbi.do <<-SQL
CREATE INDEX target_id_#{table_name} ON #{table_name}(target_id);
  SQL
end

#create_dictionary_map(language) ⇒ Object



114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
# File 'lib/odba/storage.rb', line 114

def create_dictionary_map(language)
  %w{lhword lpart_hword lword}.each { |token|
    self.dbi.do <<-SQL
      INSERT INTO pg_ts_cfgmap (ts_name, tok_alias, dict_name)
      VALUES ('default_#{language}', '#{token}',
      '{#{language}_ispell,#{language}_stem}')
    SQL
  }
  [ 'url', 'host', 'sfloat', 'uri', 'int', 'float', 'email',
    'word', 'hword', 'nlword', 'nlpart_hword', 'part_hword',
    'nlhword', 'file', 'uint', 'version' 
  ].each { |token|
    self.dbi.do <<-SQL
      INSERT INTO pg_ts_cfgmap (ts_name, tok_alias, dict_name)
      VALUES ('default_#{language}', '#{token}', '{simple}')
    SQL
  }
end

#create_fulltext_index(table_name) ⇒ Object



155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
# File 'lib/odba/storage.rb', line 155

def create_fulltext_index(table_name)
  self.dbi.do <<-SQL
CREATE TABLE #{table_name} (
  origin_id INTEGER,
  search_term tsvector,
  target_id INTEGER
);
  SQL
  #index origin_id
  self.dbi.do <<-SQL
CREATE INDEX origin_id_#{table_name} ON #{table_name}(origin_id);
  SQL
  #index search_term
  self.dbi.do <<-SQL
CREATE INDEX search_term_#{table_name}
ON #{table_name} USING gist(search_term);
  SQL
  #index target_id
  self.dbi.do <<-SQL
CREATE INDEX target_id_#{table_name} ON #{table_name}(target_id);
  SQL
end

#create_index(table_name) ⇒ Object



177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
# File 'lib/odba/storage.rb', line 177

def create_index(table_name)
  self.dbi.do <<-SQL
    CREATE TABLE #{table_name} (
      origin_id INTEGER,
      search_term TEXT,
      target_id INTEGER
    );
  SQL
  #index origin_id
  self.dbi.do <<-SQL
    CREATE INDEX origin_id_#{table_name}
    ON #{table_name}(origin_id)
  SQL
  #index search_term
  self.dbi.do <<-SQL
    CREATE INDEX search_term_#{table_name}
    ON #{table_name}(search_term)
  SQL
  #index target_id
  self.dbi.do <<-SQL
    CREATE INDEX target_id_#{table_name}
    ON #{table_name}(target_id)
  SQL
end

#delete_index_element(index_name, odba_id, id_name) ⇒ Object



207
208
209
210
211
# File 'lib/odba/storage.rb', line 207

def delete_index_element(index_name, odba_id, id_name)
  self.dbi.do <<-SQL, odba_id
    DELETE FROM #{index_name} WHERE #{id_name} = ?
  SQL
end

#delete_persistable(odba_id) ⇒ Object



212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
# File 'lib/odba/storage.rb', line 212

def delete_persistable(odba_id)
    # delete origin from connections
	self.dbi.do <<-SQL, odba_id
		DELETE FROM object_connection WHERE origin_id = ?
	SQL
    # delete target from connections
	self.dbi.do <<-SQL, odba_id
		DELETE FROM object_connection WHERE target_id = ?
	SQL
    # delete from collections
	self.dbi.do <<-SQL, odba_id
		DELETE FROM collection WHERE odba_id = ?
	SQL
    # delete from objects
	self.dbi.do <<-SQL, odba_id
		DELETE FROM object WHERE odba_id = ?
	SQL
end

#drop_index(index_name) ⇒ Object



204
205
206
# File 'lib/odba/storage.rb', line 204

def drop_index(index_name)
	self.dbi.do "DROP TABLE #{index_name}"
end

#ensure_object_connections(origin_id, target_ids) ⇒ Object



230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
# File 'lib/odba/storage.rb', line 230

def ensure_object_connections(origin_id, target_ids)
  sql = <<-SQL
    SELECT target_id FROM object_connection
    WHERE origin_id = ?
  SQL
  target_ids.uniq!
  update_ids = target_ids
  old_ids = []
  ## use self.dbi instead of @dbi to get information about
  ## object_connections previously stored within this transaction
  if(rows = self.dbi.select_all(sql, origin_id))
    old_ids = rows.collect { |row| row[0] }
    old_ids.uniq!
    delete_ids = old_ids - target_ids
    update_ids = target_ids - old_ids
    unless(delete_ids.empty?)
      while(!(ids = delete_ids.slice!(0, BULK_FETCH_STEP)).empty?)
        self.dbi.do <<-SQL, origin_id
          DELETE FROM object_connection
          WHERE origin_id = ? AND target_id IN (#{ids.join(',')})
        SQL
      end
    end
  end
  sth = self.dbi.prepare <<-SQL
    INSERT INTO object_connection (origin_id, target_id)
    VALUES (?, ?)
  SQL
  update_ids.each { |id|
    sth.execute(origin_id, id)
  }
  sth.finish
end

#ensure_target_id_index(table_name) ⇒ Object



263
264
265
266
267
268
269
270
# File 'lib/odba/storage.rb', line 263

def ensure_target_id_index(table_name)
  #index target_id
  self.dbi.do <<-SQL
    CREATE INDEX target_id_#{table_name}
    ON #{table_name}(target_id)
  SQL
rescue
end

#extent_count(klass) ⇒ Object



271
272
273
274
275
# File 'lib/odba/storage.rb', line 271

def extent_count(klass)
  self.dbi.select_one(<<-EOQ, klass.to_s).first
    SELECT COUNT(odba_id) FROM object WHERE extent = ?
  EOQ
end

#extent_ids(klass) ⇒ Object



276
277
278
279
280
# File 'lib/odba/storage.rb', line 276

def extent_ids(klass)
  self.dbi.select_all(<<-EOQ, klass.to_s).flatten
    SELECT odba_id FROM object WHERE extent = ?
  EOQ
end

#fulltext_index_delete(index_name, id, id_name) ⇒ Object



281
282
283
284
285
286
# File 'lib/odba/storage.rb', line 281

def fulltext_index_delete(index_name, id, id_name)
  self.dbi.do <<-SQL, id
    DELETE FROM #{index_name}
    WHERE #{id_name} = ?
  SQL
end

#fulltext_index_target_ids(index_name, origin_id) ⇒ Object



287
288
289
290
291
292
293
294
# File 'lib/odba/storage.rb', line 287

def fulltext_index_target_ids(index_name, origin_id)
  sql = <<-SQL
    SELECT DISTINCT target_id
    FROM #{index_name}
    WHERE origin_id=?
  SQL
  self.dbi.select_all(sql, origin_id)
end

#generate_dictionary(language, locale, dict_dir) ⇒ Object



295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
# File 'lib/odba/storage.rb', line 295

def generate_dictionary(language, locale, dict_dir)
  # setup configuration
  self.dbi.do <<-SQL
    INSERT INTO pg_ts_cfg (ts_name, prs_name, locale)
    VALUES ('default_#{language}', 'default', '#{locale}');
  SQL
  # insert path to dictionary
  sql = <<-SQL
    INSERT INTO pg_ts_dict (
      SELECT '#{language}_ispell', dict_init, ?, dict_lexize
      FROM pg_ts_dict
      WHERE dict_name = 'ispell_template'
    );
  SQL
  prepath = File.expand_path("fulltext", dict_dir)
  path = %w{Aff Dict Stop}.collect { |type|
    sprintf('%sFile="%s.%s"', type, prepath, type.downcase)
  }.join(',')
  sth.do sql, path
  create_dictionary_map(language)
  self.dbi.do <<-SQL
    INSERT INTO pg_ts_dict (
      dict_name, dict_init, dict_lexize
    )
    VALUES (
      '#{language}_stem', 'dinit_#{language}(internal)',
      'snb_lexize(internal, internal, int4)'
    );
  SQL
end

#index_delete_origin(index_name, odba_id, term) ⇒ Object



325
326
327
328
329
330
331
# File 'lib/odba/storage.rb', line 325

def index_delete_origin(index_name, odba_id, term)
  self.dbi.do <<-SQL, odba_id, term
    DELETE FROM #{index_name} 
    WHERE origin_id = ?
    AND search_term = ?
  SQL
end

#index_delete_target(index_name, origin_id, search_term, target_id) ⇒ Object



332
333
334
335
336
337
338
339
# File 'lib/odba/storage.rb', line 332

def index_delete_target(index_name, origin_id, search_term, target_id)
  self.dbi.do <<-SQL, origin_id, search_term, target_id
    DELETE FROM #{index_name} 
    WHERE origin_id = ?
    AND search_term = ?
    AND target_id = ?
  SQL
end

#index_fetch_keys(index_name, length = nil) ⇒ Object



340
341
342
343
344
345
346
347
348
349
350
351
352
# File 'lib/odba/storage.rb', line 340

def index_fetch_keys(index_name, length=nil)
  expr = if(length)
           "substr(search_term, 1, #{length})"
         else
           "search_term"
         end
  sql = <<-SQL
    SELECT DISTINCT #{expr} AS key
    FROM #{index_name}
    ORDER BY key
  SQL
  self.dbi.select_all(sql).flatten
end

#index_matches(index_name, substring, limit = nil, offset = 0) ⇒ Object



353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
# File 'lib/odba/storage.rb', line 353

def index_matches(index_name, substring, limit=nil, offset=0)
  sql = <<-SQL
    SELECT DISTINCT search_term AS key
    FROM #{index_name}
    WHERE search_term LIKE ?
    ORDER BY key
  SQL
  if limit
    sql << "LIMIT #{limit}\n"
  end
  if offset > 0
    sql << "OFFSET #{offset}\n"
  end
  self.dbi.select_all(sql, substring + '%').flatten
end

#index_origin_ids(index_name, target_id) ⇒ Object



368
369
370
371
372
373
374
375
# File 'lib/odba/storage.rb', line 368

def index_origin_ids(index_name, target_id)
  sql = <<-SQL
    SELECT DISTINCT origin_id, search_term
    FROM #{index_name}
    WHERE target_id=?
  SQL
  self.dbi.select_all(sql, target_id)
end

#index_target_ids(index_name, origin_id) ⇒ Object



376
377
378
379
380
381
382
383
# File 'lib/odba/storage.rb', line 376

def index_target_ids(index_name, origin_id)
  sql = <<-SQL
    SELECT DISTINCT target_id, search_term
    FROM #{index_name}
    WHERE origin_id=?
  SQL
  self.dbi.select_all(sql, origin_id)
end

#max_idObject



384
385
386
387
388
389
# File 'lib/odba/storage.rb', line 384

def max_id
  @id_mutex.synchronize do
    ensure_next_id_set
    @next_id
  end
end

#next_idObject



390
391
392
393
394
395
# File 'lib/odba/storage.rb', line 390

def next_id
  @id_mutex.synchronize do
    ensure_next_id_set
    @next_id += 1
  end
end

#remove_dictionary(language) ⇒ Object



412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
# File 'lib/odba/storage.rb', line 412

def remove_dictionary(language)
  # remove configuration
  self.dbi.do <<-SQL
    DELETE FROM pg_ts_cfg
    WHERE ts_name='default_#{language}'
  SQL
  # remove dictionaries
  self.dbi.do <<-SQL
    DELETE FROM pg_ts_dict
    WHERE dict_name IN ('#{language}_ispell', '#{language}_stem')
  SQL
  # remove tokens
  self.dbi.do <<-SQL
    DELETE FROM pg_ts_cfgmap
    WHERE ts_name='default_#{language}'
  SQL
end

#reserve_next_id(reserved_id) ⇒ Object



401
402
403
404
405
406
407
408
409
410
411
# File 'lib/odba/storage.rb', line 401

def reserve_next_id(reserved_id)
  @id_mutex.synchronize do
    ensure_next_id_set
    if @next_id < reserved_id
      @next_id = reserved_id
    else
      raise OdbaDuplicateIdError,
            "The id '#{reserved_id}' has already been assigned"
    end
  end
end

#restore(odba_id) ⇒ Object



429
430
431
432
# File 'lib/odba/storage.rb', line 429

def restore(odba_id)
	row = self.dbi.select_one("SELECT content FROM object WHERE odba_id = ?", odba_id)
	row.first unless row.nil?
end

#restore_collection(odba_id) ⇒ Object



513
514
515
516
517
# File 'lib/odba/storage.rb', line 513

def restore_collection(odba_id)
	self.dbi.select_all <<-EOQ
		SELECT key, value FROM collection WHERE odba_id = #{odba_id}
	EOQ
end

#restore_named(name) ⇒ Object



518
519
520
521
522
# File 'lib/odba/storage.rb', line 518

def restore_named(name)
	row = self.dbi.select_one("SELECT content FROM object WHERE name = ?", 
		name)
	row.first unless row.nil?
end

#restore_prefetchableObject



523
524
525
526
527
# File 'lib/odba/storage.rb', line 523

def restore_prefetchable
	self.dbi.select_all <<-EOQ
		SELECT odba_id, content FROM object WHERE prefetchable = true
	EOQ
end

#retrieve_connected_objects(target_id) ⇒ Object



433
434
435
436
437
438
439
# File 'lib/odba/storage.rb', line 433

def retrieve_connected_objects(target_id)
	sql = <<-SQL 
		SELECT origin_id FROM object_connection 
		WHERE target_id = ?
	SQL
	self.dbi.select_all(sql, target_id)
end

#retrieve_from_condition_index(index_name, conditions, limit = nil) ⇒ Object



440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
# File 'lib/odba/storage.rb', line 440

def retrieve_from_condition_index(index_name, conditions, limit=nil)
  sql = <<-EOQ
    SELECT target_id, COUNT(target_id) AS relevance
    FROM #{index_name}
    WHERE TRUE
  EOQ
  values = []
  lines = conditions.collect { |name, info|
    val = nil
    condition = nil
    if(info.is_a?(Hash))
      condition = info['condition']
      if(val = info['value']) 
        if(/i?like/i.match(condition))
          val += '%'
        end
        condition = "#{condition || '='} ?"
        values.push(val.to_s)
      end
    elsif(info)
      condition = "= ?"
      values.push(info.to_s)
    end
    sql << <<-EOQ
      AND #{name} #{condition || 'IS NULL'}
    EOQ
  }
  sql << "        GROUP BY target_id\n"
  if(limit)
    sql << " LIMIT #{limit}"
  end
  self.dbi.select_all(sql, *values)
end

#retrieve_from_fulltext_index(index_name, search_term, dict, limit = nil) ⇒ Object



473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
# File 'lib/odba/storage.rb', line 473

def retrieve_from_fulltext_index(index_name, search_term, dict, limit=nil)
    ## this combination of gsub statements solves the problem of 
    #  properly escaping strings of this form: "(2:1)" into 
    #  '\(2\:1\)' (see test_retrieve_from_fulltext_index)
	term = search_term.strip.gsub(/\s+/, '&').gsub(/&+/, '&')\
      .gsub(/[():]/i, '\\ \\&').gsub(/\s/, '')
   sql = <<-EOQ
		SELECT target_id, 
			max(ts_rank(search_term, to_tsquery(?, ?))) AS relevance
		FROM #{index_name} 
		WHERE search_term @@ to_tsquery(?, ?) 
		GROUP BY target_id
		ORDER BY relevance DESC
	EOQ
    if(limit)
      sql << " LIMIT #{limit}"
    end
	self.dbi.select_all(sql, dict, term, dict, term)
rescue DBI::ProgrammingError => e
	warn("ODBA::Storage.retrieve_from_fulltext_index rescued a DBI::ProgrammingError(#{e.message}). Query:")
	warn("self.dbi.select_all(#{sql}, #{dict}, #{term}, #{dict}, #{term})")
	warn("returning empty result")
	[]
end

#retrieve_from_index(index_name, search_term, exact = nil, limit = nil) ⇒ Object



497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
# File 'lib/odba/storage.rb', line 497

def retrieve_from_index(index_name, search_term, 
                        exact=nil, limit=nil)
  unless(exact)
    search_term = search_term + "%"
  end
  sql = <<-EOQ
    SELECT target_id, COUNT(target_id) AS relevance
    FROM #{index_name}
    WHERE search_term LIKE ?
    GROUP BY target_id
  EOQ
  if(limit)
    sql << " LIMIT #{limit}"
  end
  self.dbi.select_all(sql, search_term)	 
end

#setupObject



528
529
530
531
532
533
534
535
536
537
538
# File 'lib/odba/storage.rb', line 528

def setup
  TABLES.each { |name, definition|
    self.dbi.do(definition) rescue DBI::ProgrammingError
  }
  unless(self.dbi.columns('object').any? { |col| col.name == 'extent' })
    self.dbi.do <<-EOS
ALTER TABLE object ADD COLUMN extent TEXT;
CREATE INDEX extent_index ON object(extent);
    EOS
  end
end

#store(odba_id, dump, name, prefetchable, klass) ⇒ Object



539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
# File 'lib/odba/storage.rb', line 539

def store(odba_id, dump, name, prefetchable, klass)
	sql = "SELECT name FROM object WHERE odba_id = ?"
	if(row = self.dbi.select_one(sql, odba_id))
		name ||= row['name']
		self.dbi.do <<-SQL, dump, name, prefetchable, klass.to_s, odba_id
			UPDATE object SET 
			content = ?,
			name = ?,
			prefetchable = ?,
        extent = ?
			WHERE odba_id = ?
		SQL
	else
		self.dbi.do <<-SQL, odba_id, dump, name, prefetchable, klass.to_s
			INSERT INTO object (odba_id, content, name, prefetchable, extent)
			VALUES (?, ?, ?, ?, ?)
		SQL
	end
end

#transaction(&block) ⇒ Object



558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
# File 'lib/odba/storage.rb', line 558

def transaction(&block)
	dbi = nil
	retval = nil
	@dbi.transaction { |dbi|
      ## this should not be necessary anymore:
		#dbi['AutoCommit'] = false
		Thread.current[:txn] = dbi
		retval = block.call
	}
	retval
ensure
    ## this should not be necessary anymore:
	#dbi['AutoCommit'] = true
	Thread.current[:txn] = nil
end

#update_condition_index(index_name, origin_id, search_terms, target_id) ⇒ Object



573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
# File 'lib/odba/storage.rb', line 573

def update_condition_index(index_name, origin_id, search_terms, target_id)
  keys = []
  vals = []
  search_terms.each { |key, val|
    keys.push(key)
    vals.push(val)
  }
  if(target_id)
    self.dbi.do <<-SQL, origin_id, target_id, *vals
INSERT INTO #{index_name} (origin_id, target_id, #{keys.join(', ')})
VALUES (?, ?#{', ?' * keys.size})
    SQL
  else
    key_str = keys.collect { |key| "#{key}=?" }.join(', ')
    self.dbi.do <<-SQL, *(vals.push(origin_id))
UPDATE #{index_name} SET #{key_str}
WHERE origin_id = ?
    SQL
  end
end

#update_fulltext_index(index_name, origin_id, search_term, target_id, dict) ⇒ Object



593
594
595
596
597
598
599
600
601
602
603
604
605
606
# File 'lib/odba/storage.rb', line 593

def update_fulltext_index(index_name, origin_id, search_term, target_id, dict)
  search_term = search_term.gsub(/\s+/, ' ').strip
  if(target_id)
    self.dbi.do <<-SQL, origin_id, dict, search_term, target_id
INSERT INTO #{index_name} (origin_id, search_term, target_id)
VALUES (?, to_tsvector(?, ?), ?)
    SQL
  else
    self.dbi.do <<-SQL, dict, search_term, origin_id
UPDATE #{index_name} SET search_term=to_tsvector(?, ?)
WHERE origin_id=?
    SQL
  end
end

#update_index(index_name, origin_id, search_term, target_id) ⇒ Object



607
608
609
610
611
612
613
614
615
616
617
618
619
# File 'lib/odba/storage.rb', line 607

def update_index(index_name, origin_id, search_term, target_id)
    if(target_id)
      self.dbi.do <<-SQL, origin_id, search_term, target_id
        INSERT INTO #{index_name} (origin_id, search_term, target_id) 
        VALUES (?, ?, ?)
      SQL
    else
      self.dbi.do <<-SQL, search_term, origin_id
        UPDATE #{index_name} SET search_term=?
        WHERE origin_id=?
      SQL
    end
end

#update_max_id(id) ⇒ Object



396
397
398
399
400
# File 'lib/odba/storage.rb', line 396

def update_max_id(id)
  @id_mutex.synchronize do
    @next_id = id
  end
end