Class: SolidCacheMongoid::Entry

Inherits:
Object
  • Object
show all
Includes:
Mongoid::Locker, Expiration, Size, Record
Defined in:
app/models/solid_cache_mongoid/entry.rb,
app/models/solid_cache_mongoid/entry/size.rb,
app/models/solid_cache_mongoid/entry/expiration.rb,
app/models/solid_cache_mongoid/entry/size/estimate.rb,
app/models/solid_cache_mongoid/entry/size/moving_average_estimate.rb

Defined Under Namespace

Modules: Expiration, Size

Constant Summary collapse

ESTIMATED_ROW_OVERHEAD =

The estimated cost of an extra row in bytes, including fixed size columns, overhead, indexes and free space Based on experimentation on SQLite, MySQL and Postgresql. A bit high for SQLite (more like 90 bytes), but about right for MySQL/Postgresql.

140
ESTIMATED_ENCRYPTION_OVERHEAD =

Assuming MessagePack serialization

170
KEY_HASH_ID_RANGE =
-(2**63)..(2**63 - 1)
MULTI_BATCH_SIZE =
1000

Class Method Summary collapse

Class Method Details

.clear_deleteObject



78
79
80
81
82
83
# File 'app/models/solid_cache_mongoid/entry.rb', line 78

def clear_delete
  without_query_cache do
    delete_all
  end
  nil
end

.clear_truncateObject



73
74
75
76
# File 'app/models/solid_cache_mongoid/entry.rb', line 73

def clear_truncate
  delete_all
  nil
end

.delete_by_key(*keys) ⇒ Object



67
68
69
70
71
# File 'app/models/solid_cache_mongoid/entry.rb', line 67

def delete_by_key(*keys)
  without_query_cache do
    where(:key_hash.in => key_hashes_for(keys)).delete_all
  end
end

.id_rangeObject



107
108
109
# File 'app/models/solid_cache_mongoid/entry.rb', line 107

def id_range
  without_query_cache { count }
end

.lock_and_write(key, &block) ⇒ Object



85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
# File 'app/models/solid_cache_mongoid/entry.rb', line 85

def lock_and_write(key, &block)
  without_query_cache do
    entry = where(key_hash: key_hash_for(key)).first

    if entry
      entry.with_lock do
        entry_key = entry.key.data
        entry_value = entry.value.data

        current_value = entry_key == key ? entry_value : nil
        new_value = block.call(current_value)
        write(key, new_value) if new_value
        new_value
      end
    else
      new_value = block.call(nil)
      write(key, new_value) if new_value
      new_value
    end
  end
end

.read(key) ⇒ Object



44
45
46
# File 'app/models/solid_cache_mongoid/entry.rb', line 44

def read(key)
  read_multi([key])[key]
end

.read_multi(keys) ⇒ Object



48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
# File 'app/models/solid_cache_mongoid/entry.rb', line 48

def read_multi(keys)
  without_query_cache do
    {}.tap do |results|
      keys.each_slice(MULTI_BATCH_SIZE).each do |keys_batch|
        key_hashes = key_hashes_for(keys_batch)

        where(:key_hash.in => key_hashes)
          .only(:key, :value)
          .each do |entry|
            # Convertir BSON::Binary de vuelta a string
            key_str = entry.key.data
            value_str = entry.value.data
            results[key_str] = value_str
          end
      end
    end
  end
end

.write(key, value) ⇒ Object



23
24
25
# File 'app/models/solid_cache_mongoid/entry.rb', line 23

def write(key, value)
  write_multi([ { key: key, value: value } ])
end

.write_multi(payloads) ⇒ Object



27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
# File 'app/models/solid_cache_mongoid/entry.rb', line 27

def write_multi(payloads)
  without_query_cache do
    payloads.each_slice(MULTI_BATCH_SIZE).each do |payload_batch|
      add_key_hash_and_byte_size(payload_batch).each do |payload|
        # Convertir key y value a BSON::Binary
        key = payload.delete(:key)
        value = payload.delete(:value)
        obj = where(key_hash: payload[:key_hash]).first_or_initialize
        obj.assign_attributes(payload)
        obj.key = BSON::Binary.new(key)
        obj.value = BSON::Binary.new(value)
        obj.save!
      end
    end
  end
end