Class: Logster::RedisStore
Constant Summary
collapse
- ENV_PREFIX =
"logster-env-"
- BULK_ENV_GET_LUA =
<<~LUA
local results = {};
for i = 1, table.getn(KEYS), 1 do
results[i] = { KEYS[i], redis.call('LRANGE', KEYS[i], 0, -1) };
end
return results;
LUA
Instance Attribute Summary collapse
Attributes inherited from BaseStore
#allow_custom_patterns, #ignore, #level, #max_retention, #skip_empty
Instance Method Summary
collapse
-
#bulk_delete(message_keys, grouping_keys) ⇒ Object
-
#bulk_get(message_keys, with_env: true) ⇒ Object
-
#clear ⇒ Object
-
#clear_all ⇒ Object
Delete everything, included protected messages (use in tests).
-
#count ⇒ Object
-
#delete(msg) ⇒ Object
-
#find_pattern_groups(load_messages: false) ⇒ Object
-
#get(message_key, load_env: true) ⇒ Object
-
#get_all_ignore_count ⇒ Object
-
#get_all_messages(with_env: true) ⇒ Object
-
#get_env(message_key) ⇒ Object
-
#get_patterns(set_name) ⇒ Object
-
#increment_ignore_count(pattern) ⇒ Object
-
#initialize(redis = nil) ⇒ RedisStore
constructor
A new instance of RedisStore.
-
#insert_pattern(set_name, pattern) ⇒ Object
-
#latest(opts = {}) ⇒ Object
-
#protect(message_key) ⇒ Object
-
#rate_limited?(ip_address, perform: false, limit: 60) ⇒ Boolean
-
#rate_limits ⇒ Object
-
#register_rate_limit_per_hour(severities, limit, &block) ⇒ Object
-
#register_rate_limit_per_minute(severities, limit, &block) ⇒ Object
-
#remove_ignore_count(pattern) ⇒ Object
-
#remove_pattern(set_name, pattern) ⇒ Object
-
#remove_pattern_group(pattern) ⇒ Object
-
#replace_and_bump(message) ⇒ Object
-
#save(message) ⇒ Object
-
#save_pattern_group(group, redis: @redis) ⇒ Object
-
#similar_key(message) ⇒ Object
-
#solve(message_key) ⇒ Object
-
#solved ⇒ Object
-
#unprotect(message_key) ⇒ Object
Methods inherited from BaseStore
#clear_patterns_cache, #report
Constructor Details
#initialize(redis = nil) ⇒ RedisStore
Returns a new instance of RedisStore.
15
16
17
18
19
20
21
|
# File 'lib/logster/redis_store.rb', line 15
def initialize(redis = nil)
super()
@redis = redis || Redis.new
@max_backlog = 1000
@redis_prefix = nil
@redis_raw_connection = nil
end
|
Instance Attribute Details
#max_backlog ⇒ Object
Returns the value of attribute max_backlog.
12
13
14
|
# File 'lib/logster/redis_store.rb', line 12
def max_backlog
@max_backlog
end
|
#redis ⇒ Object
Returns the value of attribute redis.
12
13
14
|
# File 'lib/logster/redis_store.rb', line 12
def redis
@redis
end
|
#redis_prefix ⇒ Object
310
311
312
313
314
|
# File 'lib/logster/redis_store.rb', line 310
def redis_prefix
return 'default'.freeze if !@redis_prefix
@prefix_is_proc ||= @redis_prefix.respond_to?(:call)
@prefix_is_proc ? @redis_prefix.call : @redis_prefix
end
|
#redis_raw_connection ⇒ Object
Returns the value of attribute redis_raw_connection.
12
13
14
|
# File 'lib/logster/redis_store.rb', line 12
def redis_raw_connection
@redis_raw_connection
end
|
Instance Method Details
#bulk_delete(message_keys, grouping_keys) ⇒ Object
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
|
# File 'lib/logster/redis_store.rb', line 56
def bulk_delete(message_keys, grouping_keys)
groups = find_pattern_groups(load_messages: true)
@redis.multi do |pipeline|
groups.each do |group|
group.messages = group.messages.reject { |m| message_keys.include?(m.key) }
save_pattern_group(group, redis: pipeline) if group.changed?
end
pipeline.hdel(hash_key, message_keys)
pipeline.hdel(grouping_key, grouping_keys)
message_keys.each do |k|
pipeline.lrem(list_key, -1, k)
delete_env(k, redis: pipeline)
end
end
end
|
#bulk_get(message_keys, with_env: true) ⇒ Object
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
|
# File 'lib/logster/redis_store.rb', line 247
def bulk_get(message_keys, with_env: true)
return [] if !message_keys || message_keys.size == 0
envs = nil
if with_env
envs = {}
@redis.eval(
BULK_ENV_GET_LUA,
keys: message_keys.map { |k| env_prefix(k, with_namespace: true) }
).to_h.each do |k, v|
next if v.size == 0
parsed = v.size == 1 ? JSON.parse(v[0]) : v.map { |e| JSON.parse(e) }
envs[env_unprefix(k, with_namespace: true)] = parsed
end
end
messages = @redis.hmget(hash_key, message_keys).map! do |json|
next if !json || json.size == 0
message = Message.from_json(json)
if with_env && envs
env = envs[message.key]
if !message.env || message.env.size == 0
message.env = env || {}
end
end
message
end
messages.compact!
messages
end
|
#clear ⇒ Object
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
|
# File 'lib/logster/redis_store.rb', line 165
def clear
RedisRateLimiter.clear_all(@redis)
@redis.del(solved_key)
all_keys = @redis.lrange(list_key, 0, -1)
@redis.del(list_key)
protected_keys = @redis.smembers(protected_key) || []
if protected_keys.empty?
@redis.del(hash_key)
all_keys.each { |k| delete_env(k) }
@redis.del(pattern_groups_key)
@redis.del(grouping_key)
else
protected_messages = @redis.mapped_hmget(hash_key, *protected_keys)
@redis.del(hash_key)
@redis.mapped_hmset(hash_key, protected_messages)
(all_keys - protected_keys).each { |k| delete_env(k) }
sorted = protected_messages
.values
.map { |string|
Message.from_json(string) rescue nil
}
.compact
.sort
.map(&:key)
@redis.pipelined do |pipeline|
sorted.each do |message_key|
pipeline.rpush(list_key, message_key)
end
end
find_pattern_groups(load_messages: true).each do |group|
group.messages = group.messages.select { |m| sorted.include?(m.key) }
save_pattern_group(group) if group.changed?
end
end
end
|
#clear_all ⇒ Object
Delete everything, included protected messages (use in tests)
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
|
# File 'lib/logster/redis_store.rb', line 206
def clear_all
@redis.lrange(list_key, 0, -1).each { |k| delete_env(k) }
@redis.del(list_key)
@redis.del(protected_key)
@redis.del(hash_key)
@redis.del(grouping_key)
@redis.del(solved_key)
@redis.del(ignored_logs_count_key)
@redis.del(pattern_groups_key)
Logster::Pattern.child_classes.each do |klass|
@redis.del(klass.set_name)
end
@redis.keys.each do |key|
@redis.del(key) if key.include?(Logster::RedisRateLimiter::PREFIX)
@redis.del(key) if key.start_with?(ip_rate_limit_key(""))
end
end
|
#count ⇒ Object
93
94
95
|
# File 'lib/logster/redis_store.rb', line 93
def count
@redis.llen(list_key)
end
|
#delete(msg) ⇒ Object
42
43
44
45
46
47
48
49
50
51
52
53
54
|
# File 'lib/logster/redis_store.rb', line 42
def delete(msg)
groups = find_pattern_groups() { |pat| msg.message =~ pat }
@redis.multi do |pipeline|
groups.each do |group|
group.remove_message(msg)
save_pattern_group(group, redis: pipeline) if group.changed?
end
pipeline.hdel(hash_key, msg.key)
delete_env(msg.key, redis: pipeline)
pipeline.hdel(grouping_key, msg.grouping_key)
pipeline.lrem(list_key, -1, msg.key)
end
end
|
#find_pattern_groups(load_messages: false) ⇒ Object
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
|
# File 'lib/logster/redis_store.rb', line 359
def find_pattern_groups(load_messages: false)
patterns = @patterns_cache.fetch(Logster::GroupingPattern::CACHE_KEY) do
Logster::GroupingPattern.find_all(store: self)
end
patterns = patterns.select do |pattern|
if block_given?
yield(pattern)
else
true
end
end
return [] if patterns.size == 0
mapped = patterns.map(&:inspect)
jsons = @redis.hmget(pattern_groups_key, mapped)
jsons.map! do |json|
if json && json.size > 0
group = Logster::Group.from_json(json)
group.pattern = patterns[mapped.index(group.key)]
if load_messages
group.messages = bulk_get(group.messages_keys, with_env: false)
end
group
end
end
jsons.compact!
jsons
end
|
#get(message_key, load_env: true) ⇒ Object
224
225
226
227
228
229
230
231
232
233
|
# File 'lib/logster/redis_store.rb', line 224
def get(message_key, load_env: true)
json = @redis.hget(hash_key, message_key)
return nil unless json
message = Message.from_json(json)
if load_env
message.env = get_env(message_key) || {}
end
message
end
|
#get_all_ignore_count ⇒ Object
340
341
342
|
# File 'lib/logster/redis_store.rb', line 340
def get_all_ignore_count
@redis.hgetall(ignored_logs_count_key)
end
|
#get_all_messages(with_env: true) ⇒ Object
235
236
237
|
# File 'lib/logster/redis_store.rb', line 235
def get_all_messages(with_env: true)
bulk_get(@redis.lrange(list_key, 0, -1), with_env: with_env)
end
|
#get_env(message_key) ⇒ Object
276
277
278
279
280
|
# File 'lib/logster/redis_store.rb', line 276
def get_env(message_key)
envs = @redis.lrange(env_prefix(message_key), 0, -1)
return if !envs || envs.size == 0
envs.size == 1 ? JSON.parse(envs[0]) : envs.map { |j| JSON.parse(j) }
end
|
#get_patterns(set_name) ⇒ Object
328
329
330
|
# File 'lib/logster/redis_store.rb', line 328
def get_patterns(set_name)
@redis.smembers(set_name)
end
|
#increment_ignore_count(pattern) ⇒ Object
332
333
334
|
# File 'lib/logster/redis_store.rb', line 332
def increment_ignore_count(pattern)
@redis.hincrby(ignored_logs_count_key, pattern, 1)
end
|
#insert_pattern(set_name, pattern) ⇒ Object
320
321
322
|
# File 'lib/logster/redis_store.rb', line 320
def insert_pattern(set_name, pattern)
@redis.sadd(set_name, [pattern])
end
|
#latest(opts = {}) ⇒ Object
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
|
# File 'lib/logster/redis_store.rb', line 107
def latest(opts = {})
limit = opts[:limit] || 50
severity = opts[:severity]
before = opts[:before]
after = opts[:after]
search = opts[:search]
with_env = opts.key?(:with_env) ? opts[:with_env] : true
known_groups = opts[:known_groups]&.dup || []
start, finish = find_location(before, after, limit)
return [] unless start && finish
results = []
pattern_groups = find_pattern_groups(load_messages: true)
direction = after ? 1 : -1
begin
keys = @redis.lrange(list_key, start, finish) || []
break if !keys || keys.count <= 0
rows = bulk_get(keys, with_env: with_env).reverse
temp = []
rows.each do |row|
break if before && before == row.key
row = nil if severity && !severity.include?(row.severity)
row = filter_search(row, search)
if row
matches_pattern = pattern_groups.any? { |g| row.message =~ g.pattern }
group = pattern_groups.find { |g| g.messages_keys.include?(row.key) }
if group && !known_groups.include?(group.key)
known_groups << group.key
temp << serialize_group(group, row.key)
elsif !matches_pattern
temp << row
end
end
end
temp.reverse!
if direction == -1
results = temp + results
else
results += temp
end
start += limit * direction
finish += limit * direction
finish = -1 if finish > -1
end while rows.length > 0 && results.length < limit && start < 0
results
end
|
#protect(message_key) ⇒ Object
282
283
284
285
286
287
|
# File 'lib/logster/redis_store.rb', line 282
def protect(message_key)
if message = get(message_key, load_env: false)
message.protected = true
update_message(message)
end
end
|
#rate_limited?(ip_address, perform: false, limit: 60) ⇒ Boolean
344
345
346
347
348
349
350
351
352
353
354
355
356
357
|
# File 'lib/logster/redis_store.rb', line 344
def rate_limited?(ip_address, perform: false, limit: 60)
key = ip_rate_limit_key(ip_address)
limited = @redis.call([:exists, key])
if Integer === limited
limited = limited != 0
end
if perform && !limited
@redis.setex key, limit, ""
end
limited
end
|
#rate_limits ⇒ Object
316
317
318
|
# File 'lib/logster/redis_store.rb', line 316
def rate_limits
@rate_limits ||= {}
end
|
#register_rate_limit_per_hour(severities, limit, &block) ⇒ Object
306
307
308
|
# File 'lib/logster/redis_store.rb', line 306
def register_rate_limit_per_hour(severities, limit, &block)
register_rate_limit(severities, limit, 3600, block)
end
|
#register_rate_limit_per_minute(severities, limit, &block) ⇒ Object
302
303
304
|
# File 'lib/logster/redis_store.rb', line 302
def register_rate_limit_per_minute(severities, limit, &block)
register_rate_limit(severities, limit, 60, block)
end
|
#remove_ignore_count(pattern) ⇒ Object
336
337
338
|
# File 'lib/logster/redis_store.rb', line 336
def remove_ignore_count(pattern)
@redis.hdel(ignored_logs_count_key, pattern)
end
|
#remove_pattern(set_name, pattern) ⇒ Object
324
325
326
|
# File 'lib/logster/redis_store.rb', line 324
def remove_pattern(set_name, pattern)
@redis.srem(set_name, [pattern])
end
|
#remove_pattern_group(pattern) ⇒ Object
395
396
397
|
# File 'lib/logster/redis_store.rb', line 395
def remove_pattern_group(pattern)
@redis.hdel(pattern_groups_key, pattern.inspect)
end
|
#replace_and_bump(message) ⇒ Object
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
|
# File 'lib/logster/redis_store.rb', line 72
def replace_and_bump(message)
exists = @redis.hexists(hash_key, message.key)
return false unless exists
@redis.multi do |pipeline|
pipeline.hset(hash_key, message.key, message.to_json(exclude_env: true))
push_env(message.key, message.env_buffer, redis: pipeline) if message.has_env_buffer?
pipeline.lrem(list_key, -1, message.key)
pipeline.rpush(list_key, message.key)
end
message.env_buffer = [] if message.has_env_buffer?
check_rate_limits(message.severity)
true
end
|
#save(message) ⇒ Object
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
|
# File 'lib/logster/redis_store.rb', line 23
def save(message)
if keys = message.solved_keys
keys.each do |solved|
return false if @redis.hget(solved_key, solved)
end
end
@redis.multi do |pipeline|
pipeline.hset(grouping_key, message.grouping_key, message.key)
pipeline.rpush(list_key, message.key)
update_message(message, save_env: true, redis: pipeline)
end
trim
check_rate_limits(message.severity)
true
end
|
#save_pattern_group(group, redis: @redis) ⇒ Object
387
388
389
390
391
392
393
|
# File 'lib/logster/redis_store.rb', line 387
def save_pattern_group(group, redis: @redis)
if group.messages_keys.size == 0
redis.hdel(pattern_groups_key, group.key)
else
redis.hset(pattern_groups_key, group.key, group.to_json)
end
end
|
#similar_key(message) ⇒ Object
89
90
91
|
# File 'lib/logster/redis_store.rb', line 89
def similar_key(message)
@redis.hget(grouping_key, message.grouping_key)
end
|
#solve(message_key) ⇒ Object
97
98
99
100
101
102
103
104
105
|
# File 'lib/logster/redis_store.rb', line 97
def solve(message_key)
if (message = get(message_key)) && (keys = message.solved_keys)
keys.each do |s_key|
@redis.hset(solved_key, s_key, Time.now.to_f.to_i)
end
end
clear_solved
end
|
#solved ⇒ Object
298
299
300
|
# File 'lib/logster/redis_store.rb', line 298
def solved
@redis.hkeys(solved_key) || []
end
|
#unprotect(message_key) ⇒ Object
289
290
291
292
293
294
295
296
|
# File 'lib/logster/redis_store.rb', line 289
def unprotect(message_key)
if message = get(message_key, load_env: false)
message.protected = false
update_message(message)
else
raise "Message already deleted"
end
end
|