Class: Logster::RedisStore
Constant Summary
collapse
- ENV_PREFIX =
"logster-env-"
- BULK_ENV_GET_LUA =
<<~LUA
local results = {};
for i = 1, table.getn(KEYS), 1 do
results[i] = { KEYS[i], redis.call('LRANGE', KEYS[i], 0, -1) };
end
return results;
LUA
Instance Attribute Summary collapse
Attributes inherited from BaseStore
#allow_custom_patterns, #ignore, #level, #max_retention, #skip_empty
Instance Method Summary
collapse
-
#bulk_delete(message_keys, grouping_keys) ⇒ Object
-
#bulk_get(message_keys, with_env: true) ⇒ Object
-
#clear ⇒ Object
-
#clear_all ⇒ Object
Delete everything, included protected messages (use in tests).
-
#count ⇒ Object
-
#delete(msg) ⇒ Object
-
#find_pattern_groups(load_messages: false) ⇒ Object
-
#get(message_key, load_env: true) ⇒ Object
-
#get_all_ignore_count ⇒ Object
-
#get_all_messages(with_env: true) ⇒ Object
-
#get_env(message_key) ⇒ Object
-
#get_patterns(set_name) ⇒ Object
-
#increment_ignore_count(pattern) ⇒ Object
-
#initialize(redis = nil) ⇒ RedisStore
constructor
A new instance of RedisStore.
-
#insert_pattern(set_name, pattern) ⇒ Object
-
#latest(opts = {}) ⇒ Object
-
#protect(message_key) ⇒ Object
-
#rate_limited?(ip_address, perform: false, limit: 60) ⇒ Boolean
-
#rate_limits ⇒ Object
-
#register_rate_limit_per_hour(severities, limit, &block) ⇒ Object
-
#register_rate_limit_per_minute(severities, limit, &block) ⇒ Object
-
#remove_ignore_count(pattern) ⇒ Object
-
#remove_pattern(set_name, pattern) ⇒ Object
-
#remove_pattern_group(pattern) ⇒ Object
-
#replace_and_bump(message) ⇒ Object
-
#save(message) ⇒ Object
-
#save_pattern_group(group, redis: @redis) ⇒ Object
-
#similar_key(message) ⇒ Object
-
#solve(message_key) ⇒ Object
-
#solved ⇒ Object
-
#unprotect(message_key) ⇒ Object
Methods inherited from BaseStore
#clear_patterns_cache, #report
Constructor Details
#initialize(redis = nil) ⇒ RedisStore
Returns a new instance of RedisStore.
15
16
17
18
19
20
21
|
# File 'lib/logster/redis_store.rb', line 15
def initialize(redis = nil)
super()
@redis = redis || Redis.new
@max_backlog = 1000
@redis_prefix = nil
@redis_raw_connection = nil
end
|
Instance Attribute Details
#max_backlog ⇒ Object
Returns the value of attribute max_backlog.
12
13
14
|
# File 'lib/logster/redis_store.rb', line 12
def max_backlog
@max_backlog
end
|
#redis ⇒ Object
Returns the value of attribute redis.
12
13
14
|
# File 'lib/logster/redis_store.rb', line 12
def redis
@redis
end
|
#redis_prefix ⇒ Object
309
310
311
312
313
|
# File 'lib/logster/redis_store.rb', line 309
def redis_prefix
return "default".freeze if !@redis_prefix
@prefix_is_proc ||= @redis_prefix.respond_to?(:call)
@prefix_is_proc ? @redis_prefix.call : @redis_prefix
end
|
#redis_raw_connection ⇒ Object
Returns the value of attribute redis_raw_connection.
12
13
14
|
# File 'lib/logster/redis_store.rb', line 12
def redis_raw_connection
@redis_raw_connection
end
|
Instance Method Details
#bulk_delete(message_keys, grouping_keys) ⇒ Object
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
|
# File 'lib/logster/redis_store.rb', line 54
def bulk_delete(message_keys, grouping_keys)
groups = find_pattern_groups(load_messages: true)
@redis.multi do |pipeline|
groups.each do |group|
group.messages = group.messages.reject { |m| message_keys.include?(m.key) }
save_pattern_group(group, redis: pipeline) if group.changed?
end
pipeline.hdel(hash_key, message_keys)
pipeline.hdel(grouping_key, grouping_keys)
message_keys.each do |k|
pipeline.lrem(list_key, -1, k)
delete_env(k, redis: pipeline)
end
end
end
|
#bulk_get(message_keys, with_env: true) ⇒ Object
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
|
# File 'lib/logster/redis_store.rb', line 242
def bulk_get(message_keys, with_env: true)
return [] if !message_keys || message_keys.size == 0
envs = nil
if with_env
envs = {}
@redis
.eval(
BULK_ENV_GET_LUA,
keys: message_keys.map { |k| env_prefix(k, with_namespace: true) },
)
.to_h
.each do |k, v|
next if v.size == 0
parsed = v.size == 1 ? JSON.parse(v[0]) : v.map { |e| JSON.parse(e) }
envs[env_unprefix(k, with_namespace: true)] = parsed
end
end
messages =
@redis
.hmget(hash_key, message_keys)
.map! do |json|
next if !json || json.size == 0
message = Message.from_json(json)
if with_env && envs
env = envs[message.key]
message.env = env || {} if !message.env || message.env.size == 0
end
message
end
messages.compact!
messages
end
|
#clear ⇒ Object
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
|
# File 'lib/logster/redis_store.rb', line 161
def clear
RedisRateLimiter.clear_all(@redis)
@redis.del(solved_key)
all_keys = @redis.lrange(list_key, 0, -1)
@redis.del(list_key)
protected_keys = @redis.smembers(protected_key) || []
if protected_keys.empty?
@redis.del(hash_key)
all_keys.each { |k| delete_env(k) }
@redis.del(pattern_groups_key)
@redis.del(grouping_key)
else
protected_messages = @redis.mapped_hmget(hash_key, *protected_keys)
@redis.del(hash_key)
@redis.mapped_hmset(hash_key, protected_messages)
(all_keys - protected_keys).each { |k| delete_env(k) }
sorted =
protected_messages
.values
.map do |string|
begin
Message.from_json(string)
rescue StandardError
nil
end
end
.compact
.sort
.map(&:key)
@redis.pipelined do |pipeline|
sorted.each { |message_key| pipeline.rpush(list_key, message_key) }
end
find_pattern_groups(load_messages: true).each do |group|
group.messages = group.messages.select { |m| sorted.include?(m.key) }
save_pattern_group(group) if group.changed?
end
end
end
|
#clear_all ⇒ Object
Delete everything, included protected messages (use in tests)
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
|
# File 'lib/logster/redis_store.rb', line 205
def clear_all
@redis.lrange(list_key, 0, -1).each { |k| delete_env(k) }
@redis.del(list_key)
@redis.del(protected_key)
@redis.del(hash_key)
@redis.del(grouping_key)
@redis.del(solved_key)
@redis.del(ignored_logs_count_key)
@redis.del(pattern_groups_key)
Logster::Pattern.child_classes.each { |klass| @redis.del(klass.set_name) }
@redis.keys.each do |key|
@redis.del(key) if key.include?(Logster::RedisRateLimiter::PREFIX)
@redis.del(key) if key.start_with?(ip_rate_limit_key(""))
end
end
|
#count ⇒ Object
91
92
93
|
# File 'lib/logster/redis_store.rb', line 91
def count
@redis.llen(list_key)
end
|
#delete(msg) ⇒ Object
40
41
42
43
44
45
46
47
48
49
50
51
52
|
# File 'lib/logster/redis_store.rb', line 40
def delete(msg)
groups = find_pattern_groups() { |pat| msg.message =~ pat }
@redis.multi do |pipeline|
groups.each do |group|
group.remove_message(msg)
save_pattern_group(group, redis: pipeline) if group.changed?
end
pipeline.hdel(hash_key, msg.key)
delete_env(msg.key, redis: pipeline)
pipeline.hdel(grouping_key, msg.grouping_key)
pipeline.lrem(list_key, -1, msg.key)
end
end
|
#find_pattern_groups(load_messages: false) ⇒ Object
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
|
# File 'lib/logster/redis_store.rb', line 354
def find_pattern_groups(load_messages: false)
patterns =
@patterns_cache.fetch(Logster::GroupingPattern::CACHE_KEY) do
Logster::GroupingPattern.find_all(store: self)
end
patterns =
patterns.select do |pattern|
if block_given?
yield(pattern)
else
true
end
end
return [] if patterns.size == 0
mapped = patterns.map(&:inspect)
jsons = @redis.hmget(pattern_groups_key, mapped)
jsons.map! do |json|
if json && json.size > 0
group = Logster::Group.from_json(json)
group.pattern = patterns[mapped.index(group.key)]
group.messages = bulk_get(group.messages_keys, with_env: false) if load_messages
group
end
end
jsons.compact!
jsons
end
|
#get(message_key, load_env: true) ⇒ Object
221
222
223
224
225
226
227
228
|
# File 'lib/logster/redis_store.rb', line 221
def get(message_key, load_env: true)
json = @redis.hget(hash_key, message_key)
return nil unless json
message = Message.from_json(json)
message.env = get_env(message_key) || {} if load_env
message
end
|
#get_all_ignore_count ⇒ Object
339
340
341
|
# File 'lib/logster/redis_store.rb', line 339
def get_all_ignore_count
@redis.hgetall(ignored_logs_count_key)
end
|
#get_all_messages(with_env: true) ⇒ Object
230
231
232
|
# File 'lib/logster/redis_store.rb', line 230
def get_all_messages(with_env: true)
bulk_get(@redis.lrange(list_key, 0, -1), with_env: with_env)
end
|
#get_env(message_key) ⇒ Object
275
276
277
278
279
|
# File 'lib/logster/redis_store.rb', line 275
def get_env(message_key)
envs = @redis.lrange(env_prefix(message_key), 0, -1)
return if !envs || envs.size == 0
envs.size == 1 ? JSON.parse(envs[0]) : envs.map { |j| JSON.parse(j) }
end
|
#get_patterns(set_name) ⇒ Object
327
328
329
|
# File 'lib/logster/redis_store.rb', line 327
def get_patterns(set_name)
@redis.smembers(set_name)
end
|
#increment_ignore_count(pattern) ⇒ Object
331
332
333
|
# File 'lib/logster/redis_store.rb', line 331
def increment_ignore_count(pattern)
@redis.hincrby(ignored_logs_count_key, pattern, 1)
end
|
#insert_pattern(set_name, pattern) ⇒ Object
319
320
321
|
# File 'lib/logster/redis_store.rb', line 319
def insert_pattern(set_name, pattern)
@redis.sadd(set_name, [pattern])
end
|
#latest(opts = {}) ⇒ Object
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
|
# File 'lib/logster/redis_store.rb', line 103
def latest(opts = {})
limit = opts[:limit] || 50
severity = opts[:severity]
before = opts[:before]
after = opts[:after]
search = opts[:search]
with_env = opts.key?(:with_env) ? opts[:with_env] : true
known_groups = opts[:known_groups]&.dup || []
start, finish = find_location(before, after, limit)
return [] unless start && finish
results = []
pattern_groups = find_pattern_groups(load_messages: true)
direction = after ? 1 : -1
begin
keys = @redis.lrange(list_key, start, finish) || []
break if !keys || keys.count <= 0
rows = bulk_get(keys, with_env: with_env).reverse
temp = []
rows.each do |row|
break if before && before == row.key
row = nil if severity && !severity.include?(row.severity)
row = filter_search(row, search)
if row
matches_pattern = pattern_groups.any? { |g| row.message =~ g.pattern }
group = pattern_groups.find { |g| g.messages_keys.include?(row.key) }
if group && !known_groups.include?(group.key)
known_groups << group.key
temp << serialize_group(group, row.key)
elsif !matches_pattern
temp << row
end
end
end
temp.reverse!
if direction == -1
results = temp + results
else
results += temp
end
start += limit * direction
finish += limit * direction
finish = -1 if finish > -1
end while rows.length > 0 && results.length < limit && start < 0
results
end
|
#protect(message_key) ⇒ Object
281
282
283
284
285
286
|
# File 'lib/logster/redis_store.rb', line 281
def protect(message_key)
if message = get(message_key, load_env: false)
message.protected = true
update_message(message)
end
end
|
#rate_limited?(ip_address, perform: false, limit: 60) ⇒ Boolean
343
344
345
346
347
348
349
350
351
352
|
# File 'lib/logster/redis_store.rb', line 343
def rate_limited?(ip_address, perform: false, limit: 60)
key = ip_rate_limit_key(ip_address)
limited = @redis.call([:exists, key])
limited = limited != 0 if Integer === limited
@redis.setex key, limit, "" if perform && !limited
limited
end
|
#rate_limits ⇒ Object
315
316
317
|
# File 'lib/logster/redis_store.rb', line 315
def rate_limits
@rate_limits ||= {}
end
|
#register_rate_limit_per_hour(severities, limit, &block) ⇒ Object
305
306
307
|
# File 'lib/logster/redis_store.rb', line 305
def register_rate_limit_per_hour(severities, limit, &block)
register_rate_limit(severities, limit, 3600, block)
end
|
#register_rate_limit_per_minute(severities, limit, &block) ⇒ Object
301
302
303
|
# File 'lib/logster/redis_store.rb', line 301
def register_rate_limit_per_minute(severities, limit, &block)
register_rate_limit(severities, limit, 60, block)
end
|
#remove_ignore_count(pattern) ⇒ Object
335
336
337
|
# File 'lib/logster/redis_store.rb', line 335
def remove_ignore_count(pattern)
@redis.hdel(ignored_logs_count_key, pattern)
end
|
#remove_pattern(set_name, pattern) ⇒ Object
323
324
325
|
# File 'lib/logster/redis_store.rb', line 323
def remove_pattern(set_name, pattern)
@redis.srem(set_name, [pattern])
end
|
#remove_pattern_group(pattern) ⇒ Object
390
391
392
|
# File 'lib/logster/redis_store.rb', line 390
def remove_pattern_group(pattern)
@redis.hdel(pattern_groups_key, pattern.inspect)
end
|
#replace_and_bump(message) ⇒ Object
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
|
# File 'lib/logster/redis_store.rb', line 70
def replace_and_bump(message)
exists = @redis.hexists(hash_key, message.key)
return false unless exists
@redis.multi do |pipeline|
pipeline.hset(hash_key, message.key, message.to_json(exclude_env: true))
push_env(message.key, message.env_buffer, redis: pipeline) if message.has_env_buffer?
pipeline.lrem(list_key, -1, message.key)
pipeline.rpush(list_key, message.key)
end
message.env_buffer = [] if message.has_env_buffer?
check_rate_limits(message.severity)
true
end
|
#save(message) ⇒ Object
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
|
# File 'lib/logster/redis_store.rb', line 23
def save(message)
if keys = message.solved_keys
keys.each { |solved| return false if @redis.hget(solved_key, solved) }
end
@redis.multi do |pipeline|
pipeline.hset(grouping_key, message.grouping_key, message.key)
pipeline.rpush(list_key, message.key)
update_message(message, save_env: true, redis: pipeline)
end
trim
check_rate_limits(message.severity)
true
end
|
#save_pattern_group(group, redis: @redis) ⇒ Object
382
383
384
385
386
387
388
|
# File 'lib/logster/redis_store.rb', line 382
def save_pattern_group(group, redis: @redis)
if group.messages_keys.size == 0
redis.hdel(pattern_groups_key, group.key)
else
redis.hset(pattern_groups_key, group.key, group.to_json)
end
end
|
#similar_key(message) ⇒ Object
87
88
89
|
# File 'lib/logster/redis_store.rb', line 87
def similar_key(message)
@redis.hget(grouping_key, message.grouping_key)
end
|
#solve(message_key) ⇒ Object
95
96
97
98
99
100
101
|
# File 'lib/logster/redis_store.rb', line 95
def solve(message_key)
if (message = get(message_key)) && (keys = message.solved_keys)
keys.each { |s_key| @redis.hset(solved_key, s_key, Time.now.to_f.to_i) }
end
clear_solved
end
|
#solved ⇒ Object
297
298
299
|
# File 'lib/logster/redis_store.rb', line 297
def solved
@redis.hkeys(solved_key) || []
end
|
#unprotect(message_key) ⇒ Object
288
289
290
291
292
293
294
295
|
# File 'lib/logster/redis_store.rb', line 288
def unprotect(message_key)
if message = get(message_key, load_env: false)
message.protected = false
update_message(message)
else
raise "Message already deleted"
end
end
|