Module: NetworkResiliency::StatsEngine

Extended by:
StatsEngine
Included in:
StatsEngine
Defined in:
lib/network_resiliency/stats_engine.rb

Constant Summary collapse

LOCK =
Thread::Mutex.new
STATS =
{}
SYNC_LIMIT =
100

Instance Method Summary collapse

Instance Method Details

#add(key, value) ⇒ Object



9
10
11
12
13
14
15
# File 'lib/network_resiliency/stats_engine.rb', line 9

def add(key, value)
  local, _ = synchronize do
    STATS[key] ||= [ Stats.new, Stats.new ]
  end

  local << value
end

#get(key) ⇒ Object



17
18
19
20
21
22
23
# File 'lib/network_resiliency/stats_engine.rb', line 17

def get(key)
  local, remote = synchronize do
    STATS[key] ||= [ Stats.new, Stats.new ]
  end

  local + remote
end

#resetObject



25
26
27
# File 'lib/network_resiliency/stats_engine.rb', line 25

def reset
  synchronize { STATS.clear }
end

#sync(redis) ⇒ Object



29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
# File 'lib/network_resiliency/stats_engine.rb', line 29

def sync(redis)
  dirty_keys = {}

  # select data to be synced
  data = synchronize do
    # ensure sync does not run concurrently
    return [] if @syncing
    @syncing = Thread.current

    dirty_keys = STATS.map do |key, (local, remote)|
      # skip if no new local stats and remote already synced
      next if local.n == 0 && remote.n > 0

      [ key, local.n ]
    end.compact.to_h

    # select keys to sync, prioritizing most used
    keys = dirty_keys.sort_by do |key, weight|
      -weight
    end.take(SYNC_LIMIT).map(&:first)

    # update stats for keys being synced
    keys.map do |key|
      local, remote = STATS[key]

      remote << local # update remote stats until sync completes
      STATS[key][0] = Stats.new # reset local stats

      [ key, local ]
    end.to_h
  end

  NetworkResiliency.statsd&.distribution(
    "network_resiliency.sync.keys",
    data.size,
    tags: {
      empty: data.empty?,
      truncated: data.size < dirty_keys.size,
    }.select { |_, v| v },
    sample_rate: SAMPLE_RATE[:sync],
  )

  NetworkResiliency.statsd&.distribution(
    "network_resiliency.sync.keys.dirty",
    dirty_keys.select { |_, n| n > 0 }.count,
    sample_rate: SAMPLE_RATE[:sync],
  )

  return [] if data.empty?

  # sync data to redis
  remote_stats = if NetworkResiliency.statsd
    NetworkResiliency.statsd&.time("network_resiliency.sync") do
      Stats.sync(redis, **data)
    end
  else
    Stats.sync(redis, **data)
  end

  # integrate new remote stats
  synchronize do
    remote_stats.each do |key, stats|
      local, remote = STATS[key]

      remote.reset
      remote << stats
    end
  end

  remote_stats.keys
ensure
  # release sync lock
  @syncing = nil if @syncing == Thread.current
end

#syncing?Boolean

Returns:

  • (Boolean)


104
105
106
# File 'lib/network_resiliency/stats_engine.rb', line 104

def syncing?
  !!@syncing
end