Class: Redis::TimeSeries::Info

Inherits:
Struct
  • Object
show all
Defined in:
lib/redis/time_series/info.rb

Overview

The Info struct wraps the result of the TS.INFO command with method access. It also applies some limited parsing to the result values, mainly snakifying the property keys, and instantiating Rule objects if necessary.

All properties of the struct are also available on a TimeSeries object itself via delegation.

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Instance Attribute Details

#chunk_countInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#chunk_sizeInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#chunk_typeString (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#duplicate_policyObject

Returns the value of attribute duplicate_policy



42
43
44
# File 'lib/redis/time_series/info.rb', line 42

def duplicate_policy
  @duplicate_policy
end

#first_timestampInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#labelsHash (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#last_timestampInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#max_samples_per_chunkInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#memory_usageInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#retention_timeInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#rulesArray<Rule> (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#seriesTimeSeries (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#source_keyString? (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#total_samplesInteger (readonly) Also known as: count, length, size



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

Class Method Details

.parse(series:, data:) ⇒ Info

This method is part of a private API. You should avoid using this method if possible, as it may be removed or be changed in the future.



62
63
64
65
66
67
68
69
# File 'lib/redis/time_series/info.rb', line 62

def parse(series:, data:)
  build_hash(data)
    .merge(series: series)
    .then(&method(:parse_labels))
    .then(&method(:parse_policies))
    .then(&method(:parse_rules))
    .then(&method(:new))
end

Instance Method Details

#sourceTimeSeries?

If this series is the destination of a compaction rule, returns the source series of the data.



105
106
107
108
# File 'lib/redis/time_series/info.rb', line 105

def source
  return unless source_key
  @source ||= TimeSeries.new(source_key, redis: series.redis)
end