Class: LogStash::Outputs::TreasureData

Inherits:
Base
  • Object
show all
Includes:
Stud::Buffer
Defined in:
lib/logstash/outputs/treasure_data.rb

Constant Summary collapse

IMPORT_SIZE_LIMIT =
32 * 1024 * 1024
RECORD_KEYS_LIMIT =
512
RECORD_SIZE_LIMIT =
32 * 1024 * 1024
UUID_FORMAT =
/\A[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\z/
VERSION =
Gem::Specification.load(File.expand_path('../../../../logstash-output-treasure_data.gemspec', __FILE__)).version

Instance Method Summary collapse

Instance Method Details

#closeObject



164
165
166
# File 'lib/logstash/outputs/treasure_data.rb', line 164

def close
  buffer_flush(final: true)
end

#ensure_table_exists(client, database, table) ⇒ Object



142
143
144
145
146
147
148
149
150
151
# File 'lib/logstash/outputs/treasure_data.rb', line 142

def ensure_table_exists(client, database, table)
  begin
    client.create_log_table(database, table)
  rescue TreasureData::NotFoundError => e
    client.create_database(database)
    client.create_log_table(database, table)
  rescue TreasureData::AlreadyExistsError => e
    # ignore
  end
end

#flush(events, teardown = false) ⇒ Object



104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
# File 'lib/logstash/outputs/treasure_data.rb', line 104

def flush(events, teardown = false)
  @logger.debug "flushing #{events} events (may include chunk uuid)"
  return if events.size < 1
  if UUID_FORMAT !~ events.first
    new_uuid = @uuid.generate
    @logger.debug "adding chunk uuid #{new_uuid}"
    events.unshift new_uuid
  end

  uuid = events.first
  io = StringIO.new
  @logger.debug "gzipping chunk #{uuid}"
  Zlib::GzipWriter.wrap(io){|f|
    events.each do |row|
      unless row == uuid
        f.write row
      end
    end
    f.finish
  }
  data = io.string
  @logger.debug "sending gzipped chunk #{uuid}, #{data.bytesize} bytes"
  begin
    @client.import(@database, @table, "msgpack.gz", data, data.bytesize, uuid)

  rescue TreasureData::NotFoundError => e
    raise unless @auto_create_table

    @logger.info "creating missing table #{@table} on database #{@database} for chunk #{uuid}"
    ensure_table_exists(@client, @database, @table)
    @logger.info "retrying upload chunk #{uuid}"
    retry
  end

  @logger.debug "done #{uuid}"
end

#on_flush_error(e) ⇒ Object



154
155
156
# File 'lib/logstash/outputs/treasure_data.rb', line 154

def on_flush_error(e)
  @logger.warn "flush error #{e.class}: #{e.message}"
end

#on_full_buffer_error(opts = {}) ⇒ Object



159
160
161
# File 'lib/logstash/outputs/treasure_data.rb', line 159

def on_full_buffer_error(opts={})
  @logger.warn "buffer exceeds limits: pending:#{opts[:pending_count]}, outgoing:#{opts[:outgoing_count]}"
end

#receive(event) ⇒ Object



85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
# File 'lib/logstash/outputs/treasure_data.rb', line 85

def receive(event)
  record = event.clone
  @logger.debug "receive a event"

  record['time'] ||= (record.timestamp.to_i || Time.now.to_i)
  if record.to_hash.size > RECORD_KEYS_LIMIT
    raise "Too many number of keys (#{record.keys.size} keys)"
  end

  row = record.to_msgpack
  if row.bytesize > RECORD_SIZE_LIMIT
    raise "Too large record (#{row.bytesize} bytes with keys:#{record.keys.join(',')})"
  end

  buffer_receive(row)
  @logger.debug "buffered a event"
end

#registerObject



58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
# File 'lib/logstash/outputs/treasure_data.rb', line 58

def register
  @empty_gz_data = TreasureData::API.create_empty_gz_data
  @user_agent = "logstash-output-treasure_data: #{VERSION}".freeze
  @uuid = UUID.new

  TreasureData::API.validate_database_name(@database)
  TreasureData::API.validate_table_name(@table)

  client_opts = {
    ssl: @use_ssl,
    http_proxy: @http_proxy,
    user_agent: @user_agent,
    endpoint: @endpoint,
    connect_timeout: @connect_timeout,
    read_timeout: @read_timeout,
    send_timeout: @send_timeout
  }
  @client = TreasureData::Client.new(@apikey.value, client_opts)

  buffer_initialize(
    max_items: @flush_size,
    max_interval: @flush_interval,
    logger: @logger
  )
end