Class: LogStash::Outputs::Kinesis

Inherits:
Base
  • Object
show all
Defined in:
lib/logstash/outputs/kinesis.rb

Overview

Sends log events to a Kinesis stream. This output plugin uses the official Amazon KPL. Most of the configuration options in this plugin are simply passed on to https://github.com/awslabs/amazon-kinesis-producer/blob/v0.10.0/java/amazon-kinesis-producer/src/main/java/com/amazonaws/services/kinesis/producer/KinesisProducerConfiguration.java#L38[KinesisProducerConfiguration]

Constant Summary collapse

KPL =
com.amazonaws.services.kinesis.producer
AWSAuth =
com.amazonaws.auth
ByteBuffer =
java.nio.ByteBuffer

Instance Method Summary collapse

Instance Method Details

#closeObject



112
113
114
115
# File 'lib/logstash/outputs/kinesis.rb', line 112

def close
  @producer.flushSync()
  @producer.destroy()
end

#create_credentials_providerObject



167
168
169
170
171
172
173
174
175
176
# File 'lib/logstash/outputs/kinesis.rb', line 167

def create_credentials_provider
  provider = AWSAuth.DefaultAWSCredentialsProviderChain.new()
  if @access_key and @secret_key
    provider = BasicCredentialsProvider.new(AWSAuth.BasicAWSCredentials.new(@access_key, @secret_key))
  end
  if @role_arn
    provider = create_sts_provider(provider, @role_arn)
  end
  provider
end

#create_kpl_configObject



117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
# File 'lib/logstash/outputs/kinesis.rb', line 117

def create_kpl_config
  config = KPL.KinesisProducerConfiguration::new()

  credentials_provider = create_credentials_provider
  metrics_credentials_provider = create_metrics_credentials_provider

  config.setAggregationEnabled(@aggregation_enabled)
  config.setAggregationMaxCount(@aggregation_max_count)
  config.setAggregationMaxSize(@aggregation_max_size)
  config.setCollectionMaxCount(@collection_max_count)
  config.setCollectionMaxSize(@collection_max_size)
  config.setConnectTimeout(@connect_timeout)
  config.setCredentialsProvider(credentials_provider)
  config.setCredentialsRefreshDelay(@credentials_refresh_delay)
  config.setCustomEndpoint(@custom_endpoint) if !@custom_endpoint.nil?
  config.setFailIfThrottled(@fail_if_throttled)
  config.setLogLevel(@log_level)
  config.setMaxConnections(@max_connections)
  config.setMetricsCredentialsProvider(metrics_credentials_provider)
  config.setMetricsGranularity(@metrics_granularity)
  config.setMetricsLevel(@metrics_level)
  config.setMetricsNamespace(@metrics_namespace)
  config.setMetricsUploadDelay(@metrics_upload_delay)
  config.setMinConnections(@min_connections)
  config.setNativeExecutable(@native_executable) if !@native_executable.nil?
  config.setKinesisPort(@port)
  config.setRateLimit(@rate_limit)
  config.setRecordMaxBufferedTime(@record_max_buffered_time)
  config.setRecordTtl(@record_ttl)
  config.setRegion(@region)
  config.setRequestTimeout(@request_timeout)
  config.setTempDirectory(@temp_directory) if !@temp_directory.nil?
  config.setVerifyCertificate(@verify_certificate)

  config
end

#create_metrics_credentials_providerObject



178
179
180
181
182
183
184
185
186
187
# File 'lib/logstash/outputs/kinesis.rb', line 178

def create_metrics_credentials_provider
  provider = AWSAuth.DefaultAWSCredentialsProviderChain.new()
  if @metrics_access_key and @metrics_secret_key
    provider = BasicCredentialsProvider.new(AWSAuth.BasicAWSCredentials.new(@metrics_access_key, @metrics_secret_key))
  end
  if @metrics_role_arn
    provider = create_sts_provider(provider, @metrics_role_arn)
  end
  provider
end

#create_sts_provider(base_provider, arn) ⇒ Object



154
155
156
157
158
159
160
161
162
163
164
165
# File 'lib/logstash/outputs/kinesis.rb', line 154

def create_sts_provider(base_provider, arn)
  client_config = com.amazonaws.ClientConfiguration.new()
  if @sts_proxy_host
    client_config.setProxyHost(@sts_proxy_host)
  end
  if @sts_proxy_port
    client_config.setProxyPort(@sts_proxy_port)
  end
  provider = AWSAuth.STSAssumeRoleSessionCredentialsProvider.new(
    base_provider, arn, "logstash-output-kinesis", client_config)
  provider
end

#receive(event) ⇒ Object



84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/logstash/outputs/kinesis.rb', line 84

def receive(event)
  return unless output?(event)

  if @randomized_partition_key
    event["[@metadata][partition_key]"] = SecureRandom.uuid
  else
    # Haha - gawd. If I don't put an empty string in the array, then calling .join()
    # on it later will result in a US-ASCII string if the array is empty. Ruby is awesome.
    partition_key_parts = [""]

    @event_partition_keys.each do |partition_key_name|
      if not event[partition_key_name].nil? and event[partition_key_name].length > 0
        partition_key_parts << event[partition_key_name].to_s
        break
      end
    end

    event["[@metadata][partition_key]"] = (partition_key_parts * "-").to_s[/.+/m] || "-"
  end

  begin
    @codec.encode(event)
  rescue => e
    @logger.warn("Error encoding event", :exception => e, :event => event)
  end
end

#registerObject



75
76
77
78
79
80
81
# File 'lib/logstash/outputs/kinesis.rb', line 75

def register
  @metrics_access_key ||= @access_key
  @metrics_secret_key ||= @secret_key

  @producer = KPL.KinesisProducer::new(create_kpl_config)
  @codec.on_event(&method(:send_record))
end

#send_record(event, payload) ⇒ Object



189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
# File 'lib/logstash/outputs/kinesis.rb', line 189

def send_record(event, payload)
  begin
    event_blob = ByteBuffer::wrap(payload.to_java_bytes)
    @producer.addUserRecord(@stream_name, event["[@metadata][partition_key]"], event_blob)
  rescue => e
    @logger.warn("Error writing event to Kinesis", :exception => e)
  end

  num = @producer.getOutstandingRecordsCount()
  if num > @max_pending_records
    @logger.warn("Kinesis is too busy - blocking until things have cleared up")
    @producer.flushSync()
    @logger.info("Okay - I've stopped blocking now")
  end
end