Class: LogStash::Outputs::ElasticSearch::HttpClient
- Inherits:
-
Object
- Object
- LogStash::Outputs::ElasticSearch::HttpClient
- Defined in:
- lib/logstash/outputs/elasticsearch/http_client.rb,
lib/logstash/outputs/elasticsearch/http_client/pool.rb,
lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb
Defined Under Namespace
Classes: ManticoreAdapter, Pool
Constant Summary collapse
- DEFAULT_HEADERS =
{ "Content-Type" => "application/json", 'x-elastic-product-origin' => 'logstash-output-elasticsearch' }
Instance Attribute Summary collapse
-
#action_count ⇒ Object
readonly
Returns the value of attribute action_count.
-
#client ⇒ Object
readonly
Returns the value of attribute client.
-
#logger ⇒ Object
readonly
Returns the value of attribute logger.
-
#options ⇒ Object
readonly
Returns the value of attribute options.
-
#pool ⇒ Object
readonly
Returns the value of attribute pool.
-
#recv_count ⇒ Object
readonly
Returns the value of attribute recv_count.
Instance Method Summary collapse
- #alive_urls_count ⇒ Object
- #build_adapter(options) ⇒ Object
- #build_pool(options) ⇒ Object
- #build_url_template ⇒ Object
- #bulk(actions) ⇒ Object
- #bulk_send(body_stream, batch_actions, headers = {}) ⇒ Object
- #calculate_property(uris, property, default, sniff_check) ⇒ Object
- #client_settings ⇒ Object
- #close ⇒ Object
-
#compression_level? ⇒ Boolean
return true if compression_level is [1..9] return false if it is 0.
- #emulate_batch_error_response(actions, http_code, reason) ⇒ Object
- #exists?(path, use_get = false) ⇒ Boolean
- #get(path) ⇒ Object
- #get_ilm_endpoint ⇒ Object
- #get_xpack_info ⇒ Object
- #gzip_writer(io) ⇒ Object
- #host_to_url(h) ⇒ Object
- #ilm_policy_exists?(name) ⇒ Boolean
- #ilm_policy_put(name, policy) ⇒ Object
-
#initialize(options = {}) ⇒ HttpClient
constructor
The ‘options` is a hash where the following symbol keys have meaning:.
- #join_bulk_responses(bulk_responses) ⇒ Object
- #last_es_version ⇒ Object
- #maximum_seen_major_version ⇒ Object
- #password ⇒ Object
- #path ⇒ Object
- #port ⇒ Object
- #post(path, params = {}, body_string) ⇒ Object
- #prepare_user_agent ⇒ Object
-
#rollover_alias_exists?(name) ⇒ Boolean
check whether rollover alias already exists.
-
#rollover_alias_put(alias_name, alias_definition) ⇒ Object
Create a new rollover alias.
- #scheme ⇒ Object
- #serverless? ⇒ Boolean
- #sniffing ⇒ Object
- #ssl_options ⇒ Object
- #template_exists?(template_endpoint, name) ⇒ Boolean
- #template_install(template_endpoint, name, template, force = false) ⇒ Object
- #template_put(template_endpoint, name, template) ⇒ Object
-
#update_action_builder(args, source) ⇒ Object
Build a bulk item for an elasticsearch update action.
- #uris ⇒ Object
- #user ⇒ Object
Constructor Details
#initialize(options = {}) ⇒ HttpClient
The ‘options` is a hash where the following symbol keys have meaning:
-
‘:hosts` - array of String. Set a list of hosts to use for communication.
-
‘:port` - number. set the port to use to communicate with Elasticsearch
-
‘:user` - String. The user to use for authentication.
-
‘:password` - String. The password to use for authentication.
-
‘:timeout` - Float. A duration value, in seconds, after which a socket
operation or request will be aborted if not yet successful -
‘:client_settings` - a hash; see below for keys.
The ‘client_settings` key is a has that can contain other settings:
-
‘:ssl` - Boolean. Enable or disable SSL/TLS.
-
‘:proxy` - String. Choose a HTTP HTTProxy to use.
-
‘:path` - String. The leading path for prefixing Elasticsearch
-
‘:headers` - Hash. Pairs of headers and their values requests. This is sometimes used if you are proxying Elasticsearch access through a special http path, such as using mod_rewrite.
53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 53 def initialize(={}) @logger = [:logger] @metric = [:metric] @bulk_request_metrics = @metric.namespace(:bulk_requests) @bulk_response_metrics = @bulk_request_metrics.namespace(:responses) # Again, in case we use DEFAULT_OPTIONS in the future, uncomment this. # @options = DEFAULT_OPTIONS.merge(options) = @url_template = build_url_template @pool = build_pool() # mutex to prevent requests and sniffing to access the # connection pool at the same time @bulk_path = [:bulk_path] end |
Instance Attribute Details
#action_count ⇒ Object (readonly)
Returns the value of attribute action_count.
28 29 30 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 28 def action_count @action_count end |
#client ⇒ Object (readonly)
Returns the value of attribute client.
28 29 30 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 28 def client @client end |
#logger ⇒ Object (readonly)
Returns the value of attribute logger.
28 29 30 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 28 def logger @logger end |
#options ⇒ Object (readonly)
Returns the value of attribute options.
28 29 30 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 28 def end |
#pool ⇒ Object (readonly)
Returns the value of attribute pool.
28 29 30 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 28 def pool @pool end |
#recv_count ⇒ Object (readonly)
Returns the value of attribute recv_count.
28 29 30 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 28 def recv_count @recv_count end |
Instance Method Details
#alive_urls_count ⇒ Object
102 103 104 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 102 def alive_urls_count @pool.alive_urls_count end |
#build_adapter(options) ⇒ Object
322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 322 def build_adapter() timeout = [:timeout] || 0 = { :socket_timeout => timeout, :request_timeout => timeout, } [:user_agent] = prepare_user_agent [:proxy] = client_settings[:proxy] if client_settings[:proxy] [:check_connection_timeout] = client_settings[:check_connection_timeout] if client_settings[:check_connection_timeout] # Having this explicitly set to nil is an error if client_settings[:pool_max] [:pool_max] = client_settings[:pool_max] end # Having this explicitly set to nil is an error if client_settings[:pool_max_per_route] [:pool_max_per_route] = client_settings[:pool_max_per_route] end [:ssl] = if self.scheme == 'https' [:headers] = client_settings[:headers] if client_settings[:headers] ::LogStash::Outputs::ElasticSearch::HttpClient::ManticoreAdapter.new(@logger, ) end |
#build_pool(options) ⇒ Object
365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 365 def build_pool() adapter = build_adapter() = { :license_checker => [:license_checker], :sniffing => sniffing, :sniffer_delay => [:sniffer_delay], :sniffing_path => [:sniffing_path], :healthcheck_path => [:healthcheck_path], :resurrect_delay => [:resurrect_delay], :url_normalizer => self.method(:host_to_url), :metric => [:metric] } [:scheme] = self.scheme if self.scheme pool_class = ::LogStash::Outputs::ElasticSearch::HttpClient::Pool full_urls = [:hosts].map {|h| host_to_url(h) } pool = pool_class.new(@logger, adapter, full_urls, ) pool.start pool end |
#build_url_template ⇒ Object
71 72 73 74 75 76 77 78 79 80 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 71 def build_url_template { :scheme => self.scheme, :user => self.user, :password => self.password, :host => "URLTEMPLATE", :port => self.port, :path => self.path } end |
#bulk(actions) ⇒ Object
106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 106 def bulk(actions) @action_count ||= 0 @action_count += actions.size return if actions.empty? bulk_actions = actions.collect do |action, args, source| args, source = update_action_builder(args, source) if action == 'update' if source && action != 'delete' next [ { action => args }, source ] else next { action => args } end end body_stream = StringIO.new if compression_level? body_stream.set_encoding "BINARY" stream_writer = gzip_writer(body_stream) else stream_writer = body_stream end bulk_responses = [] batch_actions = [] bulk_actions.each_with_index do |action, index| as_json = action.is_a?(Array) ? action.map {|line| LogStash::Json.dump(line)}.join("\n") : LogStash::Json.dump(action) as_json << "\n" as_json.scrub! # ensure generated JSON is valid UTF-8 if (stream_writer.pos + as_json.bytesize) > TARGET_BULK_BYTES && stream_writer.pos > 0 stream_writer.flush # ensure writer has sync'd buffers before reporting sizes logger.debug("Sending partial bulk request for batch with one or more actions remaining.", :action_count => batch_actions.size, :payload_size => stream_writer.pos, :content_length => body_stream.size, :batch_offset => (index + 1 - batch_actions.size)) headers = { EVENT_COUNT_HEADER => batch_actions.size.to_s, UNCOMPRESSED_LENGTH_HEADER => stream_writer.pos.to_s } bulk_responses << bulk_send(body_stream, batch_actions, headers) body_stream.truncate(0) && body_stream.seek(0) stream_writer = gzip_writer(body_stream) if compression_level? batch_actions.clear end stream_writer.write(as_json) batch_actions << action end stream_writer.close if compression_level? logger.debug("Sending final bulk request for batch.", :action_count => batch_actions.size, :payload_size => stream_writer.pos, :content_length => body_stream.size, :batch_offset => (actions.size - batch_actions.size)) if body_stream.size > 0 headers = { EVENT_COUNT_HEADER => batch_actions.size.to_s, UNCOMPRESSED_LENGTH_HEADER => stream_writer.pos.to_s } bulk_responses << bulk_send(body_stream, batch_actions, headers) end body_stream.close unless compression_level? join_bulk_responses(bulk_responses) end |
#bulk_send(body_stream, batch_actions, headers = {}) ⇒ Object
194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 194 def bulk_send(body_stream, batch_actions, headers = {}) params = compression_level? ? {:headers => headers.merge("Content-Encoding" => "gzip") } : { :headers => headers } begin response = @pool.post(@bulk_path, params, body_stream.string) @bulk_response_metrics.increment(response.code.to_s) rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e @bulk_response_metrics.increment(e.response_code.to_s) raise e unless e.response_code == 413 # special handling for 413, treat it as a document level issue logger.warn("Bulk request rejected: `413 Payload Too Large`", :action_count => batch_actions.size, :content_length => body_stream.size) return emulate_batch_error_response(batch_actions, 413, 'payload_too_large') rescue => e # it may be a network issue instead, re-raise raise e end LogStash::Json.load(response.body) end |
#calculate_property(uris, property, default, sniff_check) ⇒ Object
240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 240 def calculate_property(uris, property, default, sniff_check) values = uris.map(&property).uniq if sniff_check && values.size > 1 raise LogStash::ConfigurationError, "Cannot have multiple values for #{property} in hosts when sniffing is enabled!" end uri_value = values.first default = nil if default.is_a?(String) && default.empty? # Blanks are as good as nil uri_value = nil if uri_value.is_a?(String) && uri_value.empty? if default && uri_value && (default != uri_value) raise LogStash::ConfigurationError, "Explicit value for '#{property}' was declared, but it is different in one of the URLs given! Please make sure your URLs are inline with explicit values. The URLs have the property set to '#{uri_value}', but it was also set to '#{default}' explicitly" end uri_value || default end |
#client_settings ⇒ Object
308 309 310 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 308 def client_settings @_client_settings ||= [:client_settings] || {} end |
#close ⇒ Object
236 237 238 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 236 def close @pool.close end |
#compression_level? ⇒ Boolean
return true if compression_level is [1..9] return false if it is 0
318 319 320 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 318 def compression_level? client_settings.fetch(:compression_level) > 0 end |
#emulate_batch_error_response(actions, http_code, reason) ⇒ Object
213 214 215 216 217 218 219 220 221 222 223 224 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 213 def emulate_batch_error_response(actions, http_code, reason) { "errors" => true, "items" => actions.map do |action| action = action.first if action.is_a?(Array) request_action, request_parameters = action.first { request_action => {"status" => http_code, "error" => { "type" => reason }} } end } end |
#exists?(path, use_get = false) ⇒ Boolean
421 422 423 424 425 426 427 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 421 def exists?(path, use_get=false) response = use_get ? @pool.get(path) : @pool.head(path) response.code >= 200 && response.code <= 299 rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e return false if e.response_code == 404 raise e end |
#get(path) ⇒ Object
226 227 228 229 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 226 def get(path) response = @pool.get(path) LogStash::Json.load(response.body) end |
#get_ilm_endpoint ⇒ Object
465 466 467 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 465 def get_ilm_endpoint @pool.get("/_ilm/policy") end |
#get_xpack_info ⇒ Object
461 462 463 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 461 def get_xpack_info get("/_xpack") end |
#gzip_writer(io) ⇒ Object
180 181 182 183 184 185 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 180 def gzip_writer(io) fail(ArgumentError, "Cannot create gzip writer on IO with unread bytes") unless io.eof? fail(ArgumentError, "Cannot create gzip writer on non-empty IO") unless io.pos == 0 Zlib::GzipWriter.new(io, client_settings.fetch(:compression_level), Zlib::DEFAULT_STRATEGY) end |
#host_to_url(h) ⇒ Object
387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 387 def host_to_url(h) # Never override the calculated scheme raw_scheme = @url_template[:scheme] || 'http' raw_user = h.user || @url_template[:user] raw_password = h.password || @url_template[:password] postfixed_userinfo = raw_user && raw_password ? "#{raw_user}:#{raw_password}@" : nil raw_host = h.host # Always replace this! raw_port = h.port || @url_template[:port] raw_path = !h.path.nil? && !h.path.empty? && h.path != "/" ? h.path : @url_template[:path] prefixed_raw_path = raw_path && !raw_path.empty? ? raw_path : "/" parameters = client_settings[:parameters] raw_query = if parameters && !parameters.empty? combined = h.query ? Hash[URI::decode_www_form(h.query)].merge(parameters) : parameters query_str = combined.flat_map {|k,v| values = Array(v) values.map {|av| "#{k}=#{av}"} }.join("&") query_str else h.query end prefixed_raw_query = raw_query && !raw_query.empty? ? "?#{raw_query}" : nil raw_url = "#{raw_scheme}://#{postfixed_userinfo}#{raw_host}:#{raw_port}#{prefixed_raw_path}#{prefixed_raw_query}" ::LogStash::Util::SafeURI.new(raw_url) end |
#ilm_policy_exists?(name) ⇒ Boolean
469 470 471 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 469 def ilm_policy_exists?(name) exists?("/_ilm/policy/#{name}", true) end |
#ilm_policy_put(name, policy) ⇒ Object
473 474 475 476 477 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 473 def ilm_policy_put(name, policy) path = "_ilm/policy/#{name}" logger.info("Installing ILM policy #{policy}", name: name) @pool.put(path, nil, LogStash::Json.dump(policy)) end |
#join_bulk_responses(bulk_responses) ⇒ Object
187 188 189 190 191 192 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 187 def join_bulk_responses(bulk_responses) { "errors" => bulk_responses.any? {|r| r["errors"] == true}, "items" => bulk_responses.reduce([]) {|m,r| m.concat(r.fetch("items", []))} } end |
#last_es_version ⇒ Object
90 91 92 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 90 def last_es_version @pool.last_es_version end |
#maximum_seen_major_version ⇒ Object
94 95 96 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 94 def maximum_seen_major_version @pool.maximum_seen_major_version end |
#password ⇒ Object
267 268 269 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 267 def password calculate_property(uris, :password, [:password], sniffing) end |
#path ⇒ Object
271 272 273 274 275 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 271 def path calculated = calculate_property(uris, :path, client_settings[:path], sniffing) calculated = "/#{calculated}" if calculated && !calculated.start_with?("/") calculated end |
#port ⇒ Object
297 298 299 300 301 302 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 297 def port # We don't set the 'default' here because the default is what the user # indicated, so we use an || outside of calculate_property. This lets people # Enter things like foo:123, bar and wind up with foo:123, bar:9200 calculate_property(uris, :port, nil, sniffing) || 9200 end |
#post(path, params = {}, body_string) ⇒ Object
231 232 233 234 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 231 def post(path, params = {}, body_string) response = @pool.post(path, params, body_string) LogStash::Json.load(response.body) end |
#prepare_user_agent ⇒ Object
353 354 355 356 357 358 359 360 361 362 363 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 353 def prepare_user_agent os_name = java.lang.System.getProperty('os.name') os_version = java.lang.System.getProperty('os.version') os_arch = java.lang.System.getProperty('os.arch') jvm_vendor = java.lang.System.getProperty('java.vendor') jvm_version = java.lang.System.getProperty('java.version') plugin_version = Gem.loaded_specs['logstash-output-elasticsearch'].version # example: Logstash/7.14.1 (OS=Linux-5.4.0-84-generic-amd64; JVM=AdoptOpenJDK-11.0.11) logstash-output-elasticsearch/11.0.1 "Logstash/#{LOGSTASH_VERSION} (OS=#{os_name}-#{os_version}-#{os_arch}; JVM=#{jvm_vendor}-#{jvm_version}) logstash-output-elasticsearch/#{plugin_version}" end |
#rollover_alias_exists?(name) ⇒ Boolean
check whether rollover alias already exists
444 445 446 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 444 def rollover_alias_exists?(name) exists?(name) end |
#rollover_alias_put(alias_name, alias_definition) ⇒ Object
Create a new rollover alias
449 450 451 452 453 454 455 456 457 458 459 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 449 def rollover_alias_put(alias_name, alias_definition) @pool.put(CGI::escape(alias_name), nil, LogStash::Json.dump(alias_definition)) logger.info("Created rollover alias", name: alias_name) # If the rollover alias already exists, ignore the error that comes back from Elasticsearch rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e if e.response_code == 400 logger.info("Rollover alias already exists, skipping", name: alias_name) return end raise e end |
#scheme ⇒ Object
277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 277 def scheme explicit_scheme = if && .has_key?(:enabled) [:enabled] ? 'https' : 'http' else nil end calculated_scheme = calculate_property(uris, :scheme, explicit_scheme, sniffing) if calculated_scheme && calculated_scheme !~ /https?/ raise LogStash::ConfigurationError, "Bad scheme '#{calculated_scheme}' found should be one of http/https" end if calculated_scheme && explicit_scheme && calculated_scheme != explicit_scheme raise LogStash::ConfigurationError, "SSL option was explicitly set to #{ssl_options[:enabled]} but a URL was also declared with a scheme of '#{explicit_scheme}'. Please reconcile this" end calculated_scheme # May be nil if explicit_scheme is nil! end |
#serverless? ⇒ Boolean
98 99 100 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 98 def serverless? @pool.serverless? end |
#sniffing ⇒ Object
259 260 261 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 259 def sniffing [:sniffing] end |
#ssl_options ⇒ Object
312 313 314 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 312 def ||= client_settings.fetch(:ssl, {}) end |
#template_exists?(template_endpoint, name) ⇒ Boolean
429 430 431 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 429 def template_exists?(template_endpoint, name) exists?("/#{template_endpoint}/#{name}") end |
#template_install(template_endpoint, name, template, force = false) ⇒ Object
82 83 84 85 86 87 88 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 82 def template_install(template_endpoint, name, template, force=false) if template_exists?(template_endpoint, name) && !force @logger.debug("Found existing Elasticsearch template, skipping template management", name: name) return end template_put(template_endpoint, name, template) end |
#template_put(template_endpoint, name, template) ⇒ Object
433 434 435 436 437 438 439 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 433 def template_put(template_endpoint, name, template) path = "#{template_endpoint}/#{name}" logger.info("Installing Elasticsearch template", name: name) @pool.put(path, nil, LogStash::Json.dump(template)) rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e raise e unless e.response_code == 404 end |
#update_action_builder(args, source) ⇒ Object
Build a bulk item for an elasticsearch update action
481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 481 def update_action_builder(args, source) args = args.clone() if args[:_script] # Use the event as a hash from your script with variable name defined # by script_var_name (default: "event") # Ex: event["@timestamp"] source_orig = source source = { 'script' => {'params' => { [:script_var_name] => source_orig }} } if [:scripted_upsert] source['scripted_upsert'] = true source['upsert'] = {} elsif [:doc_as_upsert] source['upsert'] = source_orig else source['upsert'] = args.delete(:_upsert) if args[:_upsert] end case [:script_type] when 'indexed' source['script']['id'] = args.delete(:_script) when 'file' source['script']['file'] = args.delete(:_script) when 'inline' source['script']['inline'] = args.delete(:_script) end source['script']['lang'] = [:script_lang] if [:script_lang] != '' else source = { 'doc' => source } if [:doc_as_upsert] source['doc_as_upsert'] = true else source['upsert'] = args.delete(:_upsert) if args[:_upsert] end end [args, source] end |
#uris ⇒ Object
304 305 306 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 304 def uris [:hosts] end |
#user ⇒ Object
263 264 265 |
# File 'lib/logstash/outputs/elasticsearch/http_client.rb', line 263 def user calculate_property(uris, :user, [:user], sniffing) end |