Class: LogStash::Outputs::ElasticSearch::HttpClient::Pool
- Inherits:
-
Object
- Object
- LogStash::Outputs::ElasticSearch::HttpClient::Pool
- Defined in:
- lib/logstash/outputs/elasticsearch/http_client/pool.rb
Defined Under Namespace
Classes: BadResponseCodeError, HostUnreachableError, NoConnectionAvailableError
Constant Summary collapse
- ROOT_URI_PATH =
'/'.freeze
- LICENSE_PATH =
'/_license'.freeze
- VERSION_6_TO_7 =
::Gem::Requirement.new([">= 6.0.0", "< 7.0.0"])
- VERSION_7_TO_7_14 =
::Gem::Requirement.new([">= 7.0.0", "< 7.14.0"])
- DEFAULT_OPTIONS =
{ :healthcheck_path => ROOT_URI_PATH, :sniffing_path => "/_nodes/http", :bulk_path => "/_bulk", :scheme => 'http', :resurrect_delay => 5, :sniffing => false, :sniffer_delay => 10, }.freeze
- BUILD_FLAVOUR_SERVERLESS =
'serverless'.freeze
- ES1_SNIFF_RE_URL =
/\[([^\/]*)?\/?([^:]*):([0-9]+)\]/- ES2_AND_ABOVE_SNIFF_RE_URL =
/([^\/]*)?\/?([^:]*):([0-9]+)/
Instance Attribute Summary collapse
-
#adapter ⇒ Object
readonly
Returns the value of attribute adapter.
-
#bulk_path ⇒ Object
readonly
Returns the value of attribute bulk_path.
-
#healthcheck_path ⇒ Object
readonly
Returns the value of attribute healthcheck_path.
-
#license_checker ⇒ Object
readonly
license_checker is used by the pool specs.
-
#logger ⇒ Object
readonly
Returns the value of attribute logger.
-
#resurrect_delay ⇒ Object
readonly
Returns the value of attribute resurrect_delay.
-
#sniffer_delay ⇒ Object
readonly
Returns the value of attribute sniffer_delay.
-
#sniffing ⇒ Object
readonly
Returns the value of attribute sniffing.
-
#sniffing_path ⇒ Object
readonly
Returns the value of attribute sniffing_path.
Instance Method Summary collapse
- #add_url(url) ⇒ Object
- #address_str_to_uri(addr_str) ⇒ Object
- #alive_urls_count ⇒ Object
-
#check_sniff ⇒ Object
Sniffs and returns the results.
- #close ⇒ Object
- #elasticsearch?(url) ⇒ Boolean
- #empty_url_meta ⇒ Object
- #get_connection ⇒ Object
- #get_es_version(url) ⇒ Object
-
#get_license(url) ⇒ Hash
Retrieve ES node license information.
- #health_check_request(url) ⇒ Object
- #healthcheck!(register_phase = true) ⇒ Object
- #in_use_connections ⇒ Object
-
#initialize(logger, adapter, initial_urls = [], options = {}) ⇒ Pool
constructor
A new instance of Pool.
- #last_es_version ⇒ Object
- #major_version(version_string) ⇒ Object
- #mark_dead(url, error) ⇒ Object
- #maximum_seen_major_version ⇒ Object
- #normalize_url(uri) ⇒ Object
- #perform_request(method, path, params = {}, body = nil) ⇒ Object
- #perform_request_to_url(url, method, path, params = {}, body = nil) ⇒ Object
- #remove_url(url) ⇒ Object
- #resurrectionist_alive? ⇒ Boolean
- #return_connection(url) ⇒ Object
- #serverless? ⇒ Boolean
- #size ⇒ Object
- #sniff(nodes) ⇒ Object
-
#sniff! ⇒ Object
Sniffs the cluster then updates the internal URLs.
- #sniffer_alive? ⇒ Boolean
- #start ⇒ Object
- #start_resurrectionist ⇒ Object
- #start_sniffer ⇒ Object
- #stop_resurrectionist ⇒ Object
- #stop_sniffer ⇒ Object
- #until_stopped(task_name, delay) ⇒ Object
- #update_initial_urls ⇒ Object
- #update_urls(new_urls) ⇒ Object
- #url_info ⇒ Object
- #url_meta(url) ⇒ Object
- #urls ⇒ Object
- #valid_tagline?(version_info) ⇒ Boolean
- #wait_for_in_use_connections ⇒ Object
- #with_connection ⇒ Object
Constructor Details
#initialize(logger, adapter, initial_urls = [], options = {}) ⇒ Pool
Returns a new instance of Pool.
53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 53 def initialize(logger, adapter, initial_urls=[], ={}) @logger = logger @adapter = adapter @metric = [:metric] @initial_urls = initial_urls raise ArgumentError, "No URL Normalizer specified!" unless [:url_normalizer] @url_normalizer = [:url_normalizer] DEFAULT_OPTIONS.merge().tap do |merged| @bulk_path = merged[:bulk_path] @sniffing_path = merged[:sniffing_path] @healthcheck_path = merged[:healthcheck_path] @resurrect_delay = merged[:resurrect_delay] @sniffing = merged[:sniffing] @sniffer_delay = merged[:sniffer_delay] end # Used for all concurrent operations in this class @state_mutex = Mutex.new # Holds metadata about all URLs @url_info = {} @stopping = false @license_checker = [:license_checker] || LogStash::PluginMixins::ElasticSearch::NoopLicenseChecker::INSTANCE @last_es_version = Concurrent::AtomicReference.new @build_flavour = Concurrent::AtomicReference.new end |
Instance Attribute Details
#adapter ⇒ Object (readonly)
Returns the value of attribute adapter.
32 33 34 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 32 def adapter @adapter end |
#bulk_path ⇒ Object (readonly)
Returns the value of attribute bulk_path.
32 33 34 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 32 def bulk_path @bulk_path end |
#healthcheck_path ⇒ Object (readonly)
Returns the value of attribute healthcheck_path.
32 33 34 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 32 def healthcheck_path @healthcheck_path end |
#license_checker ⇒ Object (readonly)
license_checker is used by the pool specs
33 34 35 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 33 def license_checker @license_checker end |
#logger ⇒ Object (readonly)
Returns the value of attribute logger.
32 33 34 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 32 def logger @logger end |
#resurrect_delay ⇒ Object (readonly)
Returns the value of attribute resurrect_delay.
32 33 34 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 32 def resurrect_delay @resurrect_delay end |
#sniffer_delay ⇒ Object (readonly)
Returns the value of attribute sniffer_delay.
32 33 34 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 32 def sniffer_delay @sniffer_delay end |
#sniffing ⇒ Object (readonly)
Returns the value of attribute sniffing.
32 33 34 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 32 def sniffing @sniffing end |
#sniffing_path ⇒ Object (readonly)
Returns the value of attribute sniffing_path.
32 33 34 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 32 def sniffing_path @sniffing_path end |
Instance Method Details
#add_url(url) ⇒ Object
390 391 392 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 390 def add_url(url) @url_info[url] ||= end |
#address_str_to_uri(addr_str) ⇒ Object
199 200 201 202 203 204 205 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 199 def address_str_to_uri(addr_str) matches = addr_str.match(ES1_SNIFF_RE_URL) || addr_str.match(ES2_AND_ABOVE_SNIFF_RE_URL) if matches host = matches[1].empty? ? matches[2] : matches[1] ::LogStash::Util::SafeURI.new("#{host}:#{matches[3]}") end end |
#alive_urls_count ⇒ Object
120 121 122 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 120 def alive_urls_count @state_mutex.synchronize { @url_info.values.select {|v| v[:state] == :alive }.count } end |
#check_sniff ⇒ Object
Sniffs and returns the results. Does not update internal URLs!
174 175 176 177 178 179 180 181 182 183 184 185 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 174 def check_sniff _, , resp = perform_request(:get, @sniffing_path) @metric.increment(:sniff_requests) parsed = LogStash::Json.load(resp.body) nodes = parsed['nodes'] if !nodes || nodes.empty? @logger.warn("Sniff returned no nodes! Will not update hosts.") return nil else sniff(nodes) end end |
#close ⇒ Object
93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 93 def close @state_mutex.synchronize { @stopping = true } logger.debug "Stopping sniffer" stop_sniffer logger.debug "Stopping resurrectionist" stop_resurrectionist logger.debug "Waiting for in use manticore connections" wait_for_in_use_connections logger.debug("Closing adapter #{@adapter}") @adapter.close end |
#elasticsearch?(url) ⇒ Boolean
278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 278 def elasticsearch?(url) begin response = perform_request_to_url(url, :get, ROOT_URI_PATH) rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e return false if response.code == 401 || response.code == 403 raise e end version_info = LogStash::Json.load(response.body) return false if version_info['version'].nil? version = ::Gem::Version.new(version_info["version"]['number']) return false if version < ::Gem::Version.new('6.0.0') if VERSION_6_TO_7.satisfied_by?(version) return valid_tagline?(version_info) elsif VERSION_7_TO_7_14.satisfied_by?(version) build_flavor = version_info["version"]['build_flavor'] return false if build_flavor.nil? || build_flavor != 'default' || !valid_tagline?(version_info) else # case >= 7.14 lower_headers = response.headers.transform_keys {|key| key.to_s.downcase } product_header = lower_headers['x-elastic-product'] return false if product_header != 'Elasticsearch' end return true rescue => e logger.error("Unable to retrieve Elasticsearch version", url: url.sanitized.to_s, exception: e.class, message: e.) false end |
#empty_url_meta ⇒ Object
398 399 400 401 402 403 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 398 def { :in_use => 0, :state => :unknown } end |
#get_connection ⇒ Object
444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 444 def get_connection @state_mutex.synchronize do # The goal here is to pick a random connection from the least-in-use connections # We want some randomness so that we don't hit the same node over and over, but # we also want more 'fair' behavior in the event of high concurrency eligible_set = nil lowest_value_seen = nil @url_info.each do |url,| = [:in_use] next if [:state] == :dead if lowest_value_seen.nil? || < lowest_value_seen lowest_value_seen = eligible_set = [[url, ]] elsif lowest_value_seen == eligible_set << [url, ] end end return nil if eligible_set.nil? pick, = eligible_set.sample [:in_use] += 1 [pick, ] end end |
#get_es_version(url) ⇒ Object
479 480 481 482 483 484 485 486 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 479 def get_es_version(url) response = perform_request_to_url(url, :get, ROOT_URI_PATH) return nil unless (200..299).cover?(response.code) response = LogStash::Json.load(response.body) response.fetch('version', {}) end |
#get_license(url) ⇒ Hash
Retrieve ES node license information
226 227 228 229 230 231 232 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 226 def get_license(url) response = perform_request_to_url(url, :get, LICENSE_PATH) LogStash::Json.load(response.body) rescue => e logger.error("Unable to get license information", url: url.sanitized.to_s, exception: e.class, message: e.) {} end |
#health_check_request(url) ⇒ Object
234 235 236 237 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 234 def health_check_request(url) response = perform_request_to_url(url, :head, @healthcheck_path) raise BadResponseCodeError.new(response.code, url, nil, response.body) unless (200..299).cover?(response.code) end |
#healthcheck!(register_phase = true) ⇒ Object
239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 239 def healthcheck!(register_phase = true) # Try to keep locking granularity low such that we don't affect IO... @state_mutex.synchronize { @url_info.select {|url,| [:state] != :alive } }.each do |url,| begin logger.debug("Running health check to see if an Elasticsearch connection is working", :healthcheck_url => url.sanitized.to_s, :path => @healthcheck_path) health_check_request(url) # when called from resurrectionist skip the product check done during register phase if register_phase if !elasticsearch?(url) raise LogStash::ConfigurationError, "Could not connect to a compatible version of Elasticsearch" end end # If no exception was raised it must have succeeded! logger.warn("Restored connection to ES instance", url: url.sanitized.to_s) # We reconnected to this node, check its ES version version_info = get_es_version(url) es_version = version_info.fetch('number', nil) build_flavour = version_info.fetch('build_flavor', nil) if es_version.nil? logger.warn("Failed to retrieve Elasticsearch version data from connected endpoint, connection aborted", :url => url.sanitized.to_s) next end @state_mutex.synchronize do [:version] = es_version set_last_es_version(es_version, url) set_build_flavour(build_flavour) alive = @license_checker.appropriate_license?(self, url) [:state] = alive ? :alive : :dead end rescue HostUnreachableError, BadResponseCodeError => e logger.warn("Attempted to resurrect connection to dead ES instance, but got an error", url: url.sanitized.to_s, exception: e.class, message: e.) end end end |
#in_use_connections ⇒ Object
116 117 118 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 116 def in_use_connections @state_mutex.synchronize { @url_info.values.select {|v| v[:in_use] > 0 } } end |
#last_es_version ⇒ Object
488 489 490 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 488 def last_es_version @last_es_version.get end |
#major_version(version_string) ⇒ Object
187 188 189 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 187 def major_version(version_string) version_string.split('.').first.to_i end |
#mark_dead(url, error) ⇒ Object
424 425 426 427 428 429 430 431 432 433 434 435 436 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 424 def mark_dead(url, error) @state_mutex.synchronize do = @url_info[url] # In case a sniff happened removing the metadata just before there's nothing to mark # This is an extreme edge case, but it can happen! return unless logger.warn("Marking url as dead. Last error: [#{error.class}] #{error.}", :url => url, :error_message => error., :error_class => error.class.name) [:state] = :dead [:last_error] = error [:last_errored_at] = Time.now end end |
#maximum_seen_major_version ⇒ Object
492 493 494 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 492 def maximum_seen_major_version @state_mutex.synchronize { @maximum_seen_major_version } end |
#normalize_url(uri) ⇒ Object
340 341 342 343 344 345 346 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 340 def normalize_url(uri) u = @url_normalizer.call(uri) if !u.is_a?(::LogStash::Util::SafeURI) raise "URL Normalizer returned a '#{u.class}' rather than a SafeURI! This shouldn't happen!" end u end |
#perform_request(method, path, params = {}, body = nil) ⇒ Object
322 323 324 325 326 327 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 322 def perform_request(method, path, params={}, body=nil) with_connection do |url, | resp = perform_request_to_url(url, method, path, params, body) [url, , resp] end end |
#perform_request_to_url(url, method, path, params = {}, body = nil) ⇒ Object
336 337 338 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 336 def perform_request_to_url(url, method, path, params={}, body=nil) @adapter.perform_request(url, method, path, params, body) end |
#remove_url(url) ⇒ Object
394 395 396 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 394 def remove_url(url) @url_info.delete(url) end |
#resurrectionist_alive? ⇒ Boolean
318 319 320 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 318 def resurrectionist_alive? @resurrectionist ? @resurrectionist.alive? : nil end |
#return_connection(url) ⇒ Object
472 473 474 475 476 477 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 472 def return_connection(url) @state_mutex.synchronize do info = @url_info[url] info[:in_use] -= 1 if info # Guard against the condition where the connection has already been deleted end end |
#serverless? ⇒ Boolean
496 497 498 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 496 def serverless? @build_flavour.get == BUILD_FLAVOUR_SERVERLESS end |
#size ⇒ Object
386 387 388 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 386 def size @state_mutex.synchronize { @url_info.size } end |
#sniff(nodes) ⇒ Object
191 192 193 194 195 196 197 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 191 def sniff(nodes) nodes.map do |id,info| # Skip master-only nodes next if info["roles"] && info["roles"] == ["master"] address_str_to_uri(info["http"]["publish_address"]) if info["http"] end.compact end |
#sniff! ⇒ Object
Sniffs the cluster then updates the internal URLs
167 168 169 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 167 def sniff! update_urls(check_sniff) end |
#sniffer_alive? ⇒ Boolean
211 212 213 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 211 def sniffer_alive? @sniffer ? @sniffer.alive? : nil end |
#start ⇒ Object
83 84 85 86 87 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 83 def start update_initial_urls start_resurrectionist start_sniffer if @sniffing end |
#start_resurrectionist ⇒ Object
215 216 217 218 219 220 221 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 215 def start_resurrectionist @resurrectionist = Thread.new do until_stopped("resurrection", @resurrect_delay) do healthcheck!(false) end end end |
#start_sniffer ⇒ Object
153 154 155 156 157 158 159 160 161 162 163 164 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 153 def start_sniffer @sniffer = Thread.new do until_stopped("sniffing", sniffer_delay) do begin sniff! rescue NoConnectionAvailableError => e @state_mutex.synchronize { # Synchronize around @url_info logger.warn("Elasticsearch output attempted to sniff for new connections but cannot. No living connections are detected. Pool contains the following current URLs", :url_info => @url_info) } end end end end |
#stop_resurrectionist ⇒ Object
314 315 316 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 314 def stop_resurrectionist @resurrectionist.join if @resurrectionist end |
#stop_sniffer ⇒ Object
207 208 209 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 207 def stop_sniffer @sniffer.join if @sniffer end |
#until_stopped(task_name, delay) ⇒ Object
132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 132 def until_stopped(task_name, delay) last_done = Time.now until @state_mutex.synchronize { @stopping } begin now = Time.now if (now - last_done) >= delay last_done = now yield end sleep 1 rescue => e logger.warn( "Error while performing #{task_name}", :error_message => e., :class => e.class.name, :backtrace => e.backtrace ) end end end |
#update_initial_urls ⇒ Object
89 90 91 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 89 def update_initial_urls update_urls(@initial_urls) end |
#update_urls(new_urls) ⇒ Object
348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 348 def update_urls(new_urls) return if new_urls.nil? # Normalize URLs new_urls = new_urls.map(&method(:normalize_url)) # Used for logging nicely state_changes = {:removed => [], :added => []} @state_mutex.synchronize do # Add new connections new_urls.each do |url| # URI objects don't have real hash equality! So, since this isn't perf sensitive we do a linear scan unless @url_info.keys.include?(url) state_changes[:added] << url add_url(url) end end # Delete connections not in the new list @url_info.each do |url,_| unless new_urls.include?(url) state_changes[:removed] << url remove_url(url) end end end if state_changes[:removed].size > 0 || state_changes[:added].size > 0 logger.info? && logger.info("Elasticsearch pool URLs updated", :changes => state_changes) end # Run an inline healthcheck anytime URLs are updated # This guarantees that during startup / post-startup # sniffing we don't have idle periods waiting for the # periodic sniffer to allow new hosts to come online healthcheck! end |
#url_info ⇒ Object
124 125 126 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 124 def url_info @state_mutex.synchronize { @url_info } end |
#url_meta(url) ⇒ Object
438 439 440 441 442 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 438 def (url) @state_mutex.synchronize do @url_info[url] end end |
#urls ⇒ Object
128 129 130 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 128 def urls url_info.keys end |
#valid_tagline?(version_info) ⇒ Boolean
309 310 311 312 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 309 def valid_tagline?(version_info) tagline = version_info['tagline'] tagline == "You Know, for Search" end |
#wait_for_in_use_connections ⇒ Object
109 110 111 112 113 114 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 109 def wait_for_in_use_connections until in_use_connections.empty? logger.info "Blocked on shutdown to in use connections #{@state_mutex.synchronize {@url_info}}" sleep 1 end end |
#with_connection ⇒ Object
405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 |
# File 'lib/logstash/outputs/elasticsearch/http_client/pool.rb', line 405 def with_connection url, = get_connection # Custom error class used here so that users may retry attempts if they receive this error # should they choose to raise NoConnectionAvailableError, "No Available connections" unless url yield url, rescue HostUnreachableError => e # Mark the connection as dead here since this is likely not transient mark_dead(url, e) raise e rescue BadResponseCodeError => e # These aren't discarded from the pool because these are often very transient # errors raise e ensure return_connection(url) end |