Class: Fluent::Plugin::BigQueryLoadOutput
- Inherits:
-
BigQueryBaseOutput
- Object
- Output
- BigQueryBaseOutput
- Fluent::Plugin::BigQueryLoadOutput
- Defined in:
- lib/fluent/plugin/out_bigquery_load.rb
Instance Method Summary collapse
-
#buffer ⇒ Object
Buffer.
- #configure(conf) ⇒ Object
-
#format(tag, time, record) ⇒ Object
for Fluent::Plugin::Output#implement? method.
- #prefer_delayed_commit ⇒ Object
- #start ⇒ Object
- #try_write(chunk) ⇒ Object
- #write(chunk) ⇒ Object
Methods inherited from BigQueryBaseOutput
#fetch_schema, #fetch_schema_target_table, #get_schema, #multi_workers_ready?, #request_timeout_sec, #time_partitioning_type, #writer
Instance Method Details
#buffer ⇒ Object
Buffer
25 26 27 28 29 30 31 32 33 34 35 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 25 config_section :buffer do config_set_default :@type, "file" config_set_default :flush_mode, :interval config_set_default :flush_interval, 3600 # 1h config_set_default :flush_thread_interval, 5 config_set_default :flush_thread_burst_interval, 5 config_set_default :chunk_limit_size, 1 * 1024 ** 3 # 1GB config_set_default :total_limit_size, 32 * 1024 ** 3 # 32GB config_set_default :delayed_commit_timeout, 1800 # 30m end |
#configure(conf) ⇒ Object
37 38 39 40 41 42 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 37 def configure(conf) super placeholder_params = "project=#{@project}/dataset=#{@dataset}/table=#{@tablelist.join(",")}/fetch_schema_table=#{@fetch_schema_table}" placeholder_validate!(:bigquery_load, placeholder_params) end |
#format(tag, time, record) ⇒ Object
for Fluent::Plugin::Output#implement? method
60 61 62 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 60 def format(tag, time, record) super end |
#prefer_delayed_commit ⇒ Object
55 56 57 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 55 def prefer_delayed_commit @use_delayed_commit end |
#start ⇒ Object
44 45 46 47 48 49 50 51 52 53 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 44 def start super if prefer_delayed_commit @polling_targets = [] @polling_mutex = Mutex.new log.debug("start load job polling") timer_execute(:polling_bigquery_load_job, @wait_job_interval, &method(:poll)) end end |
#try_write(chunk) ⇒ Object
98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 98 def try_write(chunk) job_reference = do_write(chunk) @polling_mutex.synchronize do @polling_targets << job_reference end rescue Fluent::BigQuery::Error => e raise if e.retryable? @retry_mutex.synchronize do if @secondary # TODO: find better way @retry = retry_state_create( :output_retries, @buffer_config.retry_type, @buffer_config.retry_wait, @buffer_config.retry_timeout, forever: false, max_steps: @buffer_config.retry_max_times, backoff_base: @buffer_config.retry_exponential_backoff_base, max_interval: @buffer_config.retry_max_interval, secondary: true, secondary_threshold: Float::EPSILON, randomize: @buffer_config.retry_randomize ) else @retry = retry_state_create( :output_retries, @buffer_config.retry_type, @buffer_config.retry_wait, @buffer_config.retry_timeout, forever: false, max_steps: 0, backoff_base: @buffer_config.retry_exponential_backoff_base, max_interval: @buffer_config.retry_max_interval, randomize: @buffer_config.retry_randomize ) end end raise end |
#write(chunk) ⇒ Object
64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
# File 'lib/fluent/plugin/out_bigquery_load.rb', line 64 def write(chunk) job_reference = do_write(chunk) until response = writer.fetch_load_job(job_reference) sleep @wait_job_interval end writer.commit_load_job(job_reference.chunk_id_hex, response) rescue Fluent::BigQuery::Error => e raise if e.retryable? @retry_mutex.synchronize do if @secondary # TODO: find better way @retry = retry_state_create( :output_retries, @buffer_config.retry_type, @buffer_config.retry_wait, @buffer_config.retry_timeout, forever: false, max_steps: @buffer_config.retry_max_times, backoff_base: @buffer_config.retry_exponential_backoff_base, max_interval: @buffer_config.retry_max_interval, secondary: true, secondary_threshold: Float::EPSILON, randomize: @buffer_config.retry_randomize ) else @retry = retry_state_create( :output_retries, @buffer_config.retry_type, @buffer_config.retry_wait, @buffer_config.retry_timeout, forever: false, max_steps: 0, backoff_base: @buffer_config.retry_exponential_backoff_base, max_interval: @buffer_config.retry_max_interval, randomize: @buffer_config.retry_randomize ) end end raise end |