Class: Embulk::Input::Zendesk::Plugin
- Inherits:
-
InputPlugin
- Object
- InputPlugin
- Embulk::Input::Zendesk::Plugin
- Defined in:
- lib/embulk/input/zendesk/plugin.rb
Class Method Summary collapse
- .config_to_task(config) ⇒ Object
- .guess(config) ⇒ Object
- .resume(task, columns, count, &control) ⇒ Object
- .transaction(config, &control) ⇒ Object
Instance Method Summary collapse
Class Method Details
.config_to_task(config) ⇒ Object
84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 |
# File 'lib/embulk/input/zendesk/plugin.rb', line 84 def self.config_to_task(config) { login_url: config.param("login_url", :string), auth_method: config.param("auth_method", :string, default: "basic"), target: config.param("target", :string), username: config.param("username", :string, default: nil), password: config.param("password", :string, default: nil), token: config.param("token", :string, default: nil), access_token: config.param("access_token", :string, default: nil), start_time: config.param("start_time", :string, default: nil), retry_limit: config.param("retry_limit", :integer, default: 5), retry_initial_wait_sec: config.param("retry_initial_wait_sec", :integer, default: 4), incremental: config.param("incremental", :bool, default: true), schema: config.param(:columns, :array, default: []), includes: config.param(:includes, :array, default: []), } end |
.guess(config) ⇒ Object
39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 |
# File 'lib/embulk/input/zendesk/plugin.rb', line 39 def self.guess(config) task = config_to_task(config) client = Client.new(task) client.validate_config records = [] client.public_send(task[:target]) do |record| records << record end columns = Guess::SchemaGuess.from_hash_records(records).map do |column| hash = column.to_h hash.delete(:index) hash.delete(:format) unless hash[:format] # NOTE: Embulk 0.8.1 guesses Hash and Hashes in Array as string. # https://github.com/embulk/embulk/issues/379 # This is workaround for that if records.any? {|r| [Array, Hash].include?(r[hash[:name]].class) } hash[:type] = :json end case hash[:name] when /_id$/ # NOTE: sometimes *_id will be guessed as timestamp format:%d%m%Y (e.g. 21031998), all *_id columns should be type:string hash[:type] = :string hash.delete(:format) # has it if type:timestamp when "id" hash[:type] = :long hash.delete(:format) # has it if type:timestamp end hash end task[:includes].each do |ent| columns << { name: ent, type: :json } end return {"columns" => columns.compact} end |
.resume(task, columns, count, &control) ⇒ Object
28 29 30 31 32 33 34 35 36 37 |
# File 'lib/embulk/input/zendesk/plugin.rb', line 28 def self.resume(task, columns, count, &control) task_reports = yield(task, columns, count) report = task_reports.first next_config_diff = {} if report[:start_time] next_config_diff[:start_time] = report[:start_time] end return next_config_diff end |
.transaction(config, &control) ⇒ Object
9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
# File 'lib/embulk/input/zendesk/plugin.rb', line 9 def self.transaction(config, &control) task = config_to_task(config) client = Client.new(task) client.validate_config columns = task[:schema].map do |column| name = column["name"] type = column["type"].to_sym Column.new(nil, name, type, column["format"]) end if task[:incremental] && !Client::AVAILABLE_INCREMENTAL_EXPORT.include?(task[:target]) Embulk.logger.warn "target: #{task[:target]} don't support incremental export API. Will be ignored start_time option" end resume(task, columns, 1, &control) end |
Instance Method Details
#init ⇒ Object
102 103 104 |
# File 'lib/embulk/input/zendesk/plugin.rb', line 102 def init @start_time = Time.parse(task[:start_time]) if task[:start_time] end |
#run ⇒ Object
106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 |
# File 'lib/embulk/input/zendesk/plugin.rb', line 106 def run method = task[:target] args = [preview?] if @start_time args << @start_time.to_i end mutex = Mutex.new fetching_start_at = Time.now last_data = client.public_send(method, *args) do |record| record = (record) values = extract_values(record) mutex.synchronize do page_builder.add(values) end break if preview? # NOTE: preview take care only 1 record. subresources fetching is slow. end page_builder.finish task_report = {} if task[:incremental] if last_data && last_data["end_time"] # NOTE: start_time compared as "=>", not ">". # If we will use end_time for next start_time, we got the same record that is last fetched # end_time + 1 is workaround for that next_start_time = Time.at(last_data["end_time"] + 1) task_report[:start_time] = next_start_time.strftime("%Y-%m-%d %H:%M:%S%z") else # Sometimes no record and no end_time fetched on the job, but we should generate start_time on config_diff. task_report[:start_time] = fetching_start_at end end task_report end |