Class: ThinkingData::TDBatchConsumer

Inherits:
Object
  • Object
show all
Defined in:
lib/thinkingdata-ruby/td_batch_consumer.rb

Overview

Upload data by http

Constant Summary collapse

DEFAULT_LENGTH =

buffer count

20
MAX_LENGTH =
2000

Instance Method Summary collapse

Constructor Details

#initialize(server_url, app_id, max_buffer_length = DEFAULT_LENGTH) ⇒ TDBatchConsumer

Init batch consumer



16
17
18
19
20
21
22
23
24
# File 'lib/thinkingdata-ruby/td_batch_consumer.rb', line 16

def initialize(server_url, app_id, max_buffer_length = DEFAULT_LENGTH)
  @server_uri = URI.parse(server_url)
  @server_uri.path = '/sync_server'
  @app_id = app_id
  @compress = true
  @max_length = [max_buffer_length, MAX_LENGTH].min
  @buffers = []
  TDLog.info("TDBatchConsumer init success. ServerUrl: #{server_url}, appId: #{app_id}")
end

Instance Method Details

#_set_compress(compress) ⇒ Object

Deprecated.

please use: set_compress

http request compress

Parameters:

  • compress (Boolean)

    compress or not



30
31
32
# File 'lib/thinkingdata-ruby/td_batch_consumer.rb', line 30

def _set_compress(compress)
  @compress = compress
end

#add(message) ⇒ Object



41
42
43
44
45
# File 'lib/thinkingdata-ruby/td_batch_consumer.rb', line 41

def add(message)
  TDLog.info("Enqueue data to buffer. buffer size: #{@buffers.length}, data: #{message}")
  @buffers << message
  flush if @buffers.length >= @max_length
end

#closeObject



47
48
49
50
# File 'lib/thinkingdata-ruby/td_batch_consumer.rb', line 47

def close
  flush
  TDLog.info("TDBatchConsumer close.")
end

#flushObject



52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
# File 'lib/thinkingdata-ruby/td_batch_consumer.rb', line 52

def flush
  TDLog.info("TDBatchConsumer flush data.")
  begin
    @buffers.each_slice(@max_length) do |chunk|
      if @compress
        wio = StringIO.new("w")
        gzip_io = Zlib::GzipWriter.new(wio)
        gzip_io.write(chunk.to_json)
        gzip_io.close
        data = wio.string
      else
        data = chunk.to_json
      end
      compress_type = @compress ? 'gzip' : 'none'
      headers = {'Content-Type' => 'application/plaintext',
                 'appid' => @app_id,
                 'compress' => compress_type,
                 'TE-Integration-Type'=>'Ruby',
                 'TE-Integration-Version'=>ThinkingData::VERSION,
                 'TE-Integration-Count'=>@buffers.count,
                 'TA_Integration-Extra'=>'batch'}
      request = CaseSensitivePost.new(@server_uri.request_uri, headers)
      request.body = data

      TDLog.info("Send data, request: #{data}")
      begin
        response_code, response_body = _request(@server_uri, request)
        TDLog.info("Send data, response: #{response_body}")
      rescue => e
        raise ConnectionError.new("Could not connect to TE server, with error \"#{e.message}\".")
      end

      result = {}
      if response_code.to_i == 200
        begin
          result = JSON.parse(response_body.to_s)
        rescue JSON::JSONError
          raise ServerError.new("Could not interpret TE server response: '#{response_body}'")
        end
      end

      if result['code'] != 0
        raise ServerError.new("Could not write to TE, server responded with #{response_code} returning: '#{response_body}'")
      end
    end
  rescue
    raise
  end
  @buffers = []
end

#set_compress(compress) ⇒ Object

http request compress

Parameters:

  • compress (Boolean)

    compress or not



37
38
39
# File 'lib/thinkingdata-ruby/td_batch_consumer.rb', line 37

def set_compress(compress)
  @compress = compress
end