Class: Fluent::SolrOutput

Inherits:
BufferedOutput
  • Object
show all
Includes:
SetTagKeyMixin, SetTimeKeyMixin
Defined in:
lib/fluent/plugin/out_solr.rb

Constant Summary collapse

DEFAULT_COLLECTION =
'collection1'
DEFAULT_IGNORE_UNDEFINED_FIELDS =
false
DEFAULT_STRING_FIELD_VALUE_MAX_LENGTH =
-1
DEFAULT_TAG_FIELD =
'tag'
DEFAULT_TIMESTAMP_FIELD =
'event_timestamp'
DEFAULT_FLUSH_SIZE =
100
DEFAULT_COMMIT_WITH_FLUSH =
true
MODE_STANDALONE =
'Standalone'
MODE_SOLRCLOUD =
'SolrCloud'

Instance Method Summary collapse

Constructor Details

#initializeSolrOutput

Returns a new instance of SolrOutput.



55
56
57
# File 'lib/fluent/plugin/out_solr.rb', line 55

def initialize
  super
end

Instance Method Details

#configure(conf) ⇒ Object



59
60
61
# File 'lib/fluent/plugin/out_solr.rb', line 59

def configure(conf)
  super
end

#format(tag, time, record) ⇒ Object



93
94
95
# File 'lib/fluent/plugin/out_solr.rb', line 93

def format(tag, time, record)
  [tag, time, record].to_msgpack
end

#get_fieldsObject



196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
# File 'lib/fluent/plugin/out_solr.rb', line 196

def get_fields
  response = nil

  if @mode == MODE_STANDALONE then
    response = @solr.get 'schema/fields'
  elsif @mode == MODE_SOLRCLOUD then
    response = @solr.get 'schema/fields', collection: @collection
  end

  fields = []
  response['fields'].each do |field|
    fields.push(field['name'])
  end
  log.debug "Fields: #{fields}"

  return fields

  rescue Exception => e
    log.warn "An error occurred while getting fields: #{e.message}".slice(0, 1024)
end

#get_unique_keyObject



178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
# File 'lib/fluent/plugin/out_solr.rb', line 178

def get_unique_key
  response = nil

  if @mode == MODE_STANDALONE then
    response = @solr.get 'schema/uniquekey'
  elsif @mode == MODE_SOLRCLOUD then
    response = @solr.get 'schema/uniquekey', collection: @collection
  end

  unique_key = response['uniqueKey']
  log.debug "Unique key: #{unique_key}"

  return unique_key

  rescue Exception => e
    log.warn "An error occurred while getting unique key: #{e.message}".slice(0, 1024)
end

#shutdownObject



85
86
87
88
89
90
91
# File 'lib/fluent/plugin/out_solr.rb', line 85

def shutdown
  super

  unless @zk.nil? then
    @zk.close
  end
end

#startObject



63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
# File 'lib/fluent/plugin/out_solr.rb', line 63

def start
  super

  @mode = nil
  if ! @url.nil? then
    @mode = MODE_STANDALONE
  elsif ! @zk_host.nil?
    @mode = MODE_SOLRCLOUD
  end

  @solr = nil
  @zk = nil

  if @mode == MODE_STANDALONE then
    @solr = RSolr.connect :url => @url
  elsif @mode == MODE_SOLRCLOUD then
    @zk = ZK.new(@zk_host)
    cloud_connection = RSolr::Cloud::Connection.new(@zk)
    @solr = RSolr::Client.new(cloud_connection, read_timeout: 60, open_timeout: 60)
  end
end

#update(documents) ⇒ Object



166
167
168
169
170
171
172
173
174
175
176
# File 'lib/fluent/plugin/out_solr.rb', line 166

def update(documents)
  if @mode == MODE_STANDALONE then
    @solr.add documents, :params => {:commit => @commit_with_flush}
    log.debug "Added #{documents.count} document(s) to Solr"
  elsif @mode == MODE_SOLRCLOUD then
    @solr.add documents, collection: @collection, :params => {:commit => @commit_with_flush}
    log.debug "Added #{documents.count} document(s) to Solr"
  end
  rescue Exception => e
    log.warn "An error occurred while indexing: #{e.message}".slice(0, 1024)
end

#write(chunk) ⇒ Object



97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
# File 'lib/fluent/plugin/out_solr.rb', line 97

def write(chunk)
  documents = []

  @fields = @defined_fields.nil? ? get_fields : @defined_fields
  @unique_key = @unique_key_field.nil? ? get_unique_key : @unique_key_field

  chunk.msgpack_each do |tag, time, record|
    unless record.has_key?(@unique_key) then
      record.merge!({@unique_key => SecureRandom.uuid})
    end

    unless record.has_key?(@tag_field) then
      record.merge!({@tag_field => tag})
    end

    if record.has_key?(@timestamp_field) then
      begin
        event_timestamp_dt = DateTime.strptime(record[@timestamp_field], "%d/%b/%Y:%H:%M:%S %z").to_s
        record.merge!({@timestamp_field => Time.parse(event_timestamp_dt.to_s).utc.strftime('%FT%TZ')})
      rescue
        record.merge!({@timestamp_field => Time.at(time).utc.strftime('%FT%TZ')})
      end
    else
      record.merge!({@timestamp_field => Time.at(time).utc.strftime('%FT%TZ')})
    end

    if @ignore_undefined_fields then
      record.each_key do |key|
        unless @fields.include?(key) then
          record.delete(key)
        end
      end
    end

    if @string_field_value_max_length >= 0 then
      record.each_key do |key|
        if record[key].instance_of?(Array) then
          values = []
          record[key].each do |value|
            if value.instance_of?(String) then
              if value.length > @string_field_value_max_length then
                log.warn "#{key} is too long (#{value.length}, max is #{@string_field_value_max_length})."
                values.push(value.slice(0, @string_field_value_max_length))
              else
                values.push(value)
              end
            end
          end
          record[key] = values
        elsif record[key].instance_of?(String) then
          if record[key].length > @string_field_value_max_length then
            log.warn "#{key} is too long (#{record[key].length}, max is #{@string_field_value_max_length})."
            record[key] = record[key].slice(0, @string_field_value_max_length)
          end
        end
      end
    end

    documents << record

    if documents.count >= @flush_size
      update documents
      documents.clear
    end
  end

  update documents unless documents.empty?
end