Class: Fluent::SolrOutput

Inherits:
BufferedOutput
  • Object
show all
Includes:
SetTagKeyMixin, SetTimeKeyMixin
Defined in:
lib/fluent/plugin/out_solr.rb

Constant Summary collapse

DEFAULT_COLLECTION =
'collection1'
DEFAULT_IGNORE_UNDEFINED_FIELDS =
false
DEFAULT_STRING_FIELD_VALUE_MAX_LENGTH =
-1
DEFAULT_TAG_FIELD =
'tag'
DEFAULT_TIME_FIELD =
'time'
DEFAULT_TIME_FORMAT =
'%FT%TZ'
DEFAULT_TIME_OUTPUT_FORMAT =
'%FT%TZ'
DEFAULT_FLUSH_SIZE =
100
DEFAULT_COMMIT_WITH_FLUSH =
true
MODE_STANDALONE =
'Standalone'
MODE_SOLRCLOUD =
'SolrCloud'

Instance Method Summary collapse

Constructor Details

#initializeSolrOutput

Returns a new instance of SolrOutput.



61
62
63
# File 'lib/fluent/plugin/out_solr.rb', line 61

def initialize
  super
end

Instance Method Details

#configure(conf) ⇒ Object



65
66
67
# File 'lib/fluent/plugin/out_solr.rb', line 65

def configure(conf)
  super
end

#format(tag, time, record) ⇒ Object



99
100
101
# File 'lib/fluent/plugin/out_solr.rb', line 99

def format(tag, time, record)
  [tag, time, record].to_msgpack
end

#get_fieldsObject



212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
# File 'lib/fluent/plugin/out_solr.rb', line 212

def get_fields
  response = nil

  if @mode == MODE_STANDALONE then
    response = @solr.get 'schema/fields'
  elsif @mode == MODE_SOLRCLOUD then
    response = @solr.get 'schema/fields', collection: @collection
  end

  fields = []
  response['fields'].each do |field|
    fields.push(field['name'])
  end
  log.debug "Fields: #{fields}"

  return fields

  rescue Exception => e
    log.warn "An error occurred while getting fields"
end

#get_unique_keyObject



194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
# File 'lib/fluent/plugin/out_solr.rb', line 194

def get_unique_key
  response = nil

  if @mode == MODE_STANDALONE then
    response = @solr.get 'schema/uniquekey'
  elsif @mode == MODE_SOLRCLOUD then
    response = @solr.get 'schema/uniquekey', collection: @collection
  end

  unique_key = response['uniqueKey']
  log.debug "Unique key: #{unique_key}"

  return unique_key

  rescue Exception => e
    log.warn "An error occurred while getting unique key"
end

#shutdownObject



91
92
93
94
95
96
97
# File 'lib/fluent/plugin/out_solr.rb', line 91

def shutdown
  super

  unless @zk.nil? then
    @zk.close
  end
end

#startObject



69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
# File 'lib/fluent/plugin/out_solr.rb', line 69

def start
  super

  @mode = nil
  if ! @url.nil? then
    @mode = MODE_STANDALONE
  elsif ! @zk_host.nil?
    @mode = MODE_SOLRCLOUD
  end

  @solr = nil
  @zk = nil

  if @mode == MODE_STANDALONE then
    @solr = RSolr.connect :url => @url
  elsif @mode == MODE_SOLRCLOUD then
    @zk = ZK.new(@zk_host)
    cloud_connection = RSolr::Cloud::Connection.new(@zk)
    @solr = RSolr::Client.new(cloud_connection, read_timeout: 60, open_timeout: 60)
  end
end

#update(documents) ⇒ Object



182
183
184
185
186
187
188
189
190
191
192
# File 'lib/fluent/plugin/out_solr.rb', line 182

def update(documents)
  if @mode == MODE_STANDALONE then
    @solr.add documents, :params => {:commit => @commit_with_flush}
    log.debug "Added #{documents.count} document(s) to Solr"
  elsif @mode == MODE_SOLRCLOUD then
    @solr.add documents, collection: @collection, :params => {:commit => @commit_with_flush}
    log.debug "Added #{documents.count} document(s) to Solr"
  end
  rescue Exception => e
    log.warn "An error occurred while indexing"
end

#write(chunk) ⇒ Object



103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
# File 'lib/fluent/plugin/out_solr.rb', line 103

def write(chunk)
  documents = []

  @fields = @defined_fields.nil? ? get_fields : @defined_fields
  @unique_key = @unique_key_field.nil? ? get_unique_key : @unique_key_field

  chunk.msgpack_each do |tag, time, record|
    unless record.has_key?(@unique_key) then
      record.merge!({@unique_key => SecureRandom.uuid})
    end

    unless record.has_key?(@tag_field) then
      record.merge!({@tag_field => tag})
    end

    if record.has_key?(@time_field) then
      begin
        event_timestamp_dt = Time.strptime(record[@time_field], @time_format).to_s
        record.merge!({@time_field => Time.parse(event_timestamp_dt.to_s).utc.strftime(@time_output_format)})
      rescue
        record.merge!({@time_field => Time.at(time).utc.strftime(@time_output_format)})
      end
    else
      record.merge!({@time_field => Time.at(time).utc.strftime(@time_output_format)})
    end

    if @ignore_undefined_fields then
      record.each_key do |key|
        unless @fields.include?(key) then
          record.delete(key)
        end
      end
    end

    if @string_field_value_max_length >= 0 then
      record.each_key do |key|
        if record[key].instance_of?(Array) then
          values = []
          record[key].each do |value|
            if value.instance_of?(String) then
              if value.length > @string_field_value_max_length then
                log.warn "#{key} is too long (#{value.length}, max is #{@string_field_value_max_length})."
                values.push(value.slice(0, @string_field_value_max_length))
              else
                values.push(value)
              end
            end
          end
          record[key] = values
        elsif record[key].instance_of?(String) then
          if record[key].length > @string_field_value_max_length then
            log.warn "#{key} is too long (#{record[key].length}, max is #{@string_field_value_max_length})."
            record[key] = record[key].slice(0, @string_field_value_max_length)
          end
        end
      end
    end

    #
    # delete reserved fields
    # https://cwiki.apache.org/confluence/display/solr/Defining+Fields
    #
    record.each_key do |key|
      if key[0] == '_' and key[-1] == '_' then
        record.delete(key)
      end
    end

    documents << record

    if documents.count >= @flush_size
      update documents
      documents.clear
    end
  end

  update documents unless documents.empty?
end