Class: Sequent::Core::RecordSessions::ReplayEventsSession

Inherits:
Object
  • Object
show all
Defined in:
lib/sequent/core/record_sessions/replay_events_session.rb

Overview

Session objects are used to update view state

The ReplayEventsSession is optimized for bulk loading records in a Postgres database using CSV import.

After lot of experimenting this turned out to be the fastest way to to bulk inserts in the database. You can tweak the amount of records in the CSV via insert_with_csv_size before it flushes to the database to gain (or loose) speed.

It is highly recommended to create indices on the in memory record_store to speed up the processing. By default all records are indexed by aggregate_id if they have such a property.

Example:

class InvoiceEventHandler < Sequent::Core::BaseEventHandler
  on RecipientMovedEvent do |event|
    update_all_records InvoiceRecord, recipient_id: event.recipient.aggregate_id do |record|
      record.recipient_street = record.recipient.street
    end
  end
end

In this case it is wise to create an index on InvoiceRecord on the recipient_id like you would in the database.

Example:

ReplayEventsSession.new(
  50,
  {InvoiceRecord => [[:recipient_id]]}
)

Defined Under Namespace

Modules: InitStruct

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(insert_with_csv_size = 50, indices = {}) ⇒ ReplayEventsSession

insert_with_csv_size number of records to insert in a single batch

indices Hash of indices to create in memory. Greatly speeds up the replaying.

Key corresponds to the name of the 'Record'
Values contains list of lists on which columns to index. E.g. [[:first_index_column], [:another_index, :with_to_columns]]


60
61
62
63
64
65
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 60

def initialize(insert_with_csv_size = 50, indices = {})
  @insert_with_csv_size = insert_with_csv_size
  @record_store = Hash.new { |h, k| h[k] = Set.new }
  @record_index = {}
  @indices = indices
end

Instance Attribute Details

#insert_with_csv_sizeObject

Returns the value of attribute insert_with_csv_size.



40
41
42
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 40

def insert_with_csv_size
  @insert_with_csv_size
end

#record_storeObject (readonly)

Returns the value of attribute record_store.



39
40
41
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 39

def record_store
  @record_store
end

Class Method Details

.struct_cacheObject



42
43
44
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 42

def self.struct_cache
  @struct_cache ||= {}
end

Instance Method Details

#clearObject



274
275
276
277
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 274

def clear
  @record_store.clear
  @record_index.clear
end

#commitObject



208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 208

def commit
  begin
    @record_store.each do |clazz, records|
      if records.size > @insert_with_csv_size
        csv = CSV.new("")
        column_names = clazz.column_names.reject { |name| name == "id" }
        records.each do |obj|
          begin
            csv << column_names.map do |column_name|
              obj[column_name]
            end
          end
        end

        buf = ''
        conn = ActiveRecord::Base.connection.raw_connection
        copy_data = StringIO.new csv.string
        conn.transaction do
          conn.exec("COPY #{clazz.table_name} (#{column_names.join(",")}) FROM STDIN WITH csv")
          begin
            while copy_data.read(1024, buf)
              ### Uncomment this to test error-handling for exceptions from the reader side:
              # raise Errno::ECONNRESET, "socket closed while reading"
              until conn.put_copy_data(buf)
                sleep 0.1
              end
            end
          rescue Errno => err
            errmsg = "%s while reading copy data: %s" % [err.class.name, err.message]
            conn.put_copy_end(errmsg)
          ensure
            conn.put_copy_end
            copy_data.close
            while res = conn.get_result
              status = res.res_status(res.result_status)
              if status != "PGRES_COMMAND_OK"
                raise "Postgres copy command failed: #{status}, #{res.error_message}"
              end
            end
          end
        end

      else

        clazz.unscoped do
          inserts = []
          column_names = clazz.column_names.reject { |name| name == "id" }
          prepared_values = (1..column_names.size).map { |i| "$#{i}" }.join(",")
          records.each do |r|
            values = column_names.map { |name| r[name.to_sym] }
            inserts << values
          end
          sql = %Q{insert into #{clazz.table_name} (#{column_names.join(",")}) values (#{prepared_values})}
          inserts.each do |insert|
            clazz.connection.raw_connection.async_exec(sql, insert)
          end
        end
      end
    end


  ensure
    clear
  end
end

#create_or_update_record(record_class, values, created_at = Time.now) {|record| ... } ⇒ Object

Yields:

  • (record)


125
126
127
128
129
130
131
132
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 125

def create_or_update_record(record_class, values, created_at = Time.now)
  record = get_record(record_class, values)
  unless record
    record = create_record(record_class, values.merge(created_at: created_at))
  end
  yield record if block_given?
  record
end

#create_record(record_class, values) {|record| ... } ⇒ Object

Yields:

  • (record)


76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 76

def create_record(record_class, values)
  column_names = record_class.column_names
  values.merge!(updated_at: values[:created_at]) if column_names.include?("updated_at")
  struct_class_name = "#{record_class.to_s}Struct"
  if self.class.struct_cache.has_key?(struct_class_name)
    struct_class = self.class.struct_cache[struct_class_name]
  else

    # We create a struct on the fly.
    # Since the replay happens in memory we implement the ==, eql? and hash methods
    # to point to the same object. A record is the same if and only if they point to
    # the same object. These methods are necessary since we use Set instead of [].
    class_def="      \#{struct_class_name} = Struct.new(*\#{column_names.map(&:to_sym)})\n      class \#{struct_class_name}\n        include InitStruct\n        def ==(other)\n          return true if self.equal?(other)\n          super\n        end\n        def eql?(other)\n          self == other\n        end\n        def hash\n          self.object_id.hash\n        end\n      end\n    EOD\n    eval(\"\#{class_def}\")\n    struct_class = ReplayEventsSession.const_get(struct_class_name)\n    self.class.struct_cache[struct_class_name] = struct_class\n  end\n  record = struct_class.new.set_values(values)\n\n  yield record if block_given?\n  @record_store[record_class] << record\n  if record.respond_to? :aggregate_id\n    @record_index[[record_class, record.aggregate_id]] = record\n  end\n\n  if indexed?(record_class)\n    do_with_cache_keys(record_class, record) do |key|\n      @record_index[key] = [] unless @record_index.has_key?(key)\n      @record_index[key] << record\n    end\n  end\n  record\nend\n"

#delete_all_records(record_class, where_clause) ⇒ Object



145
146
147
148
149
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 145

def delete_all_records(record_class, where_clause)
  find_records(record_class, where_clause).each do |record|
    delete_record(record_class, record)
  end
end

#delete_record(record_class, record) ⇒ Object



151
152
153
154
155
156
157
158
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 151

def delete_record(record_class, record)
  @record_store[record_class].delete(record)
  if indexed?(record_class)
    do_with_cache_keys(record_class, record) do |key|
      @record_index[key].delete(record) if @record_index.has_key?(key)
    end
  end
end

#do_with_record(record_class, where_clause) {|record| ... } ⇒ Object

Yields:

  • (record)


175
176
177
178
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 175

def do_with_record(record_class, where_clause)
  record = get_record!(record_class, where_clause)
  yield record
end

#do_with_records(record_class, where_clause) ⇒ Object



168
169
170
171
172
173
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 168

def do_with_records(record_class, where_clause)
  records = find_records(record_class, where_clause)
  records.each do |record|
    yield record
  end
end

#find_records(record_class, where_clause) ⇒ Object



180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 180

def find_records(record_class, where_clause)
  if where_clause.has_key? :aggregate_id and where_clause.size == 1
    [@record_index[[record_class, where_clause[:aggregate_id]]]].compact
  elsif use_index?(record_class, where_clause)
    values = get_index(record_class, where_clause).map { |field| where_clause[field] }
    @record_index[[record_class, *values]] || []
  else
    @record_store[record_class].select do |record|
      where_clause.all? do |k, v|
        expected_value = v.kind_of?(Symbol) ? v.to_s : v
        actual_value = record[k.to_sym]
        actual_value = actual_value.to_s if actual_value.kind_of? Symbol
        if expected_value.kind_of?(Array)
          expected_value.include?(actual_value)
        else
          actual_value == expected_value
        end
      end

    end
  end.dup
end

#get_record(record_class, where_clause) ⇒ Object



140
141
142
143
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 140

def get_record(record_class, where_clause)
  results = find_records(record_class, where_clause)
  results.empty? ? nil : results.first
end

#get_record!(record_class, where_clause) ⇒ Object



134
135
136
137
138
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 134

def get_record!(record_class, where_clause)
  record = get_record(record_class, where_clause)
  raise("record #{record_class} not found for #{where_clause}, store: #{@record_store[record_class]}") unless record
  record
end

#last_record(record_class, where_clause) ⇒ Object



203
204
205
206
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 203

def last_record(record_class, where_clause)
  results = find_records(record_class, where_clause)
  results.empty? ? nil : results.last
end

#update_all_records(record_class, where_clause, updates) ⇒ Object



160
161
162
163
164
165
166
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 160

def update_all_records(record_class, where_clause, updates)
  find_records(record_class, where_clause).each do |record|
    updates.each_pair do |k, v|
      record[k.to_sym] = v
    end
  end
end

#update_record(record_class, event, where_clause = {aggregate_id: event.aggregate_id}, options = {}) {|record| ... } ⇒ Object

Yields:

  • (record)


67
68
69
70
71
72
73
74
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 67

def update_record(record_class, event, where_clause = {aggregate_id: event.aggregate_id}, options = {}, &block)
  defaults = {update_sequence_number: true}
  args = defaults.merge(options)
  record = get_record!(record_class, where_clause)
  record.updated_at = event.created_at if record.respond_to?(:updated_at)
  yield record if block_given?
  record.sequence_number = event.sequence_number if args[:update_sequence_number]
end