Class: Sequent::Core::RecordSessions::ReplayEventsSession
- Inherits:
-
Object
- Object
- Sequent::Core::RecordSessions::ReplayEventsSession
- Defined in:
- lib/sequent/core/record_sessions/replay_events_session.rb
Overview
Session objects are used to update view state
The ReplayEventsSession is optimized for bulk loading records in a Postgres database using CSV import.
After lot of experimenting this turned out to be the fastest way to to bulk inserts in the database. You can tweak the amount of records in the CSV via insert_with_csv_size before it flushes to the database to gain (or loose) speed.
It is highly recommended to create indices on the in memory record_store to speed up the processing. By default all records are indexed by aggregate_id if they have such a property.
Example:
class InvoiceEventHandler < Sequent::Core::Projector
on RecipientMovedEvent do |event|
update_all_records InvoiceRecord, recipient_id: event.recipient.aggregate_id do |record|
record.recipient_street = record.recipient.street
end
end
end
In this case it is wise to create an index on InvoiceRecord on the recipient_id like you would in the database.
Example:
ReplayEventsSession.new(
50,
{InvoiceRecord => [[:recipient_id]]}
)
Defined Under Namespace
Modules: InitStruct
Instance Attribute Summary collapse
-
#insert_with_csv_size ⇒ Object
Returns the value of attribute insert_with_csv_size.
-
#record_store ⇒ Object
readonly
Returns the value of attribute record_store.
Class Method Summary collapse
Instance Method Summary collapse
- #clear ⇒ Object
- #commit ⇒ Object
- #create_or_update_record(record_class, values, created_at = Time.now) {|record| ... } ⇒ Object
- #create_record(record_class, values) {|record| ... } ⇒ Object
- #delete_all_records(record_class, where_clause) ⇒ Object
- #delete_record(record_class, record) ⇒ Object
- #do_with_record(record_class, where_clause) {|record| ... } ⇒ Object
- #do_with_records(record_class, where_clause) ⇒ Object
- #find_records(record_class, where_clause) ⇒ Object
- #get_record(record_class, where_clause) ⇒ Object
- #get_record!(record_class, where_clause) ⇒ Object
-
#initialize(insert_with_csv_size = 50, indices = {}) ⇒ ReplayEventsSession
constructor
insert_with_csv_sizenumber of records to insert in a single batch. - #last_record(record_class, where_clause) ⇒ Object
- #update_all_records(record_class, where_clause, updates) ⇒ Object
- #update_record(record_class, event, where_clause = {aggregate_id: event.aggregate_id}, options = {}) {|record| ... } ⇒ Object
Constructor Details
#initialize(insert_with_csv_size = 50, indices = {}) ⇒ ReplayEventsSession
insert_with_csv_size number of records to insert in a single batch
indices Hash of indices to create in memory. Greatly speeds up the replaying.
Key corresponds to the name of the 'Record'
Values contains list of lists on which columns to index. E.g. [[:first_index_column], [:another_index, :with_to_columns]]
60 61 62 63 64 65 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 60 def initialize(insert_with_csv_size = 50, indices = {}) @insert_with_csv_size = insert_with_csv_size @record_store = Hash.new { |h, k| h[k] = Set.new } @record_index = {} @indices = indices end |
Instance Attribute Details
#insert_with_csv_size ⇒ Object
Returns the value of attribute insert_with_csv_size.
40 41 42 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 40 def insert_with_csv_size @insert_with_csv_size end |
#record_store ⇒ Object (readonly)
Returns the value of attribute record_store.
39 40 41 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 39 def record_store @record_store end |
Class Method Details
.struct_cache ⇒ Object
42 43 44 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 42 def self.struct_cache @struct_cache ||= {} end |
Instance Method Details
#clear ⇒ Object
274 275 276 277 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 274 def clear @record_store.clear @record_index.clear end |
#commit ⇒ Object
208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 208 def commit begin @record_store.each do |clazz, records| if records.size > @insert_with_csv_size csv = CSV.new("") column_names = clazz.column_names.reject { |name| name == "id" } records.each do |obj| begin csv << column_names.map do |column_name| obj[column_name] end end end buf = '' conn = ActiveRecord::Base.connection.raw_connection copy_data = StringIO.new csv.string conn.transaction do conn.exec("COPY #{clazz.table_name} (#{column_names.join(",")}) FROM STDIN WITH csv") begin while copy_data.read(1024, buf) ### Uncomment this to test error-handling for exceptions from the reader side: # raise Errno::ECONNRESET, "socket closed while reading" until conn.put_copy_data(buf) sleep 0.1 end end rescue Errno => err errmsg = "%s while reading copy data: %s" % [err.class.name, err.] conn.put_copy_end(errmsg) ensure conn.put_copy_end copy_data.close while res = conn.get_result status = res.res_status(res.result_status) if status != "PGRES_COMMAND_OK" raise "Postgres copy command failed: #{status}, #{res.}" end end end end else clazz.unscoped do inserts = [] column_names = clazz.column_names.reject { |name| name == "id" } prepared_values = (1..column_names.size).map { |i| "$#{i}" }.join(",") records.each do |r| values = column_names.map { |name| r[name.to_sym] } inserts << values end sql = %Q{insert into #{clazz.table_name} (#{column_names.join(",")}) values (#{prepared_values})} inserts.each do |insert| clazz.connection.raw_connection.async_exec(sql, insert) end end end end ensure clear end end |
#create_or_update_record(record_class, values, created_at = Time.now) {|record| ... } ⇒ Object
127 128 129 130 131 132 133 134 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 127 def create_or_update_record(record_class, values, created_at = Time.now) record = get_record(record_class, values) unless record record = create_record(record_class, values.merge(created_at: created_at)) end yield record if block_given? record end |
#create_record(record_class, values) {|record| ... } ⇒ Object
76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 76 def create_record(record_class, values) column_names = record_class.column_names values = record_class.column_defaults.merge(values) values.merge!(updated_at: values[:created_at]) if column_names.include?("updated_at") struct_class_name = "#{record_class.to_s}Struct" if self.class.struct_cache.has_key?(struct_class_name) struct_class = self.class.struct_cache[struct_class_name] else if column_names.include? :aggregate_id @indices[record_class] ||= [] @indices[record_class] << [:aggregate_id] end # We create a struct on the fly. # Since the replay happens in memory we implement the ==, eql? and hash methods # to point to the same object. A record is the same if and only if they point to # the same object. These methods are necessary since we use Set instead of []. class_def=<<-EOD #{struct_class_name} = Struct.new(*#{column_names.map(&:to_sym)}) class #{struct_class_name} include InitStruct def ==(other) return true if self.equal?(other) super end def eql?(other) self == other end def hash self.object_id.hash end end EOD eval("#{class_def}") struct_class = ReplayEventsSession.const_get(struct_class_name) self.class.struct_cache[struct_class_name] = struct_class end record = struct_class.new.set_values(values) yield record if block_given? @record_store[record_class] << record if indexed?(record_class) do_with_cache_keys(record_class, record) do |key| @record_index[key] = [] unless @record_index.has_key?(key) @record_index[key] << record end end record end |
#delete_all_records(record_class, where_clause) ⇒ Object
147 148 149 150 151 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 147 def delete_all_records(record_class, where_clause) find_records(record_class, where_clause).each do |record| delete_record(record_class, record) end end |
#delete_record(record_class, record) ⇒ Object
153 154 155 156 157 158 159 160 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 153 def delete_record(record_class, record) @record_store[record_class].delete(record) if indexed?(record_class) do_with_cache_keys(record_class, record) do |key| @record_index[key].delete(record) if @record_index.has_key?(key) end end end |
#do_with_record(record_class, where_clause) {|record| ... } ⇒ Object
177 178 179 180 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 177 def do_with_record(record_class, where_clause) record = get_record!(record_class, where_clause) yield record end |
#do_with_records(record_class, where_clause) ⇒ Object
170 171 172 173 174 175 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 170 def do_with_records(record_class, where_clause) records = find_records(record_class, where_clause) records.each do |record| yield record end end |
#find_records(record_class, where_clause) ⇒ Object
182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 182 def find_records(record_class, where_clause) if use_index?(record_class, where_clause) values = get_index(record_class, where_clause).map { |field| where_clause[field] } @record_index[[record_class, *values]] || [] else @record_store[record_class].select do |record| where_clause.all? do |k, v| expected_value = v.kind_of?(Symbol) ? v.to_s : v actual_value = record[k.to_sym] actual_value = actual_value.to_s if actual_value.kind_of? Symbol if expected_value.kind_of?(Array) expected_value.include?(actual_value) else actual_value == expected_value end end end end.dup end |
#get_record(record_class, where_clause) ⇒ Object
142 143 144 145 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 142 def get_record(record_class, where_clause) results = find_records(record_class, where_clause) results.empty? ? nil : results.first end |
#get_record!(record_class, where_clause) ⇒ Object
136 137 138 139 140 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 136 def get_record!(record_class, where_clause) record = get_record(record_class, where_clause) raise("record #{record_class} not found for #{where_clause}, store: #{@record_store[record_class]}") unless record record end |
#last_record(record_class, where_clause) ⇒ Object
203 204 205 206 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 203 def last_record(record_class, where_clause) results = find_records(record_class, where_clause) results.empty? ? nil : results.last end |
#update_all_records(record_class, where_clause, updates) ⇒ Object
162 163 164 165 166 167 168 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 162 def update_all_records(record_class, where_clause, updates) find_records(record_class, where_clause).each do |record| updates.each_pair do |k, v| record[k.to_sym] = v end end end |
#update_record(record_class, event, where_clause = {aggregate_id: event.aggregate_id}, options = {}) {|record| ... } ⇒ Object
67 68 69 70 71 72 73 74 |
# File 'lib/sequent/core/record_sessions/replay_events_session.rb', line 67 def update_record(record_class, event, where_clause = {aggregate_id: event.aggregate_id}, = {}, &block) defaults = {update_sequence_number: true} args = defaults.merge() record = get_record!(record_class, where_clause) record.updated_at = event.created_at if record.respond_to?(:updated_at) yield record if block_given? record.sequence_number = event.sequence_number if args[:update_sequence_number] end |