Class: PgShrink::Database::Postgres
Constant Summary
collapse
- DEFAULT_OPTS =
{
postgres_url: nil,
host: 'localhost',
port: nil,
username: 'postgres',
password: nil,
database: 'test',
batch_size: 10000
}.freeze
Instance Attribute Summary collapse
Instance Method Summary
collapse
#filter!, #filter_table, #remove_table, #sanitize!, #shrink!, #table, #tables
Constructor Details
#initialize(opts) ⇒ Postgres
Returns a new instance of Postgres.
34
35
36
37
|
# File 'lib/pg_shrink/database/postgres.rb', line 34
def initialize(opts)
@opts = DEFAULT_OPTS.merge(opts.symbolize_keys)
@connection = Sequel.connect(connection_string)
end
|
Instance Attribute Details
#connection ⇒ Object
Returns the value of attribute connection.
7
8
9
|
# File 'lib/pg_shrink/database/postgres.rb', line 7
def connection
@connection
end
|
Instance Method Details
#batch_size ⇒ Object
30
31
32
|
# File 'lib/pg_shrink/database/postgres.rb', line 30
def batch_size
@opts[:batch_size]
end
|
#connection_string ⇒ Object
18
19
20
21
22
23
24
25
26
27
28
|
# File 'lib/pg_shrink/database/postgres.rb', line 18
def connection_string
if @opts[:postgres_url]
@opts[:postgres_url]
else
str = "postgres://#{@opts[:user]}"
str << ":#{@opts[:password]}" if @opts[:password]
str << "@#{@opts[:host]}"
str << ":#{@opts[:port]}" if @opts[:port]
str << "/#{@opts[:database]}"
end
end
|
#delete_records(table_name, condition_to_delete) ⇒ Object
87
88
89
|
# File 'lib/pg_shrink/database/postgres.rb', line 87
def delete_records(table_name, condition_to_delete)
self.connection.from(table_name).where(condition_to_delete).delete
end
|
#get_records(table_name, opts) ⇒ Object
83
84
85
|
# File 'lib/pg_shrink/database/postgres.rb', line 83
def get_records(table_name, opts)
self.connection.from(table_name).where(opts).all
end
|
#records_in_batches(table_name) ⇒ Object
WARNING! This assumes the database is not changing during run. If requirements change we may need to insert a lock.
41
42
43
44
45
46
47
48
49
50
51
52
53
54
|
# File 'lib/pg_shrink/database/postgres.rb', line 41
def records_in_batches(table_name)
table = self.table(table_name)
primary_key = table.primary_key
max_id = self.connection["select max(#{primary_key}) from #{table_name}"].
first[:max]
i = 1;
while i < max_id do
sql = "select * from #{table_name} where " +
"#{primary_key} >= #{i} and #{primary_key} < #{i + batch_size}"
batch = self.connection[sql].all
yield(batch)
i = i + batch_size
end
end
|
#update_records(table_name, old_records, new_records) ⇒ Object
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
|
# File 'lib/pg_shrink/database/postgres.rb', line 56
def update_records(table_name, old_records, new_records)
table = self.table(table_name)
primary_key = table.primary_key
old_records_by_key = old_records.index_by {|r| r[primary_key]}
new_records_by_key = new_records.index_by {|r| r[primary_key]}
if (new_records_by_key.keys - old_records_by_key.keys).size > 0
raise "Bad voodoo! New records have primary keys not in old records!"
end
deleted_record_ids = old_records_by_key.keys - new_records_by_key.keys
if deleted_record_ids.any?
raise "Bad voodoo! Some records missing in new records!"
end
new_records.each do |rec|
if old_records_by_key[rec[primary_key]] != rec
self.connection.from(table_name).
where(primary_key => rec[primary_key]).
update(rec)
end
end
end
|