Module: Perus::Server::DB
- Defined in:
- lib/perus/server/db.rb
Class Method Summary collapse
Class Method Details
.cleanup ⇒ Object
60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
# File 'lib/perus/server/db.rb', line 60 def self.cleanup puts 'Cleaning old values and metrics' keep_hours = Server..keep_hours # remove old values = Time.now.to_i - (keep_hours * 60) values = Value.where("timestamp < #{}") puts "Deleting #{values.count} values" values.each(&:destroy) # remove metrics from systems if they no longer have any values empty_deleted = 0 file_deleted = 0 Metric.each do |metric| if metric.file path = metric.path if !File.exists?(path) || File.mtime(path).to_i < metric.destroy file_deleted += 1 end elsif metric.values_dataset.empty? metric.destroy empty_deleted += 1 end end puts "#{empty_deleted} metrics were deleted as they had no values" puts "#{file_deleted} metrics were deleted as they had old files" end |
.db ⇒ Object
7 8 9 |
# File 'lib/perus/server/db.rb', line 7 def self.db @db end |
.start ⇒ Object
11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
# File 'lib/perus/server/db.rb', line 11 def self.start puts 'Loading database' Sequel.extension :migration Sequel.extension :inflector # connect/create the database and run any new migrations @db = Sequel.sqlite(Server..db_path, integer_booleans: true) Sequel::Migrator.run(@db, File.join(__dir__, 'migrations')) # load models - these rely on an existing db connection require File.join(__dir__, 'models', 'system') require File.join(__dir__, 'models', 'config') require File.join(__dir__, 'models', 'value') require File.join(__dir__, 'models', 'group') require File.join(__dir__, 'models', 'error') require File.join(__dir__, 'models', 'alert') require File.join(__dir__, 'models', 'action') require File.join(__dir__, 'models', 'metric') require File.join(__dir__, 'models', 'script') require File.join(__dir__, 'models', 'command_config') require File.join(__dir__, 'models', 'script_command') require File.join(__dir__, 'models', 'config_metric') # attempt to run vacuum twice a day. this is done to increase # performance rather than reclaim unused space. as old values and # metrics are deleted the data become very fragmented. vacuuming # restructures the db so system records in the values index should # be sequentially stored vacuum_task = Concurrent::TimerTask.new do @db.execute('vacuum') end # fire every 12 hours vacuum_task.execution_interval = 60 * 60 * 12 vacuum_task.execute # a fixed number of hours of data are kept in the database. once an # hour, old values and files are removed. if all values of a metric # are removed from a system, the accompanying metric record is also # removed. cleanup_task = Concurrent::TimerTask.new do Perus::Server::DB.cleanup end # fire every hour cleanup_task.execution_interval = 60 * 60 cleanup_task.execute end |