Module: ContentServer
- Defined in:
- lib/content_server/backup_server.rb,
lib/content_server.rb,
lib/content_server/version.rb,
lib/content_server/queue_copy.rb,
lib/content_server/file_streamer.rb,
lib/content_server/queue_indexer.rb,
lib/content_server/content_server.rb,
lib/content_server/remote_content.rb,
lib/content_server/content_receiver.rb
Overview
Content server. Monitors files, index local files, listen to backup server content, copy changes and new files to backup server.
Defined Under Namespace
Classes: ContentDataReceiver, ContentDataSender, FileCopyClient, FileCopyServer, FileReceiver, FileStreamer, QueueIndexer, RemoteContentClient, RemoteContentServer, Stream
Constant Summary collapse
- VERSION =
"1.0.3"
Class Method Summary collapse
Class Method Details
.run_backup_server ⇒ Object
29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 |
# File 'lib/content_server/backup_server.rb', line 29 def run_backup_server Log.info('Start backup server') Thread.abort_on_exception = true all_threads = [] # create general tmp dir FileUtils.mkdir_p(Params['tmp_path']) unless File.directory?(Params['tmp_path']) # init tmp content data file tmp_content_data_file = Params['tmp_path'] + '/backup.data' if Params['enable_monitoring'] Log.info("Initializing monitoring of process params on port:#{Params['process_monitoring_web_port']}") Params['process_vars'] = ThreadSafeHash::ThreadSafeHash.new Params['process_vars'].set('server_name', 'backup_server') end # # # # # # # # # # # # # Initialize/Start monitoring Log.info('Start monitoring following directories:') Params['monitoring_paths'].each {|path| Log.info(" Path:'#{path['path']}'") } monitoring_events = Queue.new fm = FileMonitoring::FileMonitoring.new fm.set_event_queue(monitoring_events) # Start monitoring and writing changes to queue all_threads << Thread.new do fm.monitor_files end # # # # # # # # # # # # # # # Initialize/Start local indexer Log.debug1('Start indexer') local_server_content_data_queue = Queue.new queue_indexer = QueueIndexer.new(monitoring_events, local_server_content_data_queue, Params['local_content_data_path']) # Start indexing on demand and write changes to queue all_threads << queue_indexer.run # # # # # # # # # # # # # # # # # # # # # # # # # # # # Initialize/Start backup server content data sender Log.debug1('Start backup server content data sender') local_dynamic_content_data = ContentData::DynamicContentData.new #content_data_sender = ContentDataSender.new( # Params['remote_server'], # Params['remote_listening_port']) # Start sending to backup server all_threads << Thread.new do while true do Log.debug1 'Waiting on local server content data queue.' cd = local_server_content_data_queue.pop # content_data_sender.send_content_data(cd) local_dynamic_content_data.update(cd) end end # # # # # # # # # # # # # # # # # # # # # # # # # Start dump local content data to file thread Log.debug1('Start dump local content data to file thread') all_threads << Thread.new do last_data_flush_time = nil while true do if last_data_flush_time.nil? || last_data_flush_time + Params['data_flush_delay'] < Time.now.to_i Log.info "Writing local content data to #{Params['local_content_data_path']}." local_dynamic_content_data.last_content_data.to_file(tmp_content_data_file) sleep(0.1) # Added to prevent mv access issue ::FileUtils.mv(tmp_content_data_file, Params['local_content_data_path']) last_data_flush_time = Time.now.to_i end sleep(1) end end Params['backup_destination_folder'] = File.(Params['monitoring_paths'][0]['path']) Log.info("backup_destination_folder is:#{Params['backup_destination_folder']}") content_server_dynamic_content_data = ContentData::DynamicContentData.new remote_content = ContentServer::RemoteContentClient.new(content_server_dynamic_content_data, Params['content_server_hostname'], Params['content_server_data_port'], Params['backup_destination_folder']) all_threads.concat(remote_content.run()) file_copy_client = FileCopyClient.new(Params['content_server_hostname'], Params['content_server_files_port'], local_dynamic_content_data) all_threads.concat(file_copy_client.threads) # Each Log.info('Start remote and local contents comparator') all_threads << Thread.new do loop do sleep(Params['backup_check_delay']) local_cd = local_dynamic_content_data.last_content_data() remote_cd = content_server_dynamic_content_data.last_content_data() diff = ContentData::ContentData.remove(local_cd, remote_cd) #file_copy_client.request_copy(diff) unless diff.empty? if !diff.empty? Log.info('Start sync check. Backup and remote contents need a sync:') Log.debug2("Backup content:\n#{local_cd}") Log.debug2("Remote content:\n#{remote_cd}") Log.info("Missing contents:\n#{diff}") Log.info('Requesting copy files') file_copy_client.request_copy(diff) else Log.info("Start sync check. Local and remote contents are equal. No sync required.") end end end # # # # # # # # # # # # # # # # # # # # # # # # # Start process vars thread if Params['enable_monitoring'] monitoring_info = MonitoringInfo::MonitoringInfo.new() all_threads << Thread.new do last_data_flush_time = nil mutex = Mutex.new while true do if last_data_flush_time.nil? || last_data_flush_time + Params['process_vars_delay'] < Time.now Params['process_vars'].set('time', Time.now) Log.info("process_vars:monitoring queue size:#{monitoring_events.size}") Params['process_vars'].set('monitoring queue', monitoring_events.size) Log.info("process_vars:content data queue size:#{monitoring_events.size}") Params['process_vars'].set('content data queue', local_server_content_data_queue.size) #enable following line to see full list of object:count #obj_array = '' total_obj_count = 0 string_count = 0 file_count = 0 dir_count = 0 content_count = 0 index_agent_count = 0 indexer_patterns_count = 0 mutex.synchronize do ObjectSpace.each_object(Class) {|obj| obj_count_per_class = ObjectSpace.each_object(obj).count #enable following line to see full list of object:count #obj_array = "#{obj_array} * #{obj.name}:#{obj_count_per_class}" total_obj_count = total_obj_count + obj_count_per_class } string_count = ObjectSpace.each_object(String).count file_count = ObjectSpace.each_object(::FileMonitoring::FileStat).count dir_count = ObjectSpace.each_object(::FileMonitoring::DirStat).count content_count = ObjectSpace.each_object(::ContentData::ContentData).count index_agent_count = ObjectSpace.each_object(::FileIndexing::IndexAgent).count indexer_patterns_count = ObjectSpace.each_object(::FileIndexing::IndexerPatterns).count end #enable following line to see full list of object:count #Params['process_vars'].set('Live objs full', obj_array) Log.info("process_vars:Live objs cnt:#{total_obj_count}") Log.info("process_vars:Live String obj cnt:#{string_count}") Log.info("process_vars:Live File obj cnt:#{file_count}") Log.info("process_vars:Live Dir obj cnt:#{dir_count}") Log.info("process_vars:Live Content data obj cnt:#{content_count}") Log.info("process_vars:Live index agent obj cnt:#{index_agent_count}") Log.info("process_vars:Live indexer patterns obj cnt:#{indexer_patterns_count}") Params['process_vars'].set('Live objs cnt', total_obj_count) Params['process_vars'].set('Live String obj cnt', string_count) Params['process_vars'].set('Live File obj cnt', file_count) Params['process_vars'].set('Live Dir obj cnt', dir_count) Params['process_vars'].set('Live Content data obj cnt', content_count) Params['process_vars'].set('Live index agent obj cnt', index_agent_count) Params['process_vars'].set('Live indexer patterns obj cnt', indexer_patterns_count) last_data_flush_time = Time.now end sleep(0.3) end end end all_threads.each { |t| t.abort_on_exception = true } all_threads.each { |t| t.join } # Should never reach this line. end |
.run_content_server ⇒ Object
26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 |
# File 'lib/content_server/content_server.rb', line 26 def run_content_server Log.info('Content server start') all_threads = [] # create general tmp dir FileUtils.mkdir_p(Params['tmp_path']) unless File.directory?(Params['tmp_path']) # init tmp content data file tmp_content_data_file = Params['tmp_path'] + '/content.data' if Params['enable_monitoring'] Log.info("Initializing monitoring of process params on port:#{Params['process_monitoring_web_port']}") Params['process_vars'] = ThreadSafeHash::ThreadSafeHash.new Params['process_vars'].set('server_name', 'content_server') end # # # # # # # # # # # # # Initialize/Start monitoring Log.info('Start monitoring following directories:') Params['monitoring_paths'].each {|path| Log.info(" Path:'#{path['path']}'") } monitoring_events = Queue.new fm = FileMonitoring::FileMonitoring.new fm.set_event_queue(monitoring_events) # Start monitoring and writing changes to queue all_threads << Thread.new do fm.monitor_files end # # # # # # # # # # # # # # # Initialize/Start local indexer Log.debug1('Start indexer') local_server_content_data_queue = Queue.new queue_indexer = QueueIndexer.new(monitoring_events, local_server_content_data_queue, Params['local_content_data_path']) # Start indexing on demand and write changes to queue all_threads << queue_indexer.run # # # # # # # # # # # # # # # # # # # # # # # Initialize/Start content data comparator Log.debug1('Start content data comparator') local_dynamic_content_data = ContentData::DynamicContentData.new all_threads << Thread.new do # TODO(kolman): Seems like redundant, check how to update dynamic directly. while true do # Note: This thread should be the only consumer of local_server_content_data_queue Log.debug1 'Waiting on local server content data.' local_server_content_data = local_server_content_data_queue.pop local_dynamic_content_data.update(local_server_content_data) end end # # # # # # # # # # # # # # # # # # # # # # # # # Start dump local content data to file thread Log.debug1('Start dump local content data to file thread') all_threads << Thread.new do last_data_flush_time = nil while true do if last_data_flush_time.nil? || last_data_flush_time + Params['data_flush_delay'] < Time.now.to_i Log.info "Writing local content data to #{Params['local_content_data_path']}." local_dynamic_content_data.last_content_data.to_file(tmp_content_data_file) sleep(0.1) # Added to prevent mv access issue ::FileUtils.mv(tmp_content_data_file, Params['local_content_data_path']) last_data_flush_time = Time.now.to_i end sleep(1) end end remote_content_client = RemoteContentServer.new(local_dynamic_content_data, Params['local_content_data_port']) all_threads << remote_content_client.tcp_thread # # # # # # # # # # # # # # # # # Start copying files on demand Log.debug1('Start copy data on demand') copy_files_events = Queue.new # TODO(kolman): Remove this initialization and merge to FileCopyServer. copy_server = FileCopyServer.new(copy_files_events, Params['local_files_port']) all_threads.concat(copy_server.run()) # # # # # # # # # # # # # # # # # # # # # # # # # Start process vars thread if Params['enable_monitoring'] monitoring_info = MonitoringInfo::MonitoringInfo.new() all_threads << Thread.new do last_data_flush_time = nil mutex = Mutex.new while true do if last_data_flush_time.nil? || last_data_flush_time + Params['process_vars_delay'] < Time.now Params['process_vars'].set('time', Time.now) Log.info("process_vars:monitoring queue size:#{monitoring_events.size}") Params['process_vars'].set('monitoring queue', monitoring_events.size) Log.info("process_vars:content data queue size:#{monitoring_events.size}") Params['process_vars'].set('content data queue', local_server_content_data_queue.size) Log.info("process_vars:copy files events queue size:#{copy_files_events.size}") Params['process_vars'].set('copy files events queue', copy_files_events.size) #enable following line to see full list of object:count #obj_array = '' total_obj_count = 0 string_count = 0 file_count = 0 dir_count = 0 content_count = 0 index_agent_count = 0 indexer_patterns_count = 0 mutex.synchronize do ObjectSpace.each_object(Class) {|obj| obj_count_per_class = ObjectSpace.each_object(obj).count #enable following line to see full list of object:count #obj_array = "#{obj_array} * #{obj.name}:#{obj_count_per_class}" total_obj_count = total_obj_count + obj_count_per_class } string_count = ObjectSpace.each_object(String).count file_count = ObjectSpace.each_object(::FileMonitoring::FileStat).count dir_count = ObjectSpace.each_object(::FileMonitoring::DirStat).count content_count = ObjectSpace.each_object(::ContentData::ContentData).count index_agent_count = ObjectSpace.each_object(::FileIndexing::IndexAgent).count indexer_patterns_count = ObjectSpace.each_object(::FileIndexing::IndexerPatterns).count end #enable following line to see full list of object:count #Params['process_vars'].set('Live objs full', obj_array) Log.info("process_vars:Live objs cnt:#{total_obj_count}") Log.info("process_vars:Live String obj cnt:#{string_count}") Log.info("process_vars:Live File obj cnt:#{file_count}") Log.info("process_vars:Live Dir obj cnt:#{dir_count}") Log.info("process_vars:Live Content data obj cnt:#{content_count}") Log.info("process_vars:Live index agent obj cnt:#{index_agent_count}") Log.info("process_vars:Live indexer patterns obj cnt:#{indexer_patterns_count}") Params['process_vars'].set('Live objs cnt', total_obj_count) Params['process_vars'].set('Live String obj cnt', string_count) Params['process_vars'].set('Live File obj cnt', file_count) Params['process_vars'].set('Live Dir obj cnt', dir_count) Params['process_vars'].set('Live Content data obj cnt', content_count) Params['process_vars'].set('Live index agent obj cnt', index_agent_count) Params['process_vars'].set('Live indexer patterns obj cnt', indexer_patterns_count) last_data_flush_time = Time.now end sleep(0.3) end end end # Finalize server threads. all_threads.each { |t| t.abort_on_exception = true } all_threads.each { |t| t.join } # Should never reach this line. end |