Class: Bolt::Applicator
- Inherits:
-
Object
- Object
- Bolt::Applicator
- Defined in:
- lib/bolt/applicator.rb
Instance Method Summary collapse
- #apply(args, apply_body, scope) ⇒ Object
- #apply_ast(raw_ast, targets, options, plan_vars = {}) ⇒ Object
- #build_plugin_tarball ⇒ Object
- #catalog_apply_task ⇒ Object
- #compile(target, catalog_input) ⇒ Object
-
#count_statements(ast) ⇒ Object
Count the number of top-level statements in the AST.
- #custom_facts_task ⇒ Object
-
#initialize(inventory, executor, modulepath, plugin_dirs, project, pdb_client, hiera_config, max_compiles, apply_settings) ⇒ Applicator
constructor
A new instance of Applicator.
- #plugins ⇒ Object
- #query_resources_task ⇒ Object
- #validate_hiera_config(hiera_config) ⇒ Object
Constructor Details
#initialize(inventory, executor, modulepath, plugin_dirs, project, pdb_client, hiera_config, max_compiles, apply_settings) ⇒ Applicator
Returns a new instance of Applicator.
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
# File 'lib/bolt/applicator.rb', line 17 def initialize(inventory, executor, modulepath, plugin_dirs, project, pdb_client, hiera_config, max_compiles, apply_settings) # lazy-load expensive gem code require 'concurrent' @inventory = inventory @executor = executor @modulepath = modulepath || [] @plugin_dirs = plugin_dirs @project = project @pdb_client = pdb_client @hiera_config = hiera_config ? validate_hiera_config(hiera_config) : nil @apply_settings = apply_settings || {} @pool = Concurrent::ThreadPoolExecutor.new(max_threads: max_compiles) @logger = Logging.logger[self] end |
Instance Method Details
#apply(args, apply_body, scope) ⇒ Object
143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 |
# File 'lib/bolt/applicator.rb', line 143 def apply(args, apply_body, scope) raise(ArgumentError, 'apply requires a TargetSpec') if args.empty? raise(ArgumentError, 'apply requires at least one statement in the apply block') if apply_body.nil? type0 = Puppet.lookup(:pal_script_compiler).type('TargetSpec') Puppet::Pal.assert_type(type0, args[0], 'apply targets') @executor.report_function_call('apply') = {} if args.count > 1 type1 = Puppet.lookup(:pal_script_compiler).type('Hash[String, Data]') Puppet::Pal.assert_type(type1, args[1], 'apply options') = args[1].transform_keys { |k| k.sub(/^_/, '').to_sym } end plan_vars = scope.to_hash(true, true) targets = @inventory.get_targets(args[0]) apply_ast(apply_body, targets, , plan_vars) end |
#apply_ast(raw_ast, targets, options, plan_vars = {}) ⇒ Object
177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 |
# File 'lib/bolt/applicator.rb', line 177 def apply_ast(raw_ast, targets, , plan_vars = {}) ast = Puppet::Pops::Serialization::ToDataConverter.convert(raw_ast, rich_data: true, symbol_to_string: true) # Serialize as pcore for *Result* objects plan_vars = Puppet::Pops::Serialization::ToDataConverter.convert(plan_vars, rich_data: true, symbol_as_string: true, type_by_reference: true, local_reference: true) bolt_project = @project if @project&.name scope = { code_ast: ast, modulepath: @modulepath, project: bolt_project.to_h, pdb_config: @pdb_client.config.to_hash, hiera_config: @hiera_config, plan_vars: plan_vars, # This data isn't available on the target config hash config: @inventory.transport_data_get } description = [:description] || 'apply catalog' required_modules = [:required_modules].nil? ? nil : Array([:required_modules]) if required_modules&.any? @logger.debug("Syncing only required modules: #{required_modules.join(',')}.") end @plugin_tarball = Concurrent::Delay.new do build_plugin_tarball do |mod| next unless required_modules.nil? || required_modules.include?(mod.name) search_dirs = [] search_dirs << mod.plugins if mod.plugins? search_dirs << mod.pluginfacts if mod.pluginfacts? search_dirs << mod.files if mod.files? type_files = "#{mod.path}/types" search_dirs << type_files if File.exist?(type_files) search_dirs end end r = @executor.log_action(description, targets) do futures = targets.map do |target| Concurrent::Future.execute(executor: @pool) do @executor.with_node_logging("Compiling manifest block", [target]) do compile(target, scope) end end end result_promises = targets.zip(futures).flat_map do |target, future| @executor.queue_execute([target]) do |transport, batch| @executor.with_node_logging("Applying manifest block", batch) do catalog = future.value if future.rejected? batch.map do |batch_target| # If an unhandled exception occurred, wrap it in an ApplyError error = if future.reason.is_a?(Bolt::ApplyError) future.reason else Bolt::ApplyError.new(batch_target, future.reason.) end result = Bolt::ApplyResult.new(batch_target, error: error.to_h) @executor.publish_event(type: :node_result, result: result) result end else arguments = { 'catalog' => Puppet::Pops::Types::PSensitiveType::Sensitive.new(catalog), 'plugins' => Puppet::Pops::Types::PSensitiveType::Sensitive.new(plugins), 'apply_settings' => @apply_settings, '_task' => catalog_apply_task.name, '_noop' => [:noop] } callback = proc do |event| if event[:type] == :node_result event = event.merge(result: ApplyResult.from_task_result(event[:result])) end @executor.publish_event(event) end # Respect the run_as default set on the executor [:run_as] = @executor.run_as if @executor.run_as && !.key?(:run_as) results = transport.batch_task(batch, catalog_apply_task, arguments, , &callback) Array(results).map { |result| ApplyResult.from_task_result(result) } end end end end @executor.await_results(result_promises) end # Allow for report to exclude event metrics (apply_result doesn't require it to be present) resource_counts = r.ok_set.map { |result| result.event_metrics&.fetch('total') }.compact @executor.report_apply(count_statements(raw_ast), resource_counts) if !r.ok && ![:catch_errors] raise Bolt::ApplyFailure, r end r end |
#build_plugin_tarball ⇒ Object
286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 |
# File 'lib/bolt/applicator.rb', line 286 def build_plugin_tarball # lazy-load expensive gem code require 'minitar' require 'zlib' start_time = Time.now sio = StringIO.new output = Minitar::Output.new(Zlib::GzipWriter.new(sio)) Puppet.lookup(:current_environment).override_with(modulepath: @plugin_dirs).modules.each do |mod| search_dirs = yield mod parent = Pathname.new(mod.path).parent files = Find.find(*search_dirs).select { |file| File.file?(file) } files.each do |file| tar_path = Pathname.new(file).relative_path_from(parent) @logger.debug("Packing plugin #{file} to #{tar_path}") stat = File.stat(file) content = File.binread(file) output.tar.add_file_simple( tar_path.to_s, data: content, size: content.size, mode: stat.mode & 0o777, mtime: stat.mtime ) end end duration = Time.now - start_time @logger.debug("Packed plugins in #{duration * 1000} ms") output.close Base64.encode64(sio.string) ensure output&.close end |
#catalog_apply_task ⇒ Object
51 52 53 54 55 56 57 58 59 60 61 62 |
# File 'lib/bolt/applicator.rb', line 51 def catalog_apply_task @catalog_apply_task ||= begin path = File.join(libexec, 'apply_catalog.rb') file = { 'name' => 'apply_catalog.rb', 'path' => path } = { 'supports_noop' => true, 'input_method' => 'stdin', 'implementations' => [ { 'name' => 'apply_catalog.rb' }, { 'name' => 'apply_catalog.rb', 'remote' => true } ] } Bolt::Task.new('apply_helpers::apply_catalog', , [file]) end end |
#compile(target, catalog_input) ⇒ Object
78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 |
# File 'lib/bolt/applicator.rb', line 78 def compile(target, catalog_input) # This simplified Puppet node object is what .local uses to determine the # certname of the target node = Puppet::Node.from_data_hash('name' => target.name, 'parameters' => { 'clientcert' => target.name }) trusted = Puppet::Context::TrustedInformation.local(node) catalog_input[:target] = { name: target.name, facts: @inventory.facts(target).merge('bolt' => true), variables: @inventory.vars(target), trusted: trusted.to_h } bolt_catalog_exe = File.join(libexec, 'bolt_catalog') old_path = ENV['PATH'] ENV['PATH'] = "#{RbConfig::CONFIG['bindir']}#{File::PATH_SEPARATOR}#{old_path}" out, err, stat = Open3.capture3('ruby', bolt_catalog_exe, 'compile', stdin_data: catalog_input.to_json) ENV['PATH'] = old_path # If bolt_catalog does not return valid JSON, we should print stderr to # see what happened print_logs = stat.success? result = begin JSON.parse(out) rescue JSON::ParserError print_logs = true { 'message' => "Something's gone terribly wrong! STDERR is logged." } end # Any messages logged by Puppet will be on stderr as JSON hashes, so we # parse those and store them here. Any message on stderr that is not # properly JSON formatted is assumed to be an error message. If # compilation was successful, we print the logs as they may include # important warnings. If compilation failed, we don't print the logs as # they are likely redundant with the error that caused the failure, which # will be handled separately. logs = err.lines.map do |line| JSON.parse(line) rescue JSON::ParserError { 'level' => 'err', 'message' => line } end if print_logs logs.each do |log| bolt_level = Bolt::Util::PuppetLogLevel::MAPPING[log['level'].to_sym] = log['message'].chomp @logger.send(bolt_level, "#{target.name}: #{}") end end raise ApplyError.new(target.name, result['message']) unless stat.success? result end |
#count_statements(ast) ⇒ Object
Count the number of top-level statements in the AST.
166 167 168 169 170 171 172 173 174 175 |
# File 'lib/bolt/applicator.rb', line 166 def count_statements(ast) case ast when Puppet::Pops::Model::Program count_statements(ast.body) when Puppet::Pops::Model::BlockExpression ast.statements.count else 1 end end |
#custom_facts_task ⇒ Object
38 39 40 41 42 43 44 45 46 47 48 49 |
# File 'lib/bolt/applicator.rb', line 38 def custom_facts_task @custom_facts_task ||= begin path = File.join(libexec, 'custom_facts.rb') file = { 'name' => 'custom_facts.rb', 'path' => path } = { 'supports_noop' => true, 'input_method' => 'stdin', 'implementations' => [ { 'name' => 'custom_facts.rb' }, { 'name' => 'custom_facts.rb', 'remote' => true } ] } Bolt::Task.new('apply_helpers::custom_facts', , [file]) end end |
#plugins ⇒ Object
281 282 283 284 |
# File 'lib/bolt/applicator.rb', line 281 def plugins @plugin_tarball.value || raise(Bolt::Error.new("Failed to pack module plugins: #{@plugin_tarball.reason}", 'bolt/plugin-error')) end |
#query_resources_task ⇒ Object
64 65 66 67 68 69 70 71 72 73 74 75 76 |
# File 'lib/bolt/applicator.rb', line 64 def query_resources_task @query_resources_task ||= begin path = File.join(libexec, 'query_resources.rb') file = { 'name' => 'query_resources.rb', 'path' => path } = { 'supports_noop' => true, 'input_method' => 'stdin', 'implementations' => [ { 'name' => 'query_resources.rb' }, { 'name' => 'query_resources.rb', 'remote' => true } ] } Bolt::Task.new('apply_helpers::query_resources', , [file]) end end |
#validate_hiera_config(hiera_config) ⇒ Object
131 132 133 134 135 136 137 138 139 140 141 |
# File 'lib/bolt/applicator.rb', line 131 def validate_hiera_config(hiera_config) if File.exist?(File.path(hiera_config)) data = File.open(File.path(hiera_config), "r:UTF-8") { |f| YAML.safe_load(f.read, [Symbol]) } if data.nil? return nil elsif data['version'] != 5 raise Bolt::ParseError, "Hiera v5 is required, found v#{data['version'] || 3} in #{hiera_config}" end hiera_config end end |