Module: Gitchefsync

Extended by:
Configuration
Defined in:
lib/gitchefsync/log.rb,
lib/gitchefsync.rb,
lib/gitchefsync/opts.rb,
lib/gitchefsync/audit.rb,
lib/gitchefsync/common.rb,
lib/gitchefsync/config.rb,
lib/gitchefsync/errors.rb,
lib/gitchefsync/notify.rb,
lib/gitchefsync/io_util.rb,
lib/gitchefsync/version.rb,
lib/gitchefsync/env_sync.rb,
lib/gitchefsync/git_util.rb,
lib/gitchefsync/schedule.rb,
lib/gitchefsync/knife_util.rb

Overview

Git helper module

Defined Under Namespace

Modules: Configuration, FS, Git, Log, Parser Classes: Audit, AuditError, AuditItem, BerksError, BerksLockError, CmdError, ConfigError, Cookbook, EnvRepo, EnvSync, Error, FrozenError, GitError, InvalidTar, KnifeError, KnifeUtil, NoBerksError, NoGitGroups, NoMetaDataError, Notification, ScheduleSync, ValidationError

Constant Summary collapse

VERSION =
"0.6.2"

Constants included from Configuration

Configuration::REL_BRANCH

Class Method Summary collapse

Methods included from Configuration

configuration, configure, initialize, log, logger, options, parseAndConfigure

Class Method Details

.checkGitObject

git installed?



26
27
28
29
30
31
32
# File 'lib/gitchefsync/common.rb', line 26

def self.checkGit
  include Git
  if Git.hasGit == false
    logger.error "event_id=git_error:msg=Git was not found on the path"
    raise GitError, "Git was not detected"
  end
end

.checkProjectConfig(cmd, log) ⇒ Object

Verify .gitchefsync at HEAD of default_branch



50
51
52
53
54
55
56
57
58
59
# File 'lib/gitchefsync/common.rb', line 50

def self.checkProjectConfig(cmd, log)
  begin
    proc_cmd, cmd_line = cmd
    return proc_cmd.call(cmd_line)
  rescue GitError, CmdError
    proc_log, msg = log
    proc_log.call(msg)
  end
  return
end

.getAllGroupIDs(group_names = [], group_ids = []) ⇒ Object

Get subset of known groups (determined by configured gitlab-token)



141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
# File 'lib/gitchefsync/common.rb', line 141

def self.getAllGroupIDs(group_names=[], group_ids=[])
  groups = Array.new
  done = false
  page, per_page = 1, 100
  while !done do
    page_opts = { :per_page => per_page, :page => page}
    known_groups = Gitlab.groups(page_opts)
    if (known_groups.length == 0)
      done = true
    else
      page += 1
    end
    known_groups.each do |group|
      name = group.to_hash['name']
      id = group.to_hash['id']
      if (group_names.include? name) || (group_ids.include? id)
        groups << id
      end
    end
  end
  groups.uniq!
  groups
end

.gitCleanupObject

cycle through the working directory to see if a repo got deleted by checking that the remote repository got deleted



153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
# File 'lib/gitchefsync.rb', line 153

def self.gitCleanup
  include Git,FS
  cookbook_dirs = Dir.entries(@git_local).reject! {|item| item.start_with?(".") }
  cookbook_dirs.each do |dir|
    if !Git.remoteExists(dir,@rel_branch)
      
      #delete tar balls associated with this repo, the directory name 
      #subsequent calls to "reconcile" will clean up
      cookbook = KnifeUtil.new(@knife,dir).(dir)
      if cookbook != nil
        #remove all files associated with this cookbook name
        files = @stage_dir +"/" + cookbook.name() + "-*tar.gz"
        FS.cmd("rm -fr #{files}")
    
      end
    end
  end      
end

.gitDelta(path, remote_ref) ⇒ Object



34
35
36
37
38
39
40
41
42
43
44
45
46
47
# File 'lib/gitchefsync/common.rb', line 34

def self.gitDelta(path, remote_ref)
  include Git
  env_repo = @config['git_env_repo']

  local = Git.cmd "cd #{path} && #{@git_bin} rev-parse HEAD"
  #logger.debug "local #{local}: path=#{path}"
  remote = Git.cmd "cd #{path} && #{@git_bin} ls-remote origin #{remote_ref}"
  return false if remote.empty?
  remote = remote.split(/\s/)[0]

  delta = (local.chomp != remote.chomp)
  logger.debug "event_id=gitDelta:local=#{local.chomp}:remote=#{remote.chomp}:delta=#{delta}"
  delta
end

.helpObject

A summary of actions and cli options



38
39
40
41
42
43
44
45
46
47
48
49
# File 'lib/gitchefsync.rb', line 38

def self.help
  puts "Usage: gitchefsync [operation] -c config_file.json -t gitlab_token [--login=gitlabuser --password=gitlabpassword --syslog]"
  puts "\tgitchefsync runMasterSync -c config_file.json -t gitlab_token"
  puts "\tgitchefsync runSousSync -c config_file.json"
  puts "\tgitchefsync syncCookbooks -c config_file.json -t gitlab_token"
  puts "\tgitchefsync syncCookbooksLocal -c config_file.json"
  puts "\tgitchefsync syncEnv -c config_file.json -t gitlab_token"
  puts "\tgitchefsync stagedUpload -c config_file.json"
  puts "\tgitchefsync reconcile -c config_file.json -t gitlab_token"
  puts "\tgitchefsync gitCleanup -c config_file.json -t gitlab_token"
  puts "\tgitchefsync trimAudit -c config_file.json"
end

.included(base) ⇒ Object



21
22
23
# File 'lib/gitchefsync/common.rb', line 21

def self.included base
  base.extend ClassMethods
end

.init(opts) ⇒ Object



500
501
502
503
# File 'lib/gitchefsync.rb', line 500

def self.init(opts)
  configure(opts)

end

.mergeEnvReposObject

Adding functionality to merge environment repos together by introspecting the “working directory” for chef-repo It’s been assumed that all repositories have have pulled



358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
# File 'lib/gitchefsync/env_sync.rb', line 358

def self.mergeEnvRepos
  include FS,Git

  global_env_path =  @git_local + "/global_chef_env"

  working_dir = Dir.entries(@git_local).reject! {|item| item.start_with?(".") || item.eql?("global_chef_env")}
  working_dir.each  do |dir|
    path = File.join(@git_local, dir)
    chef_repo_dir = path + "/chef-repo"
    if Dir.exists?(chef_repo_dir)
      logger.info("event_id=processing_child_env_repo:dir=#{dir}")
      begin
        #add this repository as a
        Git.cmd "cd #{global_env_path} && #{@git_bin} remote add #{dir} file://#{File.join(path,dir)}"
      rescue Exception => e
        logger.info "event_id=git_remote_already_exists:#{e.message}"
      end
      begin
        #Merge the content via pull
        logger.info"event_id=env_merge:src=#{dir}"
        output  = Git.cmd "cd #{global_env_path} && #{@git_bin} pull #{dir} master"
        logger.info "event_id=env_merge_sucess:msg=#{output}"
      rescue Exception => e
        logger.error "event_id=env:output=#{output}"
        Git.cmd "cd #{global_env_path} && #{@git_bin} reset --hard origin/master"
      end
    end
  end
end

.notifyFromAuditObject



63
64
65
66
67
68
69
70
71
# File 'lib/gitchefsync.rb', line 63

def self.notifyFromAudit
  
  notification = Notification.new(@config['smtp_server'])
  
  notification.singleNotifyFromAudit(@audit_dir, 'cb',@config['default_notify_email'] )
  notification.singleNotifyFromAudit(@audit_dir,'env',@config['default_notify_email']) 
  
  notification.close
end

.processCookbook(path, audit) ⇒ Object

Process the cookbook from the working directory’s path (or path specified) If the cookbook exists on the server, don’t package or upload we may want to add one other condition to force this packaging behaviour and hence rsync



260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
# File 'lib/gitchefsync.rb', line 260

def self.processCookbook(path,audit)
  knifeUtil = KnifeUtil.new(@knife, @git_local)
  cookbook = knifeUtil.(path)
  logger.debug "event_id=processing:cookbook=#{cookbook}"
  
  
  if cookbook != nil
    stage_tar = @config['stage_dir'] +"/" + cookbook.berksTar()
    tar_exists = File.exists?(stage_tar)
  end
  
  begin
    
    #get some git historical info
    extra = Hash.new
    extra['sha'] = (Git.cmd "cd #{path} && git log -1 --pretty=%H").chomp
    extra['author_email'] = (Git.cmd "cd #{path} && git log -1 --pretty=%ce").chomp
    extra['date'] = (Git.cmd "cd #{path} && git log -1 --pretty=%cd").chomp
    extra['subject'] = (Git.cmd "cd #{path} && git log -1 --pretty=%s").chomp
      
    if  (cookbook !=nil && (!knifeUtil.isCBinList(cookbook, self.serverCookbooks()) || !tar_exists ))
      berks_tar = self.stageBerks(path ,  @config['stage_dir'])
      #upload cookbooks still puts a Berksfile, will refactor this method
      self.uploadBerks(path)
      logger.debug("event_id=staging:cookbook=#{cookbook}:berks_tar=#{berks_tar}")
      self.stageCBUpload(berks_tar, @stage_cb_dir, knifeUtil, self.serverCookbooks())
      audit.addCookbook(cookbook,"UPDATE",nil,extra) if berks_tar.nil?
      logger.info "event_id=cookbook_staged:cookbook=#{cookbook}"

    elsif cookbook !=nil && @config['force_package']
      logger.info "event_id=cookbook_force_package:cookbook=#{cookbook}"
      self.stageBerks(path, @config['stage_dir'])
     elsif cookbook != nil
      audit.addCookbook(cookbook, "EXISTING",nil,extra)
      logger.info "event_id=cookbook_untouched:cookbook=#{cookbook}"
    end
  rescue BerksError => e
    logger.error "event_id=berks_package_failure:msg=#{e.message}:trace=#{e.backtrace}"
    audit.addCookbook(cookbook, "ERROR", e,  extra)
  end
  
  Git.cmd "cd #{path} && git clean -xdf"
  return cookbook
end

.pullAllProjectsObject

Pulls all known projects (determined by configured gitlab-token)



122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
# File 'lib/gitchefsync/common.rb', line 122

def self.pullAllProjects
  all_projects = []
  done = false
  page, per_page = 1, 100
  while !done do 
    page_opts = { :per_page => per_page, :page => page}
    repos = Gitlab.projects(page_opts)
    if (repos.length == 0)
      done = true
    else 
      page += 1
    end
    repos.each do |project|
      pullProject(project.to_hash, true)
    end
  end
end

.pullCookbooksObject

Pull all the cookbooks that are configured via the configuration policy in sync_config.json For auto-discovery will pull every project and every project from each group that this user will have access to



106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
# File 'lib/gitchefsync.rb', line 106

def self.pullCookbooks
  Gitlab.private_token = @token
  
  group_names = (@config['gitlab_group_names'] or [])
  group_ids = (@config['gitlab_group_ids'] or [])

  if @config['gitlab_autodiscover']
    # Find all projects known by gitlab-token
    # Determine which of these projects contains .gitchefsync.yml at HEAD of default branch
    self.pullAllProjects
  else 
    logger.debug "Synchronizing group names: #{group_names}"
    logger.debug "Synchronizing group ids: #{group_ids}"
    self.getAllGroupIDs(group_names, group_ids).each do |groupid|
      group = Gitlab.group groupid
      projects = group.to_hash['projects']
      projects.each do |project|
        self.pullProject(project)
      end
    end
  end

  repo_list = @config['cookbook_repo_list']
  
  #explicit set list of cookbook repositories
  if repo_list != nil
    logger.info "event_id=repo_list_sync:config=#{@config['cookbook_repo_list']}"
    repo_list.each do |repo|
      #match the "path: full_path/repo.git"
      match = repo.split('/')
      if match == nil
        raise GitError, "Can not parse #{repo}"
      end
      path = match[match.length-1]
      path = path[0..path.length-5]
      begin
        self.updateGit(@git_local + "/" + path, repo )
      rescue GitError => e
        logger.error "event_id=git_error:msg=#{e.message}:trace=#{e.backtrace}"
        logger.error "event_id=remove_project_path: #{project_path}"
        FS.cmd "rm -rf #{project_path}"
      end
    end
  end
end

.pullProject(project, verify_yml = false) ⇒ Object

Parameters:

  • project
    • Gitlab project object



62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/gitchefsync/common.rb', line 62

def self.pullProject(project, verify_yml=false)
  
  #MAND-791 skip private repositories
  if !project['public']
    logger.warn "event_id=private_project_detected:project=#{project['path_with_namespace']}"
  end
  p_name = project['path_with_namespace'].split('/').join('_')
  project_path = File.join(@git_local, p_name)

  default_branch =  project['default_branch']

  url_type = @config['gitlab_url_type']
  # "http" is default if @config['gitlab_url_type'] not configured
  url_type ||= "http"

  if url_type.eql?("http")
    project_url = project['http_url_to_repo']
    # Verify .gitchefsync at HEAD of default_branch using `wget -O - url`
    cmd_line = "wget -qO - #{project['web_url']}/raw/#{default_branch}/.gitchefsync.yml"
    cmd = [ Proc.new{ |cmd_line| FS.cmd cmd_line }, cmd_line ]

  elsif url_type.eql?("ssh")
    project_url = project['ssh_url_to_repo']
    # Verify .gitchefsync at HEAD of default_branch using `git archive` (not currently supported/enabled using https protocol)
    cmd_line = "git archive --remote=#{project_url} #{default_branch}: .gitchefsync.yml"
    cmd = [ Proc.new { |cmd_line| Git.cmd("#{cmd_line}") }, cmd_line ]
  end

  msg = "event_id=project_missing_.gitchefsync.yml:project_url=#{project_url}:default_branch=#{default_branch}"
  proc_log = Proc.new { |msg| logger.info "#{msg}" }
  log = [ proc_log, msg ]

  if verify_yml
    check = checkProjectConfig(cmd, log)
    if check.nil? || check.empty?
      proc_log.call(msg)
      return
    end
  end

  begin
    self.updateGit(project_path, project_url)
  rescue GitError => e
    logger.error "event_id=git_error:msg=#{e.message}:trace=#{e.backtrace}"
    logger.error "event_id=remove_project_path: #{project_path}"
    FS.cmd "rm -rf #{project_path}"
  end
end

.reconcileObject

Compares git with what is on staging directory WARN: the stage directory should be filled, meaning that cookbooks can be deleted if staging directory is empty verify that we’ve had at least one successful run, by virtue of the audit file created - that we’ve had at least one run

Does a 2 way compare of the lists in on the chef server and the berks tar packages found in staging directory Adding cookbooks if they aren’t found on the server, Deleting cookbooks I don’t generate Audit file - move cookbook audit object to module scope



516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
# File 'lib/gitchefsync.rb', line 516

def self.reconcile

  #Validation
  if Audit.new(@audit_dir, 'cb').latest == nil
    logger.warn "event_id=reconcile_no_audit_detected"
    return
  end

  logger.info "event_id=reconcile:dir=#{@stage_dir}"
  knifeUtil = KnifeUtil.new(@knife, @git_local)
  #Here is what is in the server
  listCB = knifeUtil.listCookbooks

  list_stage = Array.new
  tmp_dir = @stage_cb_dir + "/.tarxf"
  FS.cmd("mkdir -p #{tmp_dir}")

  #Compile what is happening in the stage directory
  stage = @stage_dir + "/*tar.gz"
  Dir.glob(stage).each  do |file|

    begin
      logger.debug "event_id=reconcile_file:file=#{file}"

      FS.cmd "tar -tf #{file} | grep metadata.rb | tar -xf #{file} -C #{tmp_dir}"
      local_list = Array.new
      files  = tmp_dir + "/cookbooks/*/metadata.rb"
      Dir.glob(files) do ||

        cookbook = knifeUtil.(File.expand_path("..",))
        if cookbook !=nil
          list_stage << cookbook
          local_list << cookbook
        end
      end
      #of the local list do we have all of them in chef?
      add_list = knifeUtil.subtract(local_list,listCB)
      logger.debug "local_list #{local_list} delta: #{add_list}"

      self.stageCBUpload(file,@stage_cb_dir,knifeUtil,listCB,true) if !add_list.empty?()
    rescue KnifeError => e
      logger.warn "#{e.message}"
    ensure
      #finally remove what was in the berks tar and in the working tarxf dir
      FS.cmd("rm -fr #{tmp_dir}/*")
    end

  end
  #From the full list of local cookbooks (local_list)
  #we have both sides (what is local) and what is on server
  del_list = knifeUtil.subtract(listCB,list_stage)

  if !del_list.empty?
    logger.warn "event_id=del_cb_pending:cb=#{del_list}"
    del_list.each do |cb|
      logger.debug "deleting: #{cb}"
      #deletion of cookbook - this currently doesn't check node usage
      #so could have deterious side effects
      knifeUtil.delCookbook(cb)
    end
  end
end

.runMasterSyncObject



147
148
149
150
# File 'lib/gitchefsync/schedule.rb', line 147

def self.runMasterSync
  scheduleSync = ScheduleSync.new()
  scheduleSync.master
end

.runSousSyncObject



152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
# File 'lib/gitchefsync/schedule.rb', line 152

def self.runSousSync
  #force loocal sync
  options[:config]['sync_local'] = "true"
    
  #Make sure sous sync only runs on the primary node
  drbd_connection_state = FS.cmd("sudo drbdadm cstate chef_disk", 
      { "TERM" => "xterm", "PATH" => "/usr/sbin:/usr/bin:/bin:/sbin" })
  drbd_role = FS.cmd("sudo drbdadm role chef_disk", 
      { "TERM" => "xterm", "PATH" => "/usr/sbin:/usr/bin:/bin:/sbin" })
  
  drbd_connection_state.delete!("\n")
  drbd_role.delete!("\n")
  
  connected = drbd_connection_state.match(/Connected/)
  role = drbd_role.match(/^Primary/)
  
  if connected and role
    Gitchefsync.logger.info "event_id=proceed_to_sous_sync:drbd_connection_state=#{connected}:drbd_role=#{role}"
    scheduleSync = ScheduleSync.new()
    scheduleSync.sous
  elsif   
    Gitchefsync.logger.fatal "event_id=abort_sous_sync:drbd_connection_state=#{connected}:drbd_role=#{role}"
    exit 1
  end
end

.serverCookbooksObject

central place to get cookbooks from server will be determined once - and will be (eventually) thread safe



113
114
115
116
117
118
119
# File 'lib/gitchefsync/common.rb', line 113

def self.serverCookbooks
  if @serverCookbooks.nil?
    knifeUtil = KnifeUtil.new(@knife, @git_local)
    @serverCookbooks = knifeUtil.listCookbooks()
  end
  @serverCookbooks
end

.stageBerks(path, stage_dir) ⇒ Object

do and install and package the berks cookbook in a staging directory returns the path to the berks tar file



348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
# File 'lib/gitchefsync.rb', line 348

def self.stageBerks(path, stage_dir)
  include FS
  
  begin
    if File.exists?(File.join(path,"Berfile.lock"))
      raise BerksLockError, "Berks lock file found"
    end
    if File.exists?(File.join(path, "Berksfile"))
      logger.debug "event_id=Stage_cookbook:path=#{path}"
      
            
      #get the name from metadata if available
      cookbook = KnifeUtil.new(@knife,path).(path)
      if cookbook != nil
        #remove residual tar - this could be problematic if there is are tars in the 
        #cookbook
        FS.cmd "rm -f #{path}/#{cookbook.berksTar}"
        
        #Since cmdBerks doesn't raise exception must provide alternate check
        out = FS.cmdBerks "cd #{path} && #{@berks} package #{cookbook.berksTar}"
        logger.info "event_id=berks_package=#{out}"
        if File.exists? "#{path}/#{cookbook.berksTar}"
          
          # empty tarballs in staging produced errors in staged upload
          # this can happen when Berksfile is a blank file
          file_count = FS.cmd "tar tf #{path}/#{cookbook.berksTar} | wc -l"
          if file_count.to_i > 1
            FS.cmd "mv #{path}/#{cookbook.berksTar} #{stage_dir}"
          else 
            logger.info "event_id=berks_package_produced_empty_tarball: #{path}/#{cookbook.berksTar}"
            FS.cmd "rm -f #{path}/#{cookbook.berksTar}" 
            raise BerksError.new("`berks package` produced empty tarball: #{path}/#{cookbook.berksTar}")
          end
          
        else
          logger.info "event_id=berks_package_failed: #{path}/#{cookbook.berksTar}" 
          raise BerksError.new("Something went wrong generating berks file: #{out}")
        end
      
        return "#{stage_dir}/#{cookbook.berksTar}"
      end
    else
      raise NoBerksError, "Unable to locate Berks file for #{path}"
    end
  
  rescue NoBerksError,BerksLockError => e 
    raise e
  rescue Exception => e
    raise BerksError.new(e.message) 
  end
  
end

.stageCBUpload(file, cookbook_dir, knifeUtil, listCB, forceUpload = false) ⇒ Object

Extracts and uploads via knife from the staging directory don’t like that knife or list instance is passed in, for later refactoring



447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
# File 'lib/gitchefsync.rb', line 447

def self.stageCBUpload(file, cookbook_dir, knifeUtil, listCB, forceUpload = false)
  begin
    logger.info "knife_cookbook_upload:file=#{file}:dest=#{cookbook_dir}"
    match = File.basename(file).match(/(.*)-(\d+\.\d+\.\d+)/)

    if match ==nil || match.length != 3
      logger.error "event_id=invalid_tar:file=#{file}"
      raise InvalidTar, "Invalid tar name #{file}"
    end

    #logger.debug "In chef server? #{knifeUtil.inList(match[1],match[2],listCB)}"
    
   
    if !knifeUtil.inList(match[1],match[2],listCB) || forceUpload
      logger.info "event_id=stage_upload:cookbook=#{match[1]}:ver=#{match[2]}:dir=#{cookbook_dir}"
      FS.cmd "tar -xf #{file} -C #{cookbook_dir}"
      new_cb_list = Array.new       
      cb_dir = Dir.entries(cookbook_dir + "/cookbooks")
      cb_dir.each do |dir|
        
        begin
          cb_info = knifeUtil.(cookbook_dir  + "/cookbooks/" + dir)
          if knifeUtil.inList(cb_info.name(),cb_info.version,listCB)
            logger.debug "event_id=del_cb_in_server:name=#{cb_info}"
            FS.cmdNoError "rm -fr #{cookbook_dir}/cookbooks/#{cb_info.name}"
          else
            #TODO: add this as a concat method in knife_util class
             new_cb_list << cb_info
          end
        rescue NoMetaDataError => e
          logger.debug "no metadata on #{dir}"
        end
      end
      out = FS.cmd "cd #{cookbook_dir} && #{@knife} cookbook upload -a --cookbook-path=#{cookbook_dir}/cookbooks"
      listCB.concat(new_cb_list)
      logger.debug "event_id=stage_upload_output=\n#{out}"
    else
      logger.info"event_id=stage_no_upload:cookbook=#{match[1]}:ver=#{match[2]}"
    end
  rescue CmdError => e
    #logger.error "event_id=cmd_err:#{e.message}"
    
    raise KnifeError.new(e.message)
  rescue InvalidTar => e
    logger.error "event_id=invalid_tar:msg=Continuing on invalid tar"
  ensure
    if File.exists?(cookbook_dir)
      FS.cmd "rm -fr #{cookbook_dir}/*"
    end
  end
 
end

.stagedUploadObject

Find all versions of cookbooks from the server via knife command From the stage directory, do knife upload on each of the tars if the cookbook and version exists on the server don’t attempt the knife upload Each tar file is extracted to a cookbook directory where a knife upload -a is attempted on the entire directory as each tar is processed the directory is cleaned



409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
# File 'lib/gitchefsync.rb', line 409

def self.stagedUpload 
  include FS

 
  #read in the latest audit - fail on non-null exceptions
  audit = Audit.new(@audit_dir,'cb' )
   
  json = audit.parseLatest 
  if json != nil && audit.hasError(json)
    logger.error "event_id=audit_error:audit=#{json}"
    
    #Do not raise AuditError because it halts entire service
    #Read MAND-613 for more information
    #TODO: MAND-614 - Notification needed for errors in gitchefsync audit file
    #raise AuditError
  end
 
  cookbook_dir = @stage_cb_dir
  
  FS.cmd "mkdir -p #{cookbook_dir}"
  FS.knifeReady(cookbook_dir,@options[:knife_config])
  
  #Check on what is uploaded, knife util creates a list for us
  knifeUtil = KnifeUtil.new(@knife, cookbook_dir)
  listCB = knifeUtil.listCookbooks
  logger.debug "list: #{listCB}"
  stage = @stage_dir + "/*tar.gz"

  Dir.glob(stage).each  do |file|
    logger.debug "event_id=stage_upload:file=#{file}"
    stageCBUpload(file, cookbook_dir, knifeUtil, listCB)
  end
end

.syncCookbooksObject

performs a git synchronization of cookbooks, pulling in information from configured group of gitlab groups from each of the configured groups in sync-config.json pull all the projects associated with each group installed in working_directory, as specified in sync-config.json

Each repository will only pull from configured release branch And fetch tags associated with that branch The last process is to invoke syncCookbooksLocal



83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
# File 'lib/gitchefsync.rb', line 83

def self.syncCookbooks 
  include  FS
  
  
  
  FS.knifeReady(@options[:git_local], @options[:knife_config])
  @knife_ready = true
  
  if !@config['sync_local'] 
   
    self.pullCookbooks()
  else
    logger.warn "event_id=Skip_cookbook_git_sync:path=#{@git_local}"
  end
  #git is synchronized (other than deletion - see gitCleanup if you want to clean up)
  #move to synchronization on the local file system
  self.syncCookbooksLocal
end

.syncCookbooksLocalObject

For each repository in the working directory (defined by sync-config.json) checkout each tag

  1. upload to the configured chef server via a berks upload

  2. package the cookbook in the stage_dir (defined in sync-config.json)

  3. create an audit of each cookbook that was created

param options - the list of options



179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
# File 'lib/gitchefsync.rb', line 179

def self.syncCookbooksLocal 
  include FS,Git
 
  logger.info "event_id=stage_cookbooks:git_local=#{@git_local}"
  FS.knifeReady(@options[:git_local], @options[:knife_config]) unless @knife_ready
  ret_status = Hash.new
  
  
  #not sure if this should be globally governed?
  audit = Audit.new(@audit_dir, 'cb')
 
  
  knifeUtil = KnifeUtil.new(@knife, @git_local)
  #Have a delta point: interact with the chef server to identify delta
  listCB = knifeUtil.listCookbooks
  #list of cookbooks processed
  list_processed = Array.new
  cookbook_dirs = Dir.entries(@git_local).reject! {|item| item.start_with?(".") }
  cookbook_dirs.each do |dir|

    path = File.join(@git_local, dir)
    
    arr_tags = Git.branchTags(path, @rel_branch)
    
    
    #match tag against version in metadata.rb
    #possible error condition
    arr_tags.each do |tag|
      
      
      begin
        logger.debug "event_id=git_checkout:path=#{path}:tag=#{tag}"
        Git.cmd "cd #{path} && #{@git_bin} checkout #{tag}"
        
        cb = self.processCookbook(path,audit)
        list_processed << @stage_dir + "/" + cb.berksTar() unless cb.nil?
        
      rescue NoMetaDataError => e
        #No audit written on failure to parse metadata
        logger.info "event_id=nometadata:dir=#{dir}"
        next
      rescue KnifeError => e
        #No audit written on failure to parse metadata
        logger.error "event_id=cmd_error:#{e.message}:trace=#{e.backtrace}"
        next
      rescue NoBerksError => e
        #No audit written on no Berks file
        logger.error "event_id=cmd_error:#{e.message}:trace=#{e.backtrace}"
        next
      
      rescue Exception => e
        
        logger.error "event_id=git_error:msg=#{e.message}:trace=#{e.backtrace}"
        cookbook = Cookbook.new(dir,tag) if cookbook.nil?
        audit.addCookbook(cookbook,"ERROR",e)
        next
      end
    end
  end
  
  #compared list_processed with what is in stage
  #and delete the tars
  stage = @stage_dir + "/*tar.gz"
  existing = Dir.glob(stage)
  to_del = existing - list_processed
  logger.info "event_id=list_tar_delta:del+list=#{to_del}"
  to_del.each do |file| 
    File.delete(file)
  end
  #reconcile method will actually delete the cookbooks from server
   
  #write out the audit file
  audit.write
  #clean the audit files
  audit.trim(@audit_keep_trim)
end

.syncEnvObject

sync all environment, data_bags and roles json in repo(s)



337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
# File 'lib/gitchefsync/env_sync.rb', line 337

def self.syncEnv
  logger.info "event_id=env_sync_start"

  #TODO: Auto discouver `chef-repo` type repositories known by chefbot
  url = @config['git_env_repo']
  url.gsub!("http://", "https://") if url.start_with? "http://"
  envRepo = EnvRepo.new(https_url_to_repo=url)
  if !@config['sync_local']
    envRepo.sync_repo
  else
    logger.info "event_id=Skip_syncing_env_repos_from_git"
  end

  envSync = EnvSync.new(repo_list=[envRepo])
  logger.info "event_id=start_to_update_json_files"
  envSync.update_json_files
end

.trimAuditObject

trims the environment and cookbook audits, keeping @audit_keep_trim number of files



53
54
55
56
57
58
59
60
61
# File 'lib/gitchefsync.rb', line 53

def self.trimAudit
  logger.debug("event_id=trim_audit_files:keep=#{@audit_keep_trim}")
  audit = Audit.new(@audit_dir, 'env' )
  audit.trim(@audit_keep_trim)
  
  audit = Audit.new(@audit_dir, 'cb' )
  audit.trim(@audit_keep_trim)
  
end

.updateGit(project_path, git_path) ⇒ Object

TODO: change from git to git with path



166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
# File 'lib/gitchefsync/common.rb', line 166

def self.updateGit (project_path, git_path)
  include Git
  begin
    branch = @rel_branch
    logger.debug "using release branch: #{branch}"
    _git = @git_bin
    if !Git.gitInit(project_path)
      FS.cmd "mkdir #{project_path}"
      logger.debug "event_id=git_int:path=#{git_path}:project_path=#{project_path}"
      Git.cmd "cd #{project_path} && #{_git} init"
      Git.cmd "cd #{project_path} && #{_git} remote add origin #{git_path}"
      Git.cmd "cd #{project_path} && #{_git} pull origin #{branch}"
    else
      logger.info "event_id=git_repo_exists:project_path=#{project_path}"
      #wipe tags and re-fetch them
      tags = Git.cmd "cd #{project_path} && git tag"
      arr_tags = tags.split(/\n/)
      arr_tags.each do |tag|
        Git.cmd "cd #{project_path} && git tag -d #{tag}"
      end
    end

    #get all commits and tags
    Git.cmd "cd #{project_path} && #{_git} clean -xdf"
    Git.cmd "cd #{project_path} && #{_git} checkout #{branch}"
    Git.cmd "cd #{project_path} && #{_git} pull origin #{branch}"

    git_tags = Git.cmd "cd #{project_path} && #{_git} fetch --tags"

  rescue CmdError => e
    raise GitError, "An error occurred synchronizing cookbooks #{e.message}"
  end
end

.uploadBerks(path) ⇒ Object

do a berks upload of the path this will end up using sources in Berksfile which is not good for the production sync



308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
# File 'lib/gitchefsync.rb', line 308

def self.uploadBerks(path)
  include FS
  
  begin
    if File.exists?(File.join(path, "Berksfile"))
      logger.debug "Berkshelf orginally used in this tagged version of cookbook"
    elsif File.exists?(File.join(path, "metadata.rb"))
      logger.debug "Berkshelf was not orginally used in this tagged version of cookbook"
      logger.info "event_id=create_berks:path=#{path}"
      berksfile = File.new(File.join(path, "Berksfile"), "w")

      version = FS.cmd "#{@berks} -v"
      if version.start_with?("3.")
          berksfile.puts("source \"https:\/\/api.berkshelf.com\"\nmetadata")
      else
          berksfile.puts("site :opscode\nmetadata")
      end
      berksfile.close
    else
      raise NoBerksError, "Unable to locate Berks file for #{path}"
    end
    
    if @berks_upload
      logger.info "event_id=berks_install_upload&cookbook=#{path}"
              
      out = FS.cmdBerks "cd #{path} && rm -f Berksfile.lock && #{@berks} install && #{@berks} upload"
      
      logger.info "event_id=berks_upload=#{out}"
    else
      logger.debug "event_id=no_berks_upload&cookbook=#{path}"
    end
  rescue Exception => e
    raise BerksError.new(e.message)
  end
end