17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
|
# File 'lib/opzworks/commands/berks.rb', line 17
def self.run
options = Trollop.options do
banner " \#{BERKS.banner}\n\n opzworks berks stack1 stack2 ...\n\n The stack name can be passed as any unique regex. If there is\n more than one match, it will simply be skipped.\n\n Options:\n EOS\n opt :update, 'Trigger update_custom_cookbooks on stack after uploading a new cookbook tarball.', default: true\n end\n ARGV.empty? ? Trollop.die('no stacks specified') : false\n\n config = OpzWorks.config\n\n aws_credentials_provider = Aws::SharedCredentials.new(profile_name: config.aws_profile)\n s3 = Aws::S3::Resource.new(region: config.aws_region, credentials: aws_credentials_provider)\n\n opsworks = Aws::OpsWorks::Client.new(region: config.aws_region, profile: config.aws_profile)\n response = opsworks.describe_stacks\n\n # loops over inputs\n ARGV.each do |opt|\n var = populate_stack(opt, response)\n next if var == false\n\n hash = {\n 'PROJECT:' => @project,\n 'STACK ID:' => @stack_id,\n 'S3 PATH:' => @s3_path,\n 'BRANCH:' => @branch\n }\n puts \"\\n\"\n hash.each { |k, v| printf(\"%-25s %-25s\\n\", k.foreground(:green), v.foreground(:red)) }\n\n var = manage_berks_repos\n next if var == false\n\n berks_cook_path = config.berks_base_path || '/tmp'\n cook_path = \"\#{berks_cook_path}/\#{@project}-\#{@branch}\"\n install_path = \"\#{cook_path}\" + '/' + \"cookbooks-\#{@project}-\#{@branch}\"\n cookbook_tarball = config.berks_tarball_name || 'cookbooks.tgz'\n cookbook_upload = \"\#{cook_path}\" + '/' \"\#{cookbook_tarball}\"\n s3_bucket = config.berks_s3_bucket || 'opzworks'\n overrides = 'overrides'\n\n if File.exist?(\"\#{@target_path}/Berksfile.opsworks\")\n puts 'Remote management berksfile detected, not building local berkshelf.'.foreground(:yellow)\n\n unless File.directory?(\"\#{install_path}\")\n FileUtils.mkdir_p(\"\#{install_path}\")\n end\n FileUtils.copy(\"\#{@target_path}/Berksfile.opsworks\", \"\#{install_path}/Berksfile\")\n else\n # berks\n #\n puts 'Running berks install'.foreground(:blue)\n run_local <<-BASH\n cd \#{@target_path}\n berks update\n BASH\n run_local <<-BASH\n cd \#{@target_path}\n berks vendor \#{install_path}\n BASH\n end\n\n # if there's an overrides file, just pull it and stuff the contents into the\n # upload repo; the line is assumed to be a git repo. This is done to override\n # opsworks templates without destroying the upstream cookbook.\n #\n # For example, to override the default nginx cookbook's nginx.conf, create a git\n # repo with the directory structure nginx/templates/default and place your\n # custom nginx.conf.erb in it.\n #\n if File.file?(\"\#{@target_path}/\#{overrides}\")\n unless File.directory?(\"\#{install_path}\")\n FileUtils.mkdir_p(\"\#{install_path}\")\n end\n File.open(\"\#{@target_path}/\#{overrides}\") do |f|\n f.each_line do |line|\n puts \"Copying override \#{line}\".foreground(:blue)\n `cd \#{install_path} && git clone \#{line}`\n end\n end\n end\n\n puts 'Committing changes and pushing'.foreground(:blue)\n system \"cd \#{@target_path} && git commit -am 'berks update'; git push origin \#{@branch}\"\n\n puts 'Creating tarball of cookbooks'.foreground(:blue)\n FileUtils.mkdir_p(\"\#{cook_path}\")\n run_local \"tar czf \#{cookbook_upload} -C \#{install_path} .\"\n\n # upload\n #\n puts 'Uploading to S3'.foreground(:blue)\n\n begin\n obj = s3.bucket(s3_bucket).object(\"\#{@s3_path}/\#{cookbook_tarball}\")\n obj.upload_file(\"\#{cookbook_upload}\")\n rescue StandardError => e\n puts \"Caught exception while uploading to S3 bucket \#{s3_bucket}: \#{e}\".foreground(:red)\n puts 'Cleaning up before exiting'.foreground(:blue)\n FileUtils.rm(\"\#{cookbook_upload}\")\n FileUtils.rm_rf(\"\#{install_path}\")\n abort\n else\n puts \"Completed successful upload of \#{@s3_path}/\#{cookbook_tarball} to \#{s3_bucket}!\".foreground(:green)\n end\n\n # cleanup\n #\n puts 'Cleaning up'.foreground(:blue)\n FileUtils.rm(\"\#{cookbook_upload}\")\n FileUtils.rm_rf(\"\#{install_path}\")\n puts 'Done!'.foreground(:green)\n\n # update remote cookbooks\n #\n if options[:update] == true\n puts \"Triggering update_custom_cookbooks for remote stack (\#{@stack_id})\".foreground(:blue)\n\n hash = {}\n hash[:comment] = 'shake and bake'\n hash[:stack_id] = @stack_id\n hash[:command] = { name: 'update_custom_cookbooks' }\n\n begin\n opsworks.create_deployment(hash)\n rescue Aws::OpsWorks::Errors::ServiceError => e\n puts 'Caught error while attempting to trigger deployment: '.foreground(:red)\n puts e\n end\n else\n puts 'Update custom cookbooks skipped via --no-update switch.'.foreground(:blue)\n end\n end\nend\n".unindent
|