Module: GoodData::Bam::Commands

Includes:
CloudConnect
Defined in:
lib/commands/commands.rb,
lib/commands/deployment.rb,
lib/commands/validators.rb,
lib/commands/gd_commands.rb,
lib/commands/sf_commands.rb,
lib/commands/docs_commands.rb,
lib/commands/scaffold_commands.rb

Constant Summary collapse

VALIDATORS_HOME =
"validators-project"
DOWNLOADERS_HOME =
"downloaders-project"
ETL_HOME =
"etl-project"

Class Method Summary collapse

Class Method Details

.clean_files(files, now) ⇒ Object



22
23
24
25
26
27
28
29
30
31
32
33
# File 'lib/commands/deployment.rb', line 22

def self.clean_files(files, now)
  upload_paths = files.map do |file|
    file = Pathname(file)
    ext = file.extname
    "#{file.to_s.chomp(ext)}_#{now}#{ext}"
  end

  upload_paths.each do |f|
    system "curl -u #{PARAMS[:gd_login]}:#{PARAMS[:gd_pass]} -k  -X DELETE https://secure-di.gooddata.com/project-uploads/#{PARAMS[:project_pid]}/#{f}"
    puts "Uploaded #{f[1]}"
  end
end

.clobber_contract_project(home, params = {}) ⇒ Object



56
57
58
59
60
# File 'lib/commands/commands.rb', line 56

def self.clobber_contract_project(home, params={})
  home = Pathname(home)
  paths = Utils::get_paths(home + VALIDATORS_HOME)
  clobber_project(paths)
end

.clobber_downloaders_project(home, params = {}) ⇒ Object



74
75
76
77
78
# File 'lib/commands/commands.rb', line 74

def self.clobber_downloaders_project(home, params={})
  home = Pathname(home)
  paths = Utils::get_paths(home + DOWNLOADERS_HOME)
  clobber_project(paths)
end

.clobber_etl_project(home, params = {}) ⇒ Object



68
69
70
71
72
# File 'lib/commands/commands.rb', line 68

def self.clobber_etl_project(home, params={})
  home = Pathname(home)
  paths = Utils::get_paths(home + ETL_HOME)
  clobber_project(paths)
end

.clobber_project(paths) ⇒ Object



86
87
88
# File 'lib/commands/commands.rb', line 86

def self.clobber_project(paths)
  FileUtils::rm_rf(paths[:home_path])
end

.clone_backup(home, params) ⇒ Object



132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
# File 'lib/commands/commands.rb', line 132

def self.clone_backup(home, params)
  home = Pathname(home)
  new_params = params.clone
  only = params[:only]
  new_params[:additional_params][:GDC_EVENTSTORE] = params[:new_es]
  clobber_downloaders_project(home)
  project = GoodData::Bam::Project.build_project(home, new_params)
  
  require 'aws-sdk'
  AWS.config(
    :access_key_id     => params[:additional_params][:S3_ACCESS_KEY_ID],
    :secret_access_key => params[:additional_params][:S3_SECRET_ACCESS_KEY]
  )

  s3_interface_from = AWS::S3.new()
  bucket_from = s3_interface_from.buckets[params[:additional_params][:S3_BUCKETNAME]]

  files_in_from_bucket = bucket_from.objects.map {|o| o.key}

  incremental_taps = Taps.get_incremental(project[:taps])

  taps_to_process = if only.blank?
    incremental_taps
  else
    # binding.pry
     temp = Taps.get_incremental([Project::find_tap_by_id(project, only)].reject {|x| x.nil?})
     fail "There is no such tap \"#{only}\"" if temp.empty?
     temp
  end
  
  taps_to_process.each do |tap|
    what = tap[:id]
    Dir.mktmpdir do |dir|
      dir = Pathname(dir)
      files_in_from_bucket.grep(/#{params[:project_pid]}\/#{what}\/#{what}/).each do |file_name|
        puts "Downloading #{file_name}"
        o = bucket_from.objects[file_name]
        local_File_name = file_name.split("\/").last
        # puts local_File_name
        File.open(dir + local_File_name,"w") do |local_file|
          local_file.write(o.read)
        end
      end
      
      # FileUtils::cd(dir) do
      full_file_name = "full_" + what + ".csv"
      # TODO
        system "\"line to be skipped\n\">> #{dir + full_file_name}"
        
        system "for i in #{dir + what}*
          do
            if test -f \"$i\"
            then
               echo \"Doing somthing to $i\"
               tail +2 \"$i\" >> #{dir + full_file_name}
            fi
        done"
        FileUtils::cp(dir + full_file_name, full_file_name)
        # binding.pry
      # end
    end
  end
end

.clone_project(pid, options = {}) ⇒ Object



37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
# File 'lib/commands/scaffold_commands.rb', line 37

def self.clone_project(pid, options={})
  project_name = options[:project_name]
  fail "project name has to be filled in" if project_name.blank?
  with_users  = options[:with_users]

  export = {
    :exportProject => {
      :exportUsers => with_users ? 1 : 0,
      :exportData => 1
    }
  }

  result = GoodData.post("/gdc/md/#{pid}/maintenance/export", export)
  token = result["exportArtifact"]["token"]
  status_url = result["exportArtifact"]["status"]["uri"]

  state = GoodData.get(status_url)["taskState"]["status"]
  while state == "RUNNING"
    sleep 5
    result = GoodData.get(status_url) 
    state = result["taskState"]["status"]
  end

  old_project = GoodData::Project[pid]

  pr = {
    :project => {
      :content => {
        :guidedNavigation => 1,
        :driver => "Pg",
        :authorizationToken => options[:token]
      },
      :meta => {
        :title => project_name,
        :summary => "Testing Project"
      }
    }
  }
  result = GoodData.post("/gdc/projects/", pr)
  uri = result["uri"]
  while(GoodData.get(uri)["project"]["content"]["state"] == "LOADING")
    sleep(5)
  end

  new_project = GoodData::Project[uri]

  import = {
    :importProject => {
      :token => token
    }
  }

  result = GoodData.post("/gdc/md/#{new_project.obj_id}/maintenance/import", import)
  status_url = result["uri"]
  state = GoodData.get(status_url)["taskState"]["status"]
  while state == "RUNNING"
    sleep 5
    result = GoodData.get(status_url) 
    state = result["taskState"]["status"]
  end
  GoodData.post "/gdc/projects/#{new_project.obj_id}/eventStore/stores", {:store => {:storeId => "es_0"}}
  new_project.obj_id
end

.connect_to_gd(options = {}) ⇒ Object



5
6
7
8
9
# File 'lib/commands/gd_commands.rb', line 5

def self.connect_to_gd(options={})
  server = options[:server]
  GoodData.connect(PARAMS[:gd_login], PARAMS[:gd_pass], options)
  GoodData.project = PARAMS[:project_pid] if !PARAMS[:project_pid].nil? && !PARAMS[:project_pid].empty?
end

.create_email_channel(options = {}, &block) ⇒ Object



188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
# File 'lib/commands/deployment.rb', line 188

def self.create_email_channel(options={}, &block)
  email = options[:email]

  data = {
    :channelConfiguration => {
      :configuration => {
        :emailConfiguration => {
          :to => email
        }
      },
      :meta => {
        :title => "temporary email channel"
      }
    }
  }
  profile_id = GoodData.connection.user["profile"].split("/").last
  res = GoodData.post("/gdc/account/profile/#{profile_id}/channelConfigurations", data)
  self_link = res["channelConfiguration"]["meta"]["uri"]
  if block
    begin
      block.call(res)
    ensure
      GoodData.delete(self_link)
    end
  else
    res
  end
end

.create_project(options = {}) ⇒ Object



70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
# File 'lib/commands/gd_commands.rb', line 70

def self.create_project(options={})
  project_name = options[:project_name]

  pr = {
    :project => {
      :content => {
        :guidedNavigation => 1,
        :driver => "Pg", 
        :authorizationToken => options[:token]
      },
      :meta => {
        :title => project_name,
        :summary => "Testing Project",
        :projectTemplate => options[:template]
      }
    }
  }

  result = GoodData.post("/gdc/projects/", pr)
  uri = result["uri"]
  while(GoodData.get(uri)["project"]["content"]["state"] == "LOADING")
    sleep(5)
  end
  p = GoodData::Project[uri]
  p.obj_id
end

.deploy(dir, options = {}, &block) ⇒ Object



134
135
136
137
138
139
140
141
142
143
144
145
146
147
# File 'lib/commands/deployment.rb', line 134

def self.deploy(dir, options={}, &block) 
  verbose = options[:verbose] || false
  if block
    begin
      res = deploy_graph(dir, options)
      block.call(res)
    ensure
      self_link = res && res["process"] && res["process"]["links"]["self"]
      GoodData.delete(self_link) unless self_link.nil?
    end
  else
    deploy_graph(dir, options)
  end
end

.deploy_graph(dir, options = {}) ⇒ Object



149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
# File 'lib/commands/deployment.rb', line 149

def self.deploy_graph(dir, options={}) 
  dir = Pathname(dir)
  deploy_name = options[:name] || options[:project_name]
  verbose = options[:verbose] || false
  project_pid = options[:project_pid]
  
  puts HighLine::color("Deploying #{dir}", HighLine::BOLD) if verbose
  res = nil

  Tempfile.open("deploy-graph-archive") do |temp|
    Zip::OutputStream.open(temp.path) do |zio|
      Dir.glob(dir + "**/*") do |item|
        puts "including #{item}" if verbose
        unless File.directory?(item)
          zio.put_next_entry(item)
          zio.print IO.read(item)
        end
      end
    end

    GoodData.upload_to_user_webdav(temp.path, options)
    process_id = options[:process]

    data = {
        :process => {
          :name => deploy_name,
          :path => "/uploads/#{File.basename(temp.path)}"
        }
      }
    res = if process_id.nil?
      GoodData.post("/gdc/projects/#{project_pid}/dataload/processes", data)
    else
      GoodData.put("/gdc/projects/#{project_pid}/dataload/processes/#{process_id}", data)
    end
  end
  puts HighLine::color("Deploy DONE #{dir}", HighLine::BOLD) if verbose
  res
end

.execute_process(link, dir, options = {}) ⇒ Object



35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
# File 'lib/commands/deployment.rb', line 35

def self.execute_process(link, dir, options={})
  dir = Pathname(dir)
  result = GoodData.post(link, {
    :execution => {
     :graph => (dir + "graphs/main.grf").to_s,
     :params => {}  
    }
  })
  begin
    GoodData.poll(result, "executionTask")
  rescue RestClient::RequestFailed => e

  ensure
    result = GoodData.get(result["executionTask"]["links"]["detail"])
    if result["executionDetail"]["status"] == "ERROR"
      fail "Runing process failed. You can look at a log here #{result["executionDetail"]["logFileName"]}"
    end
  end
  result
end

.generate(home, params) ⇒ Object



196
197
198
199
200
201
# File 'lib/commands/commands.rb', line 196

def self.generate(home, params)
  # fail "The flow you specified was not found" if flows.empty? && !only_flow.nil?
  clobber_etl_project(home)
  setup_etl_project(home, params)
  GoodData::Bam::Generators::Etl.generate(home, params)
end

.generate_backup_restoration(home, params) ⇒ Object



106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
# File 'lib/commands/commands.rb', line 106

def self.generate_backup_restoration(home, params)
  home = Pathname(home)
  new_params = params.clone
  only = params[:only]
  new_params[:additional_params][:GDC_EVENTSTORE] = params[:new_es]
  clobber_downloaders_project(home)
  project = GoodData::Bam::Project.build_project(home, new_params)
  
  require 'aws-sdk'
  AWS.config(
    :access_key_id     => params[:additional_params][:S3_ACCESS_KEY_ID],
    :secret_access_key => params[:additional_params][:S3_SECRET_ACCESS_KEY]
  )

  s3_interface_from = AWS::S3.new()
  bucket_from = s3_interface_from.buckets[params[:additional_params][:S3_BUCKETNAME]]

  files_in_from_bucket = bucket_from.objects.map {|o| o.key}

  incremental_taps = Taps.get_incremental(project[:taps])

  setup_downloaders_project(home, new_params)
  GoodData::Bam::Generators::Downloaders.generate_backup_restoration_graph(home + DOWNLOADERS_HOME + "graphs", project, new_params)

end

.generate_docs(path = ".") ⇒ Object



5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
# File 'lib/commands/docs_commands.rb', line 5

def self.generate_docs(path=".")
  project = GoodData::Bam::Project.build_project(path)

  taps = ""
  project[:taps].each do |tap|
    taps += "####{tap[:object]}"
    taps += "\n"
    tap[:fields].each do |f|
      if f[:acts_as]
        taps += "    #{f[:name]} -> #{f[:acts_as].join(", ")}"
      else
        taps += "    #{f[:name]}"
      end
      taps += "\n"
    end

    taps += "\n"
  end

  sinks = ""
  project[:sinks].each do |sink|
    name = sink[:gd_name] || sink[:id]
    sinks += "####{name}\n"
    sink[:fields].each do |field|
      name = field[:name] || "#{field[:schema]}:#{field[:ref]}"
      type = field[:type]
      sinks += "    #{type.upcase} #{field[:meta]} => #{name}\n"
    end
    sinks += "\n"

  end

  GoodData::Bam::Utils.render_template("README.md.erb", PARAMS.merge(:taps => taps, :sinks => sinks), :to_file => 'README.md', :root => Pathname(path))
end

.generate_downloaders(home, params) ⇒ Object



90
91
92
93
94
95
96
# File 'lib/commands/commands.rb', line 90

def self.generate_downloaders(home, params)
  home = Pathname(home)
  clobber_downloaders_project(home)
  project = GoodData::Bam::Project.build_project(home, params)
  setup_downloaders_project(home, params)
  GoodData::Bam::Generators::Downloaders.generate_downloaders(home + DOWNLOADERS_HOME + "graphs", project, params)
end

.generate_graph_template(name, target) ⇒ Object



11
12
13
14
# File 'lib/commands/commands.rb', line 11

def self.generate_graph_template(name, target)
  template_name = "#{name}_template.grf.erb"
  render_template(template_name, PARAMS, :to_file => USER_DEFINED_GRAPHS_ROOT + target)
end

.generate_history_downloaders(home, params) ⇒ Object



98
99
100
101
102
103
104
# File 'lib/commands/commands.rb', line 98

def self.generate_history_downloaders(home, params)
  home = Pathname(home)
  clobber_downloaders_project(home)
  project = GoodData::Bam::Project.build_project(home, params)
  setup_downloaders_project(home, params)
  GoodData::Bam::Generators::Downloaders.generate_history_downloaders(home + DOWNLOADERS_HOME + "graphs", project, params)
end

.generate_validators(home, params) ⇒ Object

def self.project_jack_in(params)

project = GoodData::Bam::Project.build_project

end



20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
# File 'lib/commands/commands.rb', line 20

def self.generate_validators(home, params)
  home = Pathname(home)
  clobber_contract_project(home, params)
  setup_contract_project(home, params)

  project = GoodData::Bam::Project.build_project(home, params)
  file_taps = Taps.get_file(project[:taps])
  file_taps = file_taps.find_all {|t| t[:validation_source]}
  file_taps.each do |t|
    puts "Will generate validator for tap \"#{t[:id]}\""
  end
  GoodData::Bam::Validators.generate_checker_file_list(home + VALIDATORS_HOME + "graphs/main.grf", file_taps)

  file_taps.each do |tap|
    id = tap[:id]
    GoodData::Bam::Validators.generate_contract(home + VALIDATORS_HOME + "graphs/#{id}_checker.grf", tap)
    Helpers::loop_over_file(home + VALIDATORS_HOME + "graphs/#{id}_loop.grf", {
      :token => id,
      :file_to_loop => "data/#{id}_files_to_read.csv",
      :graph_to_run => "graphs/#{id}_checker.grf"
    })
    
    GoodData::Bam::Validators.generate_checker_success(home + VALIDATORS_HOME + "graphs/#{id}_success.grf", tap)
    GoodData::Bam::Validators.backup_files_from_glob(home + VALIDATORS_HOME + "graphs/#{id}_failure.grf", {
      :files_to_backup => ["${DATA}/validation_output_*.csv", "${DATA}/*_files_to_read.csv"],
      :remote_url => "`\"https://\" + replace(replace(\"${GD_LOGIN}\",\"@\",\"%40\"),\"\\\\+\",\"%2B\") + \":${GD_PASSWORD}@${GDC_WEBDAV_HOST}/project-uploads/${GDC_PROJECT_ID}/validation_reports/report_${NOW}/\"`"
    })
  end
end

.get_sf_client(params) ⇒ Object



35
36
37
# File 'lib/commands/sf_commands.rb', line 35

def self.get_sf_client(params)
  Salesforce::Client.new(params[:sf_login], params[:sf_password] + params[:sf_token], :server => params[:sf_server])
end

.model_sync(path, options) ⇒ Object



22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
# File 'lib/commands/gd_commands.rb', line 22

def self.model_sync(path, options)
  dry_run = options[:dry]
  project = GoodData::Bam::Project.build_project(path, options)
  datasets = project[:sinks]
  model_update_dir = Pathname('model_update')
  cl_home = ENV['CL_HOME'] || PARAMS['CL_HOME'] || fail("Home of cl tool cannot be found. Either set up CL_HOME in your env with 'export CL_HOME=path/to/cl or set it up in your params.json. Point to the directory of CL not to the bin dir.'")
  cl_home = Pathname(cl_home) + 'bin/gdi.sh'

  FileUtils::mkdir_p(model_update_dir)
  File.open(model_update_dir + 'dummy', 'w')
  FileUtils::cd(model_update_dir) do
    datasets.each do |ds|
      dataset_path = Pathname("cl_file_#{ds[:id]}")
      File.open(dataset_path, "w") do |temp|
        builder = Builder::XmlMarkup.new(:target=>temp, :indent=>2)
        builder.schema do |builder|
          builder.name(ds[:gd_name])
          builder.title(ds[:gd_name])
          builder.columns do |b|
            ds[:fields].each do |f|
              builder.column do |builder|
                builder.title(f[:name])
                builder.ldmType(f[:type].upcase)
                builder.reference(f[:for]) if f.has_key?(:for)
                builder.reference(f[:ref]) if f.has_key?(:ref)
                builder.schemaReference(f[:schema]) if f.has_key?(:schema)
                if f[:type] == "date"
                  builder.schemaReference("#{f[:dd]}")
                  builder.name("#{f[:name]}")
                else
                  builder.name(f[:name] || f[:ref])
                end
              end
            end
          end
        end
      end
      template_name = dry_run ? "update_dataset_dry.script.erb" : "update_dataset.script.erb"
      Utils::render_template(template_name, PARAMS.merge({"config_file" => dataset_path.expand_path}), :to_file => 'update_dataset.script')
      puts "Generate #{ds[:id]}"

      system("#{cl_home} update_dataset.script --username #{PARAMS[:gd_login]} --password #{PARAMS[:gd_pass]}")
      File.delete(dataset_path)        
    end
  end
  FileUtils::rm_rf(model_update_dir)
end

.run(dir, options = {}) ⇒ Object



56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
# File 'lib/commands/deployment.rb', line 56

def self.run(dir, options={})
  email = options[:email]
  verbose = options[:v]

  dir = Pathname(dir)

  deploy(dir, options.merge(:name => "Temporary deploy[#{dir}][#{options[:project_name]}]")) do |deploy_response|
    puts HighLine::color("Executing", HighLine::BOLD) if verbose
    if email.nil?
      result = execute_process(deploy_response["process"]["links"]["executions"], dir, options)
    else
      create_email_channel(options) do |channel_response|
        subscribe_on_finish(:success, channel_response, deploy_response, options)
        result = execute_process(deploy_response["process"]["links"]["executions"], dir)
      end
    end
  end
end

.run_validator(process, files, params) ⇒ Object



7
8
9
10
11
12
13
14
15
16
# File 'lib/commands/validators.rb', line 7

def self.run_validator(process, files, params)
  now = Time.now.to_i.to_s
  upload_files(files, now)
  begin
    execute_process("/gdc/projects/#{params[:project_pid]}/dataload/processes/#{process}/executions", VALIDATORS_HOME)
  rescue
    clean_files(files, now)
    exit_now!("Data validation failed failed. Please look here for details. \"https://secure-di.gooddata.com/project-uploads/#{params[:project_pid]}/validation_reports/\"")
  end
end

.set_up_debug(project, flow, graph) ⇒ Object



11
12
13
14
15
16
17
18
19
20
# File 'lib/commands/gd_commands.rb', line 11

def self.set_up_debug(project, flow, graph)
  fail "Project \"#{project}\" was not found" unless File.exist?(project)
  fail "Project \"#{project}\" is not a directory" unless File.directory?(project)
  # :TODO make the proram really check there is a flow and a graph etc before writing to the file
  project = Pathname(project)
  File.open(project + 'params.prm', 'w') do |f|
    f << "FLOW=#{flow}\n"
    f << "NAME=#{graph}\n"
  end
end

.setup_bash_structure(name) ⇒ Object



10
11
12
13
14
15
16
17
18
19
20
21
22
23
# File 'lib/commands/scaffold_commands.rb', line 10

def self.setup_bash_structure(name)
  fail "Directory \"#{name}\" already exists. Please remove it if you want to move forward." if File.exist?(name)
  FileUtils::mkdir_p name
  FileUtils::cd(name) do
    Utils::render_template("params.json.erb", PARAMS, :to_file => 'params.json')
    ['flows', 'sinks', 'taps'].each do |dir|
      FileUtils::mkdir_p dir
    end

    setup_flow('user')
    Utils::render_template("tap.json.erb", PARAMS, :to_file => 'taps/source_example.json')
    Utils::render_template("sink.json.erb", PARAMS, :to_file => 'sinks/dataset_example.json')
  end
end

.setup_contract_project(home, params = {}) ⇒ Object



50
51
52
53
54
# File 'lib/commands/commands.rb', line 50

def self.setup_contract_project(home, params={})
  home = Pathname(home)
  paths = Utils::get_paths(home + VALIDATORS_HOME)
  Utils::setup_basic_project(paths, params)
end

.setup_downloaders_project(home, params = {}) ⇒ Object



80
81
82
83
84
# File 'lib/commands/commands.rb', line 80

def self.setup_downloaders_project(home, params={})
  home = Pathname(home)
  paths = Utils::get_paths(home + DOWNLOADERS_HOME)
  Utils::setup_basic_project(paths, params)
end

.setup_etl_project(home, params = {}) ⇒ Object



62
63
64
65
66
# File 'lib/commands/commands.rb', line 62

def self.setup_etl_project(home, params={})
  home = Pathname(home)
  paths = Utils::get_paths(home + ETL_HOME)
  Utils::setup_basic_project(paths, params)
end

.setup_flow(name) ⇒ Object



25
26
27
# File 'lib/commands/scaffold_commands.rb', line 25

def self.setup_flow(name)
  Utils::render_template("flow.rb.erb", {:flow_name => name}, :to_file => "flows/#{name}.rb")
end

.setup_sink(name) ⇒ Object



33
34
35
# File 'lib/commands/scaffold_commands.rb', line 33

def self.setup_sink(name)
  Utils::render_template("sink.json.erb", {:sink_name => name}, :to_file => "sinks/#{name}.json")
end

.setup_tap(name) ⇒ Object



29
30
31
# File 'lib/commands/scaffold_commands.rb', line 29

def self.setup_tap(name)
  Utils::render_template("tap.json.erb", {:tap_name => name}, :to_file => "taps/#{name}.json")
end

.sf_jack_in(params) ⇒ Object



26
27
28
29
# File 'lib/commands/sf_commands.rb', line 26

def self.sf_jack_in(params)
  client = get_sf_client(params)
  client.pry
end

.sf_validate_connection(params) ⇒ Object



31
32
33
# File 'lib/commands/sf_commands.rb', line 31

def self.sf_validate_connection(params)
  get_sf_client(params)
end

.subscribe_on_finish(event_type, channel, deploy, options = {}) ⇒ Object



75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
# File 'lib/commands/deployment.rb', line 75

def self.subscribe_on_finish(event_type, channel, deploy, options={})
  channel_uri = channel["channelConfiguration"]["meta"]["uri"]
  process_id = deploy["process"]["links"]["self"].split('/').last
  event_id = case event_type
    when :success
      "dataload.process.finish.ok"
    when :failure
      "dataload.process.finish.error"
    else
      fail "You specified unknown event \"#{event_type}\""
    end

  templates = {
    :success => {
      :message => "Just wanted to let you know that ETL for \"#{deploy["process"]["name"]}\" Succeeded",
      :subject => "GoodData ETL SUCCESS: process with name \"#{deploy["process"]["name"]}\""
    },
    :failure => {
      :message => "Just wanted to let you know that ETL for \"#{deploy["process"]["name"]}\" Failed",
      :subject => "GoodData ETL FAILURE: process with name \"#{deploy["process"]["name"]}\""
    }
  }

  templates_to_be_used = templates[event_type]

  data = {
    :subscription => {
      :triggers => [
        {
          :projectEventTrigger => {
            :types => [event_id]
          }
        }
      ],
      :condition => {
        :condition => {
          :expression => "params.PROCESS_ID==\"#{process_id}\""
        }
      },
      :subject => {
        :template => {
          :expression => templates_to_be_used[:subject]
        }
      },
      :message => {
        :template => {
          :expression => templates_to_be_used[:message]
        }
      },
      :channels => [channel_uri],
      :meta => {
        :title => "Notification for process #{process_id}"
      }
    }
  }
  profile_id = GoodData.connection.user["profile"].split("/").last
  GoodData.post("/gdc/projects/#{options[:project_pid]}/users/#{profile_id}/subscriptions", data)
end

.upload_files(files, now) ⇒ Object



5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
# File 'lib/commands/deployment.rb', line 5

def self.upload_files(files, now)
  upload_paths = files.map do |file|
    file = Pathname(file)
    ext = file.extname
    "#{file.to_s.chomp(ext)}_#{now}#{ext}"
  end

  Dir.mktmpdir do |dir|
    dir = Pathname(dir)
    files.zip(upload_paths).each do |f|
      FileUtils::cp(f[0], dir + f[1])
      system "curl -u #{PARAMS[:gd_login]}:#{PARAMS[:gd_pass]} -k -T #{dir+f[1]} https://secure-di.gooddata.com/project-uploads/#{PARAMS[:project_pid]}/"
      puts "Uploaded #{f[1]}"
    end
  end
end

.validate_sf_metadata(sf_client, taps) ⇒ Object



11
12
13
14
15
16
17
18
19
20
21
22
23
24
# File 'lib/commands/sf_commands.rb', line 11

def self.(sf_client, taps)
  taps.reduce({}) do |memo, tap|
    sf_object = tap[:object]
    u = sf_client.describe(sf_object)
    sf_fields = u[:describeSObjectResponse][:result][:fields].map {|field| field[:name]}
    fields_to_validate = tap[:fields].map {|field| field[:name]}
    if memo.has_key?(sf_object) 
      memo[sf_object] = memo[sf_object].concat(fields_to_validate - sf_fields)
    else
      memo[sf_object] = (fields_to_validate - sf_fields)
    end
    memo
  end
end

.validate_sf_taps(home, params) ⇒ Object



5
6
7
8
9
# File 'lib/commands/sf_commands.rb', line 5

def self.validate_sf_taps(home, params)
  project = GoodData::Bam::Project.build_project(home, params)
  client = get_sf_client(params)
  (client, Taps.get_salesforce(project[:taps]))
end