Class: GoogleBigquery::Jobs
- Defined in:
- lib/google_bigquery/jobs.rb
Instance Attribute Summary
Attributes inherited from Client
Class Method Summary collapse
- .copy ⇒ Object
-
.export(project_id, dataset_id, table_id, bucket_location) ⇒ Object
export data TODO: get mappings for formatting options.
-
.get(project_id, job_id) ⇒ Object
Retrieves the specified job by ID.
-
.getQueryResults(project_id, job_id, params = {}) ⇒ Object
Retrieves the results of a query job.
- .import ⇒ Object
-
.insert(project_id, body = {}) ⇒ Object
Starts a new asynchronous job.
-
.list(project_id, params = {}) ⇒ Object
Lists all the Jobs in the specified project that were started by the user.
-
.load(project_id, dataset_id, table_id, sources, fields) ⇒ Object
TODO: get mappings for formatting options.
-
.query(project_id, body = {}) ⇒ Object
query Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout.
Instance Method Summary collapse
-
#initialize(client = nil, opts = {}) ⇒ Jobs
constructor
A new instance of Jobs.
Methods inherited from Client
#defaults_options, #parse_response, parse_response
Constructor Details
#initialize(client = nil, opts = {}) ⇒ Jobs
Returns a new instance of Jobs.
4 5 6 |
# File 'lib/google_bigquery/jobs.rb', line 4 def initialize(client=nil, opts={}) @client = client end |
Class Method Details
.copy ⇒ Object
146 147 |
# File 'lib/google_bigquery/jobs.rb', line 146 def self.copy() end |
.export(project_id, dataset_id, table_id, bucket_location) ⇒ Object
export data TODO: get mappings for formatting options
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 |
# File 'lib/google_bigquery/jobs.rb', line 59 def self.export(project_id, dataset_id, table_id, bucket_location) body = {'projectId'=> project_id, 'configuration'=> { 'extract'=> { 'sourceTable'=> { 'projectId'=> project_id, 'datasetId'=> dataset_id, 'tableId'=> table_id }, 'destinationUri'=> "gs://#{bucket_location}", 'destinationFormat'=> 'NEWLINE_DELIMITED_JSON' } } } res = GoogleBigquery::Auth.client.execute( :api_method=> GoogleBigquery::Auth.api.jobs.insert, :body_object=> body, :parameters=> {"projectId"=> project_id} ) job_id = JSON.parse(res.body)["jobReference"]["jobId"] puts 'Waiting for export to complete..' loop do status = JSON.parse(self.get(project_id, job_id).body) if 'DONE' == status['status']['state'] puts "Done exporting!" if status["status"]["errors"] puts status["status"]["errors"].map{|o| "#{o['reason']} : #{o['message']}"} end return end sleep(10) end end |
.get(project_id, job_id) ⇒ Object
Retrieves the specified job by ID.
21 22 23 24 25 26 27 |
# File 'lib/google_bigquery/jobs.rb', line 21 def self.get(project_id , job_id) res = GoogleBigquery::Auth.client.execute( :api_method=> GoogleBigquery::Auth.api.jobs.get, :parameters=> {"projectId"=> project_id, "jobId"=>job_id} ) parse_response(res) end |
.getQueryResults(project_id, job_id, params = {}) ⇒ Object
Retrieves the results of a query job.
30 31 32 33 34 35 36 |
# File 'lib/google_bigquery/jobs.rb', line 30 def self.getQueryResults(project_id , job_id, params={}) res = GoogleBigquery::Auth.client.execute( :api_method=> GoogleBigquery::Auth.api.jobs.get_query_results, :parameters=> {"projectId"=> project_id, "jobId"=>job_id}.merge(params) ) parse_response(res) end |
.import ⇒ Object
143 144 |
# File 'lib/google_bigquery/jobs.rb', line 143 def self.import() end |
.insert(project_id, body = {}) ⇒ Object
Starts a new asynchronous job.
39 40 41 42 43 44 45 46 |
# File 'lib/google_bigquery/jobs.rb', line 39 def self.insert(project_id, body={}) res = GoogleBigquery::Auth.client.execute( :api_method=> GoogleBigquery::Auth.api.jobs.insert, :body_object=> body, :parameters=> {"projectId"=> project_id} ) parse_response(res) end |
.list(project_id, params = {}) ⇒ Object
Lists all the Jobs in the specified project that were started by the user.
49 50 51 52 53 54 55 |
# File 'lib/google_bigquery/jobs.rb', line 49 def self.list(project_id, params={}) res = GoogleBigquery::Auth.client.execute( :api_method=> GoogleBigquery::Auth.api.jobs.list, :parameters=> {"projectId"=> project_id}.merge(params) ) parse_response(res) end |
.load(project_id, dataset_id, table_id, sources, fields) ⇒ Object
TODO: get mappings for formatting options
101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 |
# File 'lib/google_bigquery/jobs.rb', line 101 def self.load(project_id, dataset_id, table_id, sources, fields) body = { 'projectId'=> project_id, 'configuration'=> { 'load'=> { 'sourceFormat' => "NEWLINE_DELIMITED_JSON", 'sourceUri' => sources.first, 'sourceUris' => sources, 'destinationTable'=> { 'projectId'=> project_id, 'datasetId'=> dataset_id, 'tableId'=> table_id } } } } res = GoogleBigquery::Auth.client.execute( :api_method=> GoogleBigquery::Auth.api.jobs.insert, :body_object=> body, :parameters=> {"projectId"=> project_id} ) #binding.pry job_id = JSON.parse(res.body)["jobReference"]["jobId"] puts 'Waiting for import to complete..' loop do status = JSON.parse(self.get(project_id, job_id).body) if 'DONE' == status['status']['state'] puts "Done loading!" if status["status"]["errors"] puts status["status"]["errors"].map{|o| "#{o['reason']} : #{o['message']}"} end return end sleep(10) end end |
.query(project_id, body = {}) ⇒ Object
query Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout.
10 11 12 13 14 15 16 17 18 |
# File 'lib/google_bigquery/jobs.rb', line 10 def self.query(project_id, body={}) res = GoogleBigquery::Auth.client.execute( :api_method=> GoogleBigquery::Auth.api.jobs.query, :body_object=> body, :parameters=> {"projectId"=> project_id} ) parse_response(res) end |