Class: Gcloud::Bigquery::Connection
- Inherits:
-
Object
- Object
- Gcloud::Bigquery::Connection
- Defined in:
- lib/gcloud/bigquery/connection.rb
Overview
Represents the connection to Bigquery, as well as expose the API calls.
Constant Summary collapse
- API_VERSION =
:nodoc:
"v2"
Instance Attribute Summary collapse
-
#credentials ⇒ Object
:nodoc:.
-
#project ⇒ Object
Returns the value of attribute project.
Instance Method Summary collapse
- #copy_table(source, target, options = {}) ⇒ Object
-
#delete_dataset(dataset_id, options = {}) ⇒ Object
Deletes the dataset specified by the datasetId value.
-
#delete_table(dataset_id, table_id) ⇒ Object
Deletes the table specified by tableId from the dataset.
- #extract_table(table, storage_files, options = {}) ⇒ Object
-
#get_dataset(dataset_id) ⇒ Object
Returns the dataset specified by datasetID.
-
#get_job(job_id) ⇒ Object
Returns the job specified by jobID.
- #get_project_table(project_id, dataset_id, table_id) ⇒ Object
-
#get_table(dataset_id, table_id) ⇒ Object
Gets the specified table resource by table ID.
-
#initialize(project, credentials) ⇒ Connection
constructor
Creates a new Connection instance.
-
#insert_dataset(dataset_id, options = {}) ⇒ Object
Creates a new empty dataset.
- #insert_job(config) ⇒ Object
-
#insert_table(dataset_id, table_id, options = {}) ⇒ Object
Creates a new, empty table in the dataset.
- #insert_tabledata(dataset_id, table_id, rows, options = {}) ⇒ Object
-
#job_query_results(job_id, options = {}) ⇒ Object
Returns the query data for the job.
- #link_table(table, urls, options = {}) ⇒ Object
-
#list_datasets(options = {}) ⇒ Object
Lists all datasets in the specified project to which you have been granted the READER dataset role.
-
#list_jobs(options = {}) ⇒ Object
Lists all jobs in the specified project to which you have been granted the READER job role.
-
#list_tabledata(dataset_id, table_id, options = {}) ⇒ Object
Retrieves data from the table.
-
#list_tables(dataset_id, options = {}) ⇒ Object
Lists all tables in the specified dataset.
- #load_multipart(table, file, options = {}) ⇒ Object
- #load_resumable(table, file, chunk_size = nil, options = {}) ⇒ Object
- #load_table(table, storage_url, options = {}) ⇒ Object
-
#patch_dataset(dataset_id, options = {}) ⇒ Object
Updates information in an existing dataset, only replacing fields that are provided in the submitted dataset resource.
-
#patch_table(dataset_id, table_id, options = {}) ⇒ Object
Updates information in an existing table, replacing fields that are provided in the submitted table resource.
- #query(query, options = {}) ⇒ Object
- #query_job(query, options = {}) ⇒ Object
Constructor Details
#initialize(project, credentials) ⇒ Connection
Creates a new Connection instance.
33 34 35 36 37 38 39 40 |
# File 'lib/gcloud/bigquery/connection.rb', line 33 def initialize project, credentials #:nodoc: @project = project @credentials = credentials @client = Google::APIClient.new application_name: "gcloud-ruby", application_version: Gcloud::VERSION @client. = @credentials.client @bigquery = @client.discovered_api "bigquery", API_VERSION end |
Instance Attribute Details
#credentials ⇒ Object
:nodoc:
29 30 31 |
# File 'lib/gcloud/bigquery/connection.rb', line 29 def credentials @credentials end |
#project ⇒ Object
Returns the value of attribute project.
28 29 30 |
# File 'lib/gcloud/bigquery/connection.rb', line 28 def project @project end |
Instance Method Details
#copy_table(source, target, options = {}) ⇒ Object
260 261 262 263 264 265 266 |
# File 'lib/gcloud/bigquery/connection.rb', line 260 def copy_table source, target, = {} @client.execute( api_method: @bigquery.jobs.insert, parameters: { projectId: @project }, body_object: copy_table_config(source, target, ) ) end |
#delete_dataset(dataset_id, options = {}) ⇒ Object
Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying force: true in options. Immediately after deletion, you can create another dataset with the same name.
100 101 102 103 104 105 106 107 |
# File 'lib/gcloud/bigquery/connection.rb', line 100 def delete_dataset dataset_id, = {} @client.execute( api_method: @bigquery.datasets.delete, parameters: { projectId: @project, datasetId: dataset_id, deleteContents: [:force] }.delete_if { |_, v| v.nil? } ) end |
#delete_table(dataset_id, table_id) ⇒ Object
Deletes the table specified by tableId from the dataset. If the table contains data, all the data will be deleted.
167 168 169 170 171 172 173 |
# File 'lib/gcloud/bigquery/connection.rb', line 167 def delete_table dataset_id, table_id @client.execute( api_method: @bigquery.tables.delete, parameters: { projectId: @project, datasetId: dataset_id, tableId: table_id } ) end |
#extract_table(table, storage_files, options = {}) ⇒ Object
276 277 278 279 280 281 282 |
# File 'lib/gcloud/bigquery/connection.rb', line 276 def extract_table table, storage_files, = {} @client.execute( api_method: @bigquery.jobs.insert, parameters: { projectId: @project }, body_object: extract_table_config(table, storage_files, ) ) end |
#get_dataset(dataset_id) ⇒ Object
Returns the dataset specified by datasetID.
60 61 62 63 64 65 |
# File 'lib/gcloud/bigquery/connection.rb', line 60 def get_dataset dataset_id @client.execute( api_method: @bigquery.datasets.get, parameters: { projectId: @project, datasetId: dataset_id } ) end |
#get_job(job_id) ⇒ Object
Returns the job specified by jobID.
213 214 215 216 217 218 |
# File 'lib/gcloud/bigquery/connection.rb', line 213 def get_job job_id @client.execute( api_method: @bigquery.jobs.get, parameters: { projectId: @project, jobId: job_id } ) end |
#get_project_table(project_id, dataset_id, table_id) ⇒ Object
125 126 127 128 129 130 131 |
# File 'lib/gcloud/bigquery/connection.rb', line 125 def get_project_table project_id, dataset_id, table_id @client.execute( api_method: @bigquery.tables.get, parameters: { projectId: project_id, datasetId: dataset_id, tableId: table_id } ) end |
#get_table(dataset_id, table_id) ⇒ Object
Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table.
138 139 140 |
# File 'lib/gcloud/bigquery/connection.rb', line 138 def get_table dataset_id, table_id get_project_table @project, dataset_id, table_id end |
#insert_dataset(dataset_id, options = {}) ⇒ Object
Creates a new empty dataset.
69 70 71 72 73 74 75 |
# File 'lib/gcloud/bigquery/connection.rb', line 69 def insert_dataset dataset_id, = {} @client.execute( api_method: @bigquery.datasets.insert, parameters: { projectId: @project }, body_object: insert_dataset_request(dataset_id, ) ) end |
#insert_job(config) ⇒ Object
220 221 222 223 224 225 226 |
# File 'lib/gcloud/bigquery/connection.rb', line 220 def insert_job config @client.execute( api_method: @bigquery.jobs.insert, parameters: { projectId: @project }, body_object: { "configuration" => config } ) end |
#insert_table(dataset_id, table_id, options = {}) ⇒ Object
Creates a new, empty table in the dataset.
144 145 146 147 148 149 150 |
# File 'lib/gcloud/bigquery/connection.rb', line 144 def insert_table dataset_id, table_id, = {} @client.execute( api_method: @bigquery.tables.insert, parameters: { projectId: @project, datasetId: dataset_id }, body_object: insert_table_request(dataset_id, table_id, ) ) end |
#insert_tabledata(dataset_id, table_id, rows, options = {}) ⇒ Object
191 192 193 194 195 196 197 198 199 |
# File 'lib/gcloud/bigquery/connection.rb', line 191 def insert_tabledata dataset_id, table_id, rows, = {} @client.execute( api_method: @bigquery.tabledata.insert_all, parameters: { projectId: @project, datasetId: dataset_id, tableId: table_id }, body_object: insert_tabledata_rows(rows, ) ) end |
#job_query_results(job_id, options = {}) ⇒ Object
Returns the query data for the job
246 247 248 249 250 251 252 253 254 255 256 257 258 |
# File 'lib/gcloud/bigquery/connection.rb', line 246 def job_query_results job_id, = {} params = { projectId: @project, jobId: job_id, pageToken: .delete(:token), maxResults: .delete(:max), startIndex: .delete(:start), timeoutMs: .delete(:timeout) }.delete_if { |_, v| v.nil? } @client.execute( api_method: @bigquery.jobs.get_query_results, parameters: params ) end |
#link_table(table, urls, options = {}) ⇒ Object
268 269 270 271 272 273 274 |
# File 'lib/gcloud/bigquery/connection.rb', line 268 def link_table table, urls, = {} @client.execute( api_method: @bigquery.jobs.insert, parameters: { projectId: @project }, body_object: link_table_config(table, urls, ) ) end |
#list_datasets(options = {}) ⇒ Object
Lists all datasets in the specified project to which you have been granted the READER dataset role.
45 46 47 48 49 50 51 52 53 54 55 56 |
# File 'lib/gcloud/bigquery/connection.rb', line 45 def list_datasets = {} params = { projectId: @project, all: .delete(:all), pageToken: .delete(:token), maxResults: .delete(:max) }.delete_if { |_, v| v.nil? } @client.execute( api_method: @bigquery.datasets.list, parameters: params ) end |
#list_jobs(options = {}) ⇒ Object
Lists all jobs in the specified project to which you have been granted the READER job role.
204 205 206 207 208 209 |
# File 'lib/gcloud/bigquery/connection.rb', line 204 def list_jobs = {} @client.execute( api_method: @bigquery.jobs.list, parameters: list_jobs_params() ) end |
#list_tabledata(dataset_id, table_id, options = {}) ⇒ Object
Retrieves data from the table.
177 178 179 180 181 182 183 184 185 186 187 188 189 |
# File 'lib/gcloud/bigquery/connection.rb', line 177 def list_tabledata dataset_id, table_id, = {} params = { projectId: @project, datasetId: dataset_id, tableId: table_id, pageToken: .delete(:token), maxResults: .delete(:max), startIndex: .delete(:start) }.delete_if { |_, v| v.nil? } @client.execute( api_method: @bigquery.tabledata.list, parameters: params ) end |
#list_tables(dataset_id, options = {}) ⇒ Object
Lists all tables in the specified dataset. Requires the READER dataset role.
112 113 114 115 116 117 118 119 120 121 122 123 |
# File 'lib/gcloud/bigquery/connection.rb', line 112 def list_tables dataset_id, = {} params = { projectId: @project, datasetId: dataset_id, pageToken: .delete(:token), maxResults: .delete(:max) }.delete_if { |_, v| v.nil? } @client.execute( api_method: @bigquery.tables.list, parameters: params ) end |
#load_multipart(table, file, options = {}) ⇒ Object
293 294 295 296 297 298 299 300 301 302 |
# File 'lib/gcloud/bigquery/connection.rb', line 293 def load_multipart table, file, = {} media = load_media file @client.execute( api_method: @bigquery.jobs.insert, media: media, parameters: { projectId: @project, uploadType: "multipart" }, body_object: load_table_config(table, nil, file, ) ) end |
#load_resumable(table, file, chunk_size = nil, options = {}) ⇒ Object
304 305 306 307 308 309 310 311 312 313 314 315 316 |
# File 'lib/gcloud/bigquery/connection.rb', line 304 def load_resumable table, file, chunk_size = nil, = {} media = load_media file, chunk_size result = @client.execute( api_method: @bigquery.jobs.insert, media: media, parameters: { projectId: @project, uploadType: "resumable" }, body_object: load_table_config(table, nil, file, ) ) upload = result.resumable_upload result = @client.execute upload while upload.resumable? result end |
#load_table(table, storage_url, options = {}) ⇒ Object
284 285 286 287 288 289 290 291 |
# File 'lib/gcloud/bigquery/connection.rb', line 284 def load_table table, storage_url, = {} @client.execute( api_method: @bigquery.jobs.insert, parameters: { projectId: @project }, body_object: load_table_config(table, storage_url, Array(storage_url).first, ) ) end |
#patch_dataset(dataset_id, options = {}) ⇒ Object
Updates information in an existing dataset, only replacing fields that are provided in the submitted dataset resource.
80 81 82 83 84 85 86 87 88 89 90 91 92 |
# File 'lib/gcloud/bigquery/connection.rb', line 80 def patch_dataset dataset_id, = {} project_id = [:project_id] || @project body = { friendlyName: [:name], description: [:description], defaultTableExpirationMs: [:default_expiration] }.delete_if { |_, v| v.nil? } @client.execute( api_method: @bigquery.datasets.patch, parameters: { projectId: project_id, datasetId: dataset_id }, body_object: body ) end |
#patch_table(dataset_id, table_id, options = {}) ⇒ Object
Updates information in an existing table, replacing fields that are provided in the submitted table resource.
155 156 157 158 159 160 161 162 |
# File 'lib/gcloud/bigquery/connection.rb', line 155 def patch_table dataset_id, table_id, = {} @client.execute( api_method: @bigquery.tables.patch, parameters: { projectId: @project, datasetId: dataset_id, tableId: table_id }, body_object: patch_table_request() ) end |
#query(query, options = {}) ⇒ Object
236 237 238 239 240 241 242 |
# File 'lib/gcloud/bigquery/connection.rb', line 236 def query query, = {} @client.execute( api_method: @bigquery.jobs.query, parameters: { projectId: @project }, body_object: query_config(query, ) ) end |
#query_job(query, options = {}) ⇒ Object
228 229 230 231 232 233 234 |
# File 'lib/gcloud/bigquery/connection.rb', line 228 def query_job query, = {} @client.execute( api_method: @bigquery.jobs.insert, parameters: { projectId: @project }, body_object: query_table_config(query, ) ) end |