Class: Gcloud::Bigquery::Service
- Inherits:
-
Object
- Object
- Gcloud::Bigquery::Service
- Defined in:
- lib/gcloud/bigquery/service.rb
Constant Summary collapse
- API =
Alias to the Google Client API module
Google::Apis::BigqueryV2
Instance Attribute Summary collapse
- #credentials ⇒ Object
-
#mocked_service ⇒ Object
Returns the value of attribute mocked_service.
- #project ⇒ Object
Class Method Summary collapse
-
.table_ref_from_s(str, default_table_ref) ⇒ Object
Extracts at least ‘tbl` group, and possibly `dts` and `prj` groups, from strings in the formats: “my_table”, “my_dataset.my_table”, or “my-project:my_dataset.my_table”.
Instance Method Summary collapse
- #copy_table(source, target, options = {}) ⇒ Object
-
#delete_dataset(dataset_id, force = nil) ⇒ Object
Deletes the dataset specified by the datasetId value.
-
#delete_table(dataset_id, table_id) ⇒ Object
Deletes the table specified by tableId from the dataset.
- #extract_table(table, storage_files, options = {}) ⇒ Object
-
#get_dataset(dataset_id) ⇒ Object
Returns the dataset specified by datasetID.
-
#get_job(job_id) ⇒ Object
Returns the job specified by jobID.
- #get_project_table(project_id, dataset_id, table_id) ⇒ Object
-
#get_table(dataset_id, table_id) ⇒ Object
Gets the specified table resource by table ID.
-
#initialize(project, credentials, retries: nil, timeout: nil) ⇒ Service
constructor
Creates a new Service instance.
-
#insert_dataset(new_dataset_gapi) ⇒ Object
Creates a new empty dataset.
- #insert_job(config) ⇒ Object
-
#insert_table(dataset_id, new_table_gapi) ⇒ Object
Creates a new, empty table in the dataset.
- #insert_tabledata(dataset_id, table_id, rows, options = {}) ⇒ Object
- #inspect ⇒ Object
-
#job_query_results(job_id, options = {}) ⇒ Object
Returns the query data for the job.
-
#list_datasets(options = {}) ⇒ Object
Lists all datasets in the specified project to which you have been granted the READER dataset role.
-
#list_jobs(options = {}) ⇒ Object
Lists all jobs in the specified project to which you have been granted the READER job role.
-
#list_tabledata(dataset_id, table_id, options = {}) ⇒ Object
Retrieves data from the table.
-
#list_tables(dataset_id, options = {}) ⇒ Object
Lists all tables in the specified dataset.
- #load_table_file(dataset_id, table_id, file, options = {}) ⇒ Object
- #load_table_gs_url(dataset_id, table_id, url, options = {}) ⇒ Object
-
#patch_dataset(dataset_id, patched_dataset_gapi) ⇒ Object
Updates information in an existing dataset, only replacing fields that are provided in the submitted dataset resource.
-
#patch_table(dataset_id, table_id, patched_table_gapi) ⇒ Object
Updates information in an existing table, replacing fields that are provided in the submitted table resource.
- #query(query, options = {}) ⇒ Object
- #query_job(query, options = {}) ⇒ Object
- #service ⇒ Object
Constructor Details
#initialize(project, credentials, retries: nil, timeout: nil) ⇒ Service
Creates a new Service instance.
40 41 42 43 44 45 46 47 48 49 50 |
# File 'lib/gcloud/bigquery/service.rb', line 40 def initialize project, credentials, retries: nil, timeout: nil @project = project @credentials = credentials @credentials = credentials @service = API::BigqueryService.new @service..application_name = "gcloud-ruby" @service..application_version = Gcloud::VERSION @service..retries = retries || 3 @service..timeout_sec = timeout if timeout @service. = @credentials.client end |
Instance Attribute Details
#credentials ⇒ Object
36 37 38 |
# File 'lib/gcloud/bigquery/service.rb', line 36 def credentials @credentials end |
#mocked_service ⇒ Object
Returns the value of attribute mocked_service.
56 57 58 |
# File 'lib/gcloud/bigquery/service.rb', line 56 def mocked_service @mocked_service end |
#project ⇒ Object
33 34 35 |
# File 'lib/gcloud/bigquery/service.rb', line 33 def project @project end |
Class Method Details
.table_ref_from_s(str, default_table_ref) ⇒ Object
Extracts at least ‘tbl` group, and possibly `dts` and `prj` groups, from strings in the formats: “my_table”, “my_dataset.my_table”, or “my-project:my_dataset.my_table”. Then merges project_id and dataset_id from the default table if they are missing.
277 278 279 280 281 282 283 284 285 286 287 288 289 290 |
# File 'lib/gcloud/bigquery/service.rb', line 277 def self.table_ref_from_s str, default_table_ref str = str.to_s m = /\A(((?<prj>\S*):)?(?<dts>\S*)\.)?(?<tbl>\S*)\z/.match str unless m fail ArgumentError, "unable to identify table from #{str.inspect}" end str_table_ref_hash = { project_id: m["prj"], dataset_id: m["dts"], table_id: m["tbl"] }.delete_if { |_, v| v.nil? } new_table_ref_hash = default_table_ref.to_h.merge str_table_ref_hash Google::Apis::BigqueryV2::TableReference.new new_table_ref_hash end |
Instance Method Details
#copy_table(source, target, options = {}) ⇒ Object
244 245 246 247 248 |
# File 'lib/gcloud/bigquery/service.rb', line 244 def copy_table source, target, = {} service.insert_job @project, copy_table_config(source, target, ) rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#delete_dataset(dataset_id, force = nil) ⇒ Object
Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying force: true in options. Immediately after deletion, you can create another dataset with the same name.
100 101 102 103 104 |
# File 'lib/gcloud/bigquery/service.rb', line 100 def delete_dataset dataset_id, force = nil service.delete_dataset @project, dataset_id, delete_contents: force rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#delete_table(dataset_id, table_id) ⇒ Object
Deletes the table specified by tableId from the dataset. If the table contains data, all the data will be deleted.
153 154 155 156 157 |
# File 'lib/gcloud/bigquery/service.rb', line 153 def delete_table dataset_id, table_id service.delete_table @project, dataset_id, table_id rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#extract_table(table, storage_files, options = {}) ⇒ Object
250 251 252 253 254 255 |
# File 'lib/gcloud/bigquery/service.rb', line 250 def extract_table table, storage_files, = {} service.insert_job \ @project, extract_table_config(table, storage_files, ) rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#get_dataset(dataset_id) ⇒ Object
Returns the dataset specified by datasetID.
71 72 73 74 75 |
# File 'lib/gcloud/bigquery/service.rb', line 71 def get_dataset dataset_id service.get_dataset @project, dataset_id rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#get_job(job_id) ⇒ Object
Returns the job specified by jobID.
203 204 205 206 207 |
# File 'lib/gcloud/bigquery/service.rb', line 203 def get_job job_id service.get_job @project, job_id rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#get_project_table(project_id, dataset_id, table_id) ⇒ Object
116 117 118 119 120 |
# File 'lib/gcloud/bigquery/service.rb', line 116 def get_project_table project_id, dataset_id, table_id service.get_table project_id, dataset_id, table_id rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#get_table(dataset_id, table_id) ⇒ Object
Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table.
127 128 129 130 131 |
# File 'lib/gcloud/bigquery/service.rb', line 127 def get_table dataset_id, table_id get_project_table @project, dataset_id, table_id rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#insert_dataset(new_dataset_gapi) ⇒ Object
Creates a new empty dataset.
79 80 81 82 83 |
# File 'lib/gcloud/bigquery/service.rb', line 79 def insert_dataset new_dataset_gapi service.insert_dataset @project, new_dataset_gapi rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#insert_job(config) ⇒ Object
209 210 211 212 213 214 215 216 |
# File 'lib/gcloud/bigquery/service.rb', line 209 def insert_job config job_object = API::Job.new( configuration: config ) service.insert_job @project, job_object rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#insert_table(dataset_id, new_table_gapi) ⇒ Object
Creates a new, empty table in the dataset.
135 136 137 138 139 |
# File 'lib/gcloud/bigquery/service.rb', line 135 def insert_table dataset_id, new_table_gapi service.insert_table @project, dataset_id, new_table_gapi rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#insert_tabledata(dataset_id, table_id, rows, options = {}) ⇒ Object
170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 |
# File 'lib/gcloud/bigquery/service.rb', line 170 def insert_tabledata dataset_id, table_id, rows, = {} insert_rows = Array(rows).map do |row| Google::Apis::BigqueryV2::InsertAllTableDataRequest::Row.new( insert_id: Digest::MD5.base64digest(row.inspect), # Hash[row.map{|(k,v)| [k.to_s,v]}] for Hash<String,Object> json: row ) end insert_req = Google::Apis::BigqueryV2::InsertAllTableDataRequest.new( rows: insert_rows, ignore_unknown_values: [:ignore_unknown], skip_invalid_rows: [:skip_invalid] ) service.insert_all_table_data @project, dataset_id, table_id, insert_req rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#inspect ⇒ Object
292 293 294 |
# File 'lib/gcloud/bigquery/service.rb', line 292 def inspect "#{self.class}(#{@project})" end |
#job_query_results(job_id, options = {}) ⇒ Object
Returns the query data for the job
233 234 235 236 237 238 239 240 241 242 |
# File 'lib/gcloud/bigquery/service.rb', line 233 def job_query_results job_id, = {} service.get_job_query_results @project, job_id, max_results: .delete(:max), page_token: .delete(:token), start_index: .delete(:start), timeout_ms: .delete(:timeout) rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#list_datasets(options = {}) ⇒ Object
Lists all datasets in the specified project to which you have been granted the READER dataset role.
61 62 63 64 65 66 67 |
# File 'lib/gcloud/bigquery/service.rb', line 61 def list_datasets = {} service.list_datasets \ @project, all: [:all], max_results: [:max], page_token: [:token] rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#list_jobs(options = {}) ⇒ Object
Lists all jobs in the specified project to which you have been granted the READER job role.
192 193 194 195 196 197 198 199 |
# File 'lib/gcloud/bigquery/service.rb', line 192 def list_jobs = {} service.list_jobs \ @project, all_users: [:all], max_results: [:max], page_token: [:token], projection: "full", state_filter: [:filter] rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#list_tabledata(dataset_id, table_id, options = {}) ⇒ Object
Retrieves data from the table.
161 162 163 164 165 166 167 168 |
# File 'lib/gcloud/bigquery/service.rb', line 161 def list_tabledata dataset_id, table_id, = {} service.list_table_data @project, dataset_id, table_id, max_results: .delete(:max), page_token: .delete(:token), start_index: .delete(:start) rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#list_tables(dataset_id, options = {}) ⇒ Object
Lists all tables in the specified dataset. Requires the READER dataset role.
109 110 111 112 113 114 |
# File 'lib/gcloud/bigquery/service.rb', line 109 def list_tables dataset_id, = {} service.list_tables @project, dataset_id, max_results: [:max], page_token: [:token] rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#load_table_file(dataset_id, table_id, file, options = {}) ⇒ Object
264 265 266 267 268 269 270 |
# File 'lib/gcloud/bigquery/service.rb', line 264 def load_table_file dataset_id, table_id, file, = {} service.insert_job \ @project, load_table_file_config(dataset_id, table_id, file, ), upload_source: file, content_type: mime_type_for(file) rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#load_table_gs_url(dataset_id, table_id, url, options = {}) ⇒ Object
257 258 259 260 261 262 |
# File 'lib/gcloud/bigquery/service.rb', line 257 def load_table_gs_url dataset_id, table_id, url, = {} service.insert_job \ @project, load_table_url_config(dataset_id, table_id, url, ) rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#patch_dataset(dataset_id, patched_dataset_gapi) ⇒ Object
Updates information in an existing dataset, only replacing fields that are provided in the submitted dataset resource.
88 89 90 91 92 |
# File 'lib/gcloud/bigquery/service.rb', line 88 def patch_dataset dataset_id, patched_dataset_gapi service.patch_dataset @project, dataset_id, patched_dataset_gapi rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#patch_table(dataset_id, table_id, patched_table_gapi) ⇒ Object
Updates information in an existing table, replacing fields that are provided in the submitted table resource.
144 145 146 147 148 |
# File 'lib/gcloud/bigquery/service.rb', line 144 def patch_table dataset_id, table_id, patched_table_gapi service.patch_table @project, dataset_id, table_id, patched_table_gapi rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#query(query, options = {}) ⇒ Object
225 226 227 228 229 |
# File 'lib/gcloud/bigquery/service.rb', line 225 def query query, = {} service.query_job @project, query_config(query, ) rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#query_job(query, options = {}) ⇒ Object
218 219 220 221 222 223 |
# File 'lib/gcloud/bigquery/service.rb', line 218 def query_job query, = {} config = query_table_config(query, ) service.insert_job @project, config rescue Google::Apis::Error => e raise Gcloud::Error.from_error(e) end |
#service ⇒ Object
52 53 54 55 |
# File 'lib/gcloud/bigquery/service.rb', line 52 def service return mocked_service if mocked_service @service end |