1 | /*!
|
2 | * Copyright 2014 Google Inc. All Rights Reserved.
|
3 | *
|
4 | * Licensed under the Apache License, Version 2.0 (the "License");
|
5 | * you may not use this file except in compliance with the License.
|
6 | * You may obtain a copy of the License at
|
7 | *
|
8 | * http://www.apache.org/licenses/LICENSE-2.0
|
9 | *
|
10 | * Unless required by applicable law or agreed to in writing, software
|
11 | * distributed under the License is distributed on an "AS IS" BASIS,
|
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 | * See the License for the specific language governing permissions and
|
14 | * limitations under the License.
|
15 | */
|
16 | /// <reference types="node" />
|
17 | import { ServiceObject, ResponseCallback, SetMetadataResponse } from '@google-cloud/common';
|
18 | import { ResourceStream } from '@google-cloud/paginator';
|
19 | import { BigQuery, Job, Dataset, Query, SimpleQueryRowsResponse, SimpleQueryRowsCallback, ResourceCallback, RequestCallback, PagedResponse, PagedCallback, JobRequest, PagedRequest } from '.';
|
20 | import { Duplex, Writable } from 'stream';
|
21 | import { JobMetadata } from './job';
|
22 | import bigquery from './types';
|
23 | import { IntegerTypeCastOptions } from './bigquery';
|
24 | import { RowQueue } from './rowQueue';
|
25 | export interface File {
|
26 | bucket: any;
|
27 | kmsKeyName?: string;
|
28 | userProject?: string;
|
29 | name: string;
|
30 | generation?: number;
|
31 | }
|
32 | export type JobMetadataCallback = RequestCallback<JobMetadata>;
|
33 | export type JobMetadataResponse = [JobMetadata];
|
34 | export type RowMetadata = any;
|
35 | export type InsertRowsOptions = bigquery.ITableDataInsertAllRequest & {
|
36 | createInsertId?: boolean;
|
37 | partialRetries?: number;
|
38 | raw?: boolean;
|
39 | schema?: string | {};
|
40 | };
|
41 | export type InsertRowsResponse = [
|
42 | bigquery.ITableDataInsertAllResponse | bigquery.ITable
|
43 | ];
|
44 | export type InsertRowsCallback = RequestCallback<bigquery.ITableDataInsertAllResponse | bigquery.ITable>;
|
45 | export type RowsResponse = PagedResponse<RowMetadata, GetRowsOptions, bigquery.ITableDataList | bigquery.ITable>;
|
46 | export type RowsCallback = PagedCallback<RowMetadata, GetRowsOptions, bigquery.ITableDataList | bigquery.ITable>;
|
47 | export interface InsertRow {
|
48 | insertId?: string;
|
49 | json?: bigquery.IJsonObject;
|
50 | }
|
51 | export type TableRow = bigquery.ITableRow;
|
52 | export type TableRowField = bigquery.ITableCell;
|
53 | export type TableRowValue = string | TableRow;
|
54 | export type GetRowsOptions = PagedRequest<bigquery.tabledata.IListParams> & {
|
55 | wrapIntegers?: boolean | IntegerTypeCastOptions;
|
56 | parseJSON?: boolean;
|
57 | };
|
58 | export type JobLoadMetadata = JobRequest<bigquery.IJobConfigurationLoad> & {
|
59 | format?: string;
|
60 | };
|
61 | export type CreateExtractJobOptions = JobRequest<bigquery.IJobConfigurationExtract> & {
|
62 | format?: 'CSV' | 'JSON' | 'AVRO' | 'PARQUET' | 'ORC';
|
63 | gzip?: boolean;
|
64 | };
|
65 | export type JobResponse = [Job, bigquery.IJob];
|
66 | export type JobCallback = ResourceCallback<Job, bigquery.IJob>;
|
67 | export type CreateCopyJobMetadata = CopyTableMetadata;
|
68 | export type SetTableMetadataOptions = TableMetadata;
|
69 | export type CopyTableMetadata = JobRequest<bigquery.IJobConfigurationTableCopy>;
|
70 | export type TableMetadata = bigquery.ITable & {
|
71 | name?: string;
|
72 | schema?: string | TableField[] | TableSchema;
|
73 | partitioning?: string;
|
74 | view?: string | ViewDefinition;
|
75 | };
|
76 | export type ViewDefinition = bigquery.IViewDefinition;
|
77 | export type FormattedMetadata = bigquery.ITable;
|
78 | export type TableSchema = bigquery.ITableSchema;
|
79 | export type TableField = bigquery.ITableFieldSchema;
|
80 | export interface PartialInsertFailure {
|
81 | message: string;
|
82 | reason: string;
|
83 | row: RowMetadata;
|
84 | }
|
85 | export type Policy = bigquery.IPolicy;
|
86 | export type GetPolicyOptions = bigquery.IGetPolicyOptions;
|
87 | export type SetPolicyOptions = Omit<bigquery.ISetIamPolicyRequest, 'policy'>;
|
88 | export type PolicyRequest = bigquery.IGetIamPolicyRequest;
|
89 | export type PolicyResponse = [Policy];
|
90 | export type PolicyCallback = RequestCallback<PolicyResponse>;
|
91 | export type PermissionsResponse = [bigquery.ITestIamPermissionsResponse];
|
92 | export type PermissionsCallback = RequestCallback<PermissionsResponse>;
|
93 | export interface InsertStreamOptions {
|
94 | insertRowsOptions?: InsertRowsOptions;
|
95 | batchOptions?: RowBatchOptions;
|
96 | }
|
97 | export interface RowBatchOptions {
|
98 | maxBytes: number;
|
99 | maxRows: number;
|
100 | maxMilliseconds: number;
|
101 | }
|
102 | export interface TableOptions {
|
103 | location?: string;
|
104 | }
|
105 | /**
|
106 | * Table objects are returned by methods such as
|
107 | * {@link Dataset#table}, {@link Dataset#createTable}, and
|
108 | * {@link Dataset#getTables}.
|
109 | *
|
110 | * @class
|
111 | * @param {Dataset} dataset {@link Dataset} instance.
|
112 | * @param {string} id The ID of the table.
|
113 | * @param {object} [options] Table options.
|
114 | * @param {string} [options.location] The geographic location of the table, by
|
115 | * default this value is inherited from the dataset. This can be used to
|
116 | * configure the location of all jobs created through a table instance. It
|
117 | * cannot be used to set the actual location of the table. This value will
|
118 | * be superseded by any API responses containing location data for the
|
119 | * table.
|
120 | *
|
121 | * @example
|
122 | * ```
|
123 | * const {BigQuery} = require('@google-cloud/bigquery');
|
124 | * const bigquery = new BigQuery();
|
125 | * const dataset = bigquery.dataset('my-dataset');
|
126 | *
|
127 | * const table = dataset.table('my-table');
|
128 | * ```
|
129 | */
|
130 | declare class Table extends ServiceObject {
|
131 | dataset: Dataset;
|
132 | bigQuery: BigQuery;
|
133 | location?: string;
|
134 | rowQueue?: RowQueue;
|
135 | createReadStream(options?: GetRowsOptions): ResourceStream<RowMetadata>;
|
136 | constructor(dataset: Dataset, id: string, options?: TableOptions);
|
137 | /**
|
138 | * Convert a comma-separated name:type string to a table schema object.
|
139 | *
|
140 | * @static
|
141 | * @private
|
142 | *
|
143 | * @param {string} str Comma-separated schema string.
|
144 | * in the format the API expects.
{object} Table schema |
145 | */
|
146 | static createSchemaFromString_(str: string): TableSchema;
|
147 | /**
|
148 | * Convert a row entry from native types to their encoded types that the API
|
149 | * expects.
|
150 | *
|
151 | * @static
|
152 | * @private
|
153 | *
|
154 | * @param {*} value The value to be converted.
|
155 | * @returns {*} The converted value.
|
156 | */
|
157 | static encodeValue_(value?: {} | null): {} | null;
|
158 | /**
|
159 | * @private
|
160 | */
|
161 | static formatMetadata_(options: TableMetadata): FormattedMetadata;
|
162 | /**
|
163 | * @callback JobMetadataCallback
|
164 | * @param {?Error} err Request error, if any.
|
165 | * @param {object} apiResponse The full API response.
|
166 | */
|
167 | /**
|
168 | * @typedef {array} JobMetadataResponse
|
169 | * @property {object} 0 The full API response.
|
170 | */
|
171 | /**
|
172 | * Copy data from one table to another, optionally creating that table.
|
173 | *
|
174 | * @param {Table} destination The destination table.
|
175 | * @param {object} [metadata] Metadata to set with the copy operation. The
|
176 | * metadata object should be in the format of a
|
177 | * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`}
|
178 | * object.
|
179 | * object.
|
180 | * @param {string} [metadata.jobId] Custom id for the underlying job.
|
181 | * @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job
|
182 | * id.
|
183 | * @param {JobMetadataCallback} [callback] The callback function.
|
184 | * @param {?error} callback.err An error returned while making this request
|
185 | * @param {object} callback.apiResponse The full API response.
|
186 | * @returns {Promise<JobMetadataResponse>}
|
187 | *
|
188 | * @throws {Error} If a destination other than a Table object is provided.
|
189 | *
|
190 | * @example
|
191 | * ```
|
192 | * const {BigQuery} = require('@google-cloud/bigquery');
|
193 | * const bigquery = new BigQuery();
|
194 | * const dataset = bigquery.dataset('my-dataset');
|
195 | *
|
196 | * const table = dataset.table('my-table');
|
197 | * const yourTable = dataset.table('your-table');
|
198 | *
|
199 | * table.copy(yourTable, (err, apiResponse) => {});
|
200 | *
|
201 | * //-
|
202 | * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
|
203 | * // for all available options.
|
204 | * //-
|
205 | * const metadata = {
|
206 | * createDisposition: 'CREATE_NEVER',
|
207 | * writeDisposition: 'WRITE_TRUNCATE'
|
208 | * };
|
209 | *
|
210 | * table.copy(yourTable, metadata, (err, apiResponse) => {});
|
211 | *
|
212 | * //-
|
213 | * // If the callback is omitted, we'll return a Promise.
|
214 | * //-
|
215 | * table.copy(yourTable, metadata).then((data) => {
|
216 | * const apiResponse = data[0];
|
217 | * });
|
218 | * ```
|
219 | */
|
220 | copy(destination: Table, metadata?: CopyTableMetadata): Promise<JobMetadataResponse>;
|
221 | copy(destination: Table, metadata: CopyTableMetadata, callback: JobMetadataCallback): void;
|
222 | copy(destination: Table, callback: JobMetadataCallback): void;
|
223 | /**
|
224 | * @callback JobMetadataCallback
|
225 | * @param {?Error} err Request error, if any.
|
226 | * @param {object} apiResponse The full API response.
|
227 | */
|
228 | /**
|
229 | * @typedef {array} JobMetadataResponse
|
230 | * @property {object} 0 The full API response.
|
231 | */
|
232 | /**
|
233 | * Copy data from multiple tables into this table.
|
234 | *
|
235 | * @param {Table|Table[]} sourceTables The
|
236 | * source table(s) to copy data from.
|
237 | * @param {object=} metadata Metadata to set with the copy operation. The
|
238 | * metadata object should be in the format of a
|
239 | * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`}
|
240 | * object.
|
241 | * @param {string} [metadata.jobId] Custom id for the underlying job.
|
242 | * @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job
|
243 | * id.
|
244 | * @param {JobMetadataCallback} [callback] The callback function.
|
245 | * @param {?error} callback.err An error returned while making this request
|
246 | * @param {object} callback.apiResponse The full API response.
|
247 | * @returns {Promise<JobMetadataResponse>}
|
248 | *
|
249 | * @throws {Error} If a source other than a Table object is provided.
|
250 | *
|
251 | * @example
|
252 | * ```
|
253 | * const {BigQuery} = require('@google-cloud/bigquery');
|
254 | * const bigquery = new BigQuery();
|
255 | * const dataset = bigquery.dataset('my-dataset');
|
256 | * const table = dataset.table('my-table');
|
257 | *
|
258 | * const sourceTables = [
|
259 | * dataset.table('your-table'),
|
260 | * dataset.table('your-second-table')
|
261 | * ];
|
262 | *
|
263 | * table.copyFrom(sourceTables, (err, apiResponse) => {});
|
264 | *
|
265 | * //-
|
266 | * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
|
267 | * // for all available options.
|
268 | * //-
|
269 | * const metadata = {
|
270 | * createDisposition: 'CREATE_NEVER',
|
271 | * writeDisposition: 'WRITE_TRUNCATE'
|
272 | * };
|
273 | *
|
274 | * table.copyFrom(sourceTables, metadata, (err, apiResponse) => {});
|
275 | *
|
276 | * //-
|
277 | * // If the callback is omitted, we'll return a Promise.
|
278 | * //-
|
279 | * table.copyFrom(sourceTables, metadata).then((data) => {
|
280 | * const apiResponse = data[0];
|
281 | * });
|
282 | * ```
|
283 | */
|
284 | copyFrom(sourceTables: Table | Table[], metadata?: CopyTableMetadata): Promise<JobMetadataResponse>;
|
285 | copyFrom(sourceTables: Table | Table[], metadata: CopyTableMetadata, callback: JobMetadataCallback): void;
|
286 | copyFrom(sourceTables: Table | Table[], callback: JobMetadataCallback): void;
|
287 | /**
|
288 | * Copy data from one table to another, optionally creating that table.
|
289 | *
|
290 | * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
|
291 | *
|
292 | * @param {Table} destination The destination table.
|
293 | * @param {object} [metadata] Metadata to set with the copy operation. The
|
294 | * metadata object should be in the format of a
|
295 | * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`}
|
296 | * object.
|
297 | * @param {string} [metadata.jobId] Custom job id.
|
298 | * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
|
299 | * @param {JobCallback} [callback] The callback function.
|
300 | * @param {?error} callback.err An error returned while making this request
|
301 | * @param {Job} callback.job The job used to copy your table.
|
302 | * @param {object} callback.apiResponse The full API response.
|
303 | * @returns {Promise<JobResponse>}
|
304 | *
|
305 | * @throws {Error} If a destination other than a Table object is provided.
|
306 | *
|
307 | * @example
|
308 | * ```
|
309 | * const {BigQuery} = require('@google-cloud/bigquery');
|
310 | * const bigquery = new BigQuery();
|
311 | * const dataset = bigquery.dataset('my-dataset');
|
312 | * const table = dataset.table('my-table');
|
313 | *
|
314 | * const yourTable = dataset.table('your-table');
|
315 | * table.createCopyJob(yourTable, (err, job, apiResponse) => {
|
316 | * // `job` is a Job object that can be used to check the status of the
|
317 | * // request.
|
318 | * });
|
319 | *
|
320 | * //-
|
321 | * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
|
322 | * // for all available options.
|
323 | * //-
|
324 | * const metadata = {
|
325 | * createDisposition: 'CREATE_NEVER',
|
326 | * writeDisposition: 'WRITE_TRUNCATE'
|
327 | * };
|
328 | *
|
329 | * table.createCopyJob(yourTable, metadata, (err, job, apiResponse) => {});
|
330 | *
|
331 | * //-
|
332 | * // If the callback is omitted, we'll return a Promise.
|
333 | * //-
|
334 | * table.createCopyJob(yourTable, metadata).then((data) => {
|
335 | * const job = data[0];
|
336 | * const apiResponse = data[1];
|
337 | * });
|
338 | * ```
|
339 | */
|
340 | createCopyJob(destination: Table, metadata?: CreateCopyJobMetadata): Promise<JobResponse>;
|
341 | createCopyJob(destination: Table, metadata: CreateCopyJobMetadata, callback: JobCallback): void;
|
342 | createCopyJob(destination: Table, callback: JobCallback): void;
|
343 | /**
|
344 | * Copy data from multiple tables into this table.
|
345 | *
|
346 | * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
|
347 | *
|
348 | * @param {Table|Table[]} sourceTables The
|
349 | * source table(s) to copy data from.
|
350 | * @param {object} [metadata] Metadata to set with the copy operation. The
|
351 | * metadata object should be in the format of a
|
352 | * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`}
|
353 | * object.
|
354 | * @param {string} [metadata.jobId] Custom job id.
|
355 | * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
|
356 | * @param {JobCallback} [callback] The callback function.
|
357 | * @param {?error} callback.err An error returned while making this request
|
358 | * @param {Job} callback.job The job used to copy your table.
|
359 | * @param {object} callback.apiResponse The full API response.
|
360 | * @returns {Promise<JobResponse>}
|
361 | *
|
362 | * @throws {Error} If a source other than a Table object is provided.
|
363 | *
|
364 | * @example
|
365 | * ```
|
366 | * const {BigQuery} = require('@google-cloud/bigquery');
|
367 | * const bigquery = new BigQuery();
|
368 | * const dataset = bigquery.dataset('my-dataset');
|
369 | * const table = dataset.table('my-table');
|
370 | *
|
371 | * const sourceTables = [
|
372 | * dataset.table('your-table'),
|
373 | * dataset.table('your-second-table')
|
374 | * ];
|
375 | *
|
376 | * const callback = (err, job, apiResponse) => {
|
377 | * // `job` is a Job object that can be used to check the status of the
|
378 | * // request.
|
379 | * };
|
380 | *
|
381 | * table.createCopyFromJob(sourceTables, callback);
|
382 | *
|
383 | * //-
|
384 | * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
|
385 | * // for all available options.
|
386 | * //-
|
387 | * const metadata = {
|
388 | * createDisposition: 'CREATE_NEVER',
|
389 | * writeDisposition: 'WRITE_TRUNCATE'
|
390 | * };
|
391 | *
|
392 | * table.createCopyFromJob(sourceTables, metadata, callback);
|
393 | *
|
394 | * //-
|
395 | * // If the callback is omitted, we'll return a Promise.
|
396 | * //-
|
397 | * table.createCopyFromJob(sourceTables, metadata).then((data) => {
|
398 | * const job = data[0];
|
399 | * const apiResponse = data[1];
|
400 | * });
|
401 | * ```
|
402 | */
|
403 | createCopyFromJob(source: Table | Table[], metadata?: CopyTableMetadata): Promise<JobResponse>;
|
404 | createCopyFromJob(source: Table | Table[], metadata: CopyTableMetadata, callback: JobCallback): void;
|
405 | createCopyFromJob(source: Table | Table[], callback: JobCallback): void;
|
406 | /**
|
407 | * Export table to Cloud Storage.
|
408 | *
|
409 | * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
|
410 | *
|
411 | * @param {string|File} destination Where the file should be exported
|
412 | * to. A string or a {@link
|
413 | * https://googleapis.dev/nodejs/storage/latest/File.html File}
|
414 | * object.
|
415 | * @param {object=} options - The configuration object.
|
416 | * @param {string} options.format - The format to export the data in. Allowed
|
417 | * options are "CSV", "JSON", "AVRO", or "PARQUET". Default: "CSV".
|
418 | * @param {boolean} options.gzip - Specify if you would like the file compressed
|
419 | * with GZIP. Default: false.
|
420 | * @param {string} [options.jobId] Custom job id.
|
421 | * @param {string} [options.jobPrefix] Prefix to apply to the job id.
|
422 | * @param {JobCallback} callback - The callback function.
|
423 | * @param {?error} callback.err - An error returned while making this request
|
424 | * @param {Job} callback.job - The job used to export the table.
|
425 | * @param {object} callback.apiResponse - The full API response.
|
426 | * @returns {Promise<JobResponse>}
|
427 | *
|
428 | * @throws {Error} If destination isn't a File object.
|
429 | * @throws {Error} If destination format isn't recongized.
|
430 | *
|
431 | * @example
|
432 | * ```
|
433 | * const {Storage} = require('@google-cloud/storage');
|
434 | * const {BigQuery} = require('@google-cloud/bigquery');
|
435 | * const bigquery = new BigQuery();
|
436 | * const dataset = bigquery.dataset('my-dataset');
|
437 | * const table = dataset.table('my-table');
|
438 | *
|
439 | * const storage = new Storage({
|
440 | * projectId: 'grape-spaceship-123'
|
441 | * });
|
442 | * const extractedFile = storage.bucket('institutions').file('2014.csv');
|
443 | *
|
444 | * function callback(err, job, apiResponse) {
|
445 | * // `job` is a Job object that can be used to check the status of the
|
446 | * // request.
|
447 | * }
|
448 | *
|
449 | * //-
|
450 | * // To use the default options, just pass a {@link
|
451 | * https://googleapis.dev/nodejs/storage/latest/File.html File}
|
452 | * object.
|
453 | * //
|
454 | * // Note: The exported format type will be inferred by the file's extension.
|
455 | * // If you wish to override this, or provide an array of destination files,
|
456 | * // you must provide an `options` object.
|
457 | * //-
|
458 | * table.createExtractJob(extractedFile, callback);
|
459 | *
|
460 | * //-
|
461 | * // If you need more customization, pass an `options` object.
|
462 | * //-
|
463 | * const options = {
|
464 | * format: 'json',
|
465 | * gzip: true
|
466 | * };
|
467 | *
|
468 | * table.createExtractJob(extractedFile, options, callback);
|
469 | *
|
470 | * //-
|
471 | * // You can also specify multiple destination files.
|
472 | * //-
|
473 | * table.createExtractJob([
|
474 | * storage.bucket('institutions').file('2014.json'),
|
475 | * storage.bucket('institutions-copy').file('2014.json')
|
476 | * ], options, callback);
|
477 | *
|
478 | * //-
|
479 | * // If the callback is omitted, we'll return a Promise.
|
480 | * //-
|
481 | * table.createExtractJob(extractedFile, options).then((data) => {
|
482 | * const job = data[0];
|
483 | * const apiResponse = data[1];
|
484 | * });
|
485 | * ```
|
486 | */
|
487 | createExtractJob(destination: File, options?: CreateExtractJobOptions): Promise<JobResponse>;
|
488 | createExtractJob(destination: File, options: CreateExtractJobOptions, callback: JobCallback): void;
|
489 | createExtractJob(destination: File, callback: JobCallback): void;
|
490 | /**
|
491 | * Load data from a local file or Storage {@link
|
492 | * https://googleapis.dev/nodejs/storage/latest/File.html File}.
|
493 | *
|
494 | * By loading data this way, you create a load job that will run your data
|
495 | * load asynchronously. If you would like instantaneous access to your data,
|
496 | * insert it using {@liink Table#insert}.
|
497 | *
|
498 | * Note: The file type will be inferred by the given file's extension. If you
|
499 | * wish to override this, you must provide `metadata.format`.
|
500 | *
|
501 | * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
|
502 | *
|
503 | * @param {string|File|File[]} source The source file to load. A string (path)
|
504 | * to a local file, or one or more {@link
|
505 | * https://googleapis.dev/nodejs/storage/latest/File.html File}
|
506 | * objects.
|
507 | * @param {object} [metadata] Metadata to set with the load operation. The
|
508 | * metadata object should be in the format of the
|
509 | * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`}
|
510 | * property of a Jobs resource.
|
511 | * @param {string} [metadata.format] The format the data being loaded is in.
|
512 | * Allowed options are "AVRO", "CSV", "JSON", "ORC", or "PARQUET".
|
513 | * @param {string} [metadata.jobId] Custom job id.
|
514 | * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
|
515 | * @param {JobCallback} [callback] The callback function.
|
516 | * @param {?error} callback.err An error returned while making this request
|
517 | * @param {Job} callback.job The job used to load your data.
|
518 | * @param {object} callback.apiResponse The full API response.
|
519 | * @returns {Promise<JobResponse>}
|
520 | *
|
521 | * @throws {Error} If the source isn't a string file name or a File instance.
|
522 | *
|
523 | * @example
|
524 | * ```
|
525 | * const {Storage} = require('@google-cloud/storage');
|
526 | * const {BigQuery} = require('@google-cloud/bigquery');
|
527 | * const bigquery = new BigQuery();
|
528 | * const dataset = bigquery.dataset('my-dataset');
|
529 | * const table = dataset.table('my-table');
|
530 | *
|
531 | * //-
|
532 | * // Load data from a local file.
|
533 | * //-
|
534 | * const callback = (err, job, apiResponse) => {
|
535 | * // `job` is a Job object that can be used to check the status of the
|
536 | * // request.
|
537 | * };
|
538 | *
|
539 | * table.createLoadJob('./institutions.csv', callback);
|
540 | *
|
541 | * //-
|
542 | * // You may also pass in metadata in the format of a Jobs resource. See
|
543 | * // (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)
|
544 | * // for a full list of supported values.
|
545 | * //-
|
546 | * const metadata = {
|
547 | * encoding: 'ISO-8859-1',
|
548 | * sourceFormat: 'NEWLINE_DELIMITED_JSON'
|
549 | * };
|
550 | *
|
551 | * table.createLoadJob('./my-data.csv', metadata, callback);
|
552 | *
|
553 | * //-
|
554 | * // Load data from a file in your Cloud Storage bucket.
|
555 | * //-
|
556 | * const storage = new Storage({
|
557 | * projectId: 'grape-spaceship-123'
|
558 | * });
|
559 | * const data = storage.bucket('institutions').file('data.csv');
|
560 | * table.createLoadJob(data, callback);
|
561 | *
|
562 | * //-
|
563 | * // Load data from multiple files in your Cloud Storage bucket(s).
|
564 | * //-
|
565 | * table.createLoadJob([
|
566 | * storage.bucket('institutions').file('2011.csv'),
|
567 | * storage.bucket('institutions').file('2012.csv')
|
568 | * ], callback);
|
569 | *
|
570 | * //-
|
571 | * // If the callback is omitted, we'll return a Promise.
|
572 | * //-
|
573 | * table.createLoadJob(data).then((data) => {
|
574 | * const job = data[0];
|
575 | * const apiResponse = data[1];
|
576 | * });
|
577 | * ```
|
578 | */
|
579 | createLoadJob(source: string | File | File[], metadata?: JobLoadMetadata): Promise<JobResponse>;
|
580 | createLoadJob(source: string | File | File[], metadata: JobLoadMetadata, callback: JobCallback): void;
|
581 | createLoadJob(source: string | File | File[], callback: JobCallback): void;
|
582 | /**
|
583 | * @param {string | File | File[]} source
|
584 | * @param {JobLoadMetadata} metadata
|
585 | * @returns {Promise<JobResponse>}
|
586 | * @private
|
587 | */
|
588 | _createLoadJob(source: string | File | File[], metadata: JobLoadMetadata): Promise<JobResponse>;
|
589 | /**
|
590 | * Run a query as a job. No results are immediately returned. Instead, your
|
591 | * callback will be executed with a {@link Job} object that you must
|
592 | * ping for the results. See the Job documentation for explanations of how to
|
593 | * check on the status of the job.
|
594 | *
|
595 | * See {@link BigQuery#createQueryJob} for full documentation of this method.
|
596 | */
|
597 | createQueryJob(options: Query): Promise<JobResponse>;
|
598 | createQueryJob(options: Query, callback: JobCallback): void;
|
599 | /**
|
600 | * Run a query scoped to your dataset as a readable object stream.
|
601 | *
|
602 | * See {@link BigQuery#createQueryStream} for full documentation of this
|
603 | * method.
|
604 | *
|
605 | * @param {object} query See {@link BigQuery#createQueryStream} for full
|
606 | * documentation of this method.
|
607 | * @returns {stream} See {@link BigQuery#createQueryStream} for full
|
608 | * documentation of this method.
|
609 | */
|
610 | createQueryStream(query: Query): Duplex;
|
611 | /**
|
612 | * Creates a write stream. Unlike the public version, this will not
|
613 | * automatically poll the underlying job.
|
614 | *
|
615 | * @private
|
616 | *
|
617 | * @param {string|object} [metadata] Metadata to set with the load operation.
|
618 | * The metadata object should be in the format of the
|
619 | * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`}
|
620 | * property of a Jobs resource. If a string is given, it will be used
|
621 | * as the filetype.
|
622 | * @param {string} [metadata.jobId] Custom job id.
|
623 | * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
|
624 | * @returns {WritableStream}
|
625 | */
|
626 | createWriteStream_(metadata: JobLoadMetadata | string): Writable;
|
627 | /**
|
628 | * Load data into your table from a readable stream of AVRO, CSV, JSON, ORC,
|
629 | * or PARQUET data.
|
630 | *
|
631 | * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
|
632 | *
|
633 | * @param {string|object} [metadata] Metadata to set with the load operation.
|
634 | * The metadata object should be in the format of the
|
635 | * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`}
|
636 | * property of a Jobs resource. If a string is given,
|
637 | * it will be used as the filetype.
|
638 | * @param {string} [metadata.jobId] Custom job id.
|
639 | * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
|
640 | * @returns {WritableStream}
|
641 | *
|
642 | * @throws {Error} If source format isn't recognized.
|
643 | *
|
644 | * @example
|
645 | * ```
|
646 | * const {BigQuery} = require('@google-cloud/bigquery');
|
647 | * const bigquery = new BigQuery();
|
648 | * const dataset = bigquery.dataset('my-dataset');
|
649 | * const table = dataset.table('my-table');
|
650 | *
|
651 | * //-
|
652 | * // Load data from a CSV file.
|
653 | * //-
|
654 | * const request = require('request');
|
655 | *
|
656 | * const csvUrl = 'http://goo.gl/kSE7z6';
|
657 | *
|
658 | * const metadata = {
|
659 | * allowJaggedRows: true,
|
660 | * skipLeadingRows: 1
|
661 | * };
|
662 | *
|
663 | * request.get(csvUrl)
|
664 | * .pipe(table.createWriteStream(metadata))
|
665 | * .on('job', (job) => {
|
666 | * // `job` is a Job object that can be used to check the status of the
|
667 | * // request.
|
668 | * })
|
669 | * .on('complete', (job) => {
|
670 | * // The job has completed successfully.
|
671 | * });
|
672 | *
|
673 | * //-
|
674 | * // Load data from a JSON file.
|
675 | * //-
|
676 | * const fs = require('fs');
|
677 | *
|
678 | * fs.createReadStream('./test/testdata/testfile.json')
|
679 | * .pipe(table.createWriteStream('json'))
|
680 | * .on('job', (job) => {
|
681 | * // `job` is a Job object that can be used to check the status of the
|
682 | * // request.
|
683 | * })
|
684 | * .on('complete', (job) => {
|
685 | * // The job has completed successfully.
|
686 | * });
|
687 | * ```
|
688 | */
|
689 | createWriteStream(metadata: JobLoadMetadata | string): Writable;
|
690 | /**
|
691 | * Export table to Cloud Storage.
|
692 | *
|
693 | * @param {string|File} destination Where the file should be exported
|
694 | * to. A string or a {@link
|
695 | * https://googleapis.dev/nodejs/storage/latest/File.html File}.
|
696 | * @param {object} [options] The configuration object.
|
697 | * @param {string} [options.format="CSV"] The format to export the data in.
|
698 | * Allowed options are "AVRO", "CSV", "JSON", "ORC" or "PARQUET".
|
699 | * @param {boolean} [options.gzip] Specify if you would like the file compressed
|
700 | * with GZIP. Default: false.
|
701 | * @param {string} [options.jobId] Custom id for the underlying job.
|
702 | * @param {string} [options.jobPrefix] Prefix to apply to the underlying job id.
|
703 | * @param {JobMetadataCallback} [callback] The callback function.
|
704 | * @param {?error} callback.err An error returned while making this request
|
705 | * @param {object} callback.apiResponse The full API response.
|
706 | * @returns {Promise<JobMetadataResponse>}
|
707 | *
|
708 | * @throws {Error} If destination isn't a File object.
|
709 | * @throws {Error} If destination format isn't recongized.
|
710 | *
|
711 | * @example
|
712 | * ```
|
713 | * const Storage = require('@google-cloud/storage');
|
714 | * const {BigQuery} = require('@google-cloud/bigquery');
|
715 | * const bigquery = new BigQuery();
|
716 | * const dataset = bigquery.dataset('my-dataset');
|
717 | * const table = dataset.table('my-table');
|
718 | *
|
719 | * const storage = new Storage({
|
720 | * projectId: 'grape-spaceship-123'
|
721 | * });
|
722 | * const extractedFile = storage.bucket('institutions').file('2014.csv');
|
723 | *
|
724 | * //-
|
725 | * // To use the default options, just pass a {@link
|
726 | * https://googleapis.dev/nodejs/storage/latest/File.html File}
|
727 | * object.
|
728 | * //
|
729 | * // Note: The exported format type will be inferred by the file's extension.
|
730 | * // If you wish to override this, or provide an array of destination files,
|
731 | * // you must provide an `options` object.
|
732 | * //-
|
733 | * table.extract(extractedFile, (err, apiResponse) => {});
|
734 | *
|
735 | * //-
|
736 | * // If you need more customization, pass an `options` object.
|
737 | * //-
|
738 | * const options = {
|
739 | * format: 'json',
|
740 | * gzip: true
|
741 | * };
|
742 | *
|
743 | * table.extract(extractedFile, options, (err, apiResponse) => {});
|
744 | *
|
745 | * //-
|
746 | * // You can also specify multiple destination files.
|
747 | * //-
|
748 | * table.extract([
|
749 | * storage.bucket('institutions').file('2014.json'),
|
750 | * storage.bucket('institutions-copy').file('2014.json')
|
751 | * ], options, (err, apiResponse) => {});
|
752 | *
|
753 | * //-
|
754 | * // If the callback is omitted, we'll return a Promise.
|
755 | * //-
|
756 | * table.extract(extractedFile, options).then((data) => {
|
757 | * const apiResponse = data[0];
|
758 | * });
|
759 | * ```
|
760 | */
|
761 | extract(destination: File, options?: CreateExtractJobOptions): Promise<JobMetadataResponse>;
|
762 | extract(destination: File, options: CreateExtractJobOptions, callback?: JobMetadataCallback): void;
|
763 | extract(destination: File, callback?: JobMetadataCallback): void;
|
764 | /**
|
765 | * @callback RowsCallback
|
766 | * @param {?Error} err Request error, if any.
|
767 | * @param {array} rows The rows.
|
768 | * @param {object} apiResponse The full API response.
|
769 | */
|
770 | /**
|
771 | * @typedef {array} RowsResponse
|
772 | * @property {array} 0 The rows.
|
773 | */
|
774 | getRows(options?: GetRowsOptions): Promise<RowsResponse>;
|
775 | getRows(options: GetRowsOptions, callback: RowsCallback): void;
|
776 | getRows(callback: RowsCallback): void;
|
777 | /**
|
778 | * @callback InsertRowsCallback
|
779 | * @param {?Error} err Request error, if any.
|
780 | * @param {?Error} err.errors If present, these represent partial
|
781 | * failures. It's possible for part of your request to be completed
|
782 | * successfully, while the other part was not.
|
783 | * @param {object} apiResponse The full API response.
|
784 | */
|
785 | /**
|
786 | * @typedef {array} InsertRowsResponse
|
787 | * @property {object} 0 The full API response.
|
788 | */
|
789 | /**
|
790 | * Stream data into BigQuery one record at a time without running a load job.
|
791 | *
|
792 | * If you need to create an entire table from a file, consider using
|
793 | * {@link Table#load} instead.
|
794 | *
|
795 | * Note, if a table was recently created, inserts may fail until the table
|
796 | * is consistent within BigQuery. If a `schema` is supplied, this method will
|
797 | * automatically retry those failed inserts, and it will even create the
|
798 | * table with the provided schema if it does not exist.
|
799 | *
|
800 | * See {@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll| Tabledata: insertAll API Documentation}
|
801 | * See {@link https://cloud.google.com/bigquery/quotas#streaming_inserts| Streaming Insert Limits}
|
802 | * See {@link https://developers.google.com/bigquery/troubleshooting-errors| Troubleshooting Errors}
|
803 | *
|
804 | * @param {object|object[]} rows The rows to insert into the table.
|
805 | * @param {object} [options] Configuration object.
|
806 | * @param {boolean} [options.createInsertId=true] Automatically insert a
|
807 | * default row id when one is not provided.
|
808 | * @param {boolean} [options.ignoreUnknownValues=false] Accept rows that contain
|
809 | * values that do not match the schema. The unknown values are ignored.
|
810 | * @param {number} [options.partialRetries=3] Number of times to retry
|
811 | * inserting rows for cases of partial failures.
|
812 | * @param {boolean} [options.raw] If `true`, the `rows` argument is expected to
|
813 | * be formatted as according to the
|
814 | * {@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll| specification}.
|
815 | * @param {string|object} [options.schema] If provided will automatically
|
816 | * create a table if it doesn't already exist. Note that this can take
|
817 | * longer than 2 minutes to complete. A comma-separated list of
|
818 | * name:type pairs.
|
819 | * Valid types are "string", "integer", "float", "boolean", and
|
820 | * "timestamp". If the type is omitted, it is assumed to be "string".
|
821 | * Example: "name:string, age:integer". Schemas can also be specified as a
|
822 | * JSON array of fields, which allows for nested and repeated fields. See
|
823 | * a {@link http://goo.gl/sl8Dmg| Table resource} for more detailed information.
|
824 | * @param {boolean} [options.skipInvalidRows=false] Insert all valid rows of a
|
825 | * request, even if invalid rows exist.
|
826 | * @param {string} [options.templateSuffix] Treat the destination table as a
|
827 | * base template, and insert the rows into an instance table named
|
828 | * "{destination}{templateSuffix}". BigQuery will manage creation of
|
829 | * the instance table, using the schema of the base template table. See
|
830 | * {@link https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables| Automatic table creation using template tables}
|
831 | * for considerations when working with templates tables.
|
832 | * @param {InsertRowsCallback} [callback] The callback function.
|
833 | * @param {?error} callback.err An error returned while making this request.
|
834 | * @param {object[]} callback.err.errors If present, these represent partial
|
835 | * failures. It's possible for part of your request to be completed
|
836 | * successfully, while the other part was not.
|
837 | * @param {object} callback.apiResponse The full API response.
|
838 | * @returns {Promise<InsertRowsResponse>}
|
839 | *
|
840 | * @example
|
841 | * ```
|
842 | * const {BigQuery} = require('@google-cloud/bigquery');
|
843 | * const bigquery = new BigQuery();
|
844 | * const dataset = bigquery.dataset('my-dataset');
|
845 | * const table = dataset.table('my-table');
|
846 | *
|
847 | * //-
|
848 | * // Insert a single row.
|
849 | * //-
|
850 | * table.insert({
|
851 | * INSTNM: 'Motion Picture Institute of Michigan',
|
852 | * CITY: 'Troy',
|
853 | * STABBR: 'MI'
|
854 | * }, insertHandler);
|
855 | *
|
856 | * //-
|
857 | * // Insert multiple rows at a time.
|
858 | * //-
|
859 | * const rows = [
|
860 | * {
|
861 | * INSTNM: 'Motion Picture Institute of Michigan',
|
862 | * CITY: 'Troy',
|
863 | * STABBR: 'MI'
|
864 | * },
|
865 | * // ...
|
866 | * ];
|
867 | *
|
868 | * table.insert(rows, insertHandler);
|
869 | *
|
870 | * //-
|
871 | * // Insert a row as according to the <a href="https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll">specification</a>.
|
872 | * //-
|
873 | * const row = {
|
874 | * insertId: '1',
|
875 | * json: {
|
876 | * INSTNM: 'Motion Picture Institute of Michigan',
|
877 | * CITY: 'Troy',
|
878 | * STABBR: 'MI'
|
879 | * }
|
880 | * };
|
881 | *
|
882 | * const options = {
|
883 | * raw: true
|
884 | * };
|
885 | *
|
886 | * table.insert(row, options, insertHandler);
|
887 | *
|
888 | * //-
|
889 | * // Handling the response. See <a href="https://developers.google.com/bigquery/troubleshooting-errors">Troubleshooting Errors</a> for best practices on how to handle errors.
|
890 | * //-
|
891 | * function insertHandler(err, apiResponse) {
|
892 | * if (err) {
|
893 | * // An API error or partial failure occurred.
|
894 | *
|
895 | * if (err.name === 'PartialFailureError') {
|
896 | * // Some rows failed to insert, while others may have succeeded.
|
897 | *
|
898 | * // err.errors (object[]):
|
899 | * // err.errors[].row (original row object passed to `insert`)
|
900 | * // err.errors[].errors[].reason
|
901 | * // err.errors[].errors[].message
|
902 | * }
|
903 | * }
|
904 | * }
|
905 | *
|
906 | * //-
|
907 | * // If the callback is omitted, we'll return a Promise.
|
908 | * //-
|
909 | * table.insert(rows)
|
910 | * .then((data) => {
|
911 | * const apiResponse = data[0];
|
912 | * })
|
913 | * .catch((err) => {
|
914 | * // An API error or partial failure occurred.
|
915 | *
|
916 | * if (err.name === 'PartialFailureError') {
|
917 | * // Some rows failed to insert, while others may have succeeded.
|
918 | *
|
919 | * // err.errors (object[]):
|
920 | * // err.errors[].row (original row object passed to `insert`)
|
921 | * // err.errors[].errors[].reason
|
922 | * // err.errors[].errors[].message
|
923 | * }
|
924 | * });
|
925 | * ```
|
926 | */
|
927 | insert(rows: RowMetadata | RowMetadata[], options?: InsertRowsOptions): Promise<InsertRowsResponse>;
|
928 | insert(rows: RowMetadata | RowMetadata[], options: InsertRowsOptions, callback: InsertRowsCallback): void;
|
929 | insert(rows: RowMetadata | RowMetadata[], callback: InsertRowsCallback): void;
|
930 | /**
|
931 | * Insert rows with retries, but will create the table if not exists.
|
932 | *
|
933 | * @param {RowMetadata | RowMetadata[]} rows
|
934 | * @param {InsertRowsOptions} options
|
935 | * @returns {Promise<bigquery.ITableDataInsertAllResponse | bigquery.ITable>}
|
936 | * @private
|
937 | */
|
938 | private _insertAndCreateTable;
|
939 | /**
|
940 | * This method will attempt to insert rows while retrying any partial failures
|
941 | * that occur along the way. Because partial insert failures are returned
|
942 | * differently, we can't depend on our usual retry strategy.
|
943 | *
|
944 | * @private
|
945 | *
|
946 | * @param {RowMetadata|RowMetadata[]} rows The rows to insert.
|
947 | * @param {InsertRowsOptions} options Insert options.
|
948 | * @returns {Promise<bigquery.ITableDataInsertAllResponse>}
|
949 | */
|
950 | private _insertWithRetry;
|
951 | /**
|
952 | * This method does the bulk of the work for processing options and making the
|
953 | * network request.
|
954 | *
|
955 | * @private
|
956 | *
|
957 | * @param {RowMetadata|RowMetadata[]} rows The rows to insert.
|
958 | * @param {InsertRowsOptions} options Insert options.
|
959 | * @returns {Promise<bigquery.ITableDataInsertAllResponse>}
|
960 | */
|
961 | private _insert;
|
962 | createInsertStream(options?: InsertStreamOptions): Writable;
|
963 | load(source: string | File | File[], metadata?: JobLoadMetadata): Promise<JobMetadataResponse>;
|
964 | load(source: string | File | File[], metadata: JobLoadMetadata, callback: JobMetadataCallback): void;
|
965 | load(source: string | File | File[], callback: JobMetadataCallback): void;
|
966 | /**
|
967 | * Load data from a local file or Storage {@link
|
968 | * https://googleapis.dev/nodejs/storage/latest/File.html File}.
|
969 | *
|
970 | * By loading data this way, you create a load job that will run your data
|
971 | * load asynchronously. If you would like instantaneous access to your data,
|
972 | * insert it using {@link Table#insert}.
|
973 | *
|
974 | * Note: The file type will be inferred by the given file's extension. If you
|
975 | * wish to override this, you must provide `metadata.format`.
|
976 | *
|
977 | * @param {string|File} source The source file to load. A filepath as a string
|
978 | * or a {@link
|
979 | * https://googleapis.dev/nodejs/storage/latest/File.html File}
|
980 | * object.
|
981 | * @param {object} [metadata] Metadata to set with the load operation. The
|
982 | * metadata object should be in the format of the
|
983 | * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`}
|
984 | * property of a Jobs resource.
|
985 | * @param {string} [metadata.format] The format the data being loaded is in.
|
986 | * Allowed options are "AVRO", "CSV", "JSON", "ORC", or "PARQUET".
|
987 | * @param {string} [metadata.jobId] Custom id for the underlying job.
|
988 | * @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job
|
989 | * id.
|
990 | * @param {JobMetadataCallback} [callback] The callback function.
|
991 | * @param {?error} callback.err An error returned while making this request
|
992 | * @param {object} callback.apiResponse The full API response.
|
993 | * @returns {Promise<JobMetadataResponse>}
|
994 | *
|
995 | * @throws {Error} If the source isn't a string file name or a File instance.
|
996 | *
|
997 | * @example
|
998 | * ```
|
999 | * const {BigQuery} = require('@google-cloud/bigquery');
|
1000 | * const bigquery = new BigQuery();
|
1001 | * const dataset = bigquery.dataset('my-dataset');
|
1002 | * const table = dataset.table('my-table');
|
1003 | *
|
1004 | * //-
|
1005 | * // Load data from a local file.
|
1006 | * //-
|
1007 | * table.load('./institutions.csv', (err, apiResponse) => {});
|
1008 | *
|
1009 | * //-
|
1010 | * // You may also pass in metadata in the format of a Jobs resource. See
|
1011 | * // (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)
|
1012 | * // for a full list of supported values.
|
1013 | * //-
|
1014 | * const metadata = {
|
1015 | * encoding: 'ISO-8859-1',
|
1016 | * sourceFormat: 'NEWLINE_DELIMITED_JSON'
|
1017 | * };
|
1018 | *
|
1019 | * table.load('./my-data.csv', metadata, (err, apiResponse) => {});
|
1020 | *
|
1021 | * //-
|
1022 | * // Load data from a file in your Cloud Storage bucket.
|
1023 | * //-
|
1024 | * const gcs = require('@google-cloud/storage')({
|
1025 | * projectId: 'grape-spaceship-123'
|
1026 | * });
|
1027 | * const data = gcs.bucket('institutions').file('data.csv');
|
1028 | * table.load(data, (err, apiResponse) => {});
|
1029 | *
|
1030 | * //-
|
1031 | * // Load data from multiple files in your Cloud Storage bucket(s).
|
1032 | * //-
|
1033 | * table.load([
|
1034 | * gcs.bucket('institutions').file('2011.csv'),
|
1035 | * gcs.bucket('institutions').file('2012.csv')
|
1036 | * ], function(err, apiResponse) {});
|
1037 | *
|
1038 | * //-
|
1039 | * // If the callback is omitted, we'll return a Promise.
|
1040 | * //-
|
1041 | * table.load(data).then(function(data) {
|
1042 | * const apiResponse = data[0];
|
1043 | * });
|
1044 | * ```
|
1045 | */
|
1046 | load(source: string | File | File[], metadata?: JobLoadMetadata): Promise<JobMetadataResponse>;
|
1047 | load(source: string | File | File[], metadata: JobLoadMetadata, callback: JobMetadataCallback): void;
|
1048 | load(source: string | File | File[], callback: JobMetadataCallback): void;
|
1049 | /**
|
1050 | * Run a query scoped to your dataset.
|
1051 | *
|
1052 | * See {@link BigQuery#query} for full documentation of this method.
|
1053 | * @param {object} query See {@link BigQuery#query} for full documentation of this method.
|
1054 | * @param {function} [callback] See {@link BigQuery#query} for full documentation of this method.
|
1055 | * @returns {Promise<SimpleQueryRowsResponse>}
|
1056 | */
|
1057 | query(query: Query): Promise<SimpleQueryRowsResponse>;
|
1058 | query(query: string): Promise<SimpleQueryRowsResponse>;
|
1059 | query(query: Query, callback: SimpleQueryRowsCallback): void;
|
1060 | /**
|
1061 | * Set the metadata on the table.
|
1062 | *
|
1063 | * See {@link https://cloud.google.com/bigquery/docs/reference/v2/tables/patch| Tables: patch API Documentation}
|
1064 | *
|
1065 | * @param {object} metadata The metadata key/value object to set.
|
1066 | * @param {string} metadata.description A user-friendly description of the
|
1067 | * table.
|
1068 | * @param {string} metadata.name A descriptive name for the table.
|
1069 | * @param {string|object} metadata.schema A comma-separated list of name:type
|
1070 | * pairs. Valid types are "string", "integer", "float", "boolean",
|
1071 | * "bytes", "record", and "timestamp". If the type is omitted, it is assumed
|
1072 | * to be "string". Example: "name:string, age:integer". Schemas can also be
|
1073 | * specified as a JSON array of fields, which allows for nested and
|
1074 | * repeated fields. See a {@link http://goo.gl/sl8Dmg| Table resource} for more
|
1075 | * detailed information.
|
1076 | * @param {function} [callback] The callback function.
|
1077 | * @param {?error} callback.err An error returned while making this request.
|
1078 | * @param {object} callback.apiResponse The full API response.
|
1079 | * @returns {Promise<common.SetMetadataResponse>}
|
1080 | *
|
1081 | * @example
|
1082 | * ```
|
1083 | * const {BigQuery} = require('@google-cloud/bigquery');
|
1084 | * const bigquery = new BigQuery();
|
1085 | * const dataset = bigquery.dataset('my-dataset');
|
1086 | * const table = dataset.table('my-table');
|
1087 | *
|
1088 | * const metadata = {
|
1089 | * name: 'My recipes',
|
1090 | * description: 'A table for storing my recipes.',
|
1091 | * schema: 'name:string, servings:integer, cookingTime:float, quick:boolean'
|
1092 | * };
|
1093 | *
|
1094 | * table.setMetadata(metadata, (err, metadata, apiResponse) => {});
|
1095 | *
|
1096 | * //-
|
1097 | * // If the callback is omitted, we'll return a Promise.
|
1098 | * //-
|
1099 | * table.setMetadata(metadata).then((data) => {
|
1100 | * const metadata = data[0];
|
1101 | * const apiResponse = data[1];
|
1102 | * });
|
1103 | * ```
|
1104 | */
|
1105 | setMetadata(metadata: SetTableMetadataOptions): Promise<SetMetadataResponse>;
|
1106 | setMetadata(metadata: SetTableMetadataOptions, callback: ResponseCallback): void;
|
1107 | /**
|
1108 | * Run a query scoped to your dataset.
|
1109 | * @returns {Promise<PolicyResponse>}
|
1110 | */
|
1111 | getIamPolicy(optionsOrCallback?: GetPolicyOptions | PolicyCallback): Promise<PolicyResponse>;
|
1112 | getIamPolicy(options: GetPolicyOptions, callback: PolicyCallback): void;
|
1113 | /**
|
1114 | * Run a query scoped to your dataset.
|
1115 | * @returns {Promise<PolicyResponse>}
|
1116 | */
|
1117 | setIamPolicy(policy: Policy, options?: SetPolicyOptions): Promise<PolicyResponse>;
|
1118 | setIamPolicy(policy: Policy, options: SetPolicyOptions, callback: PolicyCallback): void;
|
1119 | setIamPolicy(policy: Policy, callback: PolicyCallback): void;
|
1120 | /**
|
1121 | * Run a query scoped to your dataset.
|
1122 | * @returns {Promise<PermissionsResponse>}
|
1123 | */
|
1124 | testIamPermissions(permissions: string | string[]): Promise<PermissionsResponse>;
|
1125 | testIamPermissions(permissions: string | string[], callback: PermissionsCallback): void;
|
1126 | }
|
1127 | /**
|
1128 | * Reference to the {@link Table} class.
|
1129 | * @name module:@google-cloud/bigquery.Table
|
1130 | * @see Table
|
1131 | */
|
1132 | export { Table };
|