UNPKG

47.1 kBTypeScriptView Raw
1/*!
2 * Copyright 2014 Google Inc. All Rights Reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16/// <reference types="node" />
17import { ServiceObject, ResponseCallback, SetMetadataResponse } from '@google-cloud/common';
18import { ResourceStream } from '@google-cloud/paginator';
19import { BigQuery, Job, Dataset, Query, SimpleQueryRowsResponse, SimpleQueryRowsCallback, ResourceCallback, RequestCallback, PagedResponse, PagedCallback, JobRequest, PagedRequest } from '.';
20import { Duplex, Writable } from 'stream';
21import { JobMetadata } from './job';
22import bigquery from './types';
23import { IntegerTypeCastOptions } from './bigquery';
24import { RowQueue } from './rowQueue';
25export interface File {
26 bucket: any;
27 kmsKeyName?: string;
28 userProject?: string;
29 name: string;
30 generation?: number;
31}
32export type JobMetadataCallback = RequestCallback<JobMetadata>;
33export type JobMetadataResponse = [JobMetadata];
34export type RowMetadata = any;
35export type InsertRowsOptions = bigquery.ITableDataInsertAllRequest & {
36 createInsertId?: boolean;
37 partialRetries?: number;
38 raw?: boolean;
39 schema?: string | {};
40};
41export type InsertRowsResponse = [
42 bigquery.ITableDataInsertAllResponse | bigquery.ITable
43];
44export type InsertRowsCallback = RequestCallback<bigquery.ITableDataInsertAllResponse | bigquery.ITable>;
45export type RowsResponse = PagedResponse<RowMetadata, GetRowsOptions, bigquery.ITableDataList | bigquery.ITable>;
46export type RowsCallback = PagedCallback<RowMetadata, GetRowsOptions, bigquery.ITableDataList | bigquery.ITable>;
47export interface InsertRow {
48 insertId?: string;
49 json?: bigquery.IJsonObject;
50}
51export type TableRow = bigquery.ITableRow;
52export type TableRowField = bigquery.ITableCell;
53export type TableRowValue = string | TableRow;
54export type GetRowsOptions = PagedRequest<bigquery.tabledata.IListParams> & {
55 wrapIntegers?: boolean | IntegerTypeCastOptions;
56};
57export type JobLoadMetadata = JobRequest<bigquery.IJobConfigurationLoad> & {
58 format?: string;
59};
60export type CreateExtractJobOptions = JobRequest<bigquery.IJobConfigurationExtract> & {
61 format?: 'CSV' | 'JSON' | 'AVRO' | 'PARQUET' | 'ORC';
62 gzip?: boolean;
63};
64export type JobResponse = [Job, bigquery.IJob];
65export type JobCallback = ResourceCallback<Job, bigquery.IJob>;
66export type CreateCopyJobMetadata = CopyTableMetadata;
67export type SetTableMetadataOptions = TableMetadata;
68export type CopyTableMetadata = JobRequest<bigquery.IJobConfigurationTableCopy>;
69export type TableMetadata = bigquery.ITable & {
70 name?: string;
71 schema?: string | TableField[] | TableSchema;
72 partitioning?: string;
73 view?: string | ViewDefinition;
74};
75export type ViewDefinition = bigquery.IViewDefinition;
76export type FormattedMetadata = bigquery.ITable;
77export type TableSchema = bigquery.ITableSchema;
78export type TableField = bigquery.ITableFieldSchema;
79export interface PartialInsertFailure {
80 message: string;
81 reason: string;
82 row: RowMetadata;
83}
84export type Policy = bigquery.IPolicy;
85export type GetPolicyOptions = bigquery.IGetPolicyOptions;
86export type SetPolicyOptions = Omit<bigquery.ISetIamPolicyRequest, 'policy'>;
87export type PolicyRequest = bigquery.IGetIamPolicyRequest;
88export type PolicyResponse = [Policy];
89export type PolicyCallback = RequestCallback<PolicyResponse>;
90export type PermissionsResponse = [bigquery.ITestIamPermissionsResponse];
91export type PermissionsCallback = RequestCallback<PermissionsResponse>;
92export interface InsertStreamOptions {
93 insertRowsOptions?: InsertRowsOptions;
94 batchOptions?: RowBatchOptions;
95}
96export interface RowBatchOptions {
97 maxBytes: number;
98 maxRows: number;
99 maxMilliseconds: number;
100}
101export interface TableOptions {
102 location?: string;
103}
104/**
105 * Table objects are returned by methods such as
106 * {@link Dataset#table}, {@link Dataset#createTable}, and
107 * {@link Dataset#getTables}.
108 *
109 * @class
110 * @param {Dataset} dataset {@link Dataset} instance.
111 * @param {string} id The ID of the table.
112 * @param {object} [options] Table options.
113 * @param {string} [options.location] The geographic location of the table, by
114 * default this value is inherited from the dataset. This can be used to
115 * configure the location of all jobs created through a table instance. It
116 * cannot be used to set the actual location of the table. This value will
117 * be superseded by any API responses containing location data for the
118 * table.
119 *
120 * @example
121 * ```
122 * const {BigQuery} = require('@google-cloud/bigquery');
123 * const bigquery = new BigQuery();
124 * const dataset = bigquery.dataset('my-dataset');
125 *
126 * const table = dataset.table('my-table');
127 * ```
128 */
129declare class Table extends ServiceObject {
130 dataset: Dataset;
131 bigQuery: BigQuery;
132 location?: string;
133 rowQueue?: RowQueue;
134 createReadStream(options?: GetRowsOptions): ResourceStream<RowMetadata>;
135 constructor(dataset: Dataset, id: string, options?: TableOptions);
136 /**
137 * Convert a comma-separated name:type string to a table schema object.
138 *
139 * @static
140 * @private
141 *
142 * @param {string} str Comma-separated schema string.
143 * @returns {object} Table schema in the format the API expects.
144 */
145 static createSchemaFromString_(str: string): TableSchema;
146 /**
147 * Convert a row entry from native types to their encoded types that the API
148 * expects.
149 *
150 * @static
151 * @private
152 *
153 * @param {*} value The value to be converted.
154 * @returns {*} The converted value.
155 */
156 static encodeValue_(value?: {} | null): {} | null;
157 /**
158 * @private
159 */
160 static formatMetadata_(options: TableMetadata): FormattedMetadata;
161 /**
162 * @callback JobMetadataCallback
163 * @param {?Error} err Request error, if any.
164 * @param {object} apiResponse The full API response.
165 */
166 /**
167 * @typedef {array} JobMetadataResponse
168 * @property {object} 0 The full API response.
169 */
170 /**
171 * Copy data from one table to another, optionally creating that table.
172 *
173 * @param {Table} destination The destination table.
174 * @param {object} [metadata] Metadata to set with the copy operation. The
175 * metadata object should be in the format of a
176 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`}
177 * object.
178 * object.
179 * @param {string} [metadata.jobId] Custom id for the underlying job.
180 * @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job
181 * id.
182 * @param {JobMetadataCallback} [callback] The callback function.
183 * @param {?error} callback.err An error returned while making this request
184 * @param {object} callback.apiResponse The full API response.
185 * @returns {Promise<JobMetadataResponse>}
186 *
187 * @throws {Error} If a destination other than a Table object is provided.
188 *
189 * @example
190 * ```
191 * const {BigQuery} = require('@google-cloud/bigquery');
192 * const bigquery = new BigQuery();
193 * const dataset = bigquery.dataset('my-dataset');
194 *
195 * const table = dataset.table('my-table');
196 * const yourTable = dataset.table('your-table');
197 *
198 * table.copy(yourTable, (err, apiResponse) => {});
199 *
200 * //-
201 * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
202 * // for all available options.
203 * //-
204 * const metadata = {
205 * createDisposition: 'CREATE_NEVER',
206 * writeDisposition: 'WRITE_TRUNCATE'
207 * };
208 *
209 * table.copy(yourTable, metadata, (err, apiResponse) => {});
210 *
211 * //-
212 * // If the callback is omitted, we'll return a Promise.
213 * //-
214 * table.copy(yourTable, metadata).then((data) => {
215 * const apiResponse = data[0];
216 * });
217 * ```
218 */
219 copy(destination: Table, metadata?: CopyTableMetadata): Promise<JobMetadataResponse>;
220 copy(destination: Table, metadata: CopyTableMetadata, callback: JobMetadataCallback): void;
221 copy(destination: Table, callback: JobMetadataCallback): void;
222 /**
223 * @callback JobMetadataCallback
224 * @param {?Error} err Request error, if any.
225 * @param {object} apiResponse The full API response.
226 */
227 /**
228 * @typedef {array} JobMetadataResponse
229 * @property {object} 0 The full API response.
230 */
231 /**
232 * Copy data from multiple tables into this table.
233 *
234 * @param {Table|Table[]} sourceTables The
235 * source table(s) to copy data from.
236 * @param {object=} metadata Metadata to set with the copy operation. The
237 * metadata object should be in the format of a
238 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`}
239 * object.
240 * @param {string} [metadata.jobId] Custom id for the underlying job.
241 * @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job
242 * id.
243 * @param {JobMetadataCallback} [callback] The callback function.
244 * @param {?error} callback.err An error returned while making this request
245 * @param {object} callback.apiResponse The full API response.
246 * @returns {Promise<JobMetadataResponse>}
247 *
248 * @throws {Error} If a source other than a Table object is provided.
249 *
250 * @example
251 * ```
252 * const {BigQuery} = require('@google-cloud/bigquery');
253 * const bigquery = new BigQuery();
254 * const dataset = bigquery.dataset('my-dataset');
255 * const table = dataset.table('my-table');
256 *
257 * const sourceTables = [
258 * dataset.table('your-table'),
259 * dataset.table('your-second-table')
260 * ];
261 *
262 * table.copyFrom(sourceTables, (err, apiResponse) => {});
263 *
264 * //-
265 * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
266 * // for all available options.
267 * //-
268 * const metadata = {
269 * createDisposition: 'CREATE_NEVER',
270 * writeDisposition: 'WRITE_TRUNCATE'
271 * };
272 *
273 * table.copyFrom(sourceTables, metadata, (err, apiResponse) => {});
274 *
275 * //-
276 * // If the callback is omitted, we'll return a Promise.
277 * //-
278 * table.copyFrom(sourceTables, metadata).then((data) => {
279 * const apiResponse = data[0];
280 * });
281 * ```
282 */
283 copyFrom(sourceTables: Table | Table[], metadata?: CopyTableMetadata): Promise<JobMetadataResponse>;
284 copyFrom(sourceTables: Table | Table[], metadata: CopyTableMetadata, callback: JobMetadataCallback): void;
285 copyFrom(sourceTables: Table | Table[], callback: JobMetadataCallback): void;
286 /**
287 * Copy data from one table to another, optionally creating that table.
288 *
289 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
290 *
291 * @param {Table} destination The destination table.
292 * @param {object} [metadata] Metadata to set with the copy operation. The
293 * metadata object should be in the format of a
294 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`}
295 * object.
296 * @param {string} [metadata.jobId] Custom job id.
297 * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
298 * @param {JobCallback} [callback] The callback function.
299 * @param {?error} callback.err An error returned while making this request
300 * @param {Job} callback.job The job used to copy your table.
301 * @param {object} callback.apiResponse The full API response.
302 * @returns {Promise<JobResponse>}
303 *
304 * @throws {Error} If a destination other than a Table object is provided.
305 *
306 * @example
307 * ```
308 * const {BigQuery} = require('@google-cloud/bigquery');
309 * const bigquery = new BigQuery();
310 * const dataset = bigquery.dataset('my-dataset');
311 * const table = dataset.table('my-table');
312 *
313 * const yourTable = dataset.table('your-table');
314 * table.createCopyJob(yourTable, (err, job, apiResponse) => {
315 * // `job` is a Job object that can be used to check the status of the
316 * // request.
317 * });
318 *
319 * //-
320 * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
321 * // for all available options.
322 * //-
323 * const metadata = {
324 * createDisposition: 'CREATE_NEVER',
325 * writeDisposition: 'WRITE_TRUNCATE'
326 * };
327 *
328 * table.createCopyJob(yourTable, metadata, (err, job, apiResponse) => {});
329 *
330 * //-
331 * // If the callback is omitted, we'll return a Promise.
332 * //-
333 * table.createCopyJob(yourTable, metadata).then((data) => {
334 * const job = data[0];
335 * const apiResponse = data[1];
336 * });
337 * ```
338 */
339 createCopyJob(destination: Table, metadata?: CreateCopyJobMetadata): Promise<JobResponse>;
340 createCopyJob(destination: Table, metadata: CreateCopyJobMetadata, callback: JobCallback): void;
341 createCopyJob(destination: Table, callback: JobCallback): void;
342 /**
343 * Copy data from multiple tables into this table.
344 *
345 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
346 *
347 * @param {Table|Table[]} sourceTables The
348 * source table(s) to copy data from.
349 * @param {object} [metadata] Metadata to set with the copy operation. The
350 * metadata object should be in the format of a
351 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`}
352 * object.
353 * @param {string} [metadata.jobId] Custom job id.
354 * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
355 * @param {JobCallback} [callback] The callback function.
356 * @param {?error} callback.err An error returned while making this request
357 * @param {Job} callback.job The job used to copy your table.
358 * @param {object} callback.apiResponse The full API response.
359 * @returns {Promise<JobResponse>}
360 *
361 * @throws {Error} If a source other than a Table object is provided.
362 *
363 * @example
364 * ```
365 * const {BigQuery} = require('@google-cloud/bigquery');
366 * const bigquery = new BigQuery();
367 * const dataset = bigquery.dataset('my-dataset');
368 * const table = dataset.table('my-table');
369 *
370 * const sourceTables = [
371 * dataset.table('your-table'),
372 * dataset.table('your-second-table')
373 * ];
374 *
375 * const callback = (err, job, apiResponse) => {
376 * // `job` is a Job object that can be used to check the status of the
377 * // request.
378 * };
379 *
380 * table.createCopyFromJob(sourceTables, callback);
381 *
382 * //-
383 * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
384 * // for all available options.
385 * //-
386 * const metadata = {
387 * createDisposition: 'CREATE_NEVER',
388 * writeDisposition: 'WRITE_TRUNCATE'
389 * };
390 *
391 * table.createCopyFromJob(sourceTables, metadata, callback);
392 *
393 * //-
394 * // If the callback is omitted, we'll return a Promise.
395 * //-
396 * table.createCopyFromJob(sourceTables, metadata).then((data) => {
397 * const job = data[0];
398 * const apiResponse = data[1];
399 * });
400 * ```
401 */
402 createCopyFromJob(source: Table | Table[], metadata?: CopyTableMetadata): Promise<JobResponse>;
403 createCopyFromJob(source: Table | Table[], metadata: CopyTableMetadata, callback: JobCallback): void;
404 createCopyFromJob(source: Table | Table[], callback: JobCallback): void;
405 /**
406 * Export table to Cloud Storage.
407 *
408 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
409 *
410 * @param {string|File} destination Where the file should be exported
411 * to. A string or a {@link
412 * https://googleapis.dev/nodejs/storage/latest/File.html File}
413 * object.
414 * @param {object=} options - The configuration object.
415 * @param {string} options.format - The format to export the data in. Allowed
416 * options are "CSV", "JSON", "AVRO", or "PARQUET". Default: "CSV".
417 * @param {boolean} options.gzip - Specify if you would like the file compressed
418 * with GZIP. Default: false.
419 * @param {string} [options.jobId] Custom job id.
420 * @param {string} [options.jobPrefix] Prefix to apply to the job id.
421 * @param {JobCallback} callback - The callback function.
422 * @param {?error} callback.err - An error returned while making this request
423 * @param {Job} callback.job - The job used to export the table.
424 * @param {object} callback.apiResponse - The full API response.
425 * @returns {Promise<JobResponse>}
426 *
427 * @throws {Error} If destination isn't a File object.
428 * @throws {Error} If destination format isn't recongized.
429 *
430 * @example
431 * ```
432 * const {Storage} = require('@google-cloud/storage');
433 * const {BigQuery} = require('@google-cloud/bigquery');
434 * const bigquery = new BigQuery();
435 * const dataset = bigquery.dataset('my-dataset');
436 * const table = dataset.table('my-table');
437 *
438 * const storage = new Storage({
439 * projectId: 'grape-spaceship-123'
440 * });
441 * const extractedFile = storage.bucket('institutions').file('2014.csv');
442 *
443 * function callback(err, job, apiResponse) {
444 * // `job` is a Job object that can be used to check the status of the
445 * // request.
446 * }
447 *
448 * //-
449 * // To use the default options, just pass a {@link
450 * https://googleapis.dev/nodejs/storage/latest/File.html File}
451 * object.
452 * //
453 * // Note: The exported format type will be inferred by the file's extension.
454 * // If you wish to override this, or provide an array of destination files,
455 * // you must provide an `options` object.
456 * //-
457 * table.createExtractJob(extractedFile, callback);
458 *
459 * //-
460 * // If you need more customization, pass an `options` object.
461 * //-
462 * const options = {
463 * format: 'json',
464 * gzip: true
465 * };
466 *
467 * table.createExtractJob(extractedFile, options, callback);
468 *
469 * //-
470 * // You can also specify multiple destination files.
471 * //-
472 * table.createExtractJob([
473 * storage.bucket('institutions').file('2014.json'),
474 * storage.bucket('institutions-copy').file('2014.json')
475 * ], options, callback);
476 *
477 * //-
478 * // If the callback is omitted, we'll return a Promise.
479 * //-
480 * table.createExtractJob(extractedFile, options).then((data) => {
481 * const job = data[0];
482 * const apiResponse = data[1];
483 * });
484 * ```
485 */
486 createExtractJob(destination: File, options?: CreateExtractJobOptions): Promise<JobResponse>;
487 createExtractJob(destination: File, options: CreateExtractJobOptions, callback: JobCallback): void;
488 createExtractJob(destination: File, callback: JobCallback): void;
489 /**
490 * Load data from a local file or Storage {@link
491 * https://googleapis.dev/nodejs/storage/latest/File.html File}.
492 *
493 * By loading data this way, you create a load job that will run your data
494 * load asynchronously. If you would like instantaneous access to your data,
495 * insert it using {@liink Table#insert}.
496 *
497 * Note: The file type will be inferred by the given file's extension. If you
498 * wish to override this, you must provide `metadata.format`.
499 *
500 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
501 *
502 * @param {string|File|File[]} source The source file to load. A string (path)
503 * to a local file, or one or more {@link
504 * https://googleapis.dev/nodejs/storage/latest/File.html File}
505 * objects.
506 * @param {object} [metadata] Metadata to set with the load operation. The
507 * metadata object should be in the format of the
508 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`}
509 * property of a Jobs resource.
510 * @param {string} [metadata.format] The format the data being loaded is in.
511 * Allowed options are "AVRO", "CSV", "JSON", "ORC", or "PARQUET".
512 * @param {string} [metadata.jobId] Custom job id.
513 * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
514 * @param {JobCallback} [callback] The callback function.
515 * @param {?error} callback.err An error returned while making this request
516 * @param {Job} callback.job The job used to load your data.
517 * @param {object} callback.apiResponse The full API response.
518 * @returns {Promise<JobResponse>}
519 *
520 * @throws {Error} If the source isn't a string file name or a File instance.
521 *
522 * @example
523 * ```
524 * const {Storage} = require('@google-cloud/storage');
525 * const {BigQuery} = require('@google-cloud/bigquery');
526 * const bigquery = new BigQuery();
527 * const dataset = bigquery.dataset('my-dataset');
528 * const table = dataset.table('my-table');
529 *
530 * //-
531 * // Load data from a local file.
532 * //-
533 * const callback = (err, job, apiResponse) => {
534 * // `job` is a Job object that can be used to check the status of the
535 * // request.
536 * };
537 *
538 * table.createLoadJob('./institutions.csv', callback);
539 *
540 * //-
541 * // You may also pass in metadata in the format of a Jobs resource. See
542 * // (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)
543 * // for a full list of supported values.
544 * //-
545 * const metadata = {
546 * encoding: 'ISO-8859-1',
547 * sourceFormat: 'NEWLINE_DELIMITED_JSON'
548 * };
549 *
550 * table.createLoadJob('./my-data.csv', metadata, callback);
551 *
552 * //-
553 * // Load data from a file in your Cloud Storage bucket.
554 * //-
555 * const storage = new Storage({
556 * projectId: 'grape-spaceship-123'
557 * });
558 * const data = storage.bucket('institutions').file('data.csv');
559 * table.createLoadJob(data, callback);
560 *
561 * //-
562 * // Load data from multiple files in your Cloud Storage bucket(s).
563 * //-
564 * table.createLoadJob([
565 * storage.bucket('institutions').file('2011.csv'),
566 * storage.bucket('institutions').file('2012.csv')
567 * ], callback);
568 *
569 * //-
570 * // If the callback is omitted, we'll return a Promise.
571 * //-
572 * table.createLoadJob(data).then((data) => {
573 * const job = data[0];
574 * const apiResponse = data[1];
575 * });
576 * ```
577 */
578 createLoadJob(source: string | File | File[], metadata?: JobLoadMetadata): Promise<JobResponse>;
579 createLoadJob(source: string | File | File[], metadata: JobLoadMetadata, callback: JobCallback): void;
580 createLoadJob(source: string | File | File[], callback: JobCallback): void;
581 /**
582 * @param {string | File | File[]} source
583 * @param {JobLoadMetadata} metadata
584 * @returns {Promise<JobResponse>}
585 * @private
586 */
587 _createLoadJob(source: string | File | File[], metadata: JobLoadMetadata): Promise<JobResponse>;
588 /**
589 * Run a query as a job. No results are immediately returned. Instead, your
590 * callback will be executed with a {@link Job} object that you must
591 * ping for the results. See the Job documentation for explanations of how to
592 * check on the status of the job.
593 *
594 * See {@link BigQuery#createQueryJob} for full documentation of this method.
595 */
596 createQueryJob(options: Query): Promise<JobResponse>;
597 createQueryJob(options: Query, callback: JobCallback): void;
598 /**
599 * Run a query scoped to your dataset as a readable object stream.
600 *
601 * See {@link BigQuery#createQueryStream} for full documentation of this
602 * method.
603 *
604 * @param {object} query See {@link BigQuery#createQueryStream} for full
605 * documentation of this method.
606 * @returns {stream} See {@link BigQuery#createQueryStream} for full
607 * documentation of this method.
608 */
609 createQueryStream(query: Query): Duplex;
610 /**
611 * Creates a write stream. Unlike the public version, this will not
612 * automatically poll the underlying job.
613 *
614 * @private
615 *
616 * @param {string|object} [metadata] Metadata to set with the load operation.
617 * The metadata object should be in the format of the
618 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`}
619 * property of a Jobs resource. If a string is given, it will be used
620 * as the filetype.
621 * @param {string} [metadata.jobId] Custom job id.
622 * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
623 * @returns {WritableStream}
624 */
625 createWriteStream_(metadata: JobLoadMetadata | string): Writable;
626 /**
627 * Load data into your table from a readable stream of AVRO, CSV, JSON, ORC,
628 * or PARQUET data.
629 *
630 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
631 *
632 * @param {string|object} [metadata] Metadata to set with the load operation.
633 * The metadata object should be in the format of the
634 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`}
635 * property of a Jobs resource. If a string is given,
636 * it will be used as the filetype.
637 * @param {string} [metadata.jobId] Custom job id.
638 * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
639 * @returns {WritableStream}
640 *
641 * @throws {Error} If source format isn't recognized.
642 *
643 * @example
644 * ```
645 * const {BigQuery} = require('@google-cloud/bigquery');
646 * const bigquery = new BigQuery();
647 * const dataset = bigquery.dataset('my-dataset');
648 * const table = dataset.table('my-table');
649 *
650 * //-
651 * // Load data from a CSV file.
652 * //-
653 * const request = require('request');
654 *
655 * const csvUrl = 'http://goo.gl/kSE7z6';
656 *
657 * const metadata = {
658 * allowJaggedRows: true,
659 * skipLeadingRows: 1
660 * };
661 *
662 * request.get(csvUrl)
663 * .pipe(table.createWriteStream(metadata))
664 * .on('job', (job) => {
665 * // `job` is a Job object that can be used to check the status of the
666 * // request.
667 * })
668 * .on('complete', (job) => {
669 * // The job has completed successfully.
670 * });
671 *
672 * //-
673 * // Load data from a JSON file.
674 * //-
675 * const fs = require('fs');
676 *
677 * fs.createReadStream('./test/testdata/testfile.json')
678 * .pipe(table.createWriteStream('json'))
679 * .on('job', (job) => {
680 * // `job` is a Job object that can be used to check the status of the
681 * // request.
682 * })
683 * .on('complete', (job) => {
684 * // The job has completed successfully.
685 * });
686 * ```
687 */
688 createWriteStream(metadata: JobLoadMetadata | string): Writable;
689 /**
690 * Export table to Cloud Storage.
691 *
692 * @param {string|File} destination Where the file should be exported
693 * to. A string or a {@link
694 * https://googleapis.dev/nodejs/storage/latest/File.html File}.
695 * @param {object} [options] The configuration object.
696 * @param {string} [options.format="CSV"] The format to export the data in.
697 * Allowed options are "AVRO", "CSV", "JSON", "ORC" or "PARQUET".
698 * @param {boolean} [options.gzip] Specify if you would like the file compressed
699 * with GZIP. Default: false.
700 * @param {string} [options.jobId] Custom id for the underlying job.
701 * @param {string} [options.jobPrefix] Prefix to apply to the underlying job id.
702 * @param {JobMetadataCallback} [callback] The callback function.
703 * @param {?error} callback.err An error returned while making this request
704 * @param {object} callback.apiResponse The full API response.
705 * @returns {Promise<JobMetadataResponse>}
706 *
707 * @throws {Error} If destination isn't a File object.
708 * @throws {Error} If destination format isn't recongized.
709 *
710 * @example
711 * ```
712 * const Storage = require('@google-cloud/storage');
713 * const {BigQuery} = require('@google-cloud/bigquery');
714 * const bigquery = new BigQuery();
715 * const dataset = bigquery.dataset('my-dataset');
716 * const table = dataset.table('my-table');
717 *
718 * const storage = new Storage({
719 * projectId: 'grape-spaceship-123'
720 * });
721 * const extractedFile = storage.bucket('institutions').file('2014.csv');
722 *
723 * //-
724 * // To use the default options, just pass a {@link
725 * https://googleapis.dev/nodejs/storage/latest/File.html File}
726 * object.
727 * //
728 * // Note: The exported format type will be inferred by the file's extension.
729 * // If you wish to override this, or provide an array of destination files,
730 * // you must provide an `options` object.
731 * //-
732 * table.extract(extractedFile, (err, apiResponse) => {});
733 *
734 * //-
735 * // If you need more customization, pass an `options` object.
736 * //-
737 * const options = {
738 * format: 'json',
739 * gzip: true
740 * };
741 *
742 * table.extract(extractedFile, options, (err, apiResponse) => {});
743 *
744 * //-
745 * // You can also specify multiple destination files.
746 * //-
747 * table.extract([
748 * storage.bucket('institutions').file('2014.json'),
749 * storage.bucket('institutions-copy').file('2014.json')
750 * ], options, (err, apiResponse) => {});
751 *
752 * //-
753 * // If the callback is omitted, we'll return a Promise.
754 * //-
755 * table.extract(extractedFile, options).then((data) => {
756 * const apiResponse = data[0];
757 * });
758 * ```
759 */
760 extract(destination: File, options?: CreateExtractJobOptions): Promise<JobMetadataResponse>;
761 extract(destination: File, options: CreateExtractJobOptions, callback?: JobMetadataCallback): void;
762 extract(destination: File, callback?: JobMetadataCallback): void;
763 /**
764 * @callback RowsCallback
765 * @param {?Error} err Request error, if any.
766 * @param {array} rows The rows.
767 * @param {object} apiResponse The full API response.
768 */
769 /**
770 * @typedef {array} RowsResponse
771 * @property {array} 0 The rows.
772 */
773 getRows(options?: GetRowsOptions): Promise<RowsResponse>;
774 getRows(options: GetRowsOptions, callback: RowsCallback): void;
775 getRows(callback: RowsCallback): void;
776 /**
777 * @callback InsertRowsCallback
778 * @param {?Error} err Request error, if any.
779 * @param {?Error} err.errors If present, these represent partial
780 * failures. It's possible for part of your request to be completed
781 * successfully, while the other part was not.
782 * @param {object} apiResponse The full API response.
783 */
784 /**
785 * @typedef {array} InsertRowsResponse
786 * @property {object} 0 The full API response.
787 */
788 /**
789 * Stream data into BigQuery one record at a time without running a load job.
790 *
791 * If you need to create an entire table from a file, consider using
792 * {@link Table#load} instead.
793 *
794 * Note, if a table was recently created, inserts may fail until the table
795 * is consistent within BigQuery. If a `schema` is supplied, this method will
796 * automatically retry those failed inserts, and it will even create the
797 * table with the provided schema if it does not exist.
798 *
799 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll| Tabledata: insertAll API Documentation}
800 * See {@link https://cloud.google.com/bigquery/quotas#streaming_inserts| Streaming Insert Limits}
801 * See {@link https://developers.google.com/bigquery/troubleshooting-errors| Troubleshooting Errors}
802 *
803 * @param {object|object[]} rows The rows to insert into the table.
804 * @param {object} [options] Configuration object.
805 * @param {boolean} [options.createInsertId=true] Automatically insert a
806 * default row id when one is not provided.
807 * @param {boolean} [options.ignoreUnknownValues=false] Accept rows that contain
808 * values that do not match the schema. The unknown values are ignored.
809 * @param {number} [options.partialRetries=3] Number of times to retry
810 * inserting rows for cases of partial failures.
811 * @param {boolean} [options.raw] If `true`, the `rows` argument is expected to
812 * be formatted as according to the
813 * {@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll| specification}.
814 * @param {string|object} [options.schema] If provided will automatically
815 * create a table if it doesn't already exist. Note that this can take
816 * longer than 2 minutes to complete. A comma-separated list of
817 * name:type pairs.
818 * Valid types are "string", "integer", "float", "boolean", and
819 * "timestamp". If the type is omitted, it is assumed to be "string".
820 * Example: "name:string, age:integer". Schemas can also be specified as a
821 * JSON array of fields, which allows for nested and repeated fields. See
822 * a {@link http://goo.gl/sl8Dmg| Table resource} for more detailed information.
823 * @param {boolean} [options.skipInvalidRows=false] Insert all valid rows of a
824 * request, even if invalid rows exist.
825 * @param {string} [options.templateSuffix] Treat the destination table as a
826 * base template, and insert the rows into an instance table named
827 * "{destination}{templateSuffix}". BigQuery will manage creation of
828 * the instance table, using the schema of the base template table. See
829 * {@link https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables| Automatic table creation using template tables}
830 * for considerations when working with templates tables.
831 * @param {InsertRowsCallback} [callback] The callback function.
832 * @param {?error} callback.err An error returned while making this request.
833 * @param {object[]} callback.err.errors If present, these represent partial
834 * failures. It's possible for part of your request to be completed
835 * successfully, while the other part was not.
836 * @param {object} callback.apiResponse The full API response.
837 * @returns {Promise<InsertRowsResponse>}
838 *
839 * @example
840 * ```
841 * const {BigQuery} = require('@google-cloud/bigquery');
842 * const bigquery = new BigQuery();
843 * const dataset = bigquery.dataset('my-dataset');
844 * const table = dataset.table('my-table');
845 *
846 * //-
847 * // Insert a single row.
848 * //-
849 * table.insert({
850 * INSTNM: 'Motion Picture Institute of Michigan',
851 * CITY: 'Troy',
852 * STABBR: 'MI'
853 * }, insertHandler);
854 *
855 * //-
856 * // Insert multiple rows at a time.
857 * //-
858 * const rows = [
859 * {
860 * INSTNM: 'Motion Picture Institute of Michigan',
861 * CITY: 'Troy',
862 * STABBR: 'MI'
863 * },
864 * // ...
865 * ];
866 *
867 * table.insert(rows, insertHandler);
868 *
869 * //-
870 * // Insert a row as according to the <a href="https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll">specification</a>.
871 * //-
872 * const row = {
873 * insertId: '1',
874 * json: {
875 * INSTNM: 'Motion Picture Institute of Michigan',
876 * CITY: 'Troy',
877 * STABBR: 'MI'
878 * }
879 * };
880 *
881 * const options = {
882 * raw: true
883 * };
884 *
885 * table.insert(row, options, insertHandler);
886 *
887 * //-
888 * // Handling the response. See <a href="https://developers.google.com/bigquery/troubleshooting-errors">Troubleshooting Errors</a> for best practices on how to handle errors.
889 * //-
890 * function insertHandler(err, apiResponse) {
891 * if (err) {
892 * // An API error or partial failure occurred.
893 *
894 * if (err.name === 'PartialFailureError') {
895 * // Some rows failed to insert, while others may have succeeded.
896 *
897 * // err.errors (object[]):
898 * // err.errors[].row (original row object passed to `insert`)
899 * // err.errors[].errors[].reason
900 * // err.errors[].errors[].message
901 * }
902 * }
903 * }
904 *
905 * //-
906 * // If the callback is omitted, we'll return a Promise.
907 * //-
908 * table.insert(rows)
909 * .then((data) => {
910 * const apiResponse = data[0];
911 * })
912 * .catch((err) => {
913 * // An API error or partial failure occurred.
914 *
915 * if (err.name === 'PartialFailureError') {
916 * // Some rows failed to insert, while others may have succeeded.
917 *
918 * // err.errors (object[]):
919 * // err.errors[].row (original row object passed to `insert`)
920 * // err.errors[].errors[].reason
921 * // err.errors[].errors[].message
922 * }
923 * });
924 * ```
925 */
926 insert(rows: RowMetadata | RowMetadata[], options?: InsertRowsOptions): Promise<InsertRowsResponse>;
927 insert(rows: RowMetadata | RowMetadata[], options: InsertRowsOptions, callback: InsertRowsCallback): void;
928 insert(rows: RowMetadata | RowMetadata[], callback: InsertRowsCallback): void;
929 /**
930 * Insert rows with retries, but will create the table if not exists.
931 *
932 * @param {RowMetadata | RowMetadata[]} rows
933 * @param {InsertRowsOptions} options
934 * @returns {Promise<bigquery.ITableDataInsertAllResponse | bigquery.ITable>}
935 * @private
936 */
937 private _insertAndCreateTable;
938 /**
939 * This method will attempt to insert rows while retrying any partial failures
940 * that occur along the way. Because partial insert failures are returned
941 * differently, we can't depend on our usual retry strategy.
942 *
943 * @private
944 *
945 * @param {RowMetadata|RowMetadata[]} rows The rows to insert.
946 * @param {InsertRowsOptions} options Insert options.
947 * @returns {Promise<bigquery.ITableDataInsertAllResponse>}
948 */
949 private _insertWithRetry;
950 /**
951 * This method does the bulk of the work for processing options and making the
952 * network request.
953 *
954 * @private
955 *
956 * @param {RowMetadata|RowMetadata[]} rows The rows to insert.
957 * @param {InsertRowsOptions} options Insert options.
958 * @returns {Promise<bigquery.ITableDataInsertAllResponse>}
959 */
960 private _insert;
961 createInsertStream(options?: InsertStreamOptions): Writable;
962 load(source: string | File | File[], metadata?: JobLoadMetadata): Promise<JobMetadataResponse>;
963 load(source: string | File | File[], metadata: JobLoadMetadata, callback: JobMetadataCallback): void;
964 load(source: string | File | File[], callback: JobMetadataCallback): void;
965 /**
966 * Load data from a local file or Storage {@link
967 * https://googleapis.dev/nodejs/storage/latest/File.html File}.
968 *
969 * By loading data this way, you create a load job that will run your data
970 * load asynchronously. If you would like instantaneous access to your data,
971 * insert it using {@link Table#insert}.
972 *
973 * Note: The file type will be inferred by the given file's extension. If you
974 * wish to override this, you must provide `metadata.format`.
975 *
976 * @param {string|File} source The source file to load. A filepath as a string
977 * or a {@link
978 * https://googleapis.dev/nodejs/storage/latest/File.html File}
979 * object.
980 * @param {object} [metadata] Metadata to set with the load operation. The
981 * metadata object should be in the format of the
982 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`}
983 * property of a Jobs resource.
984 * @param {string} [metadata.format] The format the data being loaded is in.
985 * Allowed options are "AVRO", "CSV", "JSON", "ORC", or "PARQUET".
986 * @param {string} [metadata.jobId] Custom id for the underlying job.
987 * @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job
988 * id.
989 * @param {JobMetadataCallback} [callback] The callback function.
990 * @param {?error} callback.err An error returned while making this request
991 * @param {object} callback.apiResponse The full API response.
992 * @returns {Promise<JobMetadataResponse>}
993 *
994 * @throws {Error} If the source isn't a string file name or a File instance.
995 *
996 * @example
997 * ```
998 * const {BigQuery} = require('@google-cloud/bigquery');
999 * const bigquery = new BigQuery();
1000 * const dataset = bigquery.dataset('my-dataset');
1001 * const table = dataset.table('my-table');
1002 *
1003 * //-
1004 * // Load data from a local file.
1005 * //-
1006 * table.load('./institutions.csv', (err, apiResponse) => {});
1007 *
1008 * //-
1009 * // You may also pass in metadata in the format of a Jobs resource. See
1010 * // (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)
1011 * // for a full list of supported values.
1012 * //-
1013 * const metadata = {
1014 * encoding: 'ISO-8859-1',
1015 * sourceFormat: 'NEWLINE_DELIMITED_JSON'
1016 * };
1017 *
1018 * table.load('./my-data.csv', metadata, (err, apiResponse) => {});
1019 *
1020 * //-
1021 * // Load data from a file in your Cloud Storage bucket.
1022 * //-
1023 * const gcs = require('@google-cloud/storage')({
1024 * projectId: 'grape-spaceship-123'
1025 * });
1026 * const data = gcs.bucket('institutions').file('data.csv');
1027 * table.load(data, (err, apiResponse) => {});
1028 *
1029 * //-
1030 * // Load data from multiple files in your Cloud Storage bucket(s).
1031 * //-
1032 * table.load([
1033 * gcs.bucket('institutions').file('2011.csv'),
1034 * gcs.bucket('institutions').file('2012.csv')
1035 * ], function(err, apiResponse) {});
1036 *
1037 * //-
1038 * // If the callback is omitted, we'll return a Promise.
1039 * //-
1040 * table.load(data).then(function(data) {
1041 * const apiResponse = data[0];
1042 * });
1043 * ```
1044 */
1045 load(source: string | File | File[], metadata?: JobLoadMetadata): Promise<JobMetadataResponse>;
1046 load(source: string | File | File[], metadata: JobLoadMetadata, callback: JobMetadataCallback): void;
1047 load(source: string | File | File[], callback: JobMetadataCallback): void;
1048 /**
1049 * Run a query scoped to your dataset.
1050 *
1051 * See {@link BigQuery#query} for full documentation of this method.
1052 * @param {object} query See {@link BigQuery#query} for full documentation of this method.
1053 * @param {function} [callback] See {@link BigQuery#query} for full documentation of this method.
1054 * @returns {Promise<SimpleQueryRowsResponse>}
1055 */
1056 query(query: Query): Promise<SimpleQueryRowsResponse>;
1057 query(query: string): Promise<SimpleQueryRowsResponse>;
1058 query(query: Query, callback: SimpleQueryRowsCallback): void;
1059 /**
1060 * Set the metadata on the table.
1061 *
1062 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/tables/patch| Tables: patch API Documentation}
1063 *
1064 * @param {object} metadata The metadata key/value object to set.
1065 * @param {string} metadata.description A user-friendly description of the
1066 * table.
1067 * @param {string} metadata.name A descriptive name for the table.
1068 * @param {string|object} metadata.schema A comma-separated list of name:type
1069 * pairs. Valid types are "string", "integer", "float", "boolean",
1070 * "bytes", "record", and "timestamp". If the type is omitted, it is assumed
1071 * to be "string". Example: "name:string, age:integer". Schemas can also be
1072 * specified as a JSON array of fields, which allows for nested and
1073 * repeated fields. See a {@link http://goo.gl/sl8Dmg| Table resource} for more
1074 * detailed information.
1075 * @param {function} [callback] The callback function.
1076 * @param {?error} callback.err An error returned while making this request.
1077 * @param {object} callback.apiResponse The full API response.
1078 * @returns {Promise<common.SetMetadataResponse>}
1079 *
1080 * @example
1081 * ```
1082 * const {BigQuery} = require('@google-cloud/bigquery');
1083 * const bigquery = new BigQuery();
1084 * const dataset = bigquery.dataset('my-dataset');
1085 * const table = dataset.table('my-table');
1086 *
1087 * const metadata = {
1088 * name: 'My recipes',
1089 * description: 'A table for storing my recipes.',
1090 * schema: 'name:string, servings:integer, cookingTime:float, quick:boolean'
1091 * };
1092 *
1093 * table.setMetadata(metadata, (err, metadata, apiResponse) => {});
1094 *
1095 * //-
1096 * // If the callback is omitted, we'll return a Promise.
1097 * //-
1098 * table.setMetadata(metadata).then((data) => {
1099 * const metadata = data[0];
1100 * const apiResponse = data[1];
1101 * });
1102 * ```
1103 */
1104 setMetadata(metadata: SetTableMetadataOptions): Promise<SetMetadataResponse>;
1105 setMetadata(metadata: SetTableMetadataOptions, callback: ResponseCallback): void;
1106 /**
1107 * Run a query scoped to your dataset.
1108 * @returns {Promise<PolicyResponse>}
1109 */
1110 getIamPolicy(optionsOrCallback?: GetPolicyOptions | PolicyCallback): Promise<PolicyResponse>;
1111 getIamPolicy(options: GetPolicyOptions, callback: PolicyCallback): void;
1112 /**
1113 * Run a query scoped to your dataset.
1114 * @returns {Promise<PolicyResponse>}
1115 */
1116 setIamPolicy(policy: Policy, options?: SetPolicyOptions): Promise<PolicyResponse>;
1117 setIamPolicy(policy: Policy, options: SetPolicyOptions, callback: PolicyCallback): void;
1118 setIamPolicy(policy: Policy, callback: PolicyCallback): void;
1119 /**
1120 * Run a query scoped to your dataset.
1121 * @returns {Promise<PermissionsResponse>}
1122 */
1123 testIamPermissions(permissions: string | string[]): Promise<PermissionsResponse>;
1124 testIamPermissions(permissions: string | string[], callback: PermissionsCallback): void;
1125}
1126/**
1127 * Reference to the {@link Table} class.
1128 * @name module:@google-cloud/bigquery.Table
1129 * @see Table
1130 */
1131export { Table };