UNPKG

47.2 kBTypeScriptView Raw
1/*!
2 * Copyright 2014 Google Inc. All Rights Reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16/// <reference types="node" />
17import { ServiceObject, ResponseCallback, SetMetadataResponse } from '@google-cloud/common';
18import { ResourceStream } from '@google-cloud/paginator';
19import { BigQuery, Job, Dataset, Query, SimpleQueryRowsResponse, SimpleQueryRowsCallback, ResourceCallback, RequestCallback, PagedResponse, PagedCallback, JobRequest, PagedRequest } from '.';
20import { Duplex, Writable } from 'stream';
21import { JobMetadata } from './job';
22import bigquery from './types';
23import { IntegerTypeCastOptions } from './bigquery';
24import { RowQueue } from './rowQueue';
25export interface File {
26 bucket: any;
27 kmsKeyName?: string;
28 userProject?: string;
29 name: string;
30 generation?: number;
31}
32export declare type JobMetadataCallback = RequestCallback<JobMetadata>;
33export declare type JobMetadataResponse = [JobMetadata];
34export declare type RowMetadata = any;
35export declare type InsertRowsOptions = bigquery.ITableDataInsertAllRequest & {
36 createInsertId?: boolean;
37 partialRetries?: number;
38 raw?: boolean;
39 schema?: string | {};
40};
41export declare type InsertRowsResponse = [bigquery.ITableDataInsertAllResponse | bigquery.ITable];
42export declare type InsertRowsCallback = RequestCallback<bigquery.ITableDataInsertAllResponse | bigquery.ITable>;
43export declare type RowsResponse = PagedResponse<RowMetadata, GetRowsOptions, bigquery.ITableDataList | bigquery.ITable>;
44export declare type RowsCallback = PagedCallback<RowMetadata, GetRowsOptions, bigquery.ITableDataList | bigquery.ITable>;
45export interface InsertRow {
46 insertId?: string;
47 json?: bigquery.IJsonObject;
48}
49export declare type TableRow = bigquery.ITableRow;
50export declare type TableRowField = bigquery.ITableCell;
51export declare type TableRowValue = string | TableRow;
52export declare type GetRowsOptions = PagedRequest<bigquery.tabledata.IListParams> & {
53 wrapIntegers?: boolean | IntegerTypeCastOptions;
54};
55export declare type JobLoadMetadata = JobRequest<bigquery.IJobConfigurationLoad> & {
56 format?: string;
57};
58export declare type CreateExtractJobOptions = JobRequest<bigquery.IJobConfigurationExtract> & {
59 format?: 'CSV' | 'JSON' | 'AVRO' | 'PARQUET' | 'ORC';
60 gzip?: boolean;
61};
62export declare type JobResponse = [Job, bigquery.IJob];
63export declare type JobCallback = ResourceCallback<Job, bigquery.IJob>;
64export declare type CreateCopyJobMetadata = CopyTableMetadata;
65export declare type SetTableMetadataOptions = TableMetadata;
66export declare type CopyTableMetadata = JobRequest<bigquery.IJobConfigurationTableCopy>;
67export declare type TableMetadata = bigquery.ITable & {
68 name?: string;
69 schema?: string | TableField[] | TableSchema;
70 partitioning?: string;
71 view?: string | ViewDefinition;
72};
73export declare type ViewDefinition = bigquery.IViewDefinition;
74export declare type FormattedMetadata = bigquery.ITable;
75export declare type TableSchema = bigquery.ITableSchema;
76export declare type TableField = bigquery.ITableFieldSchema;
77export interface PartialInsertFailure {
78 message: string;
79 reason: string;
80 row: RowMetadata;
81}
82export declare type Policy = bigquery.IPolicy;
83export declare type GetPolicyOptions = bigquery.IGetPolicyOptions;
84export declare type SetPolicyOptions = Omit<bigquery.ISetIamPolicyRequest, 'policy'>;
85export declare type PolicyRequest = bigquery.IGetIamPolicyRequest;
86export declare type PolicyResponse = [Policy];
87export declare type PolicyCallback = RequestCallback<PolicyResponse>;
88export declare type PermissionsResponse = [bigquery.ITestIamPermissionsResponse];
89export declare type PermissionsCallback = RequestCallback<PermissionsResponse>;
90export interface InsertStreamOptions {
91 insertRowsOptions?: InsertRowsOptions;
92 batchOptions?: RowBatchOptions;
93}
94export interface RowBatchOptions {
95 maxBytes: number;
96 maxRows: number;
97 maxMilliseconds: number;
98}
99export interface TableOptions {
100 location?: string;
101}
102/**
103 * Table objects are returned by methods such as
104 * {@link Dataset#table}, {@link Dataset#createTable}, and
105 * {@link Dataset#getTables}.
106 *
107 * @class
108 * @param {Dataset} dataset {@link Dataset} instance.
109 * @param {string} id The ID of the table.
110 * @param {object} [options] Table options.
111 * @param {string} [options.location] The geographic location of the table, by
112 * default this value is inherited from the dataset. This can be used to
113 * configure the location of all jobs created through a table instance. It
114 * cannot be used to set the actual location of the table. This value will
115 * be superseded by any API responses containing location data for the
116 * table.
117 *
118 * @example
119 * ```
120 * const {BigQuery} = require('@google-cloud/bigquery');
121 * const bigquery = new BigQuery();
122 * const dataset = bigquery.dataset('my-dataset');
123 *
124 * const table = dataset.table('my-table');
125 * ```
126 */
127declare class Table extends ServiceObject {
128 dataset: Dataset;
129 bigQuery: BigQuery;
130 location?: string;
131 rowQueue?: RowQueue;
132 createReadStream(options?: GetRowsOptions): ResourceStream<RowMetadata>;
133 constructor(dataset: Dataset, id: string, options?: TableOptions);
134 /**
135 * Convert a comma-separated name:type string to a table schema object.
136 *
137 * @static
138 * @private
139 *
140 * @param {string} str Comma-separated schema string.
141 * @returns {object} Table schema in the format the API expects.
142 */
143 static createSchemaFromString_(str: string): TableSchema;
144 /**
145 * Convert a row entry from native types to their encoded types that the API
146 * expects.
147 *
148 * @static
149 * @private
150 *
151 * @param {*} value The value to be converted.
152 * @returns {*} The converted value.
153 */
154 static encodeValue_(value?: {} | null): {} | null;
155 /**
156 * @private
157 */
158 static formatMetadata_(options: TableMetadata): FormattedMetadata;
159 /**
160 * @callback JobMetadataCallback
161 * @param {?Error} err Request error, if any.
162 * @param {object} apiResponse The full API response.
163 */
164 /**
165 * @typedef {array} JobMetadataResponse
166 * @property {object} 0 The full API response.
167 */
168 /**
169 * Copy data from one table to another, optionally creating that table.
170 *
171 * @param {Table} destination The destination table.
172 * @param {object} [metadata] Metadata to set with the copy operation. The
173 * metadata object should be in the format of a
174 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`}
175 * object.
176 * object.
177 * @param {string} [metadata.jobId] Custom id for the underlying job.
178 * @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job
179 * id.
180 * @param {JobMetadataCallback} [callback] The callback function.
181 * @param {?error} callback.err An error returned while making this request
182 * @param {object} callback.apiResponse The full API response.
183 * @returns {Promise<JobMetadataResponse>}
184 *
185 * @throws {Error} If a destination other than a Table object is provided.
186 *
187 * @example
188 * ```
189 * const {BigQuery} = require('@google-cloud/bigquery');
190 * const bigquery = new BigQuery();
191 * const dataset = bigquery.dataset('my-dataset');
192 *
193 * const table = dataset.table('my-table');
194 * const yourTable = dataset.table('your-table');
195 *
196 * table.copy(yourTable, (err, apiResponse) => {});
197 *
198 * //-
199 * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
200 * // for all available options.
201 * //-
202 * const metadata = {
203 * createDisposition: 'CREATE_NEVER',
204 * writeDisposition: 'WRITE_TRUNCATE'
205 * };
206 *
207 * table.copy(yourTable, metadata, (err, apiResponse) => {});
208 *
209 * //-
210 * // If the callback is omitted, we'll return a Promise.
211 * //-
212 * table.copy(yourTable, metadata).then((data) => {
213 * const apiResponse = data[0];
214 * });
215 * ```
216 */
217 copy(destination: Table, metadata?: CopyTableMetadata): Promise<JobMetadataResponse>;
218 copy(destination: Table, metadata: CopyTableMetadata, callback: JobMetadataCallback): void;
219 copy(destination: Table, callback: JobMetadataCallback): void;
220 /**
221 * @callback JobMetadataCallback
222 * @param {?Error} err Request error, if any.
223 * @param {object} apiResponse The full API response.
224 */
225 /**
226 * @typedef {array} JobMetadataResponse
227 * @property {object} 0 The full API response.
228 */
229 /**
230 * Copy data from multiple tables into this table.
231 *
232 * @param {Table|Table[]} sourceTables The
233 * source table(s) to copy data from.
234 * @param {object=} metadata Metadata to set with the copy operation. The
235 * metadata object should be in the format of a
236 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`}
237 * object.
238 * @param {string} [metadata.jobId] Custom id for the underlying job.
239 * @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job
240 * id.
241 * @param {JobMetadataCallback} [callback] The callback function.
242 * @param {?error} callback.err An error returned while making this request
243 * @param {object} callback.apiResponse The full API response.
244 * @returns {Promise<JobMetadataResponse>}
245 *
246 * @throws {Error} If a source other than a Table object is provided.
247 *
248 * @example
249 * ```
250 * const {BigQuery} = require('@google-cloud/bigquery');
251 * const bigquery = new BigQuery();
252 * const dataset = bigquery.dataset('my-dataset');
253 * const table = dataset.table('my-table');
254 *
255 * const sourceTables = [
256 * dataset.table('your-table'),
257 * dataset.table('your-second-table')
258 * ];
259 *
260 * table.copyFrom(sourceTables, (err, apiResponse) => {});
261 *
262 * //-
263 * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
264 * // for all available options.
265 * //-
266 * const metadata = {
267 * createDisposition: 'CREATE_NEVER',
268 * writeDisposition: 'WRITE_TRUNCATE'
269 * };
270 *
271 * table.copyFrom(sourceTables, metadata, (err, apiResponse) => {});
272 *
273 * //-
274 * // If the callback is omitted, we'll return a Promise.
275 * //-
276 * table.copyFrom(sourceTables, metadata).then((data) => {
277 * const apiResponse = data[0];
278 * });
279 * ```
280 */
281 copyFrom(sourceTables: Table | Table[], metadata?: CopyTableMetadata): Promise<JobMetadataResponse>;
282 copyFrom(sourceTables: Table | Table[], metadata: CopyTableMetadata, callback: JobMetadataCallback): void;
283 copyFrom(sourceTables: Table | Table[], callback: JobMetadataCallback): void;
284 /**
285 * Copy data from one table to another, optionally creating that table.
286 *
287 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
288 *
289 * @param {Table} destination The destination table.
290 * @param {object} [metadata] Metadata to set with the copy operation. The
291 * metadata object should be in the format of a
292 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`}
293 * object.
294 * @param {string} [metadata.jobId] Custom job id.
295 * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
296 * @param {JobCallback} [callback] The callback function.
297 * @param {?error} callback.err An error returned while making this request
298 * @param {Job} callback.job The job used to copy your table.
299 * @param {object} callback.apiResponse The full API response.
300 * @returns {Promise<JobResponse>}
301 *
302 * @throws {Error} If a destination other than a Table object is provided.
303 *
304 * @example
305 * ```
306 * const {BigQuery} = require('@google-cloud/bigquery');
307 * const bigquery = new BigQuery();
308 * const dataset = bigquery.dataset('my-dataset');
309 * const table = dataset.table('my-table');
310 *
311 * const yourTable = dataset.table('your-table');
312 * table.createCopyJob(yourTable, (err, job, apiResponse) => {
313 * // `job` is a Job object that can be used to check the status of the
314 * // request.
315 * });
316 *
317 * //-
318 * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
319 * // for all available options.
320 * //-
321 * const metadata = {
322 * createDisposition: 'CREATE_NEVER',
323 * writeDisposition: 'WRITE_TRUNCATE'
324 * };
325 *
326 * table.createCopyJob(yourTable, metadata, (err, job, apiResponse) => {});
327 *
328 * //-
329 * // If the callback is omitted, we'll return a Promise.
330 * //-
331 * table.createCopyJob(yourTable, metadata).then((data) => {
332 * const job = data[0];
333 * const apiResponse = data[1];
334 * });
335 * ```
336 */
337 createCopyJob(destination: Table, metadata?: CreateCopyJobMetadata): Promise<JobResponse>;
338 createCopyJob(destination: Table, metadata: CreateCopyJobMetadata, callback: JobCallback): void;
339 createCopyJob(destination: Table, callback: JobCallback): void;
340 /**
341 * Copy data from multiple tables into this table.
342 *
343 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
344 *
345 * @param {Table|Table[]} sourceTables The
346 * source table(s) to copy data from.
347 * @param {object} [metadata] Metadata to set with the copy operation. The
348 * metadata object should be in the format of a
349 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`}
350 * object.
351 * @param {string} [metadata.jobId] Custom job id.
352 * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
353 * @param {JobCallback} [callback] The callback function.
354 * @param {?error} callback.err An error returned while making this request
355 * @param {Job} callback.job The job used to copy your table.
356 * @param {object} callback.apiResponse The full API response.
357 * @returns {Promise<JobResponse>}
358 *
359 * @throws {Error} If a source other than a Table object is provided.
360 *
361 * @example
362 * ```
363 * const {BigQuery} = require('@google-cloud/bigquery');
364 * const bigquery = new BigQuery();
365 * const dataset = bigquery.dataset('my-dataset');
366 * const table = dataset.table('my-table');
367 *
368 * const sourceTables = [
369 * dataset.table('your-table'),
370 * dataset.table('your-second-table')
371 * ];
372 *
373 * const callback = (err, job, apiResponse) => {
374 * // `job` is a Job object that can be used to check the status of the
375 * // request.
376 * };
377 *
378 * table.createCopyFromJob(sourceTables, callback);
379 *
380 * //-
381 * // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
382 * // for all available options.
383 * //-
384 * const metadata = {
385 * createDisposition: 'CREATE_NEVER',
386 * writeDisposition: 'WRITE_TRUNCATE'
387 * };
388 *
389 * table.createCopyFromJob(sourceTables, metadata, callback);
390 *
391 * //-
392 * // If the callback is omitted, we'll return a Promise.
393 * //-
394 * table.createCopyFromJob(sourceTables, metadata).then((data) => {
395 * const job = data[0];
396 * const apiResponse = data[1];
397 * });
398 * ```
399 */
400 createCopyFromJob(source: Table | Table[], metadata?: CopyTableMetadata): Promise<JobResponse>;
401 createCopyFromJob(source: Table | Table[], metadata: CopyTableMetadata, callback: JobCallback): void;
402 createCopyFromJob(source: Table | Table[], callback: JobCallback): void;
403 /**
404 * Export table to Cloud Storage.
405 *
406 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
407 *
408 * @param {string|File} destination Where the file should be exported
409 * to. A string or a {@link
410 * https://googleapis.dev/nodejs/storage/latest/File.html File}
411 * object.
412 * @param {object=} options - The configuration object.
413 * @param {string} options.format - The format to export the data in. Allowed
414 * options are "CSV", "JSON", "AVRO", or "PARQUET". Default: "CSV".
415 * @param {boolean} options.gzip - Specify if you would like the file compressed
416 * with GZIP. Default: false.
417 * @param {string} [options.jobId] Custom job id.
418 * @param {string} [options.jobPrefix] Prefix to apply to the job id.
419 * @param {JobCallback} callback - The callback function.
420 * @param {?error} callback.err - An error returned while making this request
421 * @param {Job} callback.job - The job used to export the table.
422 * @param {object} callback.apiResponse - The full API response.
423 * @returns {Promise<JobResponse>}
424 *
425 * @throws {Error} If destination isn't a File object.
426 * @throws {Error} If destination format isn't recongized.
427 *
428 * @example
429 * ```
430 * const {Storage} = require('@google-cloud/storage');
431 * const {BigQuery} = require('@google-cloud/bigquery');
432 * const bigquery = new BigQuery();
433 * const dataset = bigquery.dataset('my-dataset');
434 * const table = dataset.table('my-table');
435 *
436 * const storage = new Storage({
437 * projectId: 'grape-spaceship-123'
438 * });
439 * const extractedFile = storage.bucket('institutions').file('2014.csv');
440 *
441 * function callback(err, job, apiResponse) {
442 * // `job` is a Job object that can be used to check the status of the
443 * // request.
444 * }
445 *
446 * //-
447 * // To use the default options, just pass a {@link
448 * https://googleapis.dev/nodejs/storage/latest/File.html File}
449 * object.
450 * //
451 * // Note: The exported format type will be inferred by the file's extension.
452 * // If you wish to override this, or provide an array of destination files,
453 * // you must provide an `options` object.
454 * //-
455 * table.createExtractJob(extractedFile, callback);
456 *
457 * //-
458 * // If you need more customization, pass an `options` object.
459 * //-
460 * const options = {
461 * format: 'json',
462 * gzip: true
463 * };
464 *
465 * table.createExtractJob(extractedFile, options, callback);
466 *
467 * //-
468 * // You can also specify multiple destination files.
469 * //-
470 * table.createExtractJob([
471 * storage.bucket('institutions').file('2014.json'),
472 * storage.bucket('institutions-copy').file('2014.json')
473 * ], options, callback);
474 *
475 * //-
476 * // If the callback is omitted, we'll return a Promise.
477 * //-
478 * table.createExtractJob(extractedFile, options).then((data) => {
479 * const job = data[0];
480 * const apiResponse = data[1];
481 * });
482 * ```
483 */
484 createExtractJob(destination: File, options?: CreateExtractJobOptions): Promise<JobResponse>;
485 createExtractJob(destination: File, options: CreateExtractJobOptions, callback: JobCallback): void;
486 createExtractJob(destination: File, callback: JobCallback): void;
487 /**
488 * Load data from a local file or Storage {@link
489 * https://googleapis.dev/nodejs/storage/latest/File.html File}.
490 *
491 * By loading data this way, you create a load job that will run your data
492 * load asynchronously. If you would like instantaneous access to your data,
493 * insert it using {@liink Table#insert}.
494 *
495 * Note: The file type will be inferred by the given file's extension. If you
496 * wish to override this, you must provide `metadata.format`.
497 *
498 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
499 *
500 * @param {string|File|File[]} source The source file to load. A string (path)
501 * to a local file, or one or more {@link
502 * https://googleapis.dev/nodejs/storage/latest/File.html File}
503 * objects.
504 * @param {object} [metadata] Metadata to set with the load operation. The
505 * metadata object should be in the format of the
506 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`}
507 * property of a Jobs resource.
508 * @param {string} [metadata.format] The format the data being loaded is in.
509 * Allowed options are "AVRO", "CSV", "JSON", "ORC", or "PARQUET".
510 * @param {string} [metadata.jobId] Custom job id.
511 * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
512 * @param {JobCallback} [callback] The callback function.
513 * @param {?error} callback.err An error returned while making this request
514 * @param {Job} callback.job The job used to load your data.
515 * @param {object} callback.apiResponse The full API response.
516 * @returns {Promise<JobResponse>}
517 *
518 * @throws {Error} If the source isn't a string file name or a File instance.
519 *
520 * @example
521 * ```
522 * const {Storage} = require('@google-cloud/storage');
523 * const {BigQuery} = require('@google-cloud/bigquery');
524 * const bigquery = new BigQuery();
525 * const dataset = bigquery.dataset('my-dataset');
526 * const table = dataset.table('my-table');
527 *
528 * //-
529 * // Load data from a local file.
530 * //-
531 * const callback = (err, job, apiResponse) => {
532 * // `job` is a Job object that can be used to check the status of the
533 * // request.
534 * };
535 *
536 * table.createLoadJob('./institutions.csv', callback);
537 *
538 * //-
539 * // You may also pass in metadata in the format of a Jobs resource. See
540 * // (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)
541 * // for a full list of supported values.
542 * //-
543 * const metadata = {
544 * encoding: 'ISO-8859-1',
545 * sourceFormat: 'NEWLINE_DELIMITED_JSON'
546 * };
547 *
548 * table.createLoadJob('./my-data.csv', metadata, callback);
549 *
550 * //-
551 * // Load data from a file in your Cloud Storage bucket.
552 * //-
553 * const storage = new Storage({
554 * projectId: 'grape-spaceship-123'
555 * });
556 * const data = storage.bucket('institutions').file('data.csv');
557 * table.createLoadJob(data, callback);
558 *
559 * //-
560 * // Load data from multiple files in your Cloud Storage bucket(s).
561 * //-
562 * table.createLoadJob([
563 * storage.bucket('institutions').file('2011.csv'),
564 * storage.bucket('institutions').file('2012.csv')
565 * ], callback);
566 *
567 * //-
568 * // If the callback is omitted, we'll return a Promise.
569 * //-
570 * table.createLoadJob(data).then((data) => {
571 * const job = data[0];
572 * const apiResponse = data[1];
573 * });
574 * ```
575 */
576 createLoadJob(source: string | File, metadata?: JobLoadMetadata): Promise<JobResponse>;
577 createLoadJob(source: string | File, metadata: JobLoadMetadata, callback: JobCallback): void;
578 createLoadJob(source: string | File, callback: JobCallback): void;
579 /**
580 * @param {string | File | File[]} source
581 * @param {JobLoadMetadata} metadata
582 * @returns {Promise<JobResponse>}
583 * @private
584 */
585 _createLoadJob(source: string | File | File[], metadata: JobLoadMetadata): Promise<JobResponse>;
586 /**
587 * Run a query as a job. No results are immediately returned. Instead, your
588 * callback will be executed with a {@link Job} object that you must
589 * ping for the results. See the Job documentation for explanations of how to
590 * check on the status of the job.
591 *
592 * See {@link BigQuery#createQueryJob} for full documentation of this method.
593 */
594 createQueryJob(options: Query): Promise<JobResponse>;
595 createQueryJob(options: Query, callback: JobCallback): void;
596 /**
597 * Run a query scoped to your dataset as a readable object stream.
598 *
599 * See {@link BigQuery#createQueryStream} for full documentation of this
600 * method.
601 *
602 * @param {object} query See {@link BigQuery#createQueryStream} for full
603 * documentation of this method.
604 * @returns {stream} See {@link BigQuery#createQueryStream} for full
605 * documentation of this method.
606 */
607 createQueryStream(query: Query): Duplex;
608 /**
609 * Creates a write stream. Unlike the public version, this will not
610 * automatically poll the underlying job.
611 *
612 * @private
613 *
614 * @param {string|object} [metadata] Metadata to set with the load operation.
615 * The metadata object should be in the format of the
616 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`}
617 * property of a Jobs resource. If a string is given, it will be used
618 * as the filetype.
619 * @param {string} [metadata.jobId] Custom job id.
620 * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
621 * @returns {WritableStream}
622 */
623 createWriteStream_(metadata: JobLoadMetadata | string): Writable;
624 /**
625 * Load data into your table from a readable stream of AVRO, CSV, JSON, ORC,
626 * or PARQUET data.
627 *
628 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation}
629 *
630 * @param {string|object} [metadata] Metadata to set with the load operation.
631 * The metadata object should be in the format of the
632 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`}
633 * property of a Jobs resource. If a string is given,
634 * it will be used as the filetype.
635 * @param {string} [metadata.jobId] Custom job id.
636 * @param {string} [metadata.jobPrefix] Prefix to apply to the job id.
637 * @returns {WritableStream}
638 *
639 * @throws {Error} If source format isn't recognized.
640 *
641 * @example
642 * ```
643 * const {BigQuery} = require('@google-cloud/bigquery');
644 * const bigquery = new BigQuery();
645 * const dataset = bigquery.dataset('my-dataset');
646 * const table = dataset.table('my-table');
647 *
648 * //-
649 * // Load data from a CSV file.
650 * //-
651 * const request = require('request');
652 *
653 * const csvUrl = 'http://goo.gl/kSE7z6';
654 *
655 * const metadata = {
656 * allowJaggedRows: true,
657 * skipLeadingRows: 1
658 * };
659 *
660 * request.get(csvUrl)
661 * .pipe(table.createWriteStream(metadata))
662 * .on('job', (job) => {
663 * // `job` is a Job object that can be used to check the status of the
664 * // request.
665 * })
666 * .on('complete', (job) => {
667 * // The job has completed successfully.
668 * });
669 *
670 * //-
671 * // Load data from a JSON file.
672 * //-
673 * const fs = require('fs');
674 *
675 * fs.createReadStream('./test/testdata/testfile.json')
676 * .pipe(table.createWriteStream('json'))
677 * .on('job', (job) => {
678 * // `job` is a Job object that can be used to check the status of the
679 * // request.
680 * })
681 * .on('complete', (job) => {
682 * // The job has completed successfully.
683 * });
684 * ```
685 */
686 createWriteStream(metadata: JobLoadMetadata | string): Writable;
687 /**
688 * Export table to Cloud Storage.
689 *
690 * @param {string|File} destination Where the file should be exported
691 * to. A string or a {@link
692 * https://googleapis.dev/nodejs/storage/latest/File.html File}.
693 * @param {object} [options] The configuration object.
694 * @param {string} [options.format="CSV"] The format to export the data in.
695 * Allowed options are "AVRO", "CSV", "JSON", "ORC" or "PARQUET".
696 * @param {boolean} [options.gzip] Specify if you would like the file compressed
697 * with GZIP. Default: false.
698 * @param {string} [options.jobId] Custom id for the underlying job.
699 * @param {string} [options.jobPrefix] Prefix to apply to the underlying job id.
700 * @param {JobMetadataCallback} [callback] The callback function.
701 * @param {?error} callback.err An error returned while making this request
702 * @param {object} callback.apiResponse The full API response.
703 * @returns {Promise<JobMetadataResponse>}
704 *
705 * @throws {Error} If destination isn't a File object.
706 * @throws {Error} If destination format isn't recongized.
707 *
708 * @example
709 * ```
710 * const Storage = require('@google-cloud/storage');
711 * const {BigQuery} = require('@google-cloud/bigquery');
712 * const bigquery = new BigQuery();
713 * const dataset = bigquery.dataset('my-dataset');
714 * const table = dataset.table('my-table');
715 *
716 * const storage = new Storage({
717 * projectId: 'grape-spaceship-123'
718 * });
719 * const extractedFile = storage.bucket('institutions').file('2014.csv');
720 *
721 * //-
722 * // To use the default options, just pass a {@link
723 * https://googleapis.dev/nodejs/storage/latest/File.html File}
724 * object.
725 * //
726 * // Note: The exported format type will be inferred by the file's extension.
727 * // If you wish to override this, or provide an array of destination files,
728 * // you must provide an `options` object.
729 * //-
730 * table.extract(extractedFile, (err, apiResponse) => {});
731 *
732 * //-
733 * // If you need more customization, pass an `options` object.
734 * //-
735 * const options = {
736 * format: 'json',
737 * gzip: true
738 * };
739 *
740 * table.extract(extractedFile, options, (err, apiResponse) => {});
741 *
742 * //-
743 * // You can also specify multiple destination files.
744 * //-
745 * table.extract([
746 * storage.bucket('institutions').file('2014.json'),
747 * storage.bucket('institutions-copy').file('2014.json')
748 * ], options, (err, apiResponse) => {});
749 *
750 * //-
751 * // If the callback is omitted, we'll return a Promise.
752 * //-
753 * table.extract(extractedFile, options).then((data) => {
754 * const apiResponse = data[0];
755 * });
756 * ```
757 */
758 extract(destination: File, options?: CreateExtractJobOptions): Promise<JobMetadataResponse>;
759 extract(destination: File, options: CreateExtractJobOptions, callback?: JobMetadataCallback): void;
760 extract(destination: File, callback?: JobMetadataCallback): void;
761 /**
762 * @callback RowsCallback
763 * @param {?Error} err Request error, if any.
764 * @param {array} rows The rows.
765 * @param {object} apiResponse The full API response.
766 */
767 /**
768 * @typedef {array} RowsResponse
769 * @property {array} 0 The rows.
770 */
771 getRows(options?: GetRowsOptions): Promise<RowsResponse>;
772 getRows(options: GetRowsOptions, callback: RowsCallback): void;
773 getRows(callback: RowsCallback): void;
774 /**
775 * @callback InsertRowsCallback
776 * @param {?Error} err Request error, if any.
777 * @param {?Error} err.errors If present, these represent partial
778 * failures. It's possible for part of your request to be completed
779 * successfully, while the other part was not.
780 * @param {object} apiResponse The full API response.
781 */
782 /**
783 * @typedef {array} InsertRowsResponse
784 * @property {object} 0 The full API response.
785 */
786 /**
787 * Stream data into BigQuery one record at a time without running a load job.
788 *
789 * If you need to create an entire table from a file, consider using
790 * {@link Table#load} instead.
791 *
792 * Note, if a table was recently created, inserts may fail until the table
793 * is consistent within BigQuery. If a `schema` is supplied, this method will
794 * automatically retry those failed inserts, and it will even create the
795 * table with the provided schema if it does not exist.
796 *
797 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll| Tabledata: insertAll API Documentation}
798 * See {@link https://cloud.google.com/bigquery/quotas#streaming_inserts| Streaming Insert Limits}
799 * See {@link https://developers.google.com/bigquery/troubleshooting-errors| Troubleshooting Errors}
800 *
801 * @param {object|object[]} rows The rows to insert into the table.
802 * @param {object} [options] Configuration object.
803 * @param {boolean} [options.createInsertId=true] Automatically insert a
804 * default row id when one is not provided.
805 * @param {boolean} [options.ignoreUnknownValues=false] Accept rows that contain
806 * values that do not match the schema. The unknown values are ignored.
807 * @param {number} [options.partialRetries=3] Number of times to retry
808 * inserting rows for cases of partial failures.
809 * @param {boolean} [options.raw] If `true`, the `rows` argument is expected to
810 * be formatted as according to the
811 * {@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll| specification}.
812 * @param {string|object} [options.schema] If provided will automatically
813 * create a table if it doesn't already exist. Note that this can take
814 * longer than 2 minutes to complete. A comma-separated list of
815 * name:type pairs.
816 * Valid types are "string", "integer", "float", "boolean", and
817 * "timestamp". If the type is omitted, it is assumed to be "string".
818 * Example: "name:string, age:integer". Schemas can also be specified as a
819 * JSON array of fields, which allows for nested and repeated fields. See
820 * a {@link http://goo.gl/sl8Dmg| Table resource} for more detailed information.
821 * @param {boolean} [options.skipInvalidRows=false] Insert all valid rows of a
822 * request, even if invalid rows exist.
823 * @param {string} [options.templateSuffix] Treat the destination table as a
824 * base template, and insert the rows into an instance table named
825 * "{destination}{templateSuffix}". BigQuery will manage creation of
826 * the instance table, using the schema of the base template table. See
827 * {@link https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables| Automatic table creation using template tables}
828 * for considerations when working with templates tables.
829 * @param {InsertRowsCallback} [callback] The callback function.
830 * @param {?error} callback.err An error returned while making this request.
831 * @param {object[]} callback.err.errors If present, these represent partial
832 * failures. It's possible for part of your request to be completed
833 * successfully, while the other part was not.
834 * @param {object} callback.apiResponse The full API response.
835 * @returns {Promise<InsertRowsResponse>}
836 *
837 * @example
838 * ```
839 * const {BigQuery} = require('@google-cloud/bigquery');
840 * const bigquery = new BigQuery();
841 * const dataset = bigquery.dataset('my-dataset');
842 * const table = dataset.table('my-table');
843 *
844 * //-
845 * // Insert a single row.
846 * //-
847 * table.insert({
848 * INSTNM: 'Motion Picture Institute of Michigan',
849 * CITY: 'Troy',
850 * STABBR: 'MI'
851 * }, insertHandler);
852 *
853 * //-
854 * // Insert multiple rows at a time.
855 * //-
856 * const rows = [
857 * {
858 * INSTNM: 'Motion Picture Institute of Michigan',
859 * CITY: 'Troy',
860 * STABBR: 'MI'
861 * },
862 * // ...
863 * ];
864 *
865 * table.insert(rows, insertHandler);
866 *
867 * //-
868 * // Insert a row as according to the <a href="https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll">specification</a>.
869 * //-
870 * const row = {
871 * insertId: '1',
872 * json: {
873 * INSTNM: 'Motion Picture Institute of Michigan',
874 * CITY: 'Troy',
875 * STABBR: 'MI'
876 * }
877 * };
878 *
879 * const options = {
880 * raw: true
881 * };
882 *
883 * table.insert(row, options, insertHandler);
884 *
885 * //-
886 * // Handling the response. See <a href="https://developers.google.com/bigquery/troubleshooting-errors">Troubleshooting Errors</a> for best practices on how to handle errors.
887 * //-
888 * function insertHandler(err, apiResponse) {
889 * if (err) {
890 * // An API error or partial failure occurred.
891 *
892 * if (err.name === 'PartialFailureError') {
893 * // Some rows failed to insert, while others may have succeeded.
894 *
895 * // err.errors (object[]):
896 * // err.errors[].row (original row object passed to `insert`)
897 * // err.errors[].errors[].reason
898 * // err.errors[].errors[].message
899 * }
900 * }
901 * }
902 *
903 * //-
904 * // If the callback is omitted, we'll return a Promise.
905 * //-
906 * table.insert(rows)
907 * .then((data) => {
908 * const apiResponse = data[0];
909 * })
910 * .catch((err) => {
911 * // An API error or partial failure occurred.
912 *
913 * if (err.name === 'PartialFailureError') {
914 * // Some rows failed to insert, while others may have succeeded.
915 *
916 * // err.errors (object[]):
917 * // err.errors[].row (original row object passed to `insert`)
918 * // err.errors[].errors[].reason
919 * // err.errors[].errors[].message
920 * }
921 * });
922 * ```
923 */
924 insert(rows: RowMetadata | RowMetadata[], options?: InsertRowsOptions): Promise<InsertRowsResponse>;
925 insert(rows: RowMetadata | RowMetadata[], options: InsertRowsOptions, callback: InsertRowsCallback): void;
926 insert(rows: RowMetadata | RowMetadata[], callback: InsertRowsCallback): void;
927 /**
928 * Insert rows with retries, but will create the table if not exists.
929 *
930 * @param {RowMetadata | RowMetadata[]} rows
931 * @param {InsertRowsOptions} options
932 * @returns {Promise<bigquery.ITableDataInsertAllResponse | bigquery.ITable>}
933 * @private
934 */
935 private _insertAndCreateTable;
936 /**
937 * This method will attempt to insert rows while retrying any partial failures
938 * that occur along the way. Because partial insert failures are returned
939 * differently, we can't depend on our usual retry strategy.
940 *
941 * @private
942 *
943 * @param {RowMetadata|RowMetadata[]} rows The rows to insert.
944 * @param {InsertRowsOptions} options Insert options.
945 * @returns {Promise<bigquery.ITableDataInsertAllResponse>}
946 */
947 private _insertWithRetry;
948 /**
949 * This method does the bulk of the work for processing options and making the
950 * network request.
951 *
952 * @private
953 *
954 * @param {RowMetadata|RowMetadata[]} rows The rows to insert.
955 * @param {InsertRowsOptions} options Insert options.
956 * @returns {Promise<bigquery.ITableDataInsertAllResponse>}
957 */
958 private _insert;
959 createInsertStream(options?: InsertStreamOptions): Writable;
960 load(source: string | File, metadata?: JobLoadMetadata): Promise<JobMetadataResponse>;
961 load(source: string | File, metadata: JobLoadMetadata, callback: JobMetadataCallback): void;
962 load(source: string | File, callback: JobMetadataCallback): void;
963 /**
964 * Load data from a local file or Storage {@link
965 * https://googleapis.dev/nodejs/storage/latest/File.html File}.
966 *
967 * By loading data this way, you create a load job that will run your data
968 * load asynchronously. If you would like instantaneous access to your data,
969 * insert it using {@link Table#insert}.
970 *
971 * Note: The file type will be inferred by the given file's extension. If you
972 * wish to override this, you must provide `metadata.format`.
973 *
974 * @param {string|File} source The source file to load. A filepath as a string
975 * or a {@link
976 * https://googleapis.dev/nodejs/storage/latest/File.html File}
977 * object.
978 * @param {object} [metadata] Metadata to set with the load operation. The
979 * metadata object should be in the format of the
980 * {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`}
981 * property of a Jobs resource.
982 * @param {string} [metadata.format] The format the data being loaded is in.
983 * Allowed options are "AVRO", "CSV", "JSON", "ORC", or "PARQUET".
984 * @param {string} [metadata.jobId] Custom id for the underlying job.
985 * @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job
986 * id.
987 * @param {JobMetadataCallback} [callback] The callback function.
988 * @param {?error} callback.err An error returned while making this request
989 * @param {object} callback.apiResponse The full API response.
990 * @returns {Promise<JobMetadataResponse>}
991 *
992 * @throws {Error} If the source isn't a string file name or a File instance.
993 *
994 * @example
995 * ```
996 * const {BigQuery} = require('@google-cloud/bigquery');
997 * const bigquery = new BigQuery();
998 * const dataset = bigquery.dataset('my-dataset');
999 * const table = dataset.table('my-table');
1000 *
1001 * //-
1002 * // Load data from a local file.
1003 * //-
1004 * table.load('./institutions.csv', (err, apiResponse) => {});
1005 *
1006 * //-
1007 * // You may also pass in metadata in the format of a Jobs resource. See
1008 * // (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)
1009 * // for a full list of supported values.
1010 * //-
1011 * const metadata = {
1012 * encoding: 'ISO-8859-1',
1013 * sourceFormat: 'NEWLINE_DELIMITED_JSON'
1014 * };
1015 *
1016 * table.load('./my-data.csv', metadata, (err, apiResponse) => {});
1017 *
1018 * //-
1019 * // Load data from a file in your Cloud Storage bucket.
1020 * //-
1021 * const gcs = require('@google-cloud/storage')({
1022 * projectId: 'grape-spaceship-123'
1023 * });
1024 * const data = gcs.bucket('institutions').file('data.csv');
1025 * table.load(data, (err, apiResponse) => {});
1026 *
1027 * //-
1028 * // Load data from multiple files in your Cloud Storage bucket(s).
1029 * //-
1030 * table.load([
1031 * gcs.bucket('institutions').file('2011.csv'),
1032 * gcs.bucket('institutions').file('2012.csv')
1033 * ], function(err, apiResponse) {});
1034 *
1035 * //-
1036 * // If the callback is omitted, we'll return a Promise.
1037 * //-
1038 * table.load(data).then(function(data) {
1039 * const apiResponse = data[0];
1040 * });
1041 * ```
1042 */
1043 load(source: string | File, metadata?: JobLoadMetadata): Promise<JobMetadataResponse>;
1044 load(source: string | File, metadata: JobLoadMetadata, callback: JobMetadataCallback): void;
1045 load(source: string | File, callback: JobMetadataCallback): void;
1046 /**
1047 * Run a query scoped to your dataset.
1048 *
1049 * See {@link BigQuery#query} for full documentation of this method.
1050 * @param {object} query See {@link BigQuery#query} for full documentation of this method.
1051 * @param {function} [callback] See {@link BigQuery#query} for full documentation of this method.
1052 * @returns {Promise<SimpleQueryRowsResponse>}
1053 */
1054 query(query: Query): Promise<SimpleQueryRowsResponse>;
1055 query(query: string): Promise<SimpleQueryRowsResponse>;
1056 query(query: Query, callback: SimpleQueryRowsCallback): void;
1057 /**
1058 * Set the metadata on the table.
1059 *
1060 * See {@link https://cloud.google.com/bigquery/docs/reference/v2/tables/patch| Tables: patch API Documentation}
1061 *
1062 * @param {object} metadata The metadata key/value object to set.
1063 * @param {string} metadata.description A user-friendly description of the
1064 * table.
1065 * @param {string} metadata.name A descriptive name for the table.
1066 * @param {string|object} metadata.schema A comma-separated list of name:type
1067 * pairs. Valid types are "string", "integer", "float", "boolean",
1068 * "bytes", "record", and "timestamp". If the type is omitted, it is assumed
1069 * to be "string". Example: "name:string, age:integer". Schemas can also be
1070 * specified as a JSON array of fields, which allows for nested and
1071 * repeated fields. See a {@link http://goo.gl/sl8Dmg| Table resource} for more
1072 * detailed information.
1073 * @param {function} [callback] The callback function.
1074 * @param {?error} callback.err An error returned while making this request.
1075 * @param {object} callback.apiResponse The full API response.
1076 * @returns {Promise<common.SetMetadataResponse>}
1077 *
1078 * @example
1079 * ```
1080 * const {BigQuery} = require('@google-cloud/bigquery');
1081 * const bigquery = new BigQuery();
1082 * const dataset = bigquery.dataset('my-dataset');
1083 * const table = dataset.table('my-table');
1084 *
1085 * const metadata = {
1086 * name: 'My recipes',
1087 * description: 'A table for storing my recipes.',
1088 * schema: 'name:string, servings:integer, cookingTime:float, quick:boolean'
1089 * };
1090 *
1091 * table.setMetadata(metadata, (err, metadata, apiResponse) => {});
1092 *
1093 * //-
1094 * // If the callback is omitted, we'll return a Promise.
1095 * //-
1096 * table.setMetadata(metadata).then((data) => {
1097 * const metadata = data[0];
1098 * const apiResponse = data[1];
1099 * });
1100 * ```
1101 */
1102 setMetadata(metadata: SetTableMetadataOptions): Promise<SetMetadataResponse>;
1103 setMetadata(metadata: SetTableMetadataOptions, callback: ResponseCallback): void;
1104 /**
1105 * Run a query scoped to your dataset.
1106 * @returns {Promise<PolicyResponse>}
1107 */
1108 getIamPolicy(optionsOrCallback?: GetPolicyOptions | PolicyCallback): Promise<PolicyResponse>;
1109 getIamPolicy(options: GetPolicyOptions, callback: PolicyCallback): void;
1110 /**
1111 * Run a query scoped to your dataset.
1112 * @returns {Promise<PolicyResponse>}
1113 */
1114 setIamPolicy(policy: Policy, options?: SetPolicyOptions): Promise<PolicyResponse>;
1115 setIamPolicy(policy: Policy, options: SetPolicyOptions, callback: PolicyCallback): void;
1116 setIamPolicy(policy: Policy, callback: PolicyCallback): void;
1117 /**
1118 * Run a query scoped to your dataset.
1119 * @returns {Promise<PermissionsResponse>}
1120 */
1121 testIamPermissions(permissions: string | string[]): Promise<PermissionsResponse>;
1122 testIamPermissions(permissions: string | string[], callback: PermissionsCallback): void;
1123}
1124/**
1125 * Reference to the {@link Table} class.
1126 * @name module:@google-cloud/bigquery.Table
1127 * @see Table
1128 */
1129export { Table };