diff --git a/CHANGELOG.md b/CHANGELOG.md index 4b9ea2b9..3b2b78d6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://www.npmjs.com/package/@google-cloud/bigquery?activeTab=versions +## [7.1.0](https://github.com/googleapis/nodejs-bigquery/compare/v7.0.0...v7.1.0) (2023-08-01) + + +### Features + +* Support and optionally parse JSON field ([#1229](https://github.com/googleapis/nodejs-bigquery/issues/1229)) ([cd11447](https://github.com/googleapis/nodejs-bigquery/commit/cd114470150cd58dffd4a7c511021eac19ab94d5)) +* Support create/list datasets on a different project ([#1230](https://github.com/googleapis/nodejs-bigquery/issues/1230)) ([86c63fb](https://github.com/googleapis/nodejs-bigquery/commit/86c63fb7079b2fb5b70bae13ed20267cebb3c99d)) + ## [7.0.0](https://github.com/googleapis/nodejs-bigquery/compare/v6.2.1...v7.0.0) (2023-07-31) diff --git a/package.json b/package.json index 59e79d29..e4eff143 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@google-cloud/bigquery", "description": "Google BigQuery Client Library for Node.js", - "version": "7.0.0", + "version": "7.1.0", "license": "Apache-2.0", "author": "Google LLC", "engines": { diff --git a/samples/insertingDataTypes.js b/samples/insertingDataTypes.js index 41d2b684..f501fbf6 100644 --- a/samples/insertingDataTypes.js +++ b/samples/insertingDataTypes.js @@ -45,6 +45,10 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') { name: 'school', type: 'BYTES', }, + { + name: 'metadata', + type: 'JSON', + }, { name: 'location', type: 'GEOGRAPHY', @@ -108,7 +112,11 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') { const bqTimestamp = bigquery.timestamp('2020-04-27T18:07:25.356Z'); const bqGeography = bigquery.geography('POINT(1 2)'); const schoolBuffer = Buffer.from('Test University'); - + // a JSON field needs to be converted to a string + const metadata = JSON.stringify({ + owner: 'John Doe', + contact: 'johndoe@example.com', + }); // Rows to be inserted into table const rows = [ { @@ -116,6 +124,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') { age: '30', location: bqGeography, school: schoolBuffer, + metadata: metadata, measurements: [50.05, 100.5], datesTimes: { day: bqDate, diff --git a/samples/listDatasets.js b/samples/listDatasets.js index 961d6ab4..0f5b8374 100644 --- a/samples/listDatasets.js +++ b/samples/listDatasets.js @@ -14,17 +14,22 @@ 'use strict'; -function main() { +function main(projectId) { // [START bigquery_list_datasets] // Import the Google Cloud client library const {BigQuery} = require('@google-cloud/bigquery'); const bigquery = new BigQuery(); async function listDatasets() { - // Lists all datasets in current GCP project. + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = "my_project_id"; - // Lists all datasets in the specified project - const [datasets] = await bigquery.getDatasets(); + // Lists all datasets in the specified project. + // If projectId is not specified, this method will take + // the projectId from the authenticated BigQuery Client. + const [datasets] = await bigquery.getDatasets({projectId}); console.log('Datasets:'); datasets.forEach(dataset => console.log(dataset.id)); } diff --git a/samples/package.json b/samples/package.json index 656dfda9..374db9ae 100644 --- a/samples/package.json +++ b/samples/package.json @@ -16,7 +16,7 @@ "test": "mocha --timeout 200000" }, "dependencies": { - "@google-cloud/bigquery": "^7.0.0", + "@google-cloud/bigquery": "^7.1.0", "@google-cloud/storage": "^6.0.0", "google-auth-library": "^9.0.0", "readline-promise": "^1.0.4", diff --git a/samples/test/datasets.test.js b/samples/test/datasets.test.js index fd4aecd1..cc0e1b2c 100644 --- a/samples/test/datasets.test.js +++ b/samples/test/datasets.test.js @@ -98,6 +98,12 @@ describe('Datasets', () => { assert.match(output, new RegExp(datasetId)); }); + it('should list datasets on a different project', async () => { + const output = execSync('node listDatasets.js bigquery-public-data'); + assert.match(output, /Datasets:/); + assert.match(output, new RegExp('usa_names')); + }); + it('should retrieve a dataset if it exists', async () => { const output = execSync(`node getDataset.js ${datasetId}`); assert.include(output, 'Dataset:'); diff --git a/src/bigquery.ts b/src/bigquery.ts index 8fd9e59c..c304a2b0 100644 --- a/src/bigquery.ts +++ b/src/bigquery.ts @@ -104,6 +104,7 @@ export type Query = JobRequest & { jobTimeoutMs?: number; pageToken?: string; wrapIntegers?: boolean | IntegerTypeCastOptions; + parseJSON?: boolean; }; export type QueryParamTypeStruct = { @@ -122,11 +123,17 @@ export type QueryParamTypes = export type QueryOptions = QueryResultsOptions; export type QueryStreamOptions = { wrapIntegers?: boolean | IntegerTypeCastOptions; + parseJSON?: boolean; +}; +export type DatasetResource = bigquery.IDataset & { + projectId?: string; }; -export type DatasetResource = bigquery.IDataset; export type ValueType = bigquery.IQueryParameterType; -export type GetDatasetsOptions = PagedRequest; +export type GetDatasetsOptions = PagedRequest & { + projectId?: string; +}; + export type DatasetsResponse = PagedResponse< Dataset, GetDatasetsOptions, @@ -476,24 +483,29 @@ export class BigQuery extends Service { * * @param {object} schema * @param {array} rows - * @param {boolean|IntegerTypeCastOptions} wrapIntegers Wrap values of + * @param {object} options + * @param {boolean|IntegerTypeCastOptions} options.wrapIntegers Wrap values of * 'INT64' type in {@link BigQueryInt} objects. * If a `boolean`, this will wrap values in {@link BigQueryInt} objects. * If an `object`, this will return a value returned by * `wrapIntegers.integerTypeCastFunction`. * Please see {@link IntegerTypeCastOptions} for options descriptions. - * @param {array} selectedFields List of fields to return. + * @param {array} options.selectedFields List of fields to return. * If unspecified, all fields are returned. + * @param {array} options.parseJSON parse a 'JSON' field into a JSON object. * @returns Fields using their matching names from the table's schema. */ static mergeSchemaWithRows_( schema: TableSchema | TableField, rows: TableRow[], - wrapIntegers: boolean | IntegerTypeCastOptions, - selectedFields?: string[] + options: { + wrapIntegers: boolean | IntegerTypeCastOptions; + selectedFields?: string[]; + parseJSON?: boolean; + } ) { - if (selectedFields && selectedFields!.length > 0) { - const selectedFieldsArray = selectedFields!.map(c => { + if (options.selectedFields && options.selectedFields!.length > 0) { + const selectedFieldsArray = options.selectedFields!.map(c => { return c.split('.'); }); @@ -505,7 +517,7 @@ export class BigQuery extends Service { .map(c => c!.toLowerCase()) .indexOf(field.name!.toLowerCase()) >= 0 ); - selectedFields = selectedFieldsArray + options.selectedFields = selectedFieldsArray .filter(c => c.length > 0) .map(c => c.join('.')); } @@ -518,10 +530,10 @@ export class BigQuery extends Service { let value = field.v; if (schemaField.mode === 'REPEATED') { value = (value as TableRowField[]).map(val => { - return convert(schemaField, val.v, wrapIntegers, selectedFields); + return convert(schemaField, val.v, options); }); } else { - value = convert(schemaField, value, wrapIntegers, selectedFields); + value = convert(schemaField, value, options); } // eslint-disable-next-line @typescript-eslint/no-explicit-any const fieldObject: any = {}; @@ -534,8 +546,11 @@ export class BigQuery extends Service { schemaField: TableField, // eslint-disable-next-line @typescript-eslint/no-explicit-any value: any, - wrapIntegers: boolean | IntegerTypeCastOptions, - selectedFields?: string[] + options: { + wrapIntegers: boolean | IntegerTypeCastOptions; + selectedFields?: string[]; + parseJSON?: boolean; + } ) { if (is.null(value)) { return value; @@ -558,6 +573,7 @@ export class BigQuery extends Service { } case 'INTEGER': case 'INT64': { + const {wrapIntegers} = options; value = wrapIntegers ? typeof wrapIntegers === 'object' ? BigQuery.int( @@ -580,8 +596,7 @@ export class BigQuery extends Service { value = BigQuery.mergeSchemaWithRows_( schemaField, value, - wrapIntegers, - selectedFields + options ).pop(); break; } @@ -605,6 +620,11 @@ export class BigQuery extends Service { value = BigQuery.geography(value); break; } + case 'JSON': { + const {parseJSON} = options; + value = parseJSON ? JSON.parse(value) : value; + break; + } default: break; } @@ -1242,35 +1262,36 @@ export class BigQuery extends Service { const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - this.request( - { - method: 'POST', - uri: '/datasets', - json: extend( - true, - { - location: this.location, + const reqOpts: DecorateRequestOptions = { + method: 'POST', + uri: '/datasets', + json: extend( + true, + { + location: this.location, + }, + options, + { + datasetReference: { + datasetId: id, }, - options, - { - datasetReference: { - datasetId: id, - }, - } - ), - }, - (err, resp) => { - if (err) { - callback!(err, null, resp); - return; } + ), + }; + if (options.projectId) { + reqOpts.projectId = options.projectId; + } + this.request(reqOpts, (err, resp) => { + if (err) { + callback!(err, null, resp); + return; + } - const dataset = this.dataset(id); - dataset.metadata = resp; + const dataset = this.dataset(id, options); + dataset.metadata = resp; - callback!(null, dataset, resp); - } - ); + callback!(null, dataset, resp); + }); } /** @@ -1311,6 +1332,7 @@ export class BigQuery extends Service { * the format of the {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#DatasetReference| `DatasetReference`} * @param {boolean} [options.wrapIntegers] Optionally wrap INT64 in BigQueryInt * or custom INT64 value type. + * @param {boolean} [options.parseJSON] Optionally parse JSON as a JSON Object. * @param {object|array} [options.params] Option to provide query prarameters. * @param {JobCallback} [callback] The callback function. * @param {?error} callback.err An error returned while making this request. @@ -1648,6 +1670,7 @@ export class BigQuery extends Service { * * @param {string} id ID of the dataset. * @param {object} [options] Dataset options. + * @param {string} [options.projectId] The GCP project ID. * @param {string} [options.location] The geographic location of the dataset. * Required except for US and EU. * @@ -1670,12 +1693,13 @@ export class BigQuery extends Service { } /** - * List all or some of the datasets in your project. + * List all or some of the datasets in a project. * * See {@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/list| Datasets: list API Documentation} * * @param {object} [options] Configuration object. * @param {boolean} [options.all] List all datasets, including hidden ones. + * @param {string} [options.projectId] The GCP project ID. * @param {boolean} [options.autoPaginate] Have pagination handled automatically. * Default: true. * @param {number} [options.maxApiCalls] Maximum number of API calls to make. @@ -1730,40 +1754,45 @@ export class BigQuery extends Service { const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - this.request( - { - uri: '/datasets', - qs: options, - }, - (err, resp) => { - if (err) { - callback!(err, null, null, resp); - return; - } + const reqOpts: DecorateRequestOptions = { + uri: '/datasets', + qs: options, + }; + if (options.projectId) { + reqOpts.projectId = options.projectId; + } + this.request(reqOpts, (err, resp) => { + if (err) { + callback!(err, null, null, resp); + return; + } - let nextQuery: GetDatasetsOptions | null = null; + let nextQuery: GetDatasetsOptions | null = null; - if (resp.nextPageToken) { - nextQuery = Object.assign({}, options, { - pageToken: resp.nextPageToken, - }); - } + if (resp.nextPageToken) { + nextQuery = Object.assign({}, options, { + pageToken: resp.nextPageToken, + }); + } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const datasets = (resp.datasets || []).map( - (dataset: bigquery.IDataset) => { - const ds = this.dataset(dataset.datasetReference!.datasetId!, { - location: dataset.location!, - }); - - ds.metadata = dataset!; - return ds; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const datasets = (resp.datasets || []).map( + (dataset: bigquery.IDataset) => { + const dsOpts: DatasetOptions = { + location: dataset.location!, + }; + if (options.projectId) { + dsOpts.projectId = options.projectId; } - ); + const ds = this.dataset(dataset.datasetReference!.datasetId!, dsOpts); - callback!(null, datasets, nextQuery, resp); - } - ); + ds.metadata = dataset!; + return ds; + } + ); + + callback!(null, datasets, nextQuery, resp); + }); } /** @@ -2041,6 +2070,7 @@ export class BigQuery extends Service { typeof query === 'object' ? { wrapIntegers: query.wrapIntegers, + parseJSON: query.parseJSON, } : {}; const callback = @@ -2073,13 +2103,14 @@ export class BigQuery extends Service { return; } - const {location, maxResults, pageToken, wrapIntegers} = query; + const {location, maxResults, pageToken, wrapIntegers, parseJSON} = query; const opts = { location, maxResults, pageToken, wrapIntegers, + parseJSON, autoPaginate: false, }; @@ -2087,6 +2118,7 @@ export class BigQuery extends Service { delete query.maxResults; delete query.pageToken; delete query.wrapIntegers; + delete query.parseJSON; this.query(query, opts, callback); } diff --git a/src/dataset.ts b/src/dataset.ts index 92580538..9bc8fd44 100644 --- a/src/dataset.ts +++ b/src/dataset.ts @@ -109,6 +109,7 @@ export type TableCallback = ResourceCallback; * @param {BigQuery} bigQuery {@link BigQuery} instance. * @param {string} id The ID of the Dataset. * @param {object} [options] Dataset options. + * @param {string} [options.projectId] The GCP project ID. * @param {string} [options.location] The geographic location of the dataset. * Defaults to US. * @@ -372,7 +373,12 @@ class Dataset extends ServiceObject { typeof optionsOrCallback === 'function' ? (optionsOrCallback as DatasetCallback) : cb; - options = extend({}, options, {location: this.location}); + if (this.location) { + options = extend({}, options, {location: this.location}); + } + if (this.projectId) { + options = extend({}, options, {projectId: this.projectId}); + } return bigQuery.createDataset(id, options, callback!); }, }); diff --git a/src/job.ts b/src/job.ts index 4e062435..fd6a1481 100644 --- a/src/job.ts +++ b/src/job.ts @@ -49,6 +49,7 @@ export type CancelResponse = [bigquery.IJobCancelResponse]; export type QueryResultsOptions = { job?: Job; wrapIntegers?: boolean | IntegerTypeCastOptions; + parseJSON?: boolean; } & PagedRequest; /** @@ -538,6 +539,8 @@ class Job extends Operation { const wrapIntegers = qs.wrapIntegers ? qs.wrapIntegers : false; delete qs.wrapIntegers; + const parseJSON = qs.parseJSON ? qs.parseJSON : false; + delete qs.parseJSON; delete qs.job; @@ -559,11 +562,10 @@ class Job extends Operation { let rows: any = []; if (resp.schema && resp.rows) { - rows = BigQuery.mergeSchemaWithRows_( - resp.schema, - resp.rows, - wrapIntegers - ); + rows = BigQuery.mergeSchemaWithRows_(resp.schema, resp.rows, { + wrapIntegers, + parseJSON, + }); } let nextQuery: QueryResultsOptions | null = null; diff --git a/src/table.ts b/src/table.ts index d4a6a03f..cab72804 100644 --- a/src/table.ts +++ b/src/table.ts @@ -114,6 +114,7 @@ export type TableRowValue = string | TableRow; export type GetRowsOptions = PagedRequest & { wrapIntegers?: boolean | IntegerTypeCastOptions; + parseJSON?: boolean; }; export type JobLoadMetadata = JobRequest & { @@ -1811,6 +1812,8 @@ class Table extends ServiceObject { typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; const wrapIntegers = options.wrapIntegers ? options.wrapIntegers : false; delete options.wrapIntegers; + const parseJSON = options.parseJSON ? options.parseJSON : false; + delete options.parseJSON; const onComplete = ( err: Error | null, rows: TableRow[] | null, @@ -1821,12 +1824,13 @@ class Table extends ServiceObject { callback!(err, null, null, resp); return; } - rows = BigQuery.mergeSchemaWithRows_( - this.metadata.schema, - rows || [], - wrapIntegers, - options.selectedFields ? options.selectedFields!.split(',') : [] - ); + rows = BigQuery.mergeSchemaWithRows_(this.metadata.schema, rows || [], { + wrapIntegers: wrapIntegers, + selectedFields: options.selectedFields + ? options.selectedFields!.split(',') + : [], + parseJSON, + }); callback!(null, rows, nextQuery, resp); }; diff --git a/test/bigquery.ts b/test/bigquery.ts index 791a83af..8e108c05 100644 --- a/test/bigquery.ts +++ b/test/bigquery.ts @@ -148,6 +148,7 @@ afterEach(() => sandbox.restore()); describe('BigQuery', () => { const JOB_ID = 'JOB_ID'; const PROJECT_ID = 'test-project'; + const ANOTHER_PROJECT_ID = 'another-test-project'; const LOCATION = 'asia-northeast1'; // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -610,11 +611,9 @@ describe('BigQuery', () => { }); const rawRows = rows.map(x => x.raw); - const mergedRows = BigQuery.mergeSchemaWithRows_( - schemaObject, - rawRows, - false - ); + const mergedRows = BigQuery.mergeSchemaWithRows_(schemaObject, rawRows, { + wrapIntegers: false, + }); mergedRows.forEach((mergedRow: {}, index: number) => { assert.deepStrictEqual(mergedRow, rows[index].expected); @@ -644,24 +643,54 @@ describe('BigQuery', () => { sandbox.stub(BigQuery, 'int').returns(fakeInt); - let mergedRows = BigQuery.mergeSchemaWithRows_( - SCHEMA_OBJECT, - rows.raw, - wrapIntegersBoolean - ); + let mergedRows = BigQuery.mergeSchemaWithRows_(SCHEMA_OBJECT, rows.raw, { + wrapIntegers: wrapIntegersBoolean, + }); mergedRows.forEach((mergedRow: {}) => { assert.deepStrictEqual(mergedRow, rows.expectedBool); }); - mergedRows = BigQuery.mergeSchemaWithRows_( - SCHEMA_OBJECT, - rows.raw, - wrapIntegersObject - ); + mergedRows = BigQuery.mergeSchemaWithRows_(SCHEMA_OBJECT, rows.raw, { + wrapIntegers: wrapIntegersObject, + }); mergedRows.forEach((mergedRow: {}) => { assert.deepStrictEqual(mergedRow, rows.expectedObj); }); }); + + it('should parse json with option', () => { + const jsonValue = {name: 'John Doe'}; + + const SCHEMA_OBJECT = { + fields: [{name: 'json_field', type: 'JSON'}], + } as {fields: TableField[]}; + + const rows = { + raw: { + f: [{v: JSON.stringify(jsonValue)}], + }, + expectedParsed: { + json_field: jsonValue, + }, + expectedRaw: { + json_field: JSON.stringify(jsonValue), + }, + }; + + let mergedRows = BigQuery.mergeSchemaWithRows_(SCHEMA_OBJECT, rows.raw, { + parseJSON: false, + }); + mergedRows.forEach((mergedRow: {}) => { + assert.deepStrictEqual(mergedRow, rows.expectedRaw); + }); + + mergedRows = BigQuery.mergeSchemaWithRows_(SCHEMA_OBJECT, rows.raw, { + parseJSON: true, + }); + mergedRows.forEach((mergedRow: {}) => { + assert.deepStrictEqual(mergedRow, rows.expectedParsed); + }); + }); }); describe('date', () => { @@ -1564,6 +1593,30 @@ describe('BigQuery', () => { bq.createDataset(DATASET_ID, assert.ifError); }); + it('should create a dataset on a different project', done => { + bq.makeAuthenticatedRequest = (reqOpts: DecorateRequestOptions) => { + assert.strictEqual(reqOpts.method, 'POST'); + assert.strictEqual(reqOpts.projectId, ANOTHER_PROJECT_ID); + assert.strictEqual( + reqOpts.uri, + `https://bigquery.googleapis.com/bigquery/v2/projects/${ANOTHER_PROJECT_ID}/datasets` + ); + assert.deepStrictEqual(reqOpts.json.datasetReference, { + datasetId: DATASET_ID, + }); + + done(); + }; + + bq.createDataset( + DATASET_ID, + { + projectId: ANOTHER_PROJECT_ID, + }, + assert.ifError + ); + }); + it('should send the location if available', done => { const bq = new BigQuery({ projectId: PROJECT_ID, @@ -2486,6 +2539,20 @@ describe('BigQuery', () => { done(); }); }); + + it('should fetch datasets from a different project', done => { + const queryObject = {projectId: ANOTHER_PROJECT_ID}; + + bq.makeAuthenticatedRequest = (reqOpts: DecorateRequestOptions) => { + assert.strictEqual( + reqOpts.uri, + `https://bigquery.googleapis.com/bigquery/v2/projects/${ANOTHER_PROJECT_ID}/datasets` + ); + done(); + }; + + bq.getDatasets(queryObject, assert.ifError); + }); }); describe('getJobs', () => { @@ -2742,12 +2809,14 @@ describe('BigQuery', () => { const query = { query: QUERY_STRING, wrapIntegers: true, + parseJSON: true, }; bq.query(query, (err: Error, rows: {}, resp: {}) => { assert.ifError(err); assert.deepEqual(queryResultsOpts, { job: fakeJob, wrapIntegers: true, + parseJSON: true, }); assert.strictEqual(rows, FAKE_ROWS); assert.strictEqual(resp, FAKE_RESPONSE); @@ -2790,11 +2859,12 @@ describe('BigQuery', () => { describe('queryAsStream_', () => { let queryStub: SinonStub; - let defaultOpts = { + const defaultOpts = { location: undefined, maxResults: undefined, pageToken: undefined, wrapIntegers: undefined, + parseJSON: undefined, autoPaginate: false, }; @@ -2811,12 +2881,14 @@ describe('BigQuery', () => { }); it('should call query correctly with a Query object', done => { - const query = {query: 'SELECT', wrapIntegers: true}; + const query = {query: 'SELECT', wrapIntegers: true, parseJSON: true}; bq.queryAsStream_(query, done); - defaultOpts = extend(defaultOpts, {wrapIntegers: true}); - assert( - queryStub.calledOnceWithExactly(query, defaultOpts, sinon.match.func) - ); + const opts = { + ...defaultOpts, + wrapIntegers: true, + parseJSON: true, + }; + assert(queryStub.calledOnceWithExactly(query, opts, sinon.match.func)); }); it('should query as job if supplied', done => { @@ -2842,11 +2914,29 @@ describe('BigQuery', () => { bq.queryAsStream_(query, done); - defaultOpts = extend(defaultOpts, {wrapIntegers}); + const opts = { + ...defaultOpts, + wrapIntegers, + }; - assert( - queryStub.calledOnceWithExactly(query, defaultOpts, sinon.match.func) - ); + assert(queryStub.calledOnceWithExactly(query, opts, sinon.match.func)); + }); + + it('should pass parseJSON if supplied', done => { + const parseJSON = true; + const query = { + query: 'SELECT', + parseJSON, + }; + + bq.queryAsStream_(query, done); + + const opts = { + ...defaultOpts, + parseJSON, + }; + + assert(queryStub.calledOnceWithExactly(query, opts, sinon.match.func)); }); }); diff --git a/test/dataset.ts b/test/dataset.ts index 4fea70fc..0472f90b 100644 --- a/test/dataset.ts +++ b/test/dataset.ts @@ -203,6 +203,20 @@ describe('BigQuery/Dataset', () => { ds.location = LOCATION; config.createMethod(DATASET_ID, done); }); + + it('should pass the projectId', done => { + bq.createDataset = ( + id: string, + options: DatasetOptions, + callback: Function + ) => { + assert.strictEqual(options.projectId, 'project-id'); + callback(); // the done fn + }; + + ds.projectId = 'project-id'; + config.createMethod(DATASET_ID, done); + }); }); describe('projectId override interceptor', () => { diff --git a/test/job.ts b/test/job.ts index 80771ec2..186939a9 100644 --- a/test/job.ts +++ b/test/job.ts @@ -222,11 +222,7 @@ describe('BigQuery/Job', () => { callback(null, RESPONSE); }; - BIGQUERY.mergeSchemaWithRows_ = ( - schema: {}, - rows: {}, - wrapIntegers: {} - ) => { + BIGQUERY.mergeSchemaWithRows_ = (schema: {}, rows: {}, options: {}) => { return rows; }; }); @@ -321,7 +317,7 @@ describe('BigQuery/Job', () => { sandbox .stub(BigQuery, 'mergeSchemaWithRows_') - .callsFake((schema, rows, wrapIntegers) => { + .callsFake((schema, rows, {wrapIntegers}) => { assert.strictEqual(schema, response.schema); assert.strictEqual(rows, response.rows); assert.strictEqual(wrapIntegers, false); @@ -353,7 +349,7 @@ describe('BigQuery/Job', () => { sandbox .stub(BigQuery, 'mergeSchemaWithRows_') - .callsFake((schema, rows, wrapIntegers) => { + .callsFake((schema, rows, {wrapIntegers}) => { assert.strictEqual(schema, response.schema); assert.strictEqual(rows, response.rows); assert.strictEqual(wrapIntegers, true); @@ -363,6 +359,34 @@ describe('BigQuery/Job', () => { job.getQueryResults(options, assert.ifError); }); + it('it should parse JSON', done => { + const response = { + schema: {}, + rows: [], + }; + + const mergedRows: Array<{}> = []; + + const options = {parseJSON: true}; + const expectedOptions = Object.assign({location: undefined}); + + BIGQUERY.request = (reqOpts: DecorateRequestOptions) => { + assert.deepStrictEqual(reqOpts.qs, expectedOptions); + done(); + }; + + sandbox + .stub(BigQuery, 'mergeSchemaWithRows_') + .callsFake((schema, rows, {parseJSON}) => { + assert.strictEqual(schema, response.schema); + assert.strictEqual(rows, response.rows); + assert.strictEqual(parseJSON, true); + return mergedRows; + }); + + job.getQueryResults(options, assert.ifError); + }); + it('should return the query when the job is not complete', done => { BIGQUERY.request = ( reqOpts: DecorateRequestOptions, diff --git a/test/table.ts b/test/table.ts index 3aa1176d..b78e7c32 100644 --- a/test/table.ts +++ b/test/table.ts @@ -2022,10 +2022,10 @@ describe('BigQuery/Table', () => { sandbox.restore(); sandbox .stub(BigQuery, 'mergeSchemaWithRows_') - .callsFake((schema_, rows_, wrapIntegers_) => { + .callsFake((schema_, rows_, options_) => { assert.strictEqual(schema_, schema); assert.strictEqual(rows_, rows); - assert.strictEqual(wrapIntegers_, wrapIntegers); + assert.strictEqual(options_.wrapIntegers, wrapIntegers); return mergedRows; }); }); @@ -2091,10 +2091,10 @@ describe('BigQuery/Table', () => { sandbox.restore(); sandbox .stub(BigQuery, 'mergeSchemaWithRows_') - .callsFake((schema_, rows_, wrapIntegers_) => { + .callsFake((schema_, rows_, options_) => { assert.strictEqual(schema_, schema); assert.strictEqual(rows_, rows); - assert.strictEqual(wrapIntegers_, wrapIntegers); + assert.strictEqual(options_.wrapIntegers, wrapIntegers); return merged; }); @@ -2264,8 +2264,30 @@ describe('BigQuery/Table', () => { sandbox.restore(); sandbox .stub(BigQuery, 'mergeSchemaWithRows_') - .callsFake((schema_, rows_, wrapIntegers_) => { - assert.strictEqual(wrapIntegers_, wrapIntegers); + .callsFake((schema_, rows_, options_) => { + assert.strictEqual(options_.wrapIntegers, wrapIntegers); + return merged; + }); + + table.getRows(options, done); + }); + + it('should parse json', done => { + const options = { + parseJSON: true, + }; + const merged = [{name: 'stephen'}]; + + table.request = (reqOpts: DecorateRequestOptions, callback: Function) => { + assert.deepStrictEqual(reqOpts.qs, {}); + callback(null, {}); + }; + + sandbox.restore(); + sandbox + .stub(BigQuery, 'mergeSchemaWithRows_') + .callsFake((schema_, rows_, options_) => { + assert.strictEqual(options_.parseJSON, true); return merged; });