diff --git a/src/bigquery.ts b/src/bigquery.ts index 09b1f518..a4cfbd03 100644 --- a/src/bigquery.ts +++ b/src/bigquery.ts @@ -121,6 +121,7 @@ export type Query = JobRequest & { pageToken?: string; wrapIntegers?: boolean | IntegerTypeCastOptions; parseJSON?: boolean; + skipWrapCustomTypes?: boolean; // Overrides default job creation mode set on the client. jobCreationMode?: JobCreationMode; }; @@ -141,6 +142,8 @@ export type QueryParamTypes = export type QueryOptions = QueryResultsOptions; export type QueryStreamOptions = { wrapIntegers?: boolean | IntegerTypeCastOptions; + skipWrapBig?: boolean; + skipWrapBigQueryCustomTypes?: boolean; parseJSON?: boolean; }; export type DatasetResource = bigquery.IDataset & { @@ -579,7 +582,8 @@ export class BigQuery extends Service { * Please see {@link IntegerTypeCastOptions} for options descriptions. * @param {array} options.selectedFields List of fields to return. * If unspecified, all fields are returned. - * @param {array} options.parseJSON parse a 'JSON' field into a JSON object. + * @param {boolean} options.parseJSON parse a 'JSON' field into a JSON object. + * @param {boolean} options.skipWrapCustomTypes skip converting values into BigQuery custom types. * @returns Fields using their matching names from the table's schema. */ static mergeSchemaWithRows_( @@ -587,6 +591,7 @@ export class BigQuery extends Service { rows: TableRow[], options: { wrapIntegers: boolean | IntegerTypeCastOptions; + skipWrapCustomTypes?: boolean; selectedFields?: string[]; parseJSON?: boolean; }, @@ -1429,6 +1434,7 @@ export class BigQuery extends Service { * @param {boolean} [options.wrapIntegers] Optionally wrap INT64 in BigQueryInt * or custom INT64 value type. * @param {boolean} [options.parseJSON] Optionally parse JSON as a JSON Object. + * @param {boolean} [options.skipWrapCustomTypes] Optionally skip wrapping values into BigQuery Custom Types. * @param {object|array} [options.params] Option to provide query prarameters. * @param {JobCallback} [callback] The callback function. * @param {?error} callback.err An error returned while making this request. @@ -2202,6 +2208,7 @@ export class BigQuery extends Service { ? { wrapIntegers: query.wrapIntegers, parseJSON: query.parseJSON, + skipWrapCustomTypes: query.skipWrapCustomTypes, } : {}; const callback = @@ -2241,6 +2248,7 @@ export class BigQuery extends Service { if (res.schema && res.rows) { rows = BigQuery.mergeSchemaWithRows_(res.schema, res.rows, { wrapIntegers: options.wrapIntegers || false, + skipWrapCustomTypes: options.skipWrapCustomTypes, parseJSON: options.parseJSON, }); delete res.rows; @@ -2408,13 +2416,21 @@ export class BigQuery extends Service { return; } - const {location, maxResults, pageToken, wrapIntegers, parseJSON} = query; + const { + location, + maxResults, + pageToken, + wrapIntegers, + parseJSON, + skipWrapCustomTypes, + } = query; const opts = { location, maxResults, pageToken, wrapIntegers, + skipWrapCustomTypes, parseJSON, autoPaginate: false, }; @@ -2423,6 +2439,7 @@ export class BigQuery extends Service { delete query.maxResults; delete query.pageToken; delete query.wrapIntegers; + delete query.skipWrapCustomTypes; delete query.parseJSON; this.query(query, opts, callback); @@ -2462,6 +2479,7 @@ function convertSchemaFieldValue( value: any, options: { wrapIntegers: boolean | IntegerTypeCastOptions; + skipWrapCustomTypes?: boolean; selectedFields?: string[]; parseJSON?: boolean; }, @@ -2469,7 +2487,10 @@ function convertSchemaFieldValue( if (is.null(value)) { return value; } - + let {skipWrapCustomTypes, wrapIntegers, parseJSON} = options; + if (skipWrapCustomTypes) { + wrapIntegers = false; + } switch (schemaField.type) { case 'BOOLEAN': case 'BOOL': { @@ -2487,7 +2508,6 @@ function convertSchemaFieldValue( } case 'INTEGER': case 'INT64': { - const {wrapIntegers} = options; value = wrapIntegers ? typeof wrapIntegers === 'object' ? BigQuery.int( @@ -2498,12 +2518,12 @@ function convertSchemaFieldValue( : Number(value); break; } + case 'BIGNUMERIC': case 'NUMERIC': { value = new Big(value); - break; - } - case 'BIGNUMERIC': { - value = new Big(value); + if (skipWrapCustomTypes) { + value = value.toFixed(); + } break; } case 'RECORD': { @@ -2512,36 +2532,52 @@ function convertSchemaFieldValue( } case 'DATE': { value = BigQuery.date(value); + if (skipWrapCustomTypes) { + value = value.value; + } break; } case 'DATETIME': { value = BigQuery.datetime(value); + if (skipWrapCustomTypes) { + value = value.value; + } break; } case 'TIME': { value = BigQuery.time(value); + if (skipWrapCustomTypes) { + value = value.value; + } break; } case 'TIMESTAMP': { const pd = new PreciseDate(); pd.setFullTime(PreciseDate.parseFull(BigInt(value) * BigInt(1000))); value = BigQuery.timestamp(pd); + if (skipWrapCustomTypes) { + value = value.value; + } break; } case 'GEOGRAPHY': { value = BigQuery.geography(value); + if (skipWrapCustomTypes) { + value = value.value; + } break; } case 'JSON': { - const {parseJSON} = options; value = parseJSON ? JSON.parse(value) : value; break; } case 'RANGE': { - value = BigQueryRange.fromSchemaValue_( - value, - schemaField.rangeElementType!.type!, - ); + value = skipWrapCustomTypes + ? value + : BigQueryRange.fromSchemaValue_( + value, + schemaField.rangeElementType!.type!, + ); break; } default: @@ -2645,6 +2681,7 @@ export class BigQueryRange { } return convertSchemaFieldValue({type: elementType}, value, { wrapIntegers: false, + skipWrapCustomTypes: false, }); }; return BigQuery.range( diff --git a/src/job.ts b/src/job.ts index d39f950b..d933267a 100644 --- a/src/job.ts +++ b/src/job.ts @@ -50,6 +50,7 @@ export type CancelResponse = [bigquery.IJobCancelResponse]; export type QueryResultsOptions = { job?: Job; wrapIntegers?: boolean | IntegerTypeCastOptions; + skipWrapCustomTypes?: boolean; parseJSON?: boolean; } & PagedRequest & { /** @@ -556,6 +557,10 @@ class Job extends Operation { const wrapIntegers = qs.wrapIntegers ? qs.wrapIntegers : false; delete qs.wrapIntegers; + const skipWrapCustomTypes = qs.skipWrapCustomTypes + ? qs.skipWrapCustomTypes + : false; + delete qs.skipWrapCustomTypes; const parseJSON = qs.parseJSON ? qs.parseJSON : false; delete qs.parseJSON; @@ -597,6 +602,7 @@ class Job extends Operation { if (resp.schema && resp.rows) { rows = BigQuery.mergeSchemaWithRows_(resp.schema, resp.rows, { wrapIntegers, + skipWrapCustomTypes, parseJSON, }); } diff --git a/src/table.ts b/src/table.ts index ce436545..33eab382 100644 --- a/src/table.ts +++ b/src/table.ts @@ -113,6 +113,7 @@ export type TableRowValue = string | TableRow; export type GetRowsOptions = PagedRequest & { wrapIntegers?: boolean | IntegerTypeCastOptions; + skipWrapCustomTypes?: boolean; parseJSON?: boolean; }; @@ -1851,6 +1852,10 @@ class Table extends ServiceObject { typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; const wrapIntegers = options.wrapIntegers ? options.wrapIntegers : false; delete options.wrapIntegers; + const skipWrapCustomTypes = options.skipWrapCustomTypes + ? options.skipWrapCustomTypes + : false; + delete options.skipWrapCustomTypes; const parseJSON = options.parseJSON ? options.parseJSON : false; delete options.parseJSON; const selectedFields = options.selectedFields @@ -1868,6 +1873,7 @@ class Table extends ServiceObject { } rows = BigQuery.mergeSchemaWithRows_(this.metadata.schema, rows || [], { wrapIntegers, + skipWrapCustomTypes, selectedFields, parseJSON, }); diff --git a/test/bigquery.ts b/test/bigquery.ts index b53a2b89..57c11551 100644 --- a/test/bigquery.ts +++ b/test/bigquery.ts @@ -444,52 +444,53 @@ describe('BigQuery', () => { } as {fields: TableField[]}; beforeEach(() => { - sandbox.stub(BigQuery, 'date').callsFake(input => { + sandbox.stub(BigQuery, 'date').callsFake(value => { return { type: 'fakeDate', - input, + value, }; }); - sandbox.stub(BigQuery, 'datetime').callsFake(input => { + sandbox.stub(BigQuery, 'datetime').callsFake(value => { return { type: 'fakeDatetime', - input, + value, }; }); - sandbox.stub(BigQuery, 'time').callsFake(input => { + sandbox.stub(BigQuery, 'time').callsFake(value => { return { type: 'fakeTime', - input, + value, }; }); - sandbox.stub(BigQuery, 'timestamp').callsFake(input => { + sandbox.stub(BigQuery, 'timestamp').callsFake(value => { return { type: 'fakeTimestamp', - input, + value, }; }); - sandbox.stub(BigQuery, 'geography').callsFake(input => { + sandbox.stub(BigQuery, 'geography').callsFake(value => { return { type: 'fakeGeography', - input, + value, }; }); - sandbox.stub(BigQuery, 'range').callsFake((input, elementType) => { + sandbox.stub(BigQuery, 'range').callsFake((value, elementType) => { return { type: 'fakeRange', - input, elementType, + value, }; }); }); - it('should merge the schema and flatten the rows', () => { + describe('should merge the schema and flatten the rows', () => { const now = new Date(); + const pd = new PreciseDate(BigInt(now.valueOf()) * BigInt(1_000_000)); const buffer = Buffer.from('test'); const rows = [ @@ -551,7 +552,7 @@ describe('BigQuery', () => { id: 3, name: 'Milo', dob: { - input: new PreciseDate(BigInt(now.valueOf()) * BigInt(1_000_000)), + value: pd, type: 'fakeTimestamp', }, has_claws: false, @@ -572,36 +573,63 @@ describe('BigQuery', () => { }, ], date: { - input: 'date-input', + value: 'date-input', type: 'fakeDate', }, datetime: { - input: 'datetime-input', + value: 'datetime-input', type: 'fakeDatetime', }, time: { - input: 'time-input', + value: 'time-input', type: 'fakeTime', }, geography: { - input: 'geography-input', + value: 'geography-input', type: 'fakeGeography', }, range: { + elementType: 'DATETIME', type: 'fakeRange', - input: { + value: { end: { - input: '2020-12-31 12:00:00+08', + value: '2020-12-31 12:00:00+08', type: 'fakeDatetime', }, start: { - input: '2020-10-01 12:00:00+08', + value: '2020-10-01 12:00:00+08', type: 'fakeDatetime', }, }, - elementType: 'DATETIME', }, }, + expectedWithoutTypes: { + id: 3, + name: 'Milo', + dob: pd, + has_claws: false, + has_fangs: true, + hair_count: 5.222330009847, + teeth_count: 30.2232138, + numeric_col: '3.14', + bignumeric_col: '9.9876543210123456789', + arr: [10], + arr2: [2], + nullable: null, + buffer, + objects: [ + { + nested_object: { + nested_property: 'nested_value', + }, + }, + ], + date: 'date-input', + datetime: 'datetime-input', + time: 'time-input', + geography: 'geography-input', + range: '[2020-10-01 12:00:00+08, 2020-12-31 12:00:00+08)', + }, }, ]; @@ -677,12 +705,33 @@ describe('BigQuery', () => { }); const rawRows = rows.map(x => x.raw); - const mergedRows = BigQuery.mergeSchemaWithRows_(schemaObject, rawRows, { - wrapIntegers: false, + + it('with custom types', () => { + const mergedRows = BigQuery.mergeSchemaWithRows_( + schemaObject, + rawRows, + { + wrapIntegers: false, + }, + ); + + mergedRows.forEach((mergedRow: {}, index: number) => { + assert.deepStrictEqual(mergedRow, rows[index].expected); + }); }); - mergedRows.forEach((mergedRow: {}, index: number) => { - assert.deepStrictEqual(mergedRow, rows[index].expected); + it('without custom types', () => { + const mergedRows = BigQuery.mergeSchemaWithRows_( + schemaObject, + rawRows, + { + skipWrapCustomTypes: true, + }, + ); + + mergedRows.forEach((mergedRow: {}, index: number) => { + assert.deepStrictEqual(mergedRow, rows[index].expectedWithoutTypes); + }); }); }); @@ -3263,6 +3312,7 @@ describe('BigQuery', () => { query: QUERY_STRING, wrapIntegers: true, parseJSON: true, + skipWrapCustomTypes: true, }; bq.query(query, (err: Error, rows: {}, resp: {}) => { assert.ifError(err); @@ -3270,6 +3320,7 @@ describe('BigQuery', () => { job: fakeJob, wrapIntegers: true, parseJSON: true, + skipWrapCustomTypes: true, }); assert.strictEqual(rows, FAKE_ROWS); assert.strictEqual(resp, FAKE_RESPONSE); @@ -3486,6 +3537,7 @@ describe('BigQuery', () => { maxResults: undefined, pageToken: undefined, wrapIntegers: undefined, + skipWrapCustomTypes: undefined, parseJSON: undefined, autoPaginate: false, }; @@ -3503,10 +3555,16 @@ describe('BigQuery', () => { }); it('should call query correctly with a Query object', done => { - const query = {query: 'SELECT', wrapIntegers: true, parseJSON: true}; + const query = { + query: 'SELECT', + wrapIntegers: true, + parseJSON: true, + skipWrapCustomTypes: true, + }; bq.queryAsStream_(query, done); const opts = { ...defaultOpts, + skipWrapCustomTypes: true, wrapIntegers: true, parseJSON: true, }; @@ -3560,6 +3618,23 @@ describe('BigQuery', () => { assert(queryStub.calledOnceWithExactly(query, opts, sinon.match.func)); }); + + it('should pass skipWrapCustomTypes if supplied', done => { + const skipWrapCustomTypes = true; + const query = { + query: 'SELECT', + skipWrapCustomTypes, + }; + + bq.queryAsStream_(query, done); + + const opts = { + ...defaultOpts, + skipWrapCustomTypes, + }; + + assert(queryStub.calledOnceWithExactly(query, opts, sinon.match.func)); + }); }); describe('#sanitizeEndpoint', () => { diff --git a/test/job.ts b/test/job.ts index ced932d4..5911526c 100644 --- a/test/job.ts +++ b/test/job.ts @@ -327,10 +327,11 @@ describe('BigQuery/Job', () => { sandbox .stub(BigQuery, 'mergeSchemaWithRows_') - .callsFake((schema, rows, {wrapIntegers}) => { + .callsFake((schema, rows, {wrapIntegers, skipWrapCustomTypes}) => { assert.strictEqual(schema, response.schema); assert.strictEqual(rows, response.rows); assert.strictEqual(wrapIntegers, false); + assert.strictEqual(skipWrapCustomTypes, false); return mergedRows; }); @@ -372,6 +373,37 @@ describe('BigQuery/Job', () => { job.getQueryResults(options, assert.ifError); }); + it('it should skip wrapping with custom types', done => { + const response = { + schema: {}, + rows: [], + }; + + const mergedRows: Array<{}> = []; + + const options = {skipWrapCustomTypes: true}; + const expectedOptions = Object.assign({ + location: undefined, + 'formatOptions.useInt64Timestamp': true, + }); + + BIGQUERY.request = (reqOpts: DecorateRequestOptions) => { + assert.deepStrictEqual(reqOpts.qs, expectedOptions); + done(); + }; + + sandbox + .stub(BigQuery, 'mergeSchemaWithRows_') + .callsFake((schema, rows, {skipWrapCustomTypes}) => { + assert.strictEqual(schema, response.schema); + assert.strictEqual(rows, response.rows); + assert.strictEqual(skipWrapCustomTypes, true); + return mergedRows; + }); + + job.getQueryResults(options, assert.ifError); + }); + it('it should parse JSON', done => { const response = { schema: {},