Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
127 changes: 127 additions & 0 deletions system-test/managed_writer_client_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -694,6 +694,133 @@
}
});

it.only('should invoke appendRows with picosecond precision timestamp without errors', async () => {

Check failure on line 697 in system-test/managed_writer_client_test.ts

View workflow job for this annotation

GitHub Actions / lint

'it.only' is restricted from being used
const picosTableId = generateUuid();
const picosSchema: any = {
fields: [
{
name: 'customer_name',
type: 'STRING',
mode: 'REQUIRED',
},
{
name: 'row_num',
type: 'INTEGER',
mode: 'REQUIRED',
},
{
name: 'created_at',
type: 'TIMESTAMP',
mode: 'NULLABLE',
timestampPrecision: 12,
},
],
};
const [table] = await bigquery
.dataset(datasetId)
.createTable(picosTableId, {schema: picosSchema});
const picosParent = `projects/${projectId}/datasets/${datasetId}/tables/${table.id}`;

bqWriteClient.initialize().catch(err => {
throw err;
});
const streamType: WriteStream['type'] = managedwriter.PendingStream;
const client = new WriterClient();
client.setClient(bqWriteClient);

const storageSchema =
adapt.convertBigQuerySchemaToStorageTableSchema(picosSchema);
const protoDescriptor: DescriptorProto =
adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root');

// Row 1
const row1 = {
customer_name: 'Ada Lovelace',
row_num: 1,
created_at: '2023-10-10 12:00:00.123456789012',
};

const offset: IInt64Value['value'] = '0';

const streamId = await client.createWriteStream({
streamType,
destinationTable: picosParent,
});
const appendRowsResponsesResult: AppendRowsResponse[] = [
{
appendResult: {
offset: {
value: offset,
},
},
writeStream: streamId,
},
];
try {
const connection = await client.createStreamConnection({
streamId,
});
const writer = new JSONWriter({
connection,
protoDescriptor,
});
const pw = writer.appendRows([row1], offset);
const result = await pw.getResult();
const responses: AppendRowsResponse[] = [
{
appendResult: result.appendResult,
writeStream: result.writeStream,
},
];

assert.deepEqual(appendRowsResponsesResult, responses);

const res = await connection.finalize();
connection.close();
assert.equal(res?.rowCount, 1);

const commitResponse = await client.batchCommitWriteStream({
parent: picosParent,
writeStreams: [streamId],
});
assert.equal(commitResponse.streamErrors?.length, 0);

writer.close();

// Now read to make sure the written data is correct:
const options: {[key: string]: any} = {};
const timestampOutputFormat = 'ISO8601_STRING';
const useInt64Timestamp = false;
const expectedTsValue = '2023-01-01T12:00:00.123456789123Z';
options['formatOptions.timestampOutputFormat'] = timestampOutputFormat;
options['formatOptions.useInt64Timestamp'] = useInt64Timestamp;

await new Promise<void>((resolve, reject) => {
(table as any).request(
{
uri: '/data',
qs: options,
},
(err: any, resp: any) => {
if (err) {
reject(err);
return;
}
try {
assert(resp.rows && resp.rows.length > 0);
assert.strictEqual(resp.rows[0].f[0].v, expectedTsValue);
resolve();
} catch (e) {
reject(e);
}
},
);
});
} finally {
client.close();
}
});

it('should update proto descriptor automatically with appendRows without errors', async () => {
bqWriteClient.initialize().catch(err => {
throw err;
Expand Down
2 changes: 1 addition & 1 deletion system-test/timestamp_output_format.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ interface TestCase {
expectedTsValue?: string;
}

describe.only('Timestamp Output Format System Tests', () => {
describe('Timestamp Output Format System Tests', () => {
const datasetId = `timestamp_test_${randomUUID().replace(/-/g, '_')}`;
const tableId = `timestamp_table_${randomUUID().replace(/-/g, '_')}`;
const dataset = bigquery.dataset(datasetId);
Expand Down
Loading