diff --git a/bigquery/system-test/tables.test.js b/bigquery/system-test/tables.test.js index cbfa23e779..71e65ab052 100644 --- a/bigquery/system-test/tables.test.js +++ b/bigquery/system-test/tables.test.js @@ -36,37 +36,47 @@ var options = { schema: 'Name:string, Age:integer, Weight:float, IsMagic:boolean', rows: rows }; +var srcDataset = options.dataset; +var srcTable = options.table; +var destDataset = generateUuid(); +var destTable = generateUuid(); describe('bigquery:tables', function () { before(function (done) { // Create bucket storage.createBucket(options.bucket, function (err, bucket) { assert.ifError(err, 'bucket creation succeeded'); - + // Upload data.csv bucket.upload(options.localFilePath, function (err) { assert.ifError(err, 'file upload succeeded'); - - // Create dataset - bigquery.createDataset(options.dataset, function (err, dataset) { - assert.ifError(err, 'dataset creation succeeded'); - done(); + // Create srcDataset + bigquery.createDataset(srcDataset, function (err) { + assert.ifError(err, 'srcDataset creation succeeded'); + // Create destDataset + bigquery.createDataset(destDataset, function (err) { + assert.ifError(err, 'destDataset creation succeeded'); + done(); + }); }); }); }); }); after(function (done) { - // Delete testing dataset/table - bigquery.dataset(options.dataset).delete({ force: true }, function () { - // Delete files - storage.bucket(options.bucket).deleteFiles({ force: true }, function (err) { - if (err) { - return done(err); - } - // Delete bucket - setTimeout(function () { - storage.bucket(options.bucket).delete(done); - }, 2000); + // Delete srcDataset + bigquery.dataset(srcDataset).delete({ force: true }, function () { + // Delete destDataset + bigquery.dataset(destDataset).delete({ force: true }, function () { + // Delete files + storage.bucket(options.bucket).deleteFiles({ force: true }, function (err) { + if (err) { + return done(err); + } + // Delete bucket + setTimeout(function () { + storage.bucket(options.bucket).delete(done); + }, 2000); + }); }); }); }); @@ -157,6 +167,30 @@ describe('bigquery:tables', function () { }); }); + describe('copyTable', function () { + it('should copy a table between datasets', function (done) { + program.copyTable(srcDataset, srcTable, destDataset, destTable, function (err, metadata) { + assert.equal(err, null); + assert.deepEqual(metadata.status, { state: 'DONE' }); + + bigquery.dataset(srcDataset).table(srcTable).exists( + function (err, exists) { + assert.equal(err, null); + assert.equal(exists, true, 'srcTable exists'); + + bigquery.dataset(destDataset).table(destTable).exists( + function (err, exists) { + assert.equal(err, null); + assert.equal(exists, true, 'destTable exists'); + done(); + } + ); + } + ); + }); + }); + }); + describe('deleteTable', function () { it('should delete table', function (done) { program.deleteTable(options, function (err) { diff --git a/bigquery/tables.js b/bigquery/tables.js index c2d7605502..008497bc01 100644 --- a/bigquery/tables.js +++ b/bigquery/tables.js @@ -104,6 +104,27 @@ function deleteTable (options, callback) { } // [END delete_table] +function copyTable (srcDataset, srcTable, destDataset, destTable, callback) { + var bigquery = BigQuery(); + + var srcTableObj = bigquery.dataset(srcDataset).table(srcTable); + var destTableObj = bigquery.dataset(destDataset).table(destTable); + + srcTableObj.copy(destTableObj, function (err, job) { + if (err) { + return callback(err); + } + + console.log('Started job: %s', job.id); + job + .on('error', callback) + .on('complete', function (metadata) { + console.log('Completed job: %s', job.id); + return callback(null, metadata); + }); + }); +} + // [START import_file] /** * Load a csv file into a BigQuery table. @@ -219,6 +240,7 @@ var program = module.exports = { importFile: importFile, exportTableToGCS: exportTableToGCS, insertRowsAsStream: insertRowsAsStream, + copyTable: copyTable, main: function (args) { // Run the command-line program cli.help().strict().parse(args).argv; @@ -236,6 +258,18 @@ cli .command('delete ', 'Delete a table in the specified dataset.', {}, function (options) { program.deleteTable(utils.pick(options, ['dataset', 'table']), utils.makeHandler()); }) + .command('copy ', + 'Make a copy of an existing table.', {}, + function (options) { + program.copyTable( + options.srcDataset, + options.srcTable, + options.destDataset, + options.destTable, + utils.makeHandler() + ); + } + ) .command('import
', 'Import data from a local file or a Google Cloud Storage file into BigQuery.', { bucket: { alias: 'b', @@ -325,6 +359,10 @@ cli 'node $0 insert my_dataset my_table json_file', 'Insert the JSON objects contained in json_file (one per line) into my_dataset:my_table.' ) + .example( + 'node $0 copy src_dataset src_table dest_dataset dest_table', + 'Copy src_dataset:src_table to dest_dataset:dest_table.' + ) .wrap(100) .recommendCommands() .epilogue('For more information, see https://cloud.google.com/bigquery/docs'); diff --git a/bigquery/test/tables.test.js b/bigquery/test/tables.test.js index cec83caebb..267cc27886 100644 --- a/bigquery/test/tables.test.js +++ b/bigquery/test/tables.test.js @@ -16,9 +16,13 @@ var proxyquire = require('proxyquire').noCallThru(); var bucket = 'bucket'; var file = 'file'; -var job = 'job'; +var jobId = 'job'; var dataset = 'dataset'; var table = 'table'; +var srcDataset = dataset; +var srcTable = table; +var destDataset = dataset + '_dest'; +var destTable = table + '_dest'; var format = 'JSON'; var schema = 'schema'; var jsonArray = [ @@ -46,12 +50,14 @@ function getSample () { var fileMock = {}; var metadataMock = { status: { state: 'DONE' } }; var jobMock = { - id: job, + id: jobId, getMetadata: sinon.stub().yields(null, metadataMock), on: sinon.stub().returnsThis() }; + jobMock.on.withArgs('complete').yields(metadataMock); var tableMock = { export: sinon.stub().yields(null, jobMock), + copy: sinon.stub().yields(null, jobMock), delete: sinon.stub().yields(null), import: sinon.stub().yields(null, jobMock), insert: sinon.stub().yields(null, errorList) @@ -220,7 +226,6 @@ describe('bigquery:tables', function () { table: table, file: file }; - sample.mocks.job.on.withArgs('complete').yields(sample.mocks.metadata); sample.program.importFile(options, callback); @@ -243,7 +248,6 @@ describe('bigquery:tables', function () { bucket: bucket, format: format }; - sample.mocks.job.on.withArgs('complete').yields(sample.mocks.metadata); sample.program.importFile(options, callback); @@ -269,6 +273,38 @@ describe('bigquery:tables', function () { }); }); + describe('copyTable', function () { + it('should copy a table', function () { + var sample = getSample(); + var callback = sinon.stub(); + + sample.program.copyTable(srcDataset, srcTable, destDataset, destTable, callback); + + assert.equal(sample.mocks.table.copy.calledOnce, true); + assert.deepEqual( + sample.mocks.table.copy.firstCall.args.slice(0, -1), + [sample.mocks.table] + ); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata]); + assert.equal(console.log.calledTwice, true); + assert.equal(console.log.calledWith('Started job: %s', sample.mocks.job.id), true); + assert.equal(console.log.calledWith('Completed job: %s', sample.mocks.job.id), true); + }); + + it('should handle error', function () { + var error = new Error('error'); + var sample = getSample(); + var callback = sinon.stub(); + sample.mocks.table.copy.yields(error); + + sample.program.copyTable(srcDataset, srcTable, destDataset, destTable, callback); + + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [error]); + }); + }); + describe('exportTableToGCS', function () { it('should export to a table', function () { var sample = getSample(); @@ -281,7 +317,6 @@ describe('bigquery:tables', function () { gzip: true }; var callback = sinon.stub(); - sample.mocks.job.on.withArgs('complete').yields(sample.mocks.metadata); sample.program.exportTableToGCS(options, callback); @@ -389,6 +424,17 @@ describe('bigquery:tables', function () { }]); }); + it('should call copyTable', function () { + var program = getSample().program; + program.copyTable = sinon.stub(); + + program.main(['copy', srcDataset, srcTable, destDataset, destTable]); + assert.equal(program.copyTable.calledOnce, true); + assert.deepEqual(program.copyTable.firstCall.args.slice(0, -1), + [srcDataset, srcTable, destDataset, destTable] + ); + }); + it('should call exportTableToGCS', function () { var program = getSample().program; program.exportTableToGCS = sinon.stub();