diff --git a/lib/common/util.js b/lib/common/util.js index a1df22a6232..77b0efb8fd0 100644 --- a/lib/common/util.js +++ b/lib/common/util.js @@ -197,57 +197,57 @@ util.parseApiResp = parseApiResp; * outgoing writable stream. * * @param {Duplexify} dup - Duplexify stream. - * @param {object} options - Configuration object. - * @param {module:common/connection} options.connection - A connection instance, + * @param {object} config - Configuration object. + * @param {module:common/connection} config.connection - A connection instance, * used to get a token with and send the request through. - * @param {object} options.metadata - Metadata to send at the head of the + * @param {object} config.metadata - Metadata to send at the head of the * request. - * @param {object} options.request - Request object, in the format of a standard + * @param {object} config.request - Request object, in the format of a standard * Node.js http.request() object. - * @param {string=} options.request.method - Default: "POST". - * @param {string=} options.request.qs.uploadType - Default: "multipart". - * @param {string=} options.streamContentType - Default: + * @param {string=} config.request.method - Default: "POST". + * @param {string=} config.request.qs.uploadType - Default: "multipart". + * @param {string=} config.streamContentType - Default: * "application/octet-stream". * @param {function} onComplete - Callback, executed after the writable Request * stream has completed. */ -function makeWritableStream(dup, options, onComplete) { +function makeWritableStream(dup, config, onComplete) { onComplete = onComplete || noop; - options = options || {}; var writeStream = through(); dup.setWritable(writeStream); + var metadata = config.metadata || {}; + var simpleUpload = config.simpleUpload; + var contentType = metadata.contentType || 'application/octet-stream'; + var defaultReqOpts = { method: 'POST', qs: { - uploadType: 'multipart' + uploadType: simpleUpload ? 'media' : 'multipart' } }; - var metadata = options.metadata || {}; - - var reqOpts = extend(true, defaultReqOpts, options.request, { - multipart: [ - { - 'Content-Type': 'application/json', - body: JSON.stringify(metadata) - }, - { - 'Content-Type': metadata.contentType || 'application/octet-stream', - body: writeStream - } - ] - }); + if (simpleUpload) { + defaultReqOpts.headers = { + 'Content-Type': contentType + }; - options.makeAuthorizedRequest(reqOpts, { + if (metadata.contentLength) { + defaultReqOpts.headers['Content-Length'] = metadata.contentLength; + } + } + + var reqOpts = extend(true, defaultReqOpts, config.request); + + config.makeAuthorizedRequest(reqOpts, { onAuthorized: function(err, authorizedReqOpts) { if (err) { dup.destroy(err); return; } - request(authorizedReqOpts, function(err, resp, body) { + var req = request(authorizedReqOpts, function(err, resp, body) { util.handleResp(err, resp, body, function(err, data) { if (err) { dup.destroy(err); @@ -257,6 +257,21 @@ function makeWritableStream(dup, options, onComplete) { onComplete(data); }); }); + + if (simpleUpload) { + writeStream.pipe(req); + } else { + req.multipart([ + { + 'Content-Type': 'application/json', + body: JSON.stringify(metadata || {}) + }, + { + 'Content-Type': contentType, + body: writeStream + } + ]); + } } }); } diff --git a/lib/storage/bucket.js b/lib/storage/bucket.js index 027eacb6e71..0edba87629d 100644 --- a/lib/storage/bucket.js +++ b/lib/storage/bucket.js @@ -996,6 +996,7 @@ Bucket.prototype.upload = function(localPath, options, callback) { } resumable = fd.size > RESUMABLE_THRESHOLD; + metadata.contentLength = fd.size; upload(); }); diff --git a/lib/storage/file.js b/lib/storage/file.js index dfeb29fc17c..74ced308d52 100644 --- a/lib/storage/file.js +++ b/lib/storage/file.js @@ -620,17 +620,25 @@ File.prototype.createResumableUpload = function(metadata, callback) { * NOTE: Writable streams will emit the `complete` event when the file is fully * uploaded. * - * @resource [Upload Options (Simple or Resumable)]{@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads} + * @resource [Upload Options (Simple, Multipart, or Resumable)]{@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads} * @resource [Objects: insert API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert} * * @param {object=} options - Configuration object. - * @param {boolean} options.gzip - Automatically gzip the file. This will set * `options.metadata.contentEncoding` to `gzip`. * @param {object} options.metadata - Set the metadata for this file. + * @param {number} options.metadata.contentLength - The length of the content + * being uploaded. + * @param {string} options.metadata.contentType - The type of content that + * represents the file being uploaded. Default: `application/octet-stream`. + * @param {object} options.metadata.metadata - Custom metadata to set for this + * file. + * @param {boolean} options.gzip - Automatically gzip the file. This will set * @param {boolean} options.resumable - Force a resumable upload. NOTE: When * working with streams, the file format and size is unknown until it's * completely consumed. Because of this, it's best for you to be explicit * for what makes sense given your input. + * @param {boolean} options.simple - Use a simple upload technique. Metadata + * other than * @param {string|boolean} options.validation - Possible values: `"md5"`, * `"crc32c"`, or `false`. By default, data integrity is validated with an * MD5 checksum for maximum reliability. CRC32c will provide better @@ -648,7 +656,7 @@ File.prototype.createResumableUpload = function(metadata, callback) { * var fs = require('fs'); * var image = myBucket.file('image.png'); * - * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * fs.createReadStream('panda.jpg') * .pipe(image.createWriteStream()) * .on('error', function(err) {}) * .on('finish', function() { @@ -661,7 +669,7 @@ File.prototype.createResumableUpload = function(metadata, callback) { * var fs = require('fs'); * var htmlFile = myBucket.file('index.html'); * - * fs.createReadStream('/Users/stephen/site/index.html') + * fs.createReadStream('site/index.html') * .pipe(htmlFile.createWriteStream({ gzip: true })) * .on('error', function(err) {}) * .on('finish', function() { @@ -684,7 +692,7 @@ File.prototype.createResumableUpload = function(metadata, callback) { * var fs = require('fs'); * var image = myBucket.file('image.png'); * - * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * fs.createReadStream('panda.jpg') * .pipe(image.createWriteStream({ * metadata: { * contentType: 'image/jpeg', @@ -697,6 +705,35 @@ File.prototype.createResumableUpload = function(metadata, callback) { * .on('finish', function() { * // The file upload is complete. * }); + * + * //- + * //

Uploading a File using the Simple Upload Technique

+ * // + * // By default, streams will use a multipart upload with chunked transfer- + * // encoding when being piped to the remote endpoint. This can be convenient, + * // but unnecessary when you're only transferring small files, don't need to + * // set any metadata, and you know the size of the content ahead of time. You + * // can use a simple upload technique by setting `simple: true` and disable + * // chunked transfer-encoding by setting `metadata.contentLength`. + * // + * // *Metadata properties other than `metadata.contentLength` and + * // `metadata.contentType` will be ignored when using simple uploads.* + * //- + * var fs = require('fs'); + * var image = myBucket.file('image.png'); + * + * fs.createReadStream('panda.jpg') + * .pipe(image.createWriteStream({ + * simple: true, + * metadata: { + * contentLength: 134888, + * contentType: 'image/jpg' + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); */ File.prototype.createWriteStream = function(options) { options = options || {}; @@ -736,8 +773,8 @@ File.prototype.createWriteStream = function(options) { // Wait until we've received data to determine what upload technique to use. stream.on('writing', function() { - if (options.resumable === false) { - self.startSimpleUpload_(fileWriteStream, metadata); + if (options.resumable === false || options.simple === true) { + self.startSimpleUpload_(fileWriteStream, metadata, options); } else { self.startResumableUpload_(fileWriteStream, metadata); } @@ -1397,14 +1434,15 @@ File.prototype.startResumableUpload_ = function(dup, metadata) { /** * Takes a readable stream and pipes it to a remote file. Unlike * `startResumableUpload_`, which uses the resumable upload technique, this - * method uses a simple upload (all or nothing). + * method uses a simple (or multipart) upload (all or nothing). * * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. - * @param {object=} metadata - Optional metadata to set on the file. + * @param {object} metadata - Optional metadata to set on the file. + * @param {object} options - The original options passed to `createWriteStream`. * * @private */ -File.prototype.startSimpleUpload_ = function(dup, metadata) { +File.prototype.startSimpleUpload_ = function(dup, metadata, options) { var self = this; var reqOpts = { @@ -1424,7 +1462,8 @@ File.prototype.startSimpleUpload_ = function(dup, metadata) { util.makeWritableStream(dup, { makeAuthorizedRequest: self.bucket.storage.makeAuthorizedRequest_, metadata: metadata, - request: reqOpts + request: reqOpts, + simpleUpload: options.simple }, function(data) { self.metadata = data; dup.emit('complete'); diff --git a/system-test/storage.js b/system-test/storage.js index e7960f99ffd..8fad0dad3bc 100644 --- a/system-test/storage.js +++ b/system-test/storage.js @@ -590,6 +590,31 @@ describe('storage', function() { }); }); + it('should write without chunked transfer-encoding', function(done) { + var file = bucket.file('LargeFile'); + + fs.stat(files.big.path, function(err, fd) { + var metadata = { + contentLength: fd.size, + contentType: 'image/png' + }; + + var ws = file.createWriteStream({ + simple: true, + metadata: metadata + }); + + fs.createReadStream(files.big.path) + .pipe(ws) + .on('error', done) + .on('finish', function() { + assert.equal(file.metadata.size, metadata.contentLength); + assert.equal(file.metadata.contentType, metadata.contentType); + file.delete(done); + }); + }); + }); + it('should write metadata', function(done) { var options = { metadata: { contentType: 'image/png' },