diff --git a/README.md b/README.md index f7a685d..8060c4d 100644 --- a/README.md +++ b/README.md @@ -136,15 +136,13 @@ var UploadStreamObject = new Uploader( }, function (err, uploadStream) { - uploadStream.maxPartSize(20971520) //20 MB - uploadStream.on('uploaded', function (data) { console.log('done'); }); read.pipe(uploadStream); } -); +).maxPartSize(20971520) //20 MB ``` ### stream.concurrentParts(numberOfParts) @@ -161,7 +159,6 @@ var UploadStreamObject = new Uploader( }, function (err, uploadStream) { - uploadStream.concurrentParts(5) uploadStream.on('uploaded', function (data) { console.log('done'); @@ -169,7 +166,7 @@ var UploadStreamObject = new Uploader( read.pipe(uploadStream); } -); +).concurrentParts(5); ``` ### Tuning configuration of the AWS SDK diff --git a/lib/s3-upload-stream.js b/lib/s3-upload-stream.js index 58bd721..dc30e1c 100644 --- a/lib/s3-upload-stream.js +++ b/lib/s3-upload-stream.js @@ -72,6 +72,7 @@ module.exports = { partSize = 5242880; self.partSizeThreshold = partSize; + return self; }; // Set the maximum amount of data that we will keep in memory before flushing it to S3 as a part @@ -81,6 +82,7 @@ module.exports = { parts = 1; self.concurrentPartThreshold = parts; + return self; }; // Handler to receive data and upload it to S3.