Skip to content

Commit 4926d4e

Browse files
committed
Adding tests for the functionality to adjust partSize dynamically, and fixing that feature.
1 parent 3297225 commit 4926d4e

File tree

3 files changed

+44
-3
lines changed

3 files changed

+44
-3
lines changed

README.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -147,3 +147,9 @@ var UploadStreamObject = new Uploader(
147147
```
148148
npm install s3-upload-stream
149149
```
150+
151+
### Running Tests
152+
153+
```
154+
npm test
155+
```

lib/s3-upload-stream.js

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,15 +43,15 @@ module.exports = {
4343
self.receivedSize = 0;
4444
self.uploadedSize = 0;
4545
self.currentPart = Buffer(0);
46-
self.maxPartSize = 5242880;
46+
self.partSizeThreshold = 5242880;
4747

4848
// Set the maximum amount of data that we will keep in memory before flushing it to S3 as a part
4949
// of the multipart upload
5050
self.maxPartSize = function (partSize) {
5151
if (partSize < 5242880)
5252
partSize = 5242880;
5353

54-
self.maxPartSize = partSize;
54+
self.partSizeThreshold = partSize;
5555
};
5656

5757
// Handler to receive data and upload it to S3.
@@ -60,7 +60,7 @@ module.exports = {
6060

6161
// If the current Part buffer is getting too large, or the stream piped in has ended then flush
6262
// the Part buffer downstream to S3 via the multipart upload API.
63-
if (self.currentPart.length > self.maxPartSize)
63+
if (self.currentPart.length > self.partSizeThreshold)
6464
self.flushPart(next);
6565
else
6666
next();

tests/test.js

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -162,6 +162,41 @@ describe('Creating upload stream', function () {
162162
});
163163
});
164164

165+
describe('Stream Methods', function () {
166+
var uploadStream, uploadObject;
167+
168+
before(function (done) {
169+
uploadObject = new UploadStream(
170+
{
171+
s3Client: new AWSstub.S3()
172+
},
173+
{
174+
"Bucket": "test-bucket-name",
175+
"Key": "test-file-name"
176+
},
177+
function (err, data) {
178+
expect(err).to.equal(null);
179+
uploadStream = data;
180+
done();
181+
}
182+
);
183+
});
184+
185+
describe('Setting max part size to a value greater than 5 MB', function () {
186+
it('max part size should be set to that value', function () {
187+
uploadObject.maxPartSize(20971520);
188+
expect(uploadObject.partSizeThreshold).to.equal(20971520);
189+
});
190+
});
191+
192+
describe('Setting max part size to a value less than 5 MB', function () {
193+
it('max part size should be set to 5 MB exactly', function () {
194+
uploadObject.maxPartSize(4242880);
195+
expect(uploadObject.partSizeThreshold).to.equal(5242880);
196+
});
197+
});
198+
});
199+
165200
describe('Piping data into the upload stream', function () {
166201
var uploadStream, uploadObject;
167202

0 commit comments

Comments
 (0)