Skip to content

Commit eaae0fc

Browse files
committed
Fixing usage error in docs and examples.
1 parent 72fb52d commit eaae0fc

File tree

6 files changed

+27
-20
lines changed

6 files changed

+27
-20
lines changed

CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
Changelog
22
=========
33

4+
#### 1.0.1 (2014-09-26)
5+
6+
Fixed error in usage in the documentation and examples. The examples did not use the "new" keyword when creating the upload stream, so there were scope issues when doing parallel uploads. This has been clarified and corrected in the documentation and examples.
7+
48
#### 1.0.0 (2014-09-15)
59

610
Major overhaul of the functional interface. Breaks compatability with older versions of the module in favor of a cleaner, more streamlined approach. A migration guide for users of older versions of the module has been included in the documentation.

README.md

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,13 @@ A pipeable write stream which uploads to Amazon S3 using the multipart file uplo
66

77
### Changelog
88

9-
#### 1.0.0 (2014-09-15)
9+
#### 1.0.1 (2014-09-26)
1010

11-
Major overhaul of the functional interface. Breaks compatability with older versions of the module in favor of a cleaner, more streamlined approach. A migration guide for users of older versions of the module has been included in the documentation.
11+
Fixed error in usage in the documentation and examples. The examples did not use the "new" keyword when creating the upload stream, so there were scope issues when doing parallel uploads. This has been clarified and corrected in the documentation and examples.
1212

13-
#### 0.6.2 (2014-08-31)
13+
#### 1.0.0 (2014-09-15)
1414

15-
Upgrading the AWS SDK dependency to the latest version. Fixes issue #11
15+
Major overhaul of the functional interface. Breaks compatability with older versions of the module in favor of a cleaner, more streamlined approach. A migration guide for users of older versions of the module has been included in the documentation.
1616

1717
[Historical Changelogs](CHANGELOG.md)
1818

@@ -44,7 +44,7 @@ s3Stream.client(new AWS.S3());
4444
// Create the streams
4545
var read = fs.createReadStream('/path/to/a/file');
4646
var compress = zlib.createGzip();
47-
var upload = s3Stream.upload({
47+
var upload = new s3Stream.upload({
4848
"Bucket": "bucket-name",
4949
"Key": "key-name"
5050
});
@@ -118,7 +118,7 @@ var s3Stream = require('s3-upload-stream'),
118118
s3Stream.client(new AWS.S3());
119119

120120
var read = fs.createReadStream('/path/to/a/file');
121-
var upload = s3Client.upload({
121+
var upload = new s3Client.upload({
122122
"Bucket": "bucket-name",
123123
"Key": "key-name",
124124
"ACL": "public-read",
@@ -144,7 +144,7 @@ var s3Stream = require('s3-upload-stream'),
144144
s3Stream.client(new AWS.S3());
145145

146146
var read = fs.createReadStream('/path/to/a/file');
147-
var upload = s3Client.upload({
147+
var upload = new s3Client.upload({
148148
"Bucket": "bucket-name",
149149
"Key": "key-name"
150150
});
@@ -167,7 +167,7 @@ var s3Stream = require('s3-upload-stream'),
167167
s3Stream.client(new AWS.S3());
168168

169169
var read = fs.createReadStream('/path/to/a/file');
170-
var upload = s3Client.upload({
170+
var upload = new s3Client.upload({
171171
"Bucket": "bucket-name",
172172
"Key": "key-name"
173173
});

examples/upload.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ s3Stream.client(new AWS.S3());
1313
// Create the streams
1414
var read = fs.createReadStream('../lib/s3-upload-stream.js');
1515
var compress = zlib.createGzip();
16-
var upload = s3Stream.upload({
16+
var upload = new s3Stream.upload({
1717
"Bucket": "bucket-name",
1818
"Key": "key-name"
1919
});

lib/s3-upload-stream.js

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ module.exports = {
1515
var e = new events.EventEmitter();
1616

1717
// Create the writeable stream interface.
18-
self.ws = Writable({
18+
self.ws = new Writable({
1919
highWaterMark: 4194304 // 4 MB
2020
});
2121

@@ -210,8 +210,11 @@ module.exports = {
210210
function (err, result) {
211211
if (err)
212212
self.abortUpload('Failed to complete the multipart upload on S3: ' + JSON.stringify(err));
213-
else
213+
else {
214+
// Emit both events for backwards compatability, and to follow the spec.
214215
self.ws.emit('uploaded', result);
216+
self.ws.emit('finished', result);
217+
}
215218
}
216219
);
217220
};

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"name": "s3-upload-stream",
33
"description": "Writeable stream for uploading content of unknown size to S3 via the multipart API.",
4-
"version": "1.0.0",
4+
"version": "1.0.1",
55
"author": {
66
"name": "Nathan Peck",
77
"email": "[email protected]"

tests/test.js

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ describe('Creating upload stream', function () {
114114

115115
it('should throw an error', function (done) {
116116
try {
117-
uploadStream = s3Stream.upload({
117+
uploadStream = new s3Stream.upload({
118118
"Bucket": "test-bucket-name",
119119
"Key": "test-file-name"
120120
});
@@ -133,7 +133,7 @@ describe('Creating upload stream', function () {
133133
before(function (done) {
134134
s3Stream.client(new AWSstub.S3());
135135

136-
uploadStream = s3Stream.upload({
136+
uploadStream = new s3Stream.upload({
137137
"Bucket": "test-bucket-name",
138138
"Key": "test-file-name"
139139
});
@@ -155,7 +155,7 @@ describe('Stream Methods', function () {
155155
var uploadStream;
156156

157157
before(function (done) {
158-
uploadStream = s3Stream.upload({
158+
uploadStream = new s3Stream.upload({
159159
"Bucket": "test-bucket-name",
160160
"Key": "test-file-name"
161161
});
@@ -208,7 +208,7 @@ describe('Piping data into the writable upload stream', function () {
208208
var uploadStream;
209209

210210
before(function (done) {
211-
uploadStream = s3Stream.upload({
211+
uploadStream = new s3Stream.upload({
212212
"Bucket": "test-bucket-name",
213213
"Key": "test-file-name"
214214
});
@@ -264,7 +264,7 @@ describe('Piping data into the writable upload stream', function () {
264264
describe('S3 Error catching', function () {
265265
describe('Error creating multipart upload', function () {
266266
it('should emit an error', function (done) {
267-
var uploadStream = s3Stream.upload({
267+
var uploadStream = new s3Stream.upload({
268268
"Bucket": "test-bucket-name",
269269
"Key": "create-fail"
270270
});
@@ -279,7 +279,7 @@ describe('S3 Error catching', function () {
279279
var uploadStream;
280280

281281
before(function (done) {
282-
uploadStream = s3Stream.upload({
282+
uploadStream = new s3Stream.upload({
283283
"Bucket": "test-bucket-name",
284284
"Key": "upload-fail"
285285
});
@@ -308,7 +308,7 @@ describe('S3 Error catching', function () {
308308
var uploadStream;
309309

310310
before(function (done) {
311-
uploadStream = s3Stream.upload({
311+
uploadStream = new s3Stream.upload({
312312
"Bucket": "test-bucket-name",
313313
"Key": "complete-fail"
314314
});
@@ -337,7 +337,7 @@ describe('S3 Error catching', function () {
337337
var uploadStream;
338338

339339
before(function (done) {
340-
uploadStream = s3Stream.upload({
340+
uploadStream = new s3Stream.upload({
341341
"Bucket": "test-bucket-name",
342342
"Key": "abort-fail"
343343
});

0 commit comments

Comments
 (0)