diff --git a/.istanbul.yml b/.istanbul.yml index 8bb4ab0d88..c0890a3f1a 100644 --- a/.istanbul.yml +++ b/.istanbul.yml @@ -1,2 +1,2 @@ instrumentation: - excludes: ["**/spec/**", "**/PostgresStorageAdapter.js"] + excludes: ["**/spec/**"] diff --git a/.travis.yml b/.travis.yml index 3acf7f1295..7317ebc480 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,6 +27,7 @@ branches: - /^[0-9]+.[0-9]+.[0-9]+(-.*)?$/ cache: directories: + - node_modules - "$HOME/.mongodb/versions" after_script: - bash <(curl -s https://codecov.io/bash) diff --git a/package.json b/package.json index b1698f5795..8c06e42efb 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,7 @@ "mongodb": "2.2.5", "multer": "1.2.0", "parse": "1.9.1", - "parse-server-fs-adapter": "1.0.0", + "parse-server-fs-adapter": "1.0.1", "parse-server-push-adapter": "1.0.4", "parse-server-s3-adapter": "1.0.4", "parse-server-simple-mailgun-adapter": "1.0.0", diff --git a/spec/ParseAPI.spec.js b/spec/ParseAPI.spec.js index 1a0f7b3ace..6160c03c41 100644 --- a/spec/ParseAPI.spec.js +++ b/spec/ParseAPI.spec.js @@ -212,7 +212,7 @@ describe('miscellaneous', function() { }); }); - it('ensure that if you try to sign up a user with a unique username and email, but duplicates in some other field that has a uniqueness constraint, you get a regular duplicate value error', done => { +it_exclude_dbs(['postgres'])('ensure that if you try to sign up a user with a unique username and email, but duplicates in some other field that has a uniqueness constraint, you get a regular duplicate value error', done => { let config = new Config('test'); config.database.adapter.addFieldIfNotExists('_User', 'randomField', { type: 'String' }) .then(() => config.database.adapter.ensureUniqueness('_User', userSchema, ['randomField'])) @@ -233,6 +233,7 @@ describe('miscellaneous', function() { return user.signUp() }) .catch(error => { + console.error(error); expect(error.code).toEqual(Parse.Error.DUPLICATE_VALUE); done(); }); @@ -816,7 +817,7 @@ describe('miscellaneous', function() { }); }); - it_exclude_dbs(['postgres'])('should return the updated fields on PUT', done => { + it('should return the updated fields on PUT', done => { let obj = new Parse.Object('GameScore'); obj.save({a:'hello', c: 1, d: ['1'], e:['1'], f:['1','2']}).then(( ) => { var headers = { diff --git a/spec/ParseFile.spec.js b/spec/ParseFile.spec.js index e329a3136a..00cc20c1ea 100644 --- a/spec/ParseFile.spec.js +++ b/spec/ParseFile.spec.js @@ -11,8 +11,8 @@ for (var i = 0; i < str.length; i++) { data.push(str.charCodeAt(i)); } -describe_only_db('mongo')('Parse.File testing', () => { - describe_only_db('mongo')('creating files', () => { +describe('Parse.File testing', () => { + describe('creating files', () => { it('works with Content-Type', done => { var headers = { 'Content-Type': 'application/octet-stream', @@ -88,7 +88,7 @@ describe_only_db('mongo')('Parse.File testing', () => { }); }); - it_exclude_dbs(['postgres'])('supports REST end-to-end file create, read, delete, read', done => { + it('supports REST end-to-end file create, read, delete, read', done => { var headers = { 'Content-Type': 'image/jpeg', 'X-Parse-Application-Id': 'test', @@ -204,7 +204,7 @@ describe_only_db('mongo')('Parse.File testing', () => { }); }); - it_exclude_dbs(['postgres'])("save file", done => { + it("save file", done => { var file = new Parse.File("hello.txt", data, "text/plain"); ok(!file.url()); file.save(expectSuccess({ @@ -273,7 +273,7 @@ describe_only_db('mongo')('Parse.File testing', () => { }, done)); }); - it_exclude_dbs(['postgres'])("autosave file in object", done => { + it("autosave file in object", done => { var file = new Parse.File("hello.txt", data, "text/plain"); ok(!file.url()); var object = new Parse.Object("TestObject"); @@ -506,7 +506,7 @@ describe_only_db('mongo')('Parse.File testing', () => { }); }); - it_exclude_dbs(['postgres'])('creates correct url for old files hosted on files.parsetfss.com', done => { + it('creates correct url for old files hosted on files.parsetfss.com', done => { var file = { __type: 'File', url: 'http://irrelevant.elephant/', @@ -529,7 +529,7 @@ describe_only_db('mongo')('Parse.File testing', () => { }); }); - it_exclude_dbs(['postgres'])('creates correct url for old files hosted on files.parse.com', done => { + it('creates correct url for old files hosted on files.parse.com', done => { var file = { __type: 'File', url: 'http://irrelevant.elephant/', diff --git a/spec/ParseHooks.spec.js b/spec/ParseHooks.spec.js index f3379151e9..6f7a190a05 100644 --- a/spec/ParseHooks.spec.js +++ b/spec/ParseHooks.spec.js @@ -15,7 +15,7 @@ app.use(bodyParser.json({ 'type': '*/*' })) app.listen(12345); describe('Hooks', () => { - it_exclude_dbs(['postgres'])("should have no hooks registered", (done) => { + it("should have no hooks registered", (done) => { Parse.Hooks.getFunctions().then((res) => { expect(res.constructor).toBe(Array.prototype.constructor); done(); @@ -25,7 +25,7 @@ describe('Hooks', () => { }); }); - it_exclude_dbs(['postgres'])("should have no triggers registered", (done) => { + it("should have no triggers registered", (done) => { Parse.Hooks.getTriggers().then( (res) => { expect(res.constructor).toBe(Array.prototype.constructor); done(); @@ -35,7 +35,7 @@ describe('Hooks', () => { }); }); - it_exclude_dbs(['postgres'])("should CRUD a function registration", (done) => { + it("should CRUD a function registration", (done) => { // Create Parse.Hooks.createFunction("My-Test-Function", "http://someurl") .then(response => { @@ -76,7 +76,7 @@ describe('Hooks', () => { }) }); - it_exclude_dbs(['postgres'])("should CRUD a trigger registration", (done) => { + it("should CRUD a trigger registration", (done) => { // Create Parse.Hooks.createTrigger("MyClass","beforeDelete", "http://someurl").then((res) => { expect(res.className).toBe("MyClass"); @@ -142,7 +142,7 @@ describe('Hooks', () => { }) }); - it_exclude_dbs(['postgres'])("should fail trying to create two times the same function", (done) => { + it("should fail trying to create two times the same function", (done) => { Parse.Hooks.createFunction("my_new_function", "http://url.com").then( () => { return Parse.Hooks.createFunction("my_new_function", "http://url.com") }, () => { @@ -165,7 +165,7 @@ describe('Hooks', () => { }) }); - it_exclude_dbs(['postgres'])("should fail trying to create two times the same trigger", (done) => { + it("should fail trying to create two times the same trigger", (done) => { Parse.Hooks.createTrigger("MyClass", "beforeSave", "http://url.com").then( () => { return Parse.Hooks.createTrigger("MyClass", "beforeSave", "http://url.com") }, () => { @@ -188,7 +188,7 @@ describe('Hooks', () => { }) }); - it_exclude_dbs(['postgres'])("should fail trying to update a function that don't exist", (done) => { + it("should fail trying to update a function that don't exist", (done) => { Parse.Hooks.updateFunction("A_COOL_FUNCTION", "http://url.com").then( () => { fail("Should not succeed") }, (err) => { @@ -213,7 +213,7 @@ describe('Hooks', () => { }); }); - it_exclude_dbs(['postgres'])("should fail trying to update a trigger that don't exist", (done) => { + it("should fail trying to update a trigger that don't exist", (done) => { Parse.Hooks.updateTrigger("AClassName","beforeSave", "http://url.com").then( () => { fail("Should not succeed") }, (err) => { @@ -269,7 +269,7 @@ describe('Hooks', () => { }); - it_exclude_dbs(['postgres'])("should create hooks and properly preload them", (done) => { + it("should create hooks and properly preload them", (done) => { var promises = []; for (var i = 0; i<5; i++) { @@ -304,7 +304,7 @@ describe('Hooks', () => { }) }); - it_exclude_dbs(['postgres'])("should run the function on the test server", (done) => { + it("should run the function on the test server", (done) => { app.post("/SomeFunction", function(req, res) { res.json({success:"OK!"}); @@ -326,7 +326,7 @@ describe('Hooks', () => { }); }); - it_exclude_dbs(['postgres'])("should run the function on the test server", (done) => { + it("should run the function on the test server", (done) => { app.post("/SomeFunctionError", function(req, res) { res.json({error: {code: 1337, error: "hacking that one!"}}); @@ -353,7 +353,7 @@ describe('Hooks', () => { }); }); - it_exclude_dbs(['postgres'])("should provide X-Parse-Webhook-Key when defined", (done) => { + it("should provide X-Parse-Webhook-Key when defined", (done) => { app.post("/ExpectingKey", function(req, res) { if (req.get('X-Parse-Webhook-Key') === 'hook') { res.json({success: "correct key provided"}); @@ -378,7 +378,7 @@ describe('Hooks', () => { }); }); - it_exclude_dbs(['postgres'])("should not pass X-Parse-Webhook-Key if not provided", (done) => { + it("should not pass X-Parse-Webhook-Key if not provided", (done) => { reconfigureServer({ webhookKey: undefined }) .then(() => { app.post("/ExpectingKeyAlso", function(req, res) { @@ -411,7 +411,7 @@ describe('Hooks', () => { }); - it_exclude_dbs(['postgres'])("should run the beforeSave hook on the test server", (done) => { + it("should run the beforeSave hook on the test server", (done) => { var triggerCount = 0; app.post("/BeforeSaveSome", function(req, res) { triggerCount++; @@ -438,7 +438,7 @@ describe('Hooks', () => { }); }); - it_exclude_dbs(['postgres'])("beforeSave hooks should correctly handle responses containing entire object", (done) => { + it("beforeSave hooks should correctly handle responses containing entire object", (done) => { app.post("/BeforeSaveSome2", function(req, res) { var object = Parse.Object.fromJSON(req.body.object); object.set('hello', "world"); @@ -458,7 +458,7 @@ describe('Hooks', () => { }); }); - it_exclude_dbs(['postgres'])("should run the afterSave hook on the test server", (done) => { + it("should run the afterSave hook on the test server", (done) => { var triggerCount = 0; var newObjectId; app.post("/AfterSaveSome", function(req, res) { diff --git a/spec/ParseObject.spec.js b/spec/ParseObject.spec.js index 0a281557ba..888b3cf77b 100644 --- a/spec/ParseObject.spec.js +++ b/spec/ParseObject.spec.js @@ -591,14 +591,19 @@ describe('Parse.Object testing', () => { var objectId = x1.id; var x2 = new Parse.Object('X', {objectId: objectId}); x2.addUnique('stuff', 2); - x2.addUnique('stuff', 3); - expect(x2.get('stuff')).toEqual([2, 3]); + x2.addUnique('stuff', 4); + expect(x2.get('stuff')).toEqual([2, 4]); return x2.save(); }).then(() => { var query = new Parse.Query('X'); return query.get(x1.id); }).then((x3) => { - expect(x3.get('stuff')).toEqual([1, 2, 3]); + let stuff = x3.get('stuff'); + let expected = [1, 2, 4]; + expect(stuff.length).toBe(expected.length); + for (var i of stuff) { + expect(expected.indexOf(i) >= 0).toBe(true); + } done(); }, (error) => { on_db('mongo', () => { @@ -625,15 +630,21 @@ describe('Parse.Object testing', () => { var query = new Parse.Query('X'); return query.get(x1.id); }).then((x3) => { - expect(x3.get('stuff')).toEqual([1, {'hello': 'world'}, {'foo': 'bar'}, {'bar': 'baz'}]); + let stuff = x3.get('stuff'); + let target = [1, {'hello': 'world'}, {'foo': 'bar'}, {'bar': 'baz'}]; + expect(stuff.length).toEqual(target.length); + let found = 0; + for (let thing in target) { + for (let st in stuff) { + if (st == thing) { + found++; + } + } + } + expect(found).toBe(target.length); done(); }, (error) => { - on_db('mongo', () => { - jfail(error); - }); - on_db('postgres', () => { - expect(error.message).toEqual("Postgres does not support AddUnique operator."); - }); + jfail(error); done(); }); }); @@ -654,6 +665,7 @@ describe('Parse.Object testing', () => { expect(x3.get('stuff')).toEqual([1, {'foo': 'bar'}]); done(); }, (error) => { + console.error(error); on_db('mongo', () => { jfail(error); }); diff --git a/spec/ParseQuery.spec.js b/spec/ParseQuery.spec.js index e917510713..2d6cbc857a 100644 --- a/spec/ParseQuery.spec.js +++ b/spec/ParseQuery.spec.js @@ -185,7 +185,7 @@ describe('Parse.Query testing', () => { }); }); - it_exclude_dbs(['postgres'])("containsAll number array queries", function(done) { + it("containsAll number array queries", function(done) { var NumberSet = Parse.Object.extend({ className: "NumberSet" }); var objectsList = []; @@ -211,7 +211,7 @@ describe('Parse.Query testing', () => { }); }); - it_exclude_dbs(['postgres'])("containsAll string array queries", function(done) { + it("containsAll string array queries", function(done) { var StringSet = Parse.Object.extend({ className: "StringSet" }); var objectsList = []; @@ -872,7 +872,7 @@ describe('Parse.Query testing', () => { }); }); - it("order by descending number and string", function(done) { + it_exclude_dbs(['postgres'])("order by descending number and string", function(done) { var strings = ["a", "b", "c", "d"]; var makeBoxedNumber = function(num, i) { return new BoxedNumber({ number: num, string: strings[i] }); diff --git a/spec/ParseRelation.spec.js b/spec/ParseRelation.spec.js index 9decbf1cb4..054ac86e13 100644 --- a/spec/ParseRelation.spec.js +++ b/spec/ParseRelation.spec.js @@ -331,6 +331,9 @@ describe('Parse.Relation testing', () => { done(); }); }); + }).catch(err => { + jfail(err); + done(); }); }); diff --git a/spec/ParseRole.spec.js b/spec/ParseRole.spec.js index 0ca2d124ed..16e4adc832 100644 --- a/spec/ParseRole.spec.js +++ b/spec/ParseRole.spec.js @@ -76,7 +76,7 @@ describe('Parse Role testing', () => { return role.save({}, { useMasterKey: true }); }; - it("should not recursively load the same role multiple times", (done) => { + it_exclude_dbs(['postgres'])("should not recursively load the same role multiple times", (done) => { var rootRole = "RootRole"; var roleNames = ["FooRole", "BarRole", "BazRole"]; var allRoles = [rootRole].concat(roleNames); diff --git a/spec/ParseUser.spec.js b/spec/ParseUser.spec.js index 838d334c2d..829d12158d 100644 --- a/spec/ParseUser.spec.js +++ b/spec/ParseUser.spec.js @@ -137,7 +137,7 @@ describe('Parse.User testing', () => { }) }); - it_exclude_dbs(['postgres'])("user login with files", (done) => { + it("user login with files", (done) => { let file = new Parse.File("yolo.txt", [1,2,3], "text/plain"); file.save().then((file) => { return Parse.User.signUp("asdf", "zxcv", { "file" : file }); @@ -1118,7 +1118,7 @@ describe('Parse.User testing', () => { }); }); - it_exclude_dbs(['postgres'])('log in with provider with files', done => { + it('log in with provider with files', done => { let provider = getMockFacebookProvider(); Parse.User._registerAuthenticationProvider(provider); let file = new Parse.File("yolo.txt", [1, 2, 3], "text/plain"); @@ -1468,7 +1468,7 @@ describe('Parse.User testing', () => { }); }); - it_exclude_dbs(['postgres'])("link multiple providers", (done) => { + it("link multiple providers", (done) => { var provider = getMockFacebookProvider(); var mockProvider = getMockMyOauthProvider(); Parse.User._registerAuthenticationProvider(provider); @@ -1504,7 +1504,7 @@ describe('Parse.User testing', () => { }); }); - it_exclude_dbs(['postgres'])("link multiple providers and updates token", (done) => { + it("link multiple providers and updates token", (done) => { var provider = getMockFacebookProvider(); var secondProvider = getMockFacebookProviderWithIdToken('8675309', 'jenny_valid_token'); @@ -1545,7 +1545,7 @@ describe('Parse.User testing', () => { }); }); - it_exclude_dbs(['postgres'])("link multiple providers and update token", (done) => { + it("link multiple providers and update token", (done) => { var provider = getMockFacebookProvider(); var mockProvider = getMockMyOauthProvider(); Parse.User._registerAuthenticationProvider(provider); @@ -1820,7 +1820,7 @@ describe('Parse.User testing', () => { }); }); - xit("querying for users doesn't get session tokens", (done) => { + it("querying for users doesn't get session tokens", (done) => { Parse.Promise.as().then(function() { return Parse.User.signUp("finn", "human", { foo: "bar" }); diff --git a/spec/PurchaseValidation.spec.js b/spec/PurchaseValidation.spec.js index 6574534a1c..c1a2f39402 100644 --- a/spec/PurchaseValidation.spec.js +++ b/spec/PurchaseValidation.spec.js @@ -26,7 +26,7 @@ describe("test validate_receipt endpoint", () => { }); }) - it_exclude_dbs(['postgres'])("should bypass appstore validation", (done) => { + it("should bypass appstore validation", (done) => { request.post({ headers: { @@ -170,7 +170,7 @@ describe("test validate_receipt endpoint", () => { }) }); - it_exclude_dbs(['postgres'])("should be able to update a _Product", (done) => { + it("should be able to update a _Product", (done) => { var query = new Parse.Query("_Product"); query.first().then(function(product) { if (!product) { @@ -188,7 +188,7 @@ describe("test validate_receipt endpoint", () => { }); }); - it_exclude_dbs(['postgres'])("should not be able to remove a require key in a _Product", (done) => { + it("should not be able to remove a require key in a _Product", (done) => { var query = new Parse.Query("_Product"); query.first().then(function(product){ if (!product) { diff --git a/spec/Schema.spec.js b/spec/Schema.spec.js index 8148541953..551d974c6e 100644 --- a/spec/Schema.spec.js +++ b/spec/Schema.spec.js @@ -211,7 +211,7 @@ describe('SchemaController', () => { }); }); - it_exclude_dbs(['postgres'])('will resolve class creation races appropriately', done => { + it('will resolve class creation races appropriately', done => { // If two callers race to create the same schema, the response to the // race loser should be the same as if they hadn't been racing. config.database.loadSchema() @@ -617,7 +617,7 @@ describe('SchemaController', () => { }); }); - it_exclude_dbs(['postgres'])('refuses to delete fields that dont exist', done => { + it('refuses to delete fields that dont exist', done => { hasAllPODobject().save() .then(() => config.database.loadSchema()) .then(schema => schema.deleteField('missingField', 'HasAllPOD')) @@ -628,7 +628,7 @@ describe('SchemaController', () => { }); }); - it_exclude_dbs(['postgres'])('drops related collection when deleting relation field', done => { + it('drops related collection when deleting relation field', done => { var obj1 = hasAllPODobject(); obj1.save() .then(savedObj1 => { @@ -659,7 +659,7 @@ describe('SchemaController', () => { }); }); - it_exclude_dbs(['postgres'])('can delete relation field when related _Join collection not exist', done => { + it('can delete relation field when related _Join collection not exist', done => { config.database.loadSchema() .then(schema => { schema.addClassIfNotExists('NewClass', { @@ -688,7 +688,13 @@ describe('SchemaController', () => { }) .then(() => config.database.collectionExists('_Join:relationField:NewClass')) .then(exist => { - expect(exist).toEqual(false); + on_db('postgres', () => { + // We create the table when creating the column + expect(exist).toEqual(true); + }, () => { + expect(exist).toEqual(false); + }); + }) .then(() => schema.deleteField('relationField', 'NewClass', config.database)) .then(() => schema.reloadData()) @@ -705,7 +711,7 @@ describe('SchemaController', () => { }); }); - it_exclude_dbs(['postgres'])('can delete string fields and resave as number field', done => { + it('can delete string fields and resave as number field', done => { Parse.Object.disableSingleInstance(); var obj1 = hasAllPODobject(); var obj2 = hasAllPODobject(); @@ -733,7 +739,7 @@ describe('SchemaController', () => { }); }); - it_exclude_dbs(['postgres'])('can delete pointer fields and resave as string', done => { + it('can delete pointer fields and resave as string', done => { Parse.Object.disableSingleInstance(); var obj1 = new Parse.Object('NewClass'); obj1.save() diff --git a/spec/ValidationAndPasswordsReset.spec.js b/spec/ValidationAndPasswordsReset.spec.js index fc4c329983..e959f493de 100644 --- a/spec/ValidationAndPasswordsReset.spec.js +++ b/spec/ValidationAndPasswordsReset.spec.js @@ -187,7 +187,7 @@ describe("Custom Pages, Email Verification, Password Reset", () => { }); }); - it_exclude_dbs(['postgres'])('does send with a simple adapter', done => { + it('does send with a simple adapter', done => { var calls = 0; var emailAdapter = { sendMail: function(options){ @@ -675,7 +675,7 @@ describe("Custom Pages, Email Verification, Password Reset", () => { }); }); - it_exclude_dbs(['postgres'])('should send a password reset link', done => { + it('should send a password reset link', done => { var user = new Parse.User(); var emailAdapter = { sendVerificationEmail: () => Promise.resolve(), @@ -740,7 +740,7 @@ describe("Custom Pages, Email Verification, Password Reset", () => { }); }); - it_exclude_dbs(['postgres'])('should programatically reset password', done => { + it('should programatically reset password', done => { var user = new Parse.User(); var emailAdapter = { sendVerificationEmail: () => Promise.resolve(), diff --git a/spec/helper.js b/spec/helper.js index bfec460836..a42f324ec2 100644 --- a/spec/helper.js +++ b/spec/helper.js @@ -3,6 +3,17 @@ jasmine.DEFAULT_TIMEOUT_INTERVAL = process.env.PARSE_SERVER_TEST_TIMEOUT || 5000; +global.on_db = (db, callback, elseCallback) => { + if (process.env.PARSE_SERVER_TEST_DB == db) { + return callback(); + } else if (!process.env.PARSE_SERVER_TEST_DB && db == 'mongo') { + return callback(); + } + if (elseCallback) { + elseCallback(); + } +} + var cache = require('../src/cache').default; var express = require('express'); var facebook = require('../src/authDataManager/facebook'); @@ -11,6 +22,7 @@ var path = require('path'); var TestUtils = require('../src/TestUtils'); var MongoStorageAdapter = require('../src/Adapters/Storage/Mongo/MongoStorageAdapter'); const GridStoreAdapter = require('../src/Adapters/Files/GridStoreAdapter').GridStoreAdapter; +const FSAdapter = require('parse-server-fs-adapter'); const PostgresStorageAdapter = require('../src/Adapters/Storage/Postgres/PostgresStorageAdapter'); const mongoURI = 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase'; @@ -40,7 +52,14 @@ if (process.env.PARSE_SERVER_TEST_DB === 'postgres') { var port = 8378; -let gridStoreAdapter = new GridStoreAdapter(mongoURI); +let filesAdapter; + +on_db('mongo', () => { + filesAdapter = new GridStoreAdapter(mongoURI); +}, () => { + filesAdapter = new FSAdapter(); +}); + let logLevel; let silent = true; if (process.env.VERBOSE) { @@ -53,7 +72,7 @@ if (process.env.PARSE_SERVER_LOG_LEVEL) { } // Default server configuration for tests. var defaultConfiguration = { - filesAdapter: gridStoreAdapter, + filesAdapter, serverURL: 'http://localhost:' + port + '/1', databaseAdapter, appId: 'test', @@ -383,16 +402,6 @@ global.describe_only_db = db => { } } -global.on_db = (db, callback, elseCallback) => { - if (process.env.PARSE_SERVER_TEST_DB == db) { - return callback(); - } else if (!process.env.PARSE_SERVER_TEST_DB && db == 'mongo') { - return callback(); - } - if (elseCallback) { - elseCallback(); - } -} var libraryCache = {}; jasmine.mockLibrary = function(library, name, mock) { diff --git a/spec/schemas.spec.js b/spec/schemas.spec.js index 20f9319c1b..9f53307817 100644 --- a/spec/schemas.spec.js +++ b/spec/schemas.spec.js @@ -170,7 +170,7 @@ describe('schemas', () => { }); }); - it_exclude_dbs(['postgres'])('responds with a list of schemas after creating objects', done => { + it('responds with a list of schemas after creating objects', done => { var obj1 = hasAllPODobject(); obj1.save().then(savedObj1 => { var obj2 = new Parse.Object('HasPointersAndRelations'); @@ -193,7 +193,7 @@ describe('schemas', () => { }); }); - it_exclude_dbs(['postgres'])('responds with a single schema', done => { + it('responds with a single schema', done => { var obj = hasAllPODobject(); obj.save().then(() => { request.get({ @@ -207,7 +207,7 @@ describe('schemas', () => { }); }); - it_exclude_dbs(['postgres'])('treats class names case sensitively', done => { + it('treats class names case sensitively', done => { var obj = hasAllPODobject(); obj.save().then(() => { request.get({ @@ -462,7 +462,7 @@ describe('schemas', () => { }); }); - it_exclude_dbs(['postgres'])('refuses to put to existing fields, even if it would not be a change', done => { + it('refuses to put to existing fields, even if it would not be a change', done => { var obj = hasAllPODobject(); obj.save() .then(() => { @@ -484,7 +484,7 @@ describe('schemas', () => { }) }); - it_exclude_dbs(['postgres'])('refuses to delete non-existent fields', done => { + it('refuses to delete non-existent fields', done => { var obj = hasAllPODobject(); obj.save() .then(() => { @@ -506,7 +506,7 @@ describe('schemas', () => { }); }); - it_exclude_dbs(['postgres'])('refuses to add a geopoint to a class that already has one', done => { + it('refuses to add a geopoint to a class that already has one', done => { var obj = hasAllPODobject(); obj.save() .then(() => { @@ -552,7 +552,7 @@ describe('schemas', () => { }); }); - it_exclude_dbs(['postgres'])('allows you to delete and add a geopoint in the same request', done => { + it('allows you to delete and add a geopoint in the same request', done => { var obj = new Parse.Object('NewClass'); obj.set('geo1', new Parse.GeoPoint({latitude: 0, longitude: 0})); obj.save() @@ -584,7 +584,7 @@ describe('schemas', () => { }) }); - it_exclude_dbs(['postgres'])('put with no modifications returns all fields', done => { + it('put with no modifications returns all fields', done => { var obj = hasAllPODobject(); obj.save() .then(() => { @@ -757,7 +757,7 @@ describe('schemas', () => { }); }); - it_exclude_dbs(['postgres'])('will not delete any fields if the additions are invalid', done => { + it('will not delete any fields if the additions are invalid', done => { var obj = hasAllPODobject(); obj.save() .then(() => { @@ -798,7 +798,7 @@ describe('schemas', () => { }); }); - it_exclude_dbs(['postgres'])('refuses to delete non-empty collection', done => { + it('refuses to delete non-empty collection', done => { var obj = hasAllPODobject(); obj.save() .then(() => { @@ -829,7 +829,7 @@ describe('schemas', () => { }) }); - it_exclude_dbs(['postgres'])('does not fail when deleting nonexistant collections', done => { + it('does not fail when deleting nonexistant collections', done => { request.del({ url: 'http://localhost:8378/1/schemas/Missing', headers: masterKeyHeaders, @@ -841,7 +841,7 @@ describe('schemas', () => { }); }); - it_exclude_dbs(['postgres'])('deletes collections including join tables', done => { + it('deletes collections including join tables', done => { var obj = new Parse.Object('MyClass'); obj.set('data', 'data'); obj.save() @@ -892,7 +892,7 @@ describe('schemas', () => { }); }); - it_exclude_dbs(['postgres'])('deletes schema when actual collection does not exist', done => { + it('deletes schema when actual collection does not exist', done => { request.post({ url: 'http://localhost:8378/1/schemas/NewClassForDelete', headers: masterKeyHeaders, @@ -920,7 +920,7 @@ describe('schemas', () => { }); }); - it_exclude_dbs(['postgres'])('deletes schema when actual collection exists', done => { + it('deletes schema when actual collection exists', done => { request.post({ url: 'http://localhost:8378/1/schemas/NewClassForDelete', headers: masterKeyHeaders, @@ -1582,7 +1582,7 @@ describe('schemas', () => { }) }) - it_exclude_dbs(['postgres'])('gives correct response when deleting a schema with CLPs (regression test #1919)', done => { + it('gives correct response when deleting a schema with CLPs (regression test #1919)', done => { new Parse.Object('MyClass').save({ data: 'foo'}) .then(obj => obj.destroy()) .then(() => setPermissionsOnClass('MyClass', { find: {}, get: {} }, true)) diff --git a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js index 2a4590d19c..b9acedb2ad 100644 --- a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js +++ b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js @@ -18,7 +18,7 @@ const parseTypeToPostgresType = type => { case 'String': return 'text'; case 'Date': return 'timestamp with time zone'; case 'Object': return 'jsonb'; - case 'File': return 'jsonb'; + case 'File': return 'text'; case 'Boolean': return 'boolean'; case 'Pointer': return 'char(10)'; case 'Number': return 'double precision'; @@ -45,6 +45,9 @@ const toPostgresValue = value => { if (value.__type === 'Date') { return value.iso; } + if (value.__type === 'File') { + return value.name; + } } return value; } @@ -107,6 +110,19 @@ const toPostgresSchema = (schema) => { return schema; } +// Returns the list of join tables on a schema +const joinTablesForSchema = (schema) => { + let list = []; + if (schema) { + Object.keys(schema.fields).forEach((field) => { + if (schema.fields[field].type === 'Relation') { + list.push(`_Join:${field}:${schema.className}`); + } + }); + } + return list; +} + const buildWhereClause = ({ schema, query, index }) => { let patterns = []; let values = []; @@ -214,6 +230,26 @@ const buildWhereClause = ({ schema, query, index }) => { } } + if (Array.isArray(fieldValue.$all) && schema.fields[fieldName].type === 'Array') { + let inPatterns = []; + let allowNull = false; + values.push(fieldName); + fieldValue.$all.forEach((listElem, listIndex) => { + if (listElem === null ) { + allowNull = true; + } else { + values.push(listElem); + inPatterns.push(`$${index + 1 + listIndex - (allowNull ? 1 : 0)}`); + } + }); + if (allowNull) { + patterns.push(`($${index}:name IS NULL OR $${index}:name @> array_to_json(ARRAY[${inPatterns.join(',')}]))::jsonb`); + } else { + patterns.push(`$${index}:name @> json_build_array(${inPatterns.join(',')})::jsonb`); + } + index = index + 1 + inPatterns.length; + } + if (typeof fieldValue.$exists !== 'undefined') { if (fieldValue.$exists) { patterns.push(`$${index}:name IS NOT NULL`); @@ -302,25 +338,30 @@ export class PostgresStorageAdapter { }; classExists(name) { - return notImplemented(); + return this._client.one(`SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = $1)`, [name]).then((res) => { + return res.exists; + }); } setClassLevelPermissions(className, CLPs) { return this._ensureSchemaCollectionExists().then(() => { const values = [className, 'schema', 'classLevelPermissions', CLPs] return this._client.none(`UPDATE "_SCHEMA" SET $2:name = json_object_set_key($2:name, $3::text, $4::jsonb) WHERE "className"=$1 `, values); - }).catch((err) => { - console.error("ERR!!!", err); - return Promise.reject(err); - }) + }); } createClass(className, schema) { return this.createTable(className, schema) .then(() => this._client.none('INSERT INTO "_SCHEMA" ("className", "schema", "isParseClass") VALUES ($, $, true)', { className, schema })) - .then(() => { + .then(() => { return toParseSchema(schema) - }); + }) + .catch((err) => { + if (err.code === PostgresUniqueIndexViolationError && err.detail.includes(className)) { + throw new Parse.Error(Parse.Error.INVALID_CLASS_NAME, `Class ${className} already exists.`) + } + throw err; + }) } // Just create a table, do not insert in schema @@ -332,6 +373,7 @@ export class PostgresStorageAdapter { if (className === '_User') { fields._email_verify_token_expires_at = {type: 'Date'}; fields._email_verify_token = {type: 'String'}; + fields._perishable_token = {type: 'String'}; } let index = 2; let relations = []; @@ -349,9 +391,12 @@ export class PostgresStorageAdapter { valuesArray.push(fieldName); valuesArray.push(parseTypeToPostgresType(parseType)); patternsArray.push(`$${index}:name $${index+1}:raw`); + if (fieldName === 'objectId') { + patternsArray.push(`PRIMARY KEY ($${index}:name)`) + } index = index+2; }); - const qs = `CREATE TABLE $1:name (${patternsArray.join(',')}, PRIMARY KEY ("objectId"))`; + const qs = `CREATE TABLE $1:name (${patternsArray.join(',')})`; const values = [className, ...valuesArray]; return this._ensureSchemaCollectionExists() .then(() => this._client.none(qs, values)) @@ -364,7 +409,7 @@ export class PostgresStorageAdapter { }).then(() => { // Create the relation tables return Promise.all(relations.map((fieldName) => { - return this._client.none('CREATE TABLE IF NOT EXISTS $ ("relatedId" varChar(120), "owningId" varChar(120), PRIMARY KEY("relatedId", "owningId") )', {joinTable: `_Join:${fieldName}:${className}`}) + return this._client.none('CREATE TABLE IF NOT EXISTS $ ("relatedId" varChar(120), "owningId" varChar(120), PRIMARY KEY("relatedId", "owningId") )', {joinTable: `_Join:${fieldName}:${className}`}); })); }); } @@ -396,7 +441,7 @@ export class PostgresStorageAdapter { return promise.then(() => { return t.any('SELECT "schema" FROM "_SCHEMA" WHERE "className" = $', {className}); }).then(result => { - if (fieldName in result[0].schema) { + if (fieldName in result[0].schema.fields) { throw "Attempted to add a field that already exists"; } else { result[0].schema.fields[fieldName] = type; @@ -412,7 +457,14 @@ export class PostgresStorageAdapter { // Drops a collection. Resolves with true if it was a Parse Schema (eg. _User, Custom, etc.) // and resolves with false if it wasn't (eg. a join table). Rejects if deletion was impossible. deleteClass(className) { - return notImplemented(); + return Promise.resolve().then(() => { + let operations = [[`DROP TABLE IF EXISTS $1:name`, [className]], + [`DELETE FROM "_SCHEMA" WHERE "className"=$1`, [className]]]; + return this._client.tx(t=>t.batch(operations.map(statement=>t.none(statement[0], statement[1])))); + }).then(() => { + // resolves with false when _Join table + return className.indexOf('_Join:') != 0; + }); } // Delete all data known to this adapter. Used for testing. @@ -422,12 +474,7 @@ export class PostgresStorageAdapter { return this._client.any('SELECT * FROM "_SCHEMA"') .then(results => { let joins = results.reduce((list, schema) => { - Object.keys(schema.schema.fields).forEach((field) => { - if (schema.schema.fields[field].type === 'Relation') { - list.push(`_Join:${field}:${schema.className}`); - } - }) - return list; + return list.concat(joinTablesForSchema(schema.schema)); }, []); const classes = ['_SCHEMA','_PushStatus','_Hooks','_GlobalConfig', ...results.map(result => result.className), ...joins]; return this._client.tx(t=>t.batch(classes.map(className=>t.none('DROP TABLE IF EXISTS $', { className })))); @@ -457,7 +504,36 @@ export class PostgresStorageAdapter { // Returns a Promise. deleteFields(className, schema, fieldNames) { - return notImplemented(); + debug('deleteFields', className, fieldNames); + return Promise.resolve() + .then(() => { + fieldNames = fieldNames.reduce((list, fieldName) => { + let field = schema.fields[fieldName] + if (field.type !== 'Relation') { + list.push(fieldName); + } + delete schema.fields[fieldName]; + return list; + }, []); + + let values = [className, ...fieldNames]; + let columns = fieldNames.map((name, idx) => { + return `$${idx+2}:name`; + }).join(','); + + let doBatch = (t) => { + let batch = [ + t.none('UPDATE "_SCHEMA" SET "schema"=$ WHERE "className"=$', {schema, className}) + ]; + if (values.length > 1) { + batch.push(t.none(`ALTER TABLE $1:name DROP COLUMN ${columns}`, values)); + } + return batch; + } + return this._client.tx((t) => { + return t.batch(doBatch(t)); + }); + }); } // Return a promise for all schemas known to this adapter, in Parse format. In case the @@ -473,6 +549,7 @@ export class PostgresStorageAdapter { // this adapter doesn't know about the schema, return a promise that rejects with // undefined as the reason. getClass(className) { + debug('getClass', className); return this._client.any('SELECT * FROM "_SCHEMA" WHERE "className"=$', { className }) .then(result => { if (result.length === 1) { @@ -509,6 +586,9 @@ export class PostgresStorageAdapter { if (fieldName == '_email_verify_token_expires_at') { valuesArray.push(object[fieldName].iso); } + if (fieldName == '_perishable_token') { + valuesArray.push(object[fieldName].iso); + } return; } switch (schema.fields[fieldName].type) { @@ -529,9 +609,11 @@ export class PostgresStorageAdapter { case 'String': case 'Number': case 'Boolean': - case 'File': valuesArray.push(object[fieldName]); break; + case 'File': + valuesArray.push(object[fieldName].name); + break; case 'GeoPoint': // pop the point and process later geoPoints[fieldName] = object[fieldName]; @@ -544,8 +626,16 @@ export class PostgresStorageAdapter { }); columnsArray = columnsArray.concat(Object.keys(geoPoints)); - let initialValues = valuesArray.map((val, index) => `$${index + 2 + columnsArray.length}${(['_rperm','_wperm'].includes(columnsArray[index])) ? '::text[]' : ''}`); - + let initialValues = valuesArray.map((val, index) => { + let termination = ''; + let fieldName = columnsArray[index]; + if (['_rperm','_wperm'].includes(fieldName)) { + termination = '::text[]'; + } else if (schema.fields[fieldName] && schema.fields[fieldName].type === 'Array') { + termination = '::jsonb'; + } + return `$${index + 2 + columnsArray.length}${termination}`; + }); let geoPointsInjects = Object.keys(geoPoints).map((key, idx) => { let value = geoPoints[key]; valuesArray.push(value.latitude, value.longitude); @@ -608,33 +698,58 @@ export class PostgresStorageAdapter { let values = [className] let index = 2; schema = toPostgresSchema(schema); + // Resolve authData first, + // So we don't end up with multiple key updates for (let fieldName in update) { - let fieldValue = update[fieldName]; - var authDataMatch = fieldName.match(/^_auth_data_([a-zA-Z0-9_]+)$/); + let authDataMatch = fieldName.match(/^_auth_data_([a-zA-Z0-9_]+)$/); if (authDataMatch) { var provider = authDataMatch[1]; let value = update[fieldName]; delete update[fieldName]; - fieldName = 'authData'; - updatePatterns.push(`$${index}:name = json_object_set_key($${index}:name, $${index+1}::text, $${index+2}::jsonb)`); - values.push(fieldName, provider, value); - index += 3; + update['authData'] = update['authData'] || {}; + update['authData'][provider] = value; + } + } + + for (let fieldName in update) { + let fieldValue = update[fieldName]; + if (fieldName == 'authData') { + // This recursively sets the json_object + // Only 1 level deep + let generate = (jsonb, key, value) => { + return `json_object_set_key(${jsonb}, ${key}, ${value})::jsonb`;  + } + let lastKey = `$${index}:name`; + let fieldNameIndex = index; + index+=1; + values.push(fieldName); + let update = Object.keys(fieldValue).reduce((lastKey, key) => { + let str = generate(lastKey, `$${index}::text`, `$${index+1}::jsonb`) + index+=2; + values.push(key, fieldValue[key]); + return str; + }, lastKey); + updatePatterns.push(`$${fieldNameIndex}:name = ${update}`); } else if (fieldValue.__op === 'Increment') { updatePatterns.push(`$${index}:name = COALESCE($${index}:name, 0) + $${index + 1}`); values.push(fieldName, fieldValue.amount); index += 2; } else if (fieldValue.__op === 'Add') { - updatePatterns.push(`$${index}:name = COALESCE($${index}:name, '[]'::jsonb) || $${index + 1}`); - values.push(fieldName, fieldValue.objects); + updatePatterns.push(`$${index}:name = array_add(COALESCE($${index}:name, '[]'::jsonb), $${index + 1}::jsonb)`); + values.push(fieldName, JSON.stringify(fieldValue.objects)); index += 2; } else if (fieldValue.__op === 'Delete') { updatePatterns.push(`$${index}:name = $${index + 1}`) values.push(fieldName, null); index += 2; } else if (fieldValue.__op === 'Remove') { - return Promise.reject(new Parse.Error(Parse.Error.OPERATION_FORBIDDEN, 'Postgres does not support Remove operator.')); + updatePatterns.push(`$${index}:name = array_remove(COALESCE($${index}:name, '[]'::jsonb), $${index + 1}::jsonb)`) + values.push(fieldName, JSON.stringify(fieldValue.objects)); + index += 2; } else if (fieldValue.__op === 'AddUnique') { - return Promise.reject(new Parse.Error(Parse.Error.OPERATION_FORBIDDEN, 'Postgres does not support AddUnique operator.')); + updatePatterns.push(`$${index}:name = array_add_unique(COALESCE($${index}:name, '[]'::jsonb), $${index + 1}::jsonb)`); + values.push(fieldName, JSON.stringify(fieldValue.objects)); + index += 2; } else if (fieldName === 'updatedAt') { //TODO: stop special casing this. It should check for __type === 'Date' and use .iso updatePatterns.push(`$${index}:name = $${index + 1}`) values.push(fieldName, fieldValue); @@ -655,6 +770,10 @@ export class PostgresStorageAdapter { updatePatterns.push(`$${index}:name = $${index + 1}`); values.push(fieldName, toPostgresValue(fieldValue)); index += 2; + } else if (fieldValue.__type === 'File') { + updatePatterns.push(`$${index}:name = $${index + 1}`); + values.push(fieldName, toPostgresValue(fieldValue)); + index += 2; } else if (fieldValue.__type === 'GeoPoint') { updatePatterns.push(`$${index}:name = POINT($${index + 1}, $${index + 2})`); values.push(fieldName, fieldValue.latitude, fieldValue.longitude); @@ -767,6 +886,12 @@ export class PostgresStorageAdapter { longitude: object[fieldName].y } } + if (object[fieldName] && schema.fields[fieldName].type === 'File') { + object[fieldName] = { + __type: 'File', + name: object[fieldName] + } + } }); //TODO: remove this reliance on the mongo format. DB adapter shouldn't know there is a difference between created at and any other date field. if (object.createdAt) { @@ -821,26 +946,47 @@ export class PostgresStorageAdapter { // Executes a count. count(className, schema, query) { + debug('count', className, query); let values = [className]; let where = buildWhereClause({ schema, query, index: 2 }); values.push(...where.values); const wherePattern = where.pattern.length > 0 ? `WHERE ${where.pattern}` : ''; const qs = `SELECT count(*) FROM $1:name ${wherePattern}`; - return this._client.one(qs, values, a => +a.count); + return this._client.one(qs, values, a => +a.count).catch((err) => { + if (err.code === PostgresRelationDoesNotExistError) { + return 0; + } + throw err; + }); } performInitialization({ VolatileClassesSchemas }) { let now = new Date().getTime(); debug('performInitialization'); let promises = VolatileClassesSchemas.map((schema) => { - return this.createTable(schema.className, schema); + return this.createTable(schema.className, schema).catch((err) =>{ + if (err.code === PostgresDuplicateRelationError || err.code == Parse.Error.INVALID_CLASS_NAME) { + return Promise.resolve(); + } + throw err; + }); }); - return Promise.all(promises).then(() => { - return this._client.any(json_object_set_key).catch((err) => { - console.error(err); - }) + return Promise.all([ + this._client.any(json_object_set_key).catch((err) => { + console.error(err); + }), + this._client.any(array_add).catch((err) => { + console.error(err); + }), + this._client.any(array_add_unique).catch((err) => { + console.error(err); + }), + this._client.any(array_remove).catch((err) => { + console.error(err); + }) + ]); }).then(() => { debug(`initialzationDone in ${new Date().getTime() - now}`); }) @@ -870,5 +1016,41 @@ SELECT concat(\'{\', string_agg(to_json("key") || \':\' || "value", \',\'), \'}\ SELECT "key_to_set", to_json("value_to_set")::jsonb) AS "fields"\ $function$;' +const array_add = `CREATE OR REPLACE FUNCTION "array_add"( + "array" jsonb, + "values" jsonb +) + RETURNS jsonb + LANGUAGE sql + IMMUTABLE + STRICT +AS $function$ + SELECT array_to_json(ARRAY(SELECT unnest(ARRAY(SELECT DISTINCT jsonb_array_elements("array")) || ARRAY(SELECT jsonb_array_elements("values")))))::jsonb; +$function$;`; + +const array_add_unique = `CREATE OR REPLACE FUNCTION "array_add_unique"( + "array" jsonb, + "values" jsonb +) + RETURNS jsonb + LANGUAGE sql + IMMUTABLE + STRICT +AS $function$ + SELECT array_to_json(ARRAY(SELECT DISTINCT unnest(ARRAY(SELECT DISTINCT jsonb_array_elements("array")) || ARRAY(SELECT DISTINCT jsonb_array_elements("values")))))::jsonb; +$function$;`; + +const array_remove = `CREATE OR REPLACE FUNCTION "array_remove"( + "array" jsonb, + "values" jsonb +) + RETURNS jsonb + LANGUAGE sql + IMMUTABLE + STRICT +AS $function$ + SELECT array_to_json(ARRAY(SELECT * FROM jsonb_array_elements("array") as elt WHERE elt NOT IN (SELECT * FROM (SELECT jsonb_array_elements("values")) AS sub)))::jsonb; +$function$;`; + export default PostgresStorageAdapter; module.exports = PostgresStorageAdapter; // Required for tests diff --git a/src/Controllers/SchemaController.js b/src/Controllers/SchemaController.js index 50f7a94202..ee98ad3d81 100644 --- a/src/Controllers/SchemaController.js +++ b/src/Controllers/SchemaController.js @@ -86,6 +86,12 @@ const defaultColumns = Object.freeze({ "errorMessage": {type:'Object'}, "sentPerType": {type:'Object'}, "failedPerType":{type:'Object'}, + }, + _Hooks: { + "functionName": {type:'String'}, + "className": {type:'String'}, + "triggerName": {type:'String'}, + "url": {type:'String'} } }); @@ -258,13 +264,13 @@ const injectDefaultSchema = ({className, fields, classLevelPermissions}) => ({ classLevelPermissions, }); -const VolatileClassesSchemas = volatileClasses.map((className) => { - return convertSchemaToAdapterSchema(injectDefaultSchema({ - className, +const _HooksSchema = {className: "_Hooks", fields: defaultColumns._Hooks}; +const _PushStatusSchema = convertSchemaToAdapterSchema(injectDefaultSchema({ + className: "_PushStatus", fields: {}, classLevelPermissions: {} - })); -}); +})); +const VolatileClassesSchemas = [_HooksSchema, _PushStatusSchema]; const dbTypeMatchesObjectType = (dbType, objectType) => { if (dbType.type !== objectType.type) return false;