diff --git a/spec/GridFSBucketStorageAdapter.spec.js b/spec/GridFSBucketStorageAdapter.spec.js index 3b8c8016e9..92f7aae388 100644 --- a/spec/GridFSBucketStorageAdapter.spec.js +++ b/spec/GridFSBucketStorageAdapter.spec.js @@ -44,9 +44,7 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => { await expectMissingFile(encryptedAdapter, 'myFileName'); const originalString = 'abcdefghi'; await encryptedAdapter.createFile('myFileName', originalString); - const unencryptedResult = await unencryptedAdapter.getFileData( - 'myFileName' - ); + const unencryptedResult = await unencryptedAdapter.getFileData('myFileName'); expect(unencryptedResult.toString('utf8')).not.toBe(originalString); const encryptedResult = await encryptedAdapter.getFileData('myFileName'); expect(encryptedResult.toString('utf8')).toBe(originalString); @@ -71,10 +69,7 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => { const unencryptedResult2 = await unencryptedAdapter.getFileData(fileName2); expect(unencryptedResult2.toString('utf8')).toBe(data2); //Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter - const { - rotated, - notRotated, - } = await encryptedAdapter.rotateEncryptionKey(); + const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey(); expect(rotated.length).toEqual(2); expect( rotated.filter(function (value) { @@ -101,30 +96,18 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => { it('should rotate key of all old encrypted GridFS files to encrypted files', async () => { const oldEncryptionKey = 'oldKeyThatILoved'; - const oldEncryptedAdapter = new GridFSBucketAdapter( - databaseURI, - {}, - oldEncryptionKey - ); - const encryptedAdapter = new GridFSBucketAdapter( - databaseURI, - {}, - 'newKeyThatILove' - ); + const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey); + const encryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, 'newKeyThatILove'); const fileName1 = 'file1.txt'; const data1 = 'hello world'; const fileName2 = 'file2.txt'; const data2 = 'hello new world'; //Store unecrypted files await oldEncryptedAdapter.createFile(fileName1, data1); - const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData( - fileName1 - ); + const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1); expect(oldEncryptedResult1.toString('utf8')).toBe(data1); await oldEncryptedAdapter.createFile(fileName2, data2); - const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData( - fileName2 - ); + const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2); expect(oldEncryptedResult2.toString('utf8')).toBe(data2); //Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey({ @@ -170,11 +153,7 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => { it('should rotate key of all old encrypted GridFS files to unencrypted files', async () => { const oldEncryptionKey = 'oldKeyThatILoved'; - const oldEncryptedAdapter = new GridFSBucketAdapter( - databaseURI, - {}, - oldEncryptionKey - ); + const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey); const unEncryptedAdapter = new GridFSBucketAdapter(databaseURI); const fileName1 = 'file1.txt'; const data1 = 'hello world'; @@ -182,20 +161,13 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => { const data2 = 'hello new world'; //Store unecrypted files await oldEncryptedAdapter.createFile(fileName1, data1); - const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData( - fileName1 - ); + const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1); expect(oldEncryptedResult1.toString('utf8')).toBe(data1); await oldEncryptedAdapter.createFile(fileName2, data2); - const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData( - fileName2 - ); + const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2); expect(oldEncryptedResult2.toString('utf8')).toBe(data2); //Check if unEncrypted adapter can read data and make sure it's not the same as oldEncrypted adapter - const { - rotated, - notRotated, - } = await unEncryptedAdapter.rotateEncryptionKey({ + const { rotated, notRotated } = await unEncryptedAdapter.rotateEncryptionKey({ oldKey: oldEncryptionKey, }); expect(rotated.length).toEqual(2); @@ -238,16 +210,8 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => { it('should only encrypt specified fileNames', async () => { const oldEncryptionKey = 'oldKeyThatILoved'; - const oldEncryptedAdapter = new GridFSBucketAdapter( - databaseURI, - {}, - oldEncryptionKey - ); - const encryptedAdapter = new GridFSBucketAdapter( - databaseURI, - {}, - 'newKeyThatILove' - ); + const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey); + const encryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, 'newKeyThatILove'); const unEncryptedAdapter = new GridFSBucketAdapter(databaseURI); const fileName1 = 'file1.txt'; const data1 = 'hello world'; @@ -255,14 +219,10 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => { const data2 = 'hello new world'; //Store unecrypted files await oldEncryptedAdapter.createFile(fileName1, data1); - const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData( - fileName1 - ); + const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1); expect(oldEncryptedResult1.toString('utf8')).toBe(data1); await oldEncryptedAdapter.createFile(fileName2, data2); - const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData( - fileName2 - ); + const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2); expect(oldEncryptedResult2.toString('utf8')).toBe(data2); //Inject unecrypted file to see if causes an issue const fileName3 = 'file3.txt'; @@ -318,16 +278,8 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => { it("should return fileNames of those it can't encrypt with the new key", async () => { const oldEncryptionKey = 'oldKeyThatILoved'; - const oldEncryptedAdapter = new GridFSBucketAdapter( - databaseURI, - {}, - oldEncryptionKey - ); - const encryptedAdapter = new GridFSBucketAdapter( - databaseURI, - {}, - 'newKeyThatILove' - ); + const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey); + const encryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, 'newKeyThatILove'); const unEncryptedAdapter = new GridFSBucketAdapter(databaseURI); const fileName1 = 'file1.txt'; const data1 = 'hello world'; @@ -335,14 +287,10 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => { const data2 = 'hello new world'; //Store unecrypted files await oldEncryptedAdapter.createFile(fileName1, data1); - const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData( - fileName1 - ); + const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1); expect(oldEncryptedResult1.toString('utf8')).toBe(data1); await oldEncryptedAdapter.createFile(fileName2, data2); - const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData( - fileName2 - ); + const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2); expect(oldEncryptedResult2.toString('utf8')).toBe(data2); //Inject unecrypted file to see if causes an issue const fileName3 = 'file3.txt'; diff --git a/spec/RedisCacheAdapter.spec.js b/spec/RedisCacheAdapter.spec.js index 4991d2b937..09b3506099 100644 --- a/spec/RedisCacheAdapter.spec.js +++ b/spec/RedisCacheAdapter.spec.js @@ -1,5 +1,5 @@ -const RedisCacheAdapter = require('../lib/Adapters/Cache/RedisCacheAdapter').default; -const Config = require('../lib/Config'); +const RedisCacheAdapter = require('../lib/Adapters/Cache/RedisCacheAdapter') + .default; /* To run this test part of the complete suite @@ -173,356 +173,3 @@ describe_only(() => { .then(done); }); }); - -describe_only(() => { - return process.env.PARSE_SERVER_TEST_CACHE === 'redis'; -})('Redis Performance', function () { - let cacheAdapter; - let getSpy; - let putSpy; - let delSpy; - - beforeEach(async () => { - cacheAdapter = new RedisCacheAdapter(); - await reconfigureServer({ - cacheAdapter, - }); - await cacheAdapter.clear(); - - getSpy = spyOn(cacheAdapter, 'get').and.callThrough(); - putSpy = spyOn(cacheAdapter, 'put').and.callThrough(); - delSpy = spyOn(cacheAdapter, 'del').and.callThrough(); - }); - - it('test new object', async () => { - const object = new TestObject(); - object.set('foo', 'bar'); - await object.save(); - expect(getSpy.calls.count()).toBe(3); - expect(putSpy.calls.count()).toBe(3); - expect(delSpy.calls.count()).toBe(1); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('test new object multiple fields', async () => { - const container = new Container({ - dateField: new Date(), - arrayField: [], - numberField: 1, - stringField: 'hello', - booleanField: true, - }); - await container.save(); - expect(getSpy.calls.count()).toBe(3); - expect(putSpy.calls.count()).toBe(3); - expect(delSpy.calls.count()).toBe(1); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('test update existing fields', async () => { - const object = new TestObject(); - object.set('foo', 'bar'); - await object.save(); - - getSpy.calls.reset(); - putSpy.calls.reset(); - - object.set('foo', 'barz'); - await object.save(); - expect(getSpy.calls.count()).toBe(3); - expect(putSpy.calls.count()).toBe(1); - expect(delSpy.calls.count()).toBe(2); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('test saveAll / destroyAll', async () => { - const object = new TestObject(); - await object.save(); - - getSpy.calls.reset(); - putSpy.calls.reset(); - - const objects = []; - for (let i = 0; i < 10; i++) { - const object = new TestObject(); - object.set('number', i); - objects.push(object); - } - await Parse.Object.saveAll(objects); - expect(getSpy.calls.count()).toBe(21); - expect(putSpy.calls.count()).toBe(11); - - getSpy.calls.reset(); - putSpy.calls.reset(); - - await Parse.Object.destroyAll(objects); - expect(getSpy.calls.count()).toBe(11); - expect(putSpy.calls.count()).toBe(1); - expect(delSpy.calls.count()).toBe(3); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('test saveAll / destroyAll batch', async () => { - const object = new TestObject(); - await object.save(); - - getSpy.calls.reset(); - putSpy.calls.reset(); - - const objects = []; - for (let i = 0; i < 10; i++) { - const object = new TestObject(); - object.set('number', i); - objects.push(object); - } - await Parse.Object.saveAll(objects, { batchSize: 5 }); - expect(getSpy.calls.count()).toBe(22); - expect(putSpy.calls.count()).toBe(7); - - getSpy.calls.reset(); - putSpy.calls.reset(); - - await Parse.Object.destroyAll(objects, { batchSize: 5 }); - expect(getSpy.calls.count()).toBe(12); - expect(putSpy.calls.count()).toBe(2); - expect(delSpy.calls.count()).toBe(5); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('test add new field to existing object', async () => { - const object = new TestObject(); - object.set('foo', 'bar'); - await object.save(); - - getSpy.calls.reset(); - putSpy.calls.reset(); - - object.set('new', 'barz'); - await object.save(); - expect(getSpy.calls.count()).toBe(3); - expect(putSpy.calls.count()).toBe(2); - expect(delSpy.calls.count()).toBe(2); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('test add multiple fields to existing object', async () => { - const object = new TestObject(); - object.set('foo', 'bar'); - await object.save(); - - getSpy.calls.reset(); - putSpy.calls.reset(); - - object.set({ - dateField: new Date(), - arrayField: [], - numberField: 1, - stringField: 'hello', - booleanField: true, - }); - await object.save(); - expect(getSpy.calls.count()).toBe(3); - expect(putSpy.calls.count()).toBe(2); - expect(delSpy.calls.count()).toBe(2); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('test user', async () => { - const user = new Parse.User(); - user.setUsername('testing'); - user.setPassword('testing'); - await user.signUp(); - - expect(getSpy.calls.count()).toBe(8); - expect(putSpy.calls.count()).toBe(2); - expect(delSpy.calls.count()).toBe(1); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('test allowClientCreation false', async () => { - const object = new TestObject(); - await object.save(); - await reconfigureServer({ - cacheAdapter, - allowClientClassCreation: false, - }); - await cacheAdapter.clear(); - - getSpy.calls.reset(); - putSpy.calls.reset(); - delSpy.calls.reset(); - - object.set('foo', 'bar'); - await object.save(); - expect(getSpy.calls.count()).toBe(4); - expect(putSpy.calls.count()).toBe(2); - - getSpy.calls.reset(); - putSpy.calls.reset(); - - const query = new Parse.Query(TestObject); - await query.get(object.id); - expect(getSpy.calls.count()).toBe(3); - expect(putSpy.calls.count()).toBe(1); - expect(delSpy.calls.count()).toBe(2); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('test query', async () => { - const object = new TestObject(); - object.set('foo', 'bar'); - await object.save(); - - getSpy.calls.reset(); - putSpy.calls.reset(); - delSpy.calls.reset(); - - const query = new Parse.Query(TestObject); - await query.get(object.id); - expect(getSpy.calls.count()).toBe(2); - expect(putSpy.calls.count()).toBe(1); - expect(delSpy.calls.count()).toBe(1); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('test query include', async () => { - const child = new TestObject(); - await child.save(); - - const object = new TestObject(); - object.set('child', child); - await object.save(); - - getSpy.calls.reset(); - putSpy.calls.reset(); - - const query = new Parse.Query(TestObject); - query.include('child'); - await query.get(object.id); - - expect(getSpy.calls.count()).toBe(4); - expect(putSpy.calls.count()).toBe(1); - expect(delSpy.calls.count()).toBe(3); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('query relation without schema', async () => { - const child = new Parse.Object('ChildObject'); - await child.save(); - - const parent = new Parse.Object('ParentObject'); - const relation = parent.relation('child'); - relation.add(child); - await parent.save(); - - getSpy.calls.reset(); - putSpy.calls.reset(); - - const objects = await relation.query().find(); - expect(objects.length).toBe(1); - expect(objects[0].id).toBe(child.id); - - expect(getSpy.calls.count()).toBe(2); - expect(putSpy.calls.count()).toBe(1); - expect(delSpy.calls.count()).toBe(3); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('test delete object', async () => { - const object = new TestObject(); - object.set('foo', 'bar'); - await object.save(); - - getSpy.calls.reset(); - putSpy.calls.reset(); - delSpy.calls.reset(); - - await object.destroy(); - expect(getSpy.calls.count()).toBe(2); - expect(putSpy.calls.count()).toBe(1); - expect(delSpy.calls.count()).toBe(1); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(0); - }); - - it('test schema update class', async () => { - const container = new Container(); - await container.save(); - - getSpy.calls.reset(); - putSpy.calls.reset(); - delSpy.calls.reset(); - - const config = Config.get('test'); - const schema = await config.database.loadSchema(); - await schema.reloadData(); - - const levelPermissions = { - find: { '*': true }, - get: { '*': true }, - create: { '*': true }, - update: { '*': true }, - delete: { '*': true }, - addField: { '*': true }, - protectedFields: { '*': [] }, - }; - - await schema.updateClass( - 'Container', - { - fooOne: { type: 'Number' }, - fooTwo: { type: 'Array' }, - fooThree: { type: 'Date' }, - fooFour: { type: 'Object' }, - fooFive: { type: 'Relation', targetClass: '_User' }, - fooSix: { type: 'String' }, - fooSeven: { type: 'Object' }, - fooEight: { type: 'String' }, - fooNine: { type: 'String' }, - fooTeen: { type: 'Number' }, - fooEleven: { type: 'String' }, - fooTwelve: { type: 'String' }, - fooThirteen: { type: 'String' }, - fooFourteen: { type: 'String' }, - fooFifteen: { type: 'String' }, - fooSixteen: { type: 'String' }, - fooEighteen: { type: 'String' }, - fooNineteen: { type: 'String' }, - }, - levelPermissions, - {}, - config.database - ); - expect(getSpy.calls.count()).toBe(3); - expect(putSpy.calls.count()).toBe(3); - expect(delSpy.calls.count()).toBe(0); - - const keys = await cacheAdapter.getAllKeys(); - expect(keys.length).toBe(1); - }); -}); diff --git a/spec/SchemaCache.spec.js b/spec/SchemaCache.spec.js deleted file mode 100644 index 5a4a517395..0000000000 --- a/spec/SchemaCache.spec.js +++ /dev/null @@ -1,75 +0,0 @@ -const CacheController = require('../lib/Controllers/CacheController.js').default; -const InMemoryCacheAdapter = require('../lib/Adapters/Cache/InMemoryCacheAdapter').default; -const SchemaCache = require('../lib/Controllers/SchemaCache').default; - -describe('SchemaCache', () => { - let cacheController; - - beforeEach(() => { - const cacheAdapter = new InMemoryCacheAdapter({}); - cacheController = new CacheController(cacheAdapter, 'appId'); - }); - - it('can retrieve a single schema after all schemas stored', done => { - const schemaCache = new SchemaCache(cacheController); - const allSchemas = [ - { - className: 'Class1', - }, - { - className: 'Class2', - }, - ]; - schemaCache - .setAllClasses(allSchemas) - .then(() => { - return schemaCache.getOneSchema('Class2'); - }) - .then(schema => { - expect(schema).not.toBeNull(); - done(); - }); - }); - - it("doesn't persist cached data by default", done => { - const schemaCache = new SchemaCache(cacheController); - const schema = { - className: 'Class1', - }; - schemaCache.setAllClasses([schema]).then(() => { - const anotherSchemaCache = new SchemaCache(cacheController); - return anotherSchemaCache.getOneSchema(schema.className).then(schema => { - expect(schema).toBeNull(); - done(); - }); - }); - }); - - it('can persist cached data', done => { - const schemaCache = new SchemaCache(cacheController, 5000, true); - const schema = { - className: 'Class1', - }; - schemaCache.setAllClasses([schema]).then(() => { - const anotherSchemaCache = new SchemaCache(cacheController, 5000, true); - return anotherSchemaCache.getOneSchema(schema.className).then(schema => { - expect(schema).not.toBeNull(); - done(); - }); - }); - }); - - it('should not store if ttl is null', async () => { - const ttl = null; - const schemaCache = new SchemaCache(cacheController, ttl); - expect(await schemaCache.getAllClasses()).toBeNull(); - expect(await schemaCache.setAllClasses()).toBeNull(); - expect(await schemaCache.getOneSchema()).toBeNull(); - }); - - it('should convert string ttl to number', async () => { - const ttl = '5000'; - const schemaCache = new SchemaCache(cacheController, ttl); - expect(schemaCache.ttl).toBe(5000); - }); -}); diff --git a/spec/dev.js b/spec/dev.js index c58879a533..9b1559464c 100644 --- a/spec/dev.js +++ b/spec/dev.js @@ -4,12 +4,9 @@ const Parse = require('parse/node'); const className = 'AnObject'; const defaultRoleName = 'tester'; -let schemaCache; - module.exports = { /* AnObject */ className, - schemaCache, /** * Creates and returns new user. diff --git a/src/Adapters/Auth/google.js b/src/Adapters/Auth/google.js index 75671e2c6b..8691cf9cae 100644 --- a/src/Adapters/Auth/google.js +++ b/src/Adapters/Auth/google.js @@ -39,9 +39,7 @@ function getGoogleKeyByKeyId(keyId) { if (expire) { cache = Object.assign({}, pems, { - expiresAt: new Date( - new Date().getTime() + Number(expire[1]) * 1000 - ), + expiresAt: new Date(new Date().getTime() + Number(expire[1]) * 1000), }); } } @@ -57,10 +55,7 @@ function getHeaderFromToken(token) { const decodedToken = jwt.decode(token, { complete: true }); if (!decodedToken) { - throw new Parse.Error( - Parse.Error.OBJECT_NOT_FOUND, - `provided token does not decode as JWT` - ); + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `provided token does not decode as JWT`); } return decodedToken.header; @@ -68,10 +63,7 @@ function getHeaderFromToken(token) { async function verifyIdToken({ id_token: token, id }, { clientId }) { if (!token) { - throw new Parse.Error( - Parse.Error.OBJECT_NOT_FOUND, - `id token is invalid for this user.` - ); + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `id token is invalid for this user.`); } const { kid: keyId, alg: algorithm } = getHeaderFromToken(token); @@ -96,10 +88,7 @@ async function verifyIdToken({ id_token: token, id }, { clientId }) { } if (jwtClaims.sub !== id) { - throw new Parse.Error( - Parse.Error.OBJECT_NOT_FOUND, - `auth data is invalid for this user.` - ); + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `auth data is invalid for this user.`); } if (clientId && jwtClaims.aud !== clientId) { @@ -140,9 +129,7 @@ function rsaPublicKeyToPEM(modulusB64, exponentB64) { const encodedExplen = encodeLengthHex(explen); const encodedPubkey = '30' + - encodeLengthHex( - modlen + explen + encodedModlen.length / 2 + encodedExplen.length / 2 + 2 - ) + + encodeLengthHex(modlen + explen + encodedModlen.length / 2 + encodedExplen.length / 2 + 2) + '02' + encodedModlen + modulusHex + diff --git a/src/Adapters/Auth/index.js b/src/Adapters/Auth/index.js index d0da98ab3c..00637d1131 100755 --- a/src/Adapters/Auth/index.js +++ b/src/Adapters/Auth/index.js @@ -92,11 +92,7 @@ function loadAuthAdapter(provider, authOptions) { // Try the configuration methods if (providerOptions) { - const optionalAdapter = loadAdapter( - providerOptions, - undefined, - providerOptions - ); + const optionalAdapter = loadAdapter(providerOptions, undefined, providerOptions); if (optionalAdapter) { ['validateAuthData', 'validateAppId'].forEach(key => { if (optionalAdapter[key]) { @@ -128,10 +124,7 @@ module.exports = function (authOptions = {}, enableAnonymousUsers = true) { return; } - const { adapter, appIds, providerOptions } = loadAuthAdapter( - provider, - authOptions - ); + const { adapter, appIds, providerOptions } = loadAuthAdapter(provider, authOptions); return authDataValidator(adapter, appIds, providerOptions); }; diff --git a/src/Adapters/Auth/instagram.js b/src/Adapters/Auth/instagram.js index 0c1379d4ca..6d61413bf0 100644 --- a/src/Adapters/Auth/instagram.js +++ b/src/Adapters/Auth/instagram.js @@ -11,10 +11,7 @@ function validateAuthData(authData) { if (response && response.data && response.data.id == authData.id) { return; } - throw new Parse.Error( - Parse.Error.OBJECT_NOT_FOUND, - 'Instagram auth is invalid for this user.' - ); + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Instagram auth is invalid for this user.'); }); } diff --git a/src/Adapters/Auth/keycloak.js b/src/Adapters/Auth/keycloak.js index 1223eac36b..037542f7af 100644 --- a/src/Adapters/Auth/keycloak.js +++ b/src/Adapters/Auth/keycloak.js @@ -37,12 +37,7 @@ const { Parse } = require('parse/node'); const httpsRequest = require('./httpsRequest'); const arraysEqual = (_arr1, _arr2) => { - if ( - !Array.isArray(_arr1) || - !Array.isArray(_arr2) || - _arr1.length !== _arr2.length - ) - return false; + if (!Array.isArray(_arr1) || !Array.isArray(_arr2) || _arr1.length !== _arr2.length) return false; var arr1 = _arr1.concat().sort(); var arr2 = _arr2.concat().sort(); @@ -54,21 +49,12 @@ const arraysEqual = (_arr1, _arr2) => { return true; }; -const handleAuth = async ( - { access_token, id, roles, groups } = {}, - { config } = {} -) => { +const handleAuth = async ({ access_token, id, roles, groups } = {}, { config } = {}) => { if (!(access_token && id)) { - throw new Parse.Error( - Parse.Error.OBJECT_NOT_FOUND, - 'Missing access token and/or User id' - ); + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Missing access token and/or User id'); } if (!config || !(config['auth-server-url'] && config['realm'])) { - throw new Parse.Error( - Parse.Error.OBJECT_NOT_FOUND, - 'Missing keycloak configuration' - ); + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Missing keycloak configuration'); } try { const response = await httpsRequest.get({ @@ -87,10 +73,7 @@ const handleAuth = async ( ) { return; } - throw new Parse.Error( - Parse.Error.OBJECT_NOT_FOUND, - 'Invalid authentication' - ); + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Invalid authentication'); } catch (e) { if (e instanceof Parse.Error) { throw e; diff --git a/src/Adapters/Auth/vkontakte.js b/src/Adapters/Auth/vkontakte.js index fe9913ab8c..46fd1248ae 100644 --- a/src/Adapters/Auth/vkontakte.js +++ b/src/Adapters/Auth/vkontakte.js @@ -11,10 +11,7 @@ function validateAuthData(authData, params) { if (response && response.access_token) { return request( 'api.vk.com', - 'method/users.get?access_token=' + - authData.access_token + - '&v=' + - params.apiVersion + 'method/users.get?access_token=' + authData.access_token + '&v=' + params.apiVersion ).then(function (response) { if ( response && @@ -24,16 +21,10 @@ function validateAuthData(authData, params) { ) { return; } - throw new Parse.Error( - Parse.Error.OBJECT_NOT_FOUND, - 'Vk auth is invalid for this user.' - ); + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Vk auth is invalid for this user.'); }); } - throw new Parse.Error( - Parse.Error.OBJECT_NOT_FOUND, - 'Vk appIds or appSecret is incorrect.' - ); + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Vk appIds or appSecret is incorrect.'); }); } diff --git a/src/Adapters/Files/GridFSBucketAdapter.js b/src/Adapters/Files/GridFSBucketAdapter.js index bf9f119f4d..84876fad6f 100644 --- a/src/Adapters/Files/GridFSBucketAdapter.js +++ b/src/Adapters/Files/GridFSBucketAdapter.js @@ -28,11 +28,7 @@ export class GridFSBucketAdapter extends FilesAdapter { this._algorithm = 'aes-256-gcm'; this._encryptionKey = encryptionKey !== undefined - ? crypto - .createHash('sha256') - .update(String(encryptionKey)) - .digest('base64') - .substr(0, 32) + ? crypto.createHash('sha256').update(String(encryptionKey)).digest('base64').substr(0, 32) : null; const defaultMongoOptions = { useNewUrlParser: true, @@ -43,13 +39,12 @@ export class GridFSBucketAdapter extends FilesAdapter { _connect() { if (!this._connectionPromise) { - this._connectionPromise = MongoClient.connect( - this._databaseURI, - this._mongoOptions - ).then(client => { - this._client = client; - return client.db(client.s.options.dbName); - }); + this._connectionPromise = MongoClient.connect(this._databaseURI, this._mongoOptions).then( + client => { + this._client = client; + return client.db(client.s.options.dbName); + } + ); } return this._connectionPromise; } @@ -68,11 +63,7 @@ export class GridFSBucketAdapter extends FilesAdapter { if (this._encryptionKey !== null) { try { const iv = crypto.randomBytes(16); - const cipher = crypto.createCipheriv( - this._algorithm, - this._encryptionKey, - iv - ); + const cipher = crypto.createCipheriv(this._algorithm, this._encryptionKey, iv); const encryptedResult = Buffer.concat([ cipher.update(data), cipher.final(), @@ -126,16 +117,9 @@ export class GridFSBucketAdapter extends FilesAdapter { const authTag = data.slice(authTagLocation); const iv = data.slice(ivLocation, authTagLocation); const encrypted = data.slice(0, ivLocation); - const decipher = crypto.createDecipheriv( - this._algorithm, - this._encryptionKey, - iv - ); + const decipher = crypto.createDecipheriv(this._algorithm, this._encryptionKey, iv); decipher.setAuthTag(authTag); - const decrypted = Buffer.concat([ - decipher.update(encrypted), - decipher.final(), - ]); + const decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]); return resolve(decrypted); } catch (err) { return reject(err); @@ -160,10 +144,7 @@ export class GridFSBucketAdapter extends FilesAdapter { options.oldKey ); } else { - oldKeyFileAdapter = new GridFSBucketAdapter( - this._databaseURI, - this._mongoOptions - ); + oldKeyFileAdapter = new GridFSBucketAdapter(this._databaseURI, this._mongoOptions); } if (options.fileNames !== undefined) { fileNames = options.fileNames; @@ -186,9 +167,7 @@ export class GridFSBucketAdapter extends FilesAdapter { this.createFile(fileName, plainTextData) .then(() => { fileNamesRotated.push(fileName); - fileNamesNotRotated = fileNamesNotRotated.filter(function ( - value - ) { + fileNamesNotRotated = fileNamesNotRotated.filter(function (value) { return value !== fileName; }); fileNameIndex += 1; @@ -223,13 +202,7 @@ export class GridFSBucketAdapter extends FilesAdapter { } getFileLocation(config, filename) { - return ( - config.mount + - '/files/' + - config.applicationId + - '/' + - encodeURIComponent(filename) - ); + return config.mount + '/files/' + config.applicationId + '/' + encodeURIComponent(filename); } async getMetadata(filename) { diff --git a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js index 60483480ed..26b3f2feb8 100644 --- a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js +++ b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js @@ -2,16 +2,8 @@ import MongoCollection from './MongoCollection'; import MongoSchemaCollection from './MongoSchemaCollection'; import { StorageAdapter } from '../StorageAdapter'; -import type { - SchemaType, - QueryType, - StorageClass, - QueryOptions, -} from '../StorageAdapter'; -import { - parse as parseUrl, - format as formatUrl, -} from '../../../vendor/mongodbUrl'; +import type { SchemaType, QueryType, StorageClass, QueryOptions } from '../StorageAdapter'; +import { parse as parseUrl, format as formatUrl } from '../../../vendor/mongodbUrl'; import { parseObjectToMongoObjectForCreate, mongoObjectToParseObject, @@ -45,9 +37,7 @@ const storageAdapterAllCollections = mongoAdapter => { } // TODO: If you have one app with a collection prefix that happens to be a prefix of another // apps prefix, this will go very very badly. We should fix that somehow. - return ( - collection.collectionName.indexOf(mongoAdapter._collectionPrefix) == 0 - ); + return collection.collectionName.indexOf(mongoAdapter._collectionPrefix) == 0; }); }); }; @@ -85,16 +75,13 @@ const mongoSchemaFromFieldsAndClassNameAndCLP = ( for (const fieldName in fields) { const { type, targetClass, ...fieldOptions } = fields[fieldName]; - mongoObject[ - fieldName - ] = MongoSchemaCollection.parseFieldTypeToMongoFieldType({ + mongoObject[fieldName] = MongoSchemaCollection.parseFieldTypeToMongoFieldType({ type, targetClass, }); if (fieldOptions && Object.keys(fieldOptions).length > 0) { mongoObject._metadata = mongoObject._metadata || {}; - mongoObject._metadata.fields_options = - mongoObject._metadata.fields_options || {}; + mongoObject._metadata.fields_options = mongoObject._metadata.fields_options || {}; mongoObject._metadata.fields_options[fieldName] = fieldOptions; } } @@ -108,11 +95,7 @@ const mongoSchemaFromFieldsAndClassNameAndCLP = ( } } - if ( - indexes && - typeof indexes === 'object' && - Object.keys(indexes).length > 0 - ) { + if (indexes && typeof indexes === 'object' && Object.keys(indexes).length > 0) { mongoObject._metadata = mongoObject._metadata || {}; mongoObject._metadata.indexes = indexes; } @@ -130,6 +113,8 @@ export class MongoStorageAdapter implements StorageAdapter { _uri: string; _collectionPrefix: string; _mongoOptions: Object; + _stream: any; + _onchange: any; // Public connectionPromise: ?Promise; database: any; @@ -137,16 +122,13 @@ export class MongoStorageAdapter implements StorageAdapter { _maxTimeMS: ?number; canSortOnJoinTables: boolean; - constructor({ - uri = defaults.DefaultMongoURI, - collectionPrefix = '', - mongoOptions = {}, - }: any) { + constructor({ uri = defaults.DefaultMongoURI, collectionPrefix = '', mongoOptions = {} }: any) { this._uri = uri; this._collectionPrefix = collectionPrefix; this._mongoOptions = mongoOptions; this._mongoOptions.useNewUrlParser = true; this._mongoOptions.useUnifiedTopology = true; + this._onchange = () => {}; // MaxTimeMS is not a global MongoDB client option, it is applied per operation. this._maxTimeMS = mongoOptions.maxTimeMS; @@ -154,6 +136,10 @@ export class MongoStorageAdapter implements StorageAdapter { delete mongoOptions.maxTimeMS; } + watch(callback: () => void): void { + this._onchange = callback; + } + connect() { if (this.connectionPromise) { return this.connectionPromise; @@ -219,15 +205,19 @@ export class MongoStorageAdapter implements StorageAdapter { _schemaCollection(): Promise { return this.connect() .then(() => this._adaptiveCollection(MongoSchemaCollectionName)) - .then(collection => new MongoSchemaCollection(collection)); + .then(collection => { + if (!this._stream) { + this._stream = collection._mongoCollection.watch(); + this._stream.on('change', this._onchange); + } + return new MongoSchemaCollection(collection); + }); } classExists(name: string) { return this.connect() .then(() => { - return this.database - .listCollections({ name: this._collectionPrefix + name }) - .toArray(); + return this.database.listCollections({ name: this._collectionPrefix + name }).toArray(); }) .then(collections => { return collections.length > 0; @@ -262,10 +252,7 @@ export class MongoStorageAdapter implements StorageAdapter { Object.keys(submittedIndexes).forEach(name => { const field = submittedIndexes[name]; if (existingIndexes[name] && field.__op !== 'Delete') { - throw new Parse.Error( - Parse.Error.INVALID_QUERY, - `Index ${name} exists, cannot update.` - ); + throw new Parse.Error(Parse.Error.INVALID_QUERY, `Index ${name} exists, cannot update.`); } if (!existingIndexes[name] && field.__op === 'Delete') { throw new Parse.Error( @@ -349,26 +336,15 @@ export class MongoStorageAdapter implements StorageAdapter { schema.indexes ); mongoObject._id = className; - return this.setIndexesWithSchemaFormat( - className, - schema.indexes, - {}, - schema.fields - ) + return this.setIndexesWithSchemaFormat(className, schema.indexes, {}, schema.fields) .then(() => this._schemaCollection()) .then(schemaCollection => schemaCollection.insertSchema(mongoObject)) .catch(err => this.handleError(err)); } - addFieldIfNotExists( - className: string, - fieldName: string, - type: any - ): Promise { + addFieldIfNotExists(className: string, fieldName: string, type: any): Promise { return this._schemaCollection() - .then(schemaCollection => - schemaCollection.addFieldIfNotExists(className, fieldName, type) - ) + .then(schemaCollection => schemaCollection.addFieldIfNotExists(className, fieldName, type)) .then(() => this.createIndexesIfNeeded(className, fieldName, type)) .catch(err => this.handleError(err)); } @@ -388,9 +364,7 @@ export class MongoStorageAdapter implements StorageAdapter { }) // We've dropped the collection, now remove the _SCHEMA document .then(() => this._schemaCollection()) - .then(schemaCollection => - schemaCollection.findAndDeleteSchema(className) - ) + .then(schemaCollection => schemaCollection.findAndDeleteSchema(className)) .catch(err => this.handleError(err)) ); } @@ -398,9 +372,7 @@ export class MongoStorageAdapter implements StorageAdapter { deleteAllClasses(fast: boolean) { return storageAdapterAllCollections(this).then(collections => Promise.all( - collections.map(collection => - fast ? collection.deleteMany({}) : collection.drop() - ) + collections.map(collection => (fast ? collection.deleteMany({}) : collection.drop())) ) ); } @@ -450,13 +422,9 @@ export class MongoStorageAdapter implements StorageAdapter { }); return this._adaptiveCollection(className) - .then(collection => - collection.updateMany(collectionFilter, collectionUpdate) - ) + .then(collection => collection.updateMany(collectionFilter, collectionUpdate)) .then(() => this._schemaCollection()) - .then(schemaCollection => - schemaCollection.updateSchema(className, schemaUpdate) - ) + .then(schemaCollection => schemaCollection.updateSchema(className, schemaUpdate)) .catch(err => this.handleError(err)); } @@ -465,9 +433,7 @@ export class MongoStorageAdapter implements StorageAdapter { // rejection reason are TBD. getAllClasses(): Promise { return this._schemaCollection() - .then(schemasCollection => - schemasCollection._fetchAllSchemasFrom_SCHEMA() - ) + .then(schemasCollection => schemasCollection._fetchAllSchemasFrom_SCHEMA()) .catch(err => this.handleError(err)); } @@ -476,31 +442,18 @@ export class MongoStorageAdapter implements StorageAdapter { // undefined as the reason. getClass(className: string): Promise { return this._schemaCollection() - .then(schemasCollection => - schemasCollection._fetchOneSchemaFrom_SCHEMA(className) - ) + .then(schemasCollection => schemasCollection._fetchOneSchemaFrom_SCHEMA(className)) .catch(err => this.handleError(err)); } // TODO: As yet not particularly well specified. Creates an object. Maybe shouldn't even need the schema, // and should infer from the type. Or maybe does need the schema for validations. Or maybe needs // the schema only for the legacy mongo format. We'll figure that out later. - createObject( - className: string, - schema: SchemaType, - object: any, - transactionalSession: ?any - ) { + createObject(className: string, schema: SchemaType, object: any, transactionalSession: ?any) { schema = convertParseSchemaToMongoSchema(schema); - const mongoObject = parseObjectToMongoObjectForCreate( - className, - object, - schema - ); + const mongoObject = parseObjectToMongoObjectForCreate(className, object, schema); return this._adaptiveCollection(className) - .then(collection => - collection.insertOne(mongoObject, transactionalSession) - ) + .then(collection => collection.insertOne(mongoObject, transactionalSession)) .catch(error => { if (error.code === 11000) { // Duplicate value @@ -510,9 +463,7 @@ export class MongoStorageAdapter implements StorageAdapter { ); err.underlyingError = error; if (error.message) { - const matches = error.message.match( - /index:[\sa-zA-Z0-9_\-\.]+\$?([a-zA-Z_-]+)_1/ - ); + const matches = error.message.match(/index:[\sa-zA-Z0-9_\-\.]+\$?([a-zA-Z_-]+)_1/); if (matches && Array.isArray(matches)) { err.userInfo = { duplicated_field: matches[1] }; } @@ -543,18 +494,12 @@ export class MongoStorageAdapter implements StorageAdapter { .then( ({ result }) => { if (result.n === 0) { - throw new Parse.Error( - Parse.Error.OBJECT_NOT_FOUND, - 'Object not found.' - ); + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.'); } return Promise.resolve(); }, () => { - throw new Parse.Error( - Parse.Error.INTERNAL_SERVER_ERROR, - 'Database adapter error' - ); + throw new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'Database adapter error'); } ); } @@ -571,9 +516,7 @@ export class MongoStorageAdapter implements StorageAdapter { const mongoUpdate = transformUpdate(className, update, schema); const mongoWhere = transformWhere(className, query, schema); return this._adaptiveCollection(className) - .then(collection => - collection.updateMany(mongoWhere, mongoUpdate, transactionalSession) - ) + .then(collection => collection.updateMany(mongoWhere, mongoUpdate, transactionalSession)) .catch(err => this.handleError(err)); } @@ -621,9 +564,7 @@ export class MongoStorageAdapter implements StorageAdapter { const mongoUpdate = transformUpdate(className, update, schema); const mongoWhere = transformWhere(className, query, schema); return this._adaptiveCollection(className) - .then(collection => - collection.upsertOne(mongoWhere, mongoUpdate, transactionalSession) - ) + .then(collection => collection.upsertOne(mongoWhere, mongoUpdate, transactionalSession)) .catch(err => this.handleError(err)); } @@ -632,16 +573,7 @@ export class MongoStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType, - { - skip, - limit, - sort, - keys, - readPreference, - hint, - caseInsensitive, - explain, - }: QueryOptions + { skip, limit, sort, keys, readPreference, hint, caseInsensitive, explain }: QueryOptions ): Promise { schema = convertParseSchemaToMongoSchema(schema); const mongoWhere = transformWhere(className, query, schema); @@ -689,9 +621,7 @@ export class MongoStorageAdapter implements StorageAdapter { if (explain) { return objects; } - return objects.map(object => - mongoObjectToParseObject(className, object, schema) - ); + return objects.map(object => mongoObjectToParseObject(className, object, schema)); }) .catch(err => this.handleError(err)); } @@ -706,18 +636,14 @@ export class MongoStorageAdapter implements StorageAdapter { ): Promise { schema = convertParseSchemaToMongoSchema(schema); const indexCreationRequest = {}; - const mongoFieldNames = fieldNames.map(fieldName => - transformKey(className, fieldName, schema) - ); + const mongoFieldNames = fieldNames.map(fieldName => transformKey(className, fieldName, schema)); mongoFieldNames.forEach(fieldName => { - indexCreationRequest[fieldName] = - options.indexType !== undefined ? options.indexType : 1; + indexCreationRequest[fieldName] = options.indexType !== undefined ? options.indexType : 1; }); const defaultOptions: Object = { background: true, sparse: true }; const indexNameOptions: Object = indexName ? { name: indexName } : {}; - const ttlOptions: Object = - options.ttl !== undefined ? { expireAfterSeconds: options.ttl } : {}; + const ttlOptions: Object = options.ttl !== undefined ? { expireAfterSeconds: options.ttl } : {}; const caseInsensitiveOptions: Object = caseInsensitive ? { collation: MongoCollection.caseInsensitiveCollation() } : {}; @@ -732,10 +658,8 @@ export class MongoStorageAdapter implements StorageAdapter { .then( collection => new Promise((resolve, reject) => - collection._mongoCollection.createIndex( - indexCreationRequest, - indexOptions, - error => (error ? reject(error) : resolve()) + collection._mongoCollection.createIndex(indexCreationRequest, indexOptions, error => + error ? reject(error) : resolve() ) ) ) @@ -747,23 +671,15 @@ export class MongoStorageAdapter implements StorageAdapter { // As such, we shouldn't expose this function to users of parse until we have an out-of-band // Way of determining if a field is nullable. Undefined doesn't count against uniqueness, // which is why we use sparse indexes. - ensureUniqueness( - className: string, - schema: SchemaType, - fieldNames: string[] - ) { + ensureUniqueness(className: string, schema: SchemaType, fieldNames: string[]) { schema = convertParseSchemaToMongoSchema(schema); const indexCreationRequest = {}; - const mongoFieldNames = fieldNames.map(fieldName => - transformKey(className, fieldName, schema) - ); + const mongoFieldNames = fieldNames.map(fieldName => transformKey(className, fieldName, schema)); mongoFieldNames.forEach(fieldName => { indexCreationRequest[fieldName] = 1; }); return this._adaptiveCollection(className) - .then(collection => - collection._ensureSparseUniqueIndexInBackground(indexCreationRequest) - ) + .then(collection => collection._ensureSparseUniqueIndexInBackground(indexCreationRequest)) .catch(error => { if (error.code === 11000) { throw new Parse.Error( @@ -808,23 +724,14 @@ export class MongoStorageAdapter implements StorageAdapter { .catch(err => this.handleError(err)); } - distinct( - className: string, - schema: SchemaType, - query: QueryType, - fieldName: string - ) { + distinct(className: string, schema: SchemaType, query: QueryType, fieldName: string) { schema = convertParseSchemaToMongoSchema(schema); - const isPointerField = - schema.fields[fieldName] && schema.fields[fieldName].type === 'Pointer'; + const isPointerField = schema.fields[fieldName] && schema.fields[fieldName].type === 'Pointer'; const transformField = transformKey(className, fieldName, schema); return this._adaptiveCollection(className) .then(collection => - collection.distinct( - transformField, - transformWhere(className, query, schema) - ) + collection.distinct(transformField, transformWhere(className, query, schema)) ) .then(objects => { objects = objects.filter(obj => obj != null); @@ -862,16 +769,10 @@ export class MongoStorageAdapter implements StorageAdapter { stage.$match = this._parseAggregateArgs(schema, stage.$match); } if (stage.$project) { - stage.$project = this._parseAggregateProjectArgs( - schema, - stage.$project - ); + stage.$project = this._parseAggregateProjectArgs(schema, stage.$project); } if (stage.$geoNear && stage.$geoNear.query) { - stage.$geoNear.query = this._parseAggregateArgs( - schema, - stage.$geoNear.query - ); + stage.$geoNear.query = this._parseAggregateArgs(schema, stage.$geoNear.query); } return stage; }); @@ -894,8 +795,7 @@ export class MongoStorageAdapter implements StorageAdapter { if ( result._id == null || result._id == undefined || - (['object', 'string'].includes(typeof result._id) && - _.isEmpty(result._id)) + (['object', 'string'].includes(typeof result._id) && _.isEmpty(result._id)) ) { result._id = null; } @@ -905,11 +805,7 @@ export class MongoStorageAdapter implements StorageAdapter { }); return results; }) - .then(objects => - objects.map(object => - mongoObjectToParseObject(className, object, schema) - ) - ) + .then(objects => objects.map(object => mongoObjectToParseObject(className, object, schema))) .catch(err => this.handleError(err)); } @@ -945,20 +841,12 @@ export class MongoStorageAdapter implements StorageAdapter { // Pass objects down to MongoDB...this is more than likely an $exists operator. returnValue[`_p_${field}`] = pipeline[field]; } else { - returnValue[ - `_p_${field}` - ] = `${schema.fields[field].targetClass}$${pipeline[field]}`; + returnValue[`_p_${field}`] = `${schema.fields[field].targetClass}$${pipeline[field]}`; } - } else if ( - schema.fields[field] && - schema.fields[field].type === 'Date' - ) { + } else if (schema.fields[field] && schema.fields[field].type === 'Date') { returnValue[field] = this._convertToDate(pipeline[field]); } else { - returnValue[field] = this._parseAggregateArgs( - schema, - pipeline[field] - ); + returnValue[field] = this._parseAggregateArgs(schema, pipeline[field]); } if (field === 'objectId') { @@ -1011,16 +899,11 @@ export class MongoStorageAdapter implements StorageAdapter { // updatedAt or objectId and change it accordingly. _parseAggregateGroupArgs(schema: any, pipeline: any): any { if (Array.isArray(pipeline)) { - return pipeline.map(value => - this._parseAggregateGroupArgs(schema, value) - ); + return pipeline.map(value => this._parseAggregateGroupArgs(schema, value)); } else if (typeof pipeline === 'object') { const returnValue = {}; for (const field in pipeline) { - returnValue[field] = this._parseAggregateGroupArgs( - schema, - pipeline[field] - ); + returnValue[field] = this._parseAggregateGroupArgs(schema, pipeline[field]); } return returnValue; } else if (typeof pipeline === 'string') { @@ -1077,10 +960,7 @@ export class MongoStorageAdapter implements StorageAdapter { case '': break; default: - throw new Parse.Error( - Parse.Error.INVALID_QUERY, - 'Not supported read preference.' - ); + throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Not supported read preference.'); } return readPreference; } @@ -1111,11 +991,7 @@ export class MongoStorageAdapter implements StorageAdapter { return Promise.resolve(); } - createTextIndexesIfNeeded( - className: string, - query: QueryType, - schema: any - ): Promise { + createTextIndexesIfNeeded(className: string, query: QueryType, schema: any): Promise { for (const fieldName in query) { if (!query[fieldName] || !query[fieldName].$text) { continue; diff --git a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js index aa2ddf3f40..085b925049 100644 --- a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js +++ b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js @@ -253,12 +253,7 @@ interface WhereClause { sorts: Array; } -const buildWhereClause = ({ - schema, - query, - index, - caseInsensitive, -}): WhereClause => { +const buildWhereClause = ({ schema, query, index, caseInsensitive }): WhereClause => { const patterns = []; let values = []; const sorts = []; @@ -266,9 +261,7 @@ const buildWhereClause = ({ schema = toPostgresSchema(schema); for (const fieldName in query) { const isArrayField = - schema.fields && - schema.fields[fieldName] && - schema.fields[fieldName].type === 'Array'; + schema.fields && schema.fields[fieldName] && schema.fields[fieldName].type === 'Array'; const initialPatternsLength = patterns.length; const fieldValue = query[fieldName]; @@ -284,10 +277,7 @@ const buildWhereClause = ({ if (authDataMatch) { // TODO: Handle querying by _auth_data_provider, authData is stored in authData field continue; - } else if ( - caseInsensitive && - (fieldName === 'username' || fieldName === 'email') - ) { + } else if (caseInsensitive && (fieldName === 'username' || fieldName === 'email')) { patterns.push(`LOWER($${index}:name) = LOWER($${index + 1})`); values.push(fieldName, fieldValue); index += 2; @@ -324,10 +314,7 @@ const buildWhereClause = ({ } else if (typeof fieldValue === 'boolean') { patterns.push(`$${index}:name = $${index + 1}`); // Can't cast boolean to double precision - if ( - schema.fields[fieldName] && - schema.fields[fieldName].type === 'Number' - ) { + if (schema.fields[fieldName] && schema.fields[fieldName].type === 'Number') { // Should always return zero results const MAX_INT_PLUS_ONE = 9223372036854775808; values.push(fieldName, MAX_INT_PLUS_ONE); @@ -377,9 +364,7 @@ const buildWhereClause = ({ // if not null, we need to manually exclude null if (fieldValue.$ne.__type === 'GeoPoint') { patterns.push( - `($${index}:name <> POINT($${index + 1}, $${ - index + 2 - }) OR $${index}:name IS NULL)` + `($${index}:name <> POINT($${index + 1}, $${index + 2}) OR $${index}:name IS NULL)` ); } else { if (fieldName.indexOf('.') >= 0) { @@ -388,9 +373,7 @@ const buildWhereClause = ({ `(${constraintFieldName} <> $${index} OR ${constraintFieldName} IS NULL)` ); } else { - patterns.push( - `($${index}:name <> $${index + 1} OR $${index}:name IS NULL)` - ); + patterns.push(`($${index}:name <> $${index + 1} OR $${index}:name IS NULL)`); } } } @@ -421,8 +404,7 @@ const buildWhereClause = ({ } } } - const isInOrNin = - Array.isArray(fieldValue.$in) || Array.isArray(fieldValue.$nin); + const isInOrNin = Array.isArray(fieldValue.$in) || Array.isArray(fieldValue.$nin); if ( Array.isArray(fieldValue.$in) && isArrayField && @@ -441,9 +423,7 @@ const buildWhereClause = ({ } }); if (allowNull) { - patterns.push( - `($${index}:name IS NULL OR $${index}:name && ARRAY[${inPatterns.join()}])` - ); + patterns.push(`($${index}:name IS NULL OR $${index}:name && ARRAY[${inPatterns.join()}])`); } else { patterns.push(`$${index}:name && ARRAY[${inPatterns.join()}]`); } @@ -453,9 +433,7 @@ const buildWhereClause = ({ const not = notIn ? ' NOT ' : ''; if (baseArray.length > 0) { if (isArrayField) { - patterns.push( - `${not} array_contains($${index}:name, $${index + 1})` - ); + patterns.push(`${not} array_contains($${index}:name, $${index + 1})`); values.push(fieldName, JSON.stringify(baseArray)); index += 2; } else { @@ -518,13 +496,9 @@ const buildWhereClause = ({ const value = processRegexPattern(fieldValue.$all[i].$regex); fieldValue.$all[i] = value.substring(1) + '%'; } - patterns.push( - `array_contains_all_regex($${index}:name, $${index + 1}::jsonb)` - ); + patterns.push(`array_contains_all_regex($${index}:name, $${index + 1}::jsonb)`); } else { - patterns.push( - `array_contains_all($${index}:name, $${index + 1}::jsonb)` - ); + patterns.push(`array_contains_all($${index}:name, $${index + 1}::jsonb)`); } values.push(fieldName, JSON.stringify(fieldValue.$all)); index += 2; @@ -549,10 +523,7 @@ const buildWhereClause = ({ if (fieldValue.$containedBy) { const arr = fieldValue.$containedBy; if (!(arr instanceof Array)) { - throw new Parse.Error( - Parse.Error.INVALID_JSON, - `bad $containedBy: should be an array` - ); + throw new Parse.Error(Parse.Error.INVALID_JSON, `bad $containedBy: should be an array`); } patterns.push(`$${index}:name <@ $${index + 1}::jsonb`); @@ -564,22 +535,13 @@ const buildWhereClause = ({ const search = fieldValue.$text.$search; let language = 'english'; if (typeof search !== 'object') { - throw new Parse.Error( - Parse.Error.INVALID_JSON, - `bad $text: $search, should be object` - ); + throw new Parse.Error(Parse.Error.INVALID_JSON, `bad $text: $search, should be object`); } if (!search.$term || typeof search.$term !== 'string') { - throw new Parse.Error( - Parse.Error.INVALID_JSON, - `bad $text: $term, should be string` - ); + throw new Parse.Error(Parse.Error.INVALID_JSON, `bad $text: $term, should be string`); } if (search.$language && typeof search.$language !== 'string') { - throw new Parse.Error( - Parse.Error.INVALID_JSON, - `bad $text: $language, should be string` - ); + throw new Parse.Error(Parse.Error.INVALID_JSON, `bad $text: $language, should be string`); } else if (search.$language) { language = search.$language; } @@ -594,10 +556,7 @@ const buildWhereClause = ({ `bad $text: $caseSensitive not supported, please use $regex or create a separate lower case column.` ); } - if ( - search.$diacriticSensitive && - typeof search.$diacriticSensitive !== 'boolean' - ) { + if (search.$diacriticSensitive && typeof search.$diacriticSensitive !== 'boolean') { throw new Parse.Error( Parse.Error.INVALID_JSON, `bad $text: $diacriticSensitive, should be boolean` @@ -609,9 +568,7 @@ const buildWhereClause = ({ ); } patterns.push( - `to_tsvector($${index}, $${index + 1}:name) @@ to_tsquery($${ - index + 2 - }, $${index + 3})` + `to_tsvector($${index}, $${index + 1}:name) @@ to_tsquery($${index + 2}, $${index + 3})` ); values.push(language, fieldName, language, search.$term); index += 4; @@ -716,10 +673,7 @@ const buildWhereClause = ({ return `(${point[0]}, ${point[1]})`; } if (typeof point !== 'object' || point.__type !== 'GeoPoint') { - throw new Parse.Error( - Parse.Error.INVALID_JSON, - 'bad $geoWithin value' - ); + throw new Parse.Error(Parse.Error.INVALID_JSON, 'bad $geoWithin value'); } else { Parse.GeoPoint._validate(point.latitude, point.longitude); } @@ -830,9 +784,7 @@ const buildWhereClause = ({ if (initialPatternsLength === patterns.length) { throw new Parse.Error( Parse.Error.OPERATION_FORBIDDEN, - `Postgres doesn't support this query type yet ${JSON.stringify( - fieldValue - )}` + `Postgres doesn't support this query type yet ${JSON.stringify(fieldValue)}` ); } } @@ -856,6 +808,10 @@ export class PostgresStorageAdapter implements StorageAdapter { this.canSortOnJoinTables = false; } + watch(/* callback: () => void */): void { + // this._onchange = callback; + } + //Note that analyze=true will run the query, executing INSERTS, DELETES, etc. createExplainableQuery(query: string, analyze: boolean = false) { if (analyze) { @@ -903,12 +859,7 @@ export class PostgresStorageAdapter implements StorageAdapter { const self = this; await this._client.task('set-class-level-permissions', async t => { await self._ensureSchemaCollectionExists(t); - const values = [ - className, - 'schema', - 'classLevelPermissions', - JSON.stringify(CLPs), - ]; + const values = [className, 'schema', 'classLevelPermissions', JSON.stringify(CLPs)]; await t.none( `UPDATE "_SCHEMA" SET $2:name = json_object_set_key($2:name, $3::text, $4::jsonb) WHERE "className" = $1`, values @@ -936,10 +887,7 @@ export class PostgresStorageAdapter implements StorageAdapter { Object.keys(submittedIndexes).forEach(name => { const field = submittedIndexes[name]; if (existingIndexes[name] && field.__op !== 'Delete') { - throw new Parse.Error( - Parse.Error.INVALID_QUERY, - `Index ${name} exists, cannot update.` - ); + throw new Parse.Error(Parse.Error.INVALID_QUERY, `Index ${name} exists, cannot update.`); } if (!existingIndexes[name] && field.__op === 'Delete') { throw new Parse.Error( @@ -990,24 +938,12 @@ export class PostgresStorageAdapter implements StorageAdapter { 'INSERT INTO "_SCHEMA" ("className", "schema", "isParseClass") VALUES ($, $, true)', { className, schema } ); - await this.setIndexesWithSchemaFormat( - className, - schema.indexes, - {}, - schema.fields, - t - ); + await this.setIndexesWithSchemaFormat(className, schema.indexes, {}, schema.fields, t); return toParseSchema(schema); }) .catch(err => { - if ( - err.code === PostgresUniqueIndexViolationError && - err.detail.includes(className) - ) { - throw new Parse.Error( - Parse.Error.DUPLICATE_VALUE, - `Class ${className} already exists.` - ); + if (err.code === PostgresUniqueIndexViolationError && err.detail.includes(className)) { + throw new Parse.Error(Parse.Error.DUPLICATE_VALUE, `Class ${className} already exists.`); } throw err; }); @@ -1093,24 +1029,14 @@ export class PostgresStorageAdapter implements StorageAdapter { const newColumns = Object.keys(schema.fields) .filter(item => columns.indexOf(item) === -1) .map(fieldName => - self.addFieldIfNotExists( - className, - fieldName, - schema.fields[fieldName], - t - ) + self.addFieldIfNotExists(className, fieldName, schema.fields[fieldName], t) ); await t.batch(newColumns); }); } - async addFieldIfNotExists( - className: string, - fieldName: string, - type: any, - conn: any - ) { + async addFieldIfNotExists(className: string, fieldName: string, type: any, conn: any) { // TODO: Must be revised for invalid logic... debug('addFieldIfNotExists', { className, fieldName, type }); conn = conn || this._client; @@ -1128,11 +1054,7 @@ export class PostgresStorageAdapter implements StorageAdapter { ); } catch (error) { if (error.code === PostgresRelationDoesNotExistError) { - return self.createClass( - className, - { fields: { [fieldName]: type } }, - t - ); + return self.createClass(className, { fields: { [fieldName]: type } }, t); } if (error.code !== PostgresDuplicateColumnError) { throw error; @@ -1234,11 +1156,7 @@ export class PostgresStorageAdapter implements StorageAdapter { // may do so. // Returns a Promise. - async deleteFields( - className: string, - schema: SchemaType, - fieldNames: string[] - ): Promise { + async deleteFields(className: string, schema: SchemaType, fieldNames: string[]): Promise { debug('deleteFields', className, fieldNames); fieldNames = fieldNames.reduce((list: Array, fieldName: string) => { const field = schema.fields[fieldName]; @@ -1257,15 +1175,12 @@ export class PostgresStorageAdapter implements StorageAdapter { .join(', DROP COLUMN'); await this._client.tx('delete-fields', async t => { - await t.none( - 'UPDATE "_SCHEMA" SET "schema" = $ WHERE "className" = $', - { schema, className } - ); + await t.none('UPDATE "_SCHEMA" SET "schema" = $ WHERE "className" = $', { + schema, + className, + }); if (values.length > 1) { - await t.none( - `ALTER TABLE $1:name DROP COLUMN IF EXISTS ${columns}`, - values - ); + await t.none(`ALTER TABLE $1:name DROP COLUMN IF EXISTS ${columns}`, values); } }); } @@ -1412,10 +1327,7 @@ export class PostgresStorageAdapter implements StorageAdapter { const fieldName = columnsArray[index]; if (['_rperm', '_wperm'].indexOf(fieldName) >= 0) { termination = '::text[]'; - } else if ( - schema.fields[fieldName] && - schema.fields[fieldName].type === 'Array' - ) { + } else if (schema.fields[fieldName] && schema.fields[fieldName].type === 'Array') { termination = '::jsonb'; } return `$${index + 2 + columnsArray.length}${termination}`; @@ -1427,18 +1339,13 @@ export class PostgresStorageAdapter implements StorageAdapter { return `POINT($${l}, $${l + 1})`; }); - const columnsPattern = columnsArray - .map((col, index) => `$${index + 2}:name`) - .join(); + const columnsPattern = columnsArray.map((col, index) => `$${index + 2}:name`).join(); const valuesPattern = initialValues.concat(geoPointsInjects).join(); const qs = `INSERT INTO $1:name (${columnsPattern}) VALUES (${valuesPattern})`; const values = [className, ...columnsArray, ...valuesArray]; debug(qs, values); - const promise = (transactionalSession - ? transactionalSession.t - : this._client - ) + const promise = (transactionalSession ? transactionalSession.t : this._client) .none(qs, values) .then(() => ({ ops: [object] })) .catch(error => { @@ -1488,17 +1395,11 @@ export class PostgresStorageAdapter implements StorageAdapter { } const qs = `WITH deleted AS (DELETE FROM $1:name WHERE ${where.pattern} RETURNING *) SELECT count(*) FROM deleted`; debug(qs, values); - const promise = (transactionalSession - ? transactionalSession.t - : this._client - ) + const promise = (transactionalSession ? transactionalSession.t : this._client) .one(qs, values, a => +a.count) .then(count => { if (count === 0) { - throw new Parse.Error( - Parse.Error.OBJECT_NOT_FOUND, - 'Object not found.' - ); + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.'); } else { return count; } @@ -1523,13 +1424,9 @@ export class PostgresStorageAdapter implements StorageAdapter { transactionalSession: ?any ): Promise { debug('findOneAndUpdate', className, query, update); - return this.updateObjectsByQuery( - className, - schema, - query, - update, - transactionalSession - ).then(val => val[0]); + return this.updateObjectsByQuery(className, schema, query, update, transactionalSession).then( + val => val[0] + ); } // Apply the update to all objects that match the given Parse Query. @@ -1592,39 +1489,28 @@ export class PostgresStorageAdapter implements StorageAdapter { const fieldNameIndex = index; index += 1; values.push(fieldName); - const update = Object.keys(fieldValue).reduce( - (lastKey: string, key: string) => { - const str = generate( - lastKey, - `$${index}::text`, - `$${index + 1}::jsonb` - ); - index += 2; - let value = fieldValue[key]; - if (value) { - if (value.__op === 'Delete') { - value = null; - } else { - value = JSON.stringify(value); - } + const update = Object.keys(fieldValue).reduce((lastKey: string, key: string) => { + const str = generate(lastKey, `$${index}::text`, `$${index + 1}::jsonb`); + index += 2; + let value = fieldValue[key]; + if (value) { + if (value.__op === 'Delete') { + value = null; + } else { + value = JSON.stringify(value); } - values.push(key, value); - return str; - }, - lastKey - ); + } + values.push(key, value); + return str; + }, lastKey); updatePatterns.push(`$${fieldNameIndex}:name = ${update}`); } else if (fieldValue.__op === 'Increment') { - updatePatterns.push( - `$${index}:name = COALESCE($${index}:name, 0) + $${index + 1}` - ); + updatePatterns.push(`$${index}:name = COALESCE($${index}:name, 0) + $${index + 1}`); values.push(fieldName, fieldValue.amount); index += 2; } else if (fieldValue.__op === 'Add') { updatePatterns.push( - `$${index}:name = array_add(COALESCE($${index}:name, '[]'::jsonb), $${ - index + 1 - }::jsonb)` + `$${index}:name = array_add(COALESCE($${index}:name, '[]'::jsonb), $${index + 1}::jsonb)` ); values.push(fieldName, JSON.stringify(fieldValue.objects)); index += 2; @@ -1678,9 +1564,7 @@ export class PostgresStorageAdapter implements StorageAdapter { values.push(fieldName, toPostgresValue(fieldValue)); index += 2; } else if (fieldValue.__type === 'GeoPoint') { - updatePatterns.push( - `$${index}:name = POINT($${index + 1}, $${index + 2})` - ); + updatePatterns.push(`$${index}:name = POINT($${index + 1}, $${index + 2})`); values.push(fieldName, fieldValue.longitude, fieldValue.latitude); index += 3; } else if (fieldValue.__type === 'Polygon') { @@ -1745,12 +1629,9 @@ export class PostgresStorageAdapter implements StorageAdapter { }) .map(k => k.split('.')[1]); - const deletePatterns = keysToDelete.reduce( - (p: string, c: string, i: number) => { - return p + ` - '$${index + 1 + i}:value'`; - }, - '' - ); + const deletePatterns = keysToDelete.reduce((p: string, c: string, i: number) => { + return p + ` - '$${index + 1 + i}:value'`; + }, ''); // Override Object let updateObject = "'{}'::jsonb"; @@ -1799,14 +1680,10 @@ export class PostgresStorageAdapter implements StorageAdapter { }); values.push(...where.values); - const whereClause = - where.pattern.length > 0 ? `WHERE ${where.pattern}` : ''; + const whereClause = where.pattern.length > 0 ? `WHERE ${where.pattern}` : ''; const qs = `UPDATE $1:name SET ${updatePatterns.join()} ${whereClause} RETURNING *`; debug('update: ', qs, values); - const promise = (transactionalSession - ? transactionalSession.t - : this._client - ).any(qs, values); + const promise = (transactionalSession ? transactionalSession.t : this._client).any(qs, values); if (transactionalSession) { transactionalSession.batch.push(promise); } @@ -1823,23 +1700,12 @@ export class PostgresStorageAdapter implements StorageAdapter { ) { debug('upsertOneObject', { className, query, update }); const createValue = Object.assign({}, query, update); - return this.createObject( - className, - schema, - createValue, - transactionalSession - ).catch(error => { + return this.createObject(className, schema, createValue, transactionalSession).catch(error => { // ignore duplicate value errors as it's upsert if (error.code !== Parse.Error.DUPLICATE_VALUE) { throw error; } - return this.findOneAndUpdate( - className, - schema, - query, - update, - transactionalSession - ); + return this.findOneAndUpdate(className, schema, query, update, transactionalSession); }); } @@ -1868,8 +1734,7 @@ export class PostgresStorageAdapter implements StorageAdapter { }); values.push(...where.values); - const wherePattern = - where.pattern.length > 0 ? `WHERE ${where.pattern}` : ''; + const wherePattern = where.pattern.length > 0 ? `WHERE ${where.pattern}` : ''; const limitPattern = hasLimit ? `LIMIT $${values.length + 1}` : ''; if (hasLimit) { values.push(limit); @@ -1892,10 +1757,7 @@ export class PostgresStorageAdapter implements StorageAdapter { return `${transformKey} DESC`; }) .join(); - sortPattern = - sort !== undefined && Object.keys(sort).length > 0 - ? `ORDER BY ${sorting}` - : ''; + sortPattern = sort !== undefined && Object.keys(sort).length > 0 ? `ORDER BY ${sorting}` : ''; } if (where.sorts && Object.keys((where.sorts: any)).length > 0) { sortPattern = `ORDER BY ${where.sorts.join()}`; @@ -1926,9 +1788,7 @@ export class PostgresStorageAdapter implements StorageAdapter { } const originalQuery = `SELECT ${columns} FROM $1:name ${wherePattern} ${sortPattern} ${limitPattern} ${skipPattern}`; - const qs = explain - ? this.createExplainableQuery(originalQuery) - : originalQuery; + const qs = explain ? this.createExplainableQuery(originalQuery) : originalQuery; debug(qs, values); return this._client .any(qs, values) @@ -1943,9 +1803,7 @@ export class PostgresStorageAdapter implements StorageAdapter { if (explain) { return results; } - return results.map(object => - this.postgresObjectToParseObject(className, object, schema) - ); + return results.map(object => this.postgresObjectToParseObject(className, object, schema)); }); } @@ -1977,10 +1835,7 @@ export class PostgresStorageAdapter implements StorageAdapter { let coords = object[fieldName]; coords = coords.substr(2, coords.length - 4).split('),('); coords = coords.map(point => { - return [ - parseFloat(point.split(',')[1]), - parseFloat(point.split(',')[0]), - ]; + return [parseFloat(point.split(',')[1]), parseFloat(point.split(',')[0])]; }); object[fieldName] = { __type: 'Polygon', @@ -2052,37 +1907,26 @@ export class PostgresStorageAdapter implements StorageAdapter { // As such, we shouldn't expose this function to users of parse until we have an out-of-band // Way of determining if a field is nullable. Undefined doesn't count against uniqueness, // which is why we use sparse indexes. - async ensureUniqueness( - className: string, - schema: SchemaType, - fieldNames: string[] - ) { + async ensureUniqueness(className: string, schema: SchemaType, fieldNames: string[]) { const constraintName = `${className}_unique_${fieldNames.sort().join('_')}`; - const constraintPatterns = fieldNames.map( - (fieldName, index) => `$${index + 3}:name` - ); + const constraintPatterns = fieldNames.map((fieldName, index) => `$${index + 3}:name`); const qs = `CREATE UNIQUE INDEX IF NOT EXISTS $2:name ON $1:name(${constraintPatterns.join()})`; - return this._client - .none(qs, [className, constraintName, ...fieldNames]) - .catch(error => { - if ( - error.code === PostgresDuplicateRelationError && - error.message.includes(constraintName) - ) { - // Index already exists. Ignore error. - } else if ( - error.code === PostgresUniqueIndexViolationError && - error.message.includes(constraintName) - ) { - // Cast the error into the proper parse error - throw new Parse.Error( - Parse.Error.DUPLICATE_VALUE, - 'A duplicate value for a field with unique values was provided' - ); - } else { - throw error; - } - }); + return this._client.none(qs, [className, constraintName, ...fieldNames]).catch(error => { + if (error.code === PostgresDuplicateRelationError && error.message.includes(constraintName)) { + // Index already exists. Ignore error. + } else if ( + error.code === PostgresUniqueIndexViolationError && + error.message.includes(constraintName) + ) { + // Cast the error into the proper parse error + throw new Parse.Error( + Parse.Error.DUPLICATE_VALUE, + 'A duplicate value for a field with unique values was provided' + ); + } else { + throw error; + } + }); } // Executes a count. @@ -2103,15 +1947,13 @@ export class PostgresStorageAdapter implements StorageAdapter { }); values.push(...where.values); - const wherePattern = - where.pattern.length > 0 ? `WHERE ${where.pattern}` : ''; + const wherePattern = where.pattern.length > 0 ? `WHERE ${where.pattern}` : ''; let qs = ''; if (where.pattern.length > 0 || !estimate) { qs = `SELECT count(*) FROM $1:name ${wherePattern}`; } else { - qs = - 'SELECT reltuples AS approximate_row_count FROM pg_class WHERE relname = $1'; + qs = 'SELECT reltuples AS approximate_row_count FROM pg_class WHERE relname = $1'; } return this._client @@ -2130,12 +1972,7 @@ export class PostgresStorageAdapter implements StorageAdapter { }); } - async distinct( - className: string, - schema: SchemaType, - query: QueryType, - fieldName: string - ) { + async distinct(className: string, schema: SchemaType, query: QueryType, fieldName: string) { debug('distinct', className, query); let field = fieldName; let column = fieldName; @@ -2145,13 +1982,9 @@ export class PostgresStorageAdapter implements StorageAdapter { column = fieldName.split('.')[0]; } const isArrayField = - schema.fields && - schema.fields[fieldName] && - schema.fields[fieldName].type === 'Array'; + schema.fields && schema.fields[fieldName] && schema.fields[fieldName].type === 'Array'; const isPointerField = - schema.fields && - schema.fields[fieldName] && - schema.fields[fieldName].type === 'Pointer'; + schema.fields && schema.fields[fieldName] && schema.fields[fieldName].type === 'Pointer'; const values = [field, column, className]; const where = buildWhereClause({ schema, @@ -2161,8 +1994,7 @@ export class PostgresStorageAdapter implements StorageAdapter { }); values.push(...where.values); - const wherePattern = - where.pattern.length > 0 ? `WHERE ${where.pattern}` : ''; + const wherePattern = where.pattern.length > 0 ? `WHERE ${where.pattern}` : ''; const transformer = isArrayField ? 'jsonb_array_elements' : 'ON'; let qs = `SELECT DISTINCT ${transformer}($1:name) $2:name FROM $3:name ${wherePattern}`; if (isNested) { @@ -2195,9 +2027,7 @@ export class PostgresStorageAdapter implements StorageAdapter { return results.map(object => object[column][child]); }) .then(results => - results.map(object => - this.postgresObjectToParseObject(className, object, schema) - ) + results.map(object => this.postgresObjectToParseObject(className, object, schema)) ); } @@ -2235,11 +2065,7 @@ export class PostgresStorageAdapter implements StorageAdapter { index += 1; continue; } - if ( - field === '_id' && - typeof value === 'object' && - Object.keys(value).length !== 0 - ) { + if (field === '_id' && typeof value === 'object' && Object.keys(value).length !== 0) { groupValues = value; const groupByFields = []; for (const alias in value) { @@ -2261,9 +2087,7 @@ export class PostgresStorageAdapter implements StorageAdapter { columns.push( `EXTRACT(${ mongoAggregateToPostgres[operation] - } FROM $${index}:name AT TIME ZONE 'UTC') AS $${ - index + 1 - }:name` + } FROM $${index}:name AT TIME ZONE 'UTC') AS $${index + 1}:name` ); values.push(source, alias); index += 2; @@ -2323,10 +2147,7 @@ export class PostgresStorageAdapter implements StorageAdapter { } if (stage.$match) { const patterns = []; - const orOrAnd = Object.prototype.hasOwnProperty.call( - stage.$match, - '$or' - ) + const orOrAnd = Object.prototype.hasOwnProperty.call(stage.$match, '$or') ? ' OR ' : ' AND '; @@ -2345,9 +2166,7 @@ export class PostgresStorageAdapter implements StorageAdapter { Object.keys(ParseToPosgresComparator).forEach(cmp => { if (value[cmp]) { const pgComparator = ParseToPosgresComparator[cmp]; - matchPatterns.push( - `$${index}:name ${pgComparator} $${index + 1}` - ); + matchPatterns.push(`$${index}:name ${pgComparator} $${index + 1}`); values.push(field, toPostgresValue(value[cmp])); index += 2; } @@ -2355,18 +2174,13 @@ export class PostgresStorageAdapter implements StorageAdapter { if (matchPatterns.length > 0) { patterns.push(`(${matchPatterns.join(' AND ')})`); } - if ( - schema.fields[field] && - schema.fields[field].type && - matchPatterns.length === 0 - ) { + if (schema.fields[field] && schema.fields[field].type && matchPatterns.length === 0) { patterns.push(`$${index}:name = $${index + 1}`); values.push(field, value); index += 2; } } - wherePattern = - patterns.length > 0 ? `WHERE ${patterns.join(` ${orOrAnd} `)}` : ''; + wherePattern = patterns.length > 0 ? `WHERE ${patterns.join(` ${orOrAnd} `)}` : ''; } if (stage.$limit) { limitPattern = `LIMIT $${index}`; @@ -2390,8 +2204,7 @@ export class PostgresStorageAdapter implements StorageAdapter { }) .join(); values.push(...keys); - sortPattern = - sort !== undefined && sorting.length > 0 ? `ORDER BY ${sorting}` : ''; + sortPattern = sort !== undefined && sorting.length > 0 ? `ORDER BY ${sorting}` : ''; } } @@ -2406,17 +2219,13 @@ export class PostgresStorageAdapter implements StorageAdapter { const originalQuery = `SELECT ${columns .filter(Boolean) .join()} FROM $1:name ${wherePattern} ${skipPattern} ${groupPattern} ${sortPattern} ${limitPattern}`; - const qs = explain - ? this.createExplainableQuery(originalQuery) - : originalQuery; + const qs = explain ? this.createExplainableQuery(originalQuery) : originalQuery; debug(qs, values); return this._client.any(qs, values).then(a => { if (explain) { return a; } - const results = a.map(object => - this.postgresObjectToParseObject(className, object, schema) - ); + const results = a.map(object => this.postgresObjectToParseObject(className, object, schema)); results.forEach(result => { if (!Object.prototype.hasOwnProperty.call(result, 'objectId')) { result.objectId = null; @@ -2474,11 +2283,7 @@ export class PostgresStorageAdapter implements StorageAdapter { }); } - async createIndexes( - className: string, - indexes: any, - conn: ?any - ): Promise { + async createIndexes(className: string, indexes: any, conn: ?any): Promise { return (conn || this._client).tx(t => t.batch( indexes.map(i => { @@ -2498,9 +2303,7 @@ export class PostgresStorageAdapter implements StorageAdapter { type: any, conn: ?any ): Promise { - await ( - conn || this._client - ).none('CREATE INDEX IF NOT EXISTS $1:name ON $2:name ($3:name)', [ + await (conn || this._client).none('CREATE INDEX IF NOT EXISTS $1:name ON $2:name ($3:name)', [ fieldName, className, type, @@ -2512,9 +2315,7 @@ export class PostgresStorageAdapter implements StorageAdapter { query: 'DROP INDEX $1:name', values: i, })); - await (conn || this._client).tx(t => - t.none(this._pgp.helpers.concat(queries)) - ); + await (conn || this._client).tx(t => t.none(this._pgp.helpers.concat(queries))); } async getIndexes(className: string) { @@ -2547,18 +2348,14 @@ export class PostgresStorageAdapter implements StorageAdapter { } commitTransactionalSession(transactionalSession: any): Promise { - transactionalSession.resolve( - transactionalSession.t.batch(transactionalSession.batch) - ); + transactionalSession.resolve(transactionalSession.t.batch(transactionalSession.batch)); return transactionalSession.result; } abortTransactionalSession(transactionalSession: any): Promise { const result = transactionalSession.result.catch(); transactionalSession.batch.push(Promise.reject()); - transactionalSession.resolve( - transactionalSession.t.batch(transactionalSession.batch) - ); + transactionalSession.resolve(transactionalSession.t.batch(transactionalSession.batch)); return result; } @@ -2575,41 +2372,34 @@ export class PostgresStorageAdapter implements StorageAdapter { const indexNameOptions: Object = indexName != null ? { name: indexName } : { name: defaultIndexName }; const constraintPatterns = caseInsensitive - ? fieldNames.map( - (fieldName, index) => `lower($${index + 3}:name) varchar_pattern_ops` - ) + ? fieldNames.map((fieldName, index) => `lower($${index + 3}:name) varchar_pattern_ops`) : fieldNames.map((fieldName, index) => `$${index + 3}:name`); const qs = `CREATE INDEX IF NOT EXISTS $1:name ON $2:name (${constraintPatterns.join()})`; - await conn - .none(qs, [indexNameOptions.name, className, ...fieldNames]) - .catch(error => { - if ( - error.code === PostgresDuplicateRelationError && - error.message.includes(indexNameOptions.name) - ) { - // Index already exists. Ignore error. - } else if ( - error.code === PostgresUniqueIndexViolationError && - error.message.includes(indexNameOptions.name) - ) { - // Cast the error into the proper parse error - throw new Parse.Error( - Parse.Error.DUPLICATE_VALUE, - 'A duplicate value for a field with unique values was provided' - ); - } else { - throw error; - } - }); + await conn.none(qs, [indexNameOptions.name, className, ...fieldNames]).catch(error => { + if ( + error.code === PostgresDuplicateRelationError && + error.message.includes(indexNameOptions.name) + ) { + // Index already exists. Ignore error. + } else if ( + error.code === PostgresUniqueIndexViolationError && + error.message.includes(indexNameOptions.name) + ) { + // Cast the error into the proper parse error + throw new Parse.Error( + Parse.Error.DUPLICATE_VALUE, + 'A duplicate value for a field with unique values was provided' + ); + } else { + throw error; + } + }); } } function convertPolygonToSQL(polygon) { if (polygon.length < 3) { - throw new Parse.Error( - Parse.Error.INVALID_JSON, - `Polygon must have at least 3 values` - ); + throw new Parse.Error(Parse.Error.INVALID_JSON, `Polygon must have at least 3 values`); } if ( polygon[0][0] !== polygon[polygon.length - 1][0] || @@ -2757,9 +2547,7 @@ function literalizeRegexPart(s: string) { var GeoPointCoder = { isValidJSON(value) { - return ( - typeof value === 'object' && value !== null && value.__type === 'GeoPoint' - ); + return typeof value === 'object' && value !== null && value.__type === 'GeoPoint'; }, }; diff --git a/src/Adapters/Storage/StorageAdapter.js b/src/Adapters/Storage/StorageAdapter.js index 5139cc3248..d46265f64f 100644 --- a/src/Adapters/Storage/StorageAdapter.js +++ b/src/Adapters/Storage/StorageAdapter.js @@ -34,18 +34,10 @@ export interface StorageAdapter { classExists(className: string): Promise; setClassLevelPermissions(className: string, clps: any): Promise; createClass(className: string, schema: SchemaType): Promise; - addFieldIfNotExists( - className: string, - fieldName: string, - type: any - ): Promise; + addFieldIfNotExists(className: string, fieldName: string, type: any): Promise; deleteClass(className: string): Promise; deleteAllClasses(fast: boolean): Promise; - deleteFields( - className: string, - schema: SchemaType, - fieldNames: Array - ): Promise; + deleteFields(className: string, schema: SchemaType, fieldNames: Array): Promise; getAllClasses(): Promise; getClass(className: string): Promise; createObject( @@ -95,11 +87,7 @@ export interface StorageAdapter { caseSensitive?: boolean, options?: Object ): Promise; - ensureUniqueness( - className: string, - schema: SchemaType, - fieldNames: Array - ): Promise; + ensureUniqueness(className: string, schema: SchemaType, fieldNames: Array): Promise; count( className: string, schema: SchemaType, @@ -123,6 +111,7 @@ export interface StorageAdapter { explain?: boolean ): Promise; performInitialization(options: ?any): Promise; + watch(callback: () => void): void; // Indexing createIndexes(className: string, indexes: any, conn: ?any): Promise; diff --git a/src/Config.js b/src/Config.js index 87081af43e..0000d876cf 100644 --- a/src/Config.js +++ b/src/Config.js @@ -3,8 +3,6 @@ // mount is the URL for the root of the API; includes http, domain, etc. import AppCache from './cache'; -import SchemaCache from './Controllers/SchemaCache'; -import DatabaseController from './Controllers/DatabaseController'; import net from 'net'; import { IdempotencyOptions } from './Options/Definitions'; @@ -28,12 +26,7 @@ export class Config { config.applicationId = applicationId; Object.keys(cacheInfo).forEach(key => { if (key == 'databaseController') { - const schemaCache = new SchemaCache( - cacheInfo.cacheController, - cacheInfo.schemaCacheTTL, - cacheInfo.enableSingleSchemaCache - ); - config.database = new DatabaseController(cacheInfo.databaseController.adapter, schemaCache); + config.database = cacheInfo.databaseController; } else { config[key] = cacheInfo[key]; } diff --git a/src/Controllers/DatabaseController.js b/src/Controllers/DatabaseController.js index 5b6bfc083a..53660ab4ce 100644 --- a/src/Controllers/DatabaseController.js +++ b/src/Controllers/DatabaseController.js @@ -394,13 +394,11 @@ const relationSchema = { class DatabaseController { adapter: StorageAdapter; - schemaCache: any; schemaPromise: ?Promise; _transactionalSession: ?any; - constructor(adapter: StorageAdapter, schemaCache: any) { + constructor(adapter: StorageAdapter) { this.adapter = adapter; - this.schemaCache = schemaCache; // We don't want a mutable this.schema, because then you could have // one request that uses different schemas for different parts of // it. Instead, use loadSchema to get a schema. @@ -434,7 +432,7 @@ class DatabaseController { if (this.schemaPromise != null) { return this.schemaPromise; } - this.schemaPromise = SchemaController.load(this.adapter, this.schemaCache, options); + this.schemaPromise = SchemaController.load(this.adapter, options); this.schemaPromise.then( () => delete this.schemaPromise, () => delete this.schemaPromise @@ -916,7 +914,7 @@ class DatabaseController { */ deleteEverything(fast: boolean = false): Promise { this.schemaPromise = null; - return Promise.all([this.adapter.deleteAllClasses(fast), this.schemaCache.clear()]); + return this.adapter.deleteAllClasses(fast); } // Returns a promise for a list of related ids given an owning id. diff --git a/src/Controllers/SchemaCache.js b/src/Controllers/SchemaCache.js deleted file mode 100644 index 9fe79daa93..0000000000 --- a/src/Controllers/SchemaCache.js +++ /dev/null @@ -1,55 +0,0 @@ -const MAIN_SCHEMA = '__MAIN_SCHEMA'; -const SCHEMA_CACHE_PREFIX = '__SCHEMA'; - -import { randomString } from '../cryptoUtils'; -import defaults from '../defaults'; - -export default class SchemaCache { - cache: Object; - - constructor(cacheController, ttl = defaults.schemaCacheTTL, singleCache = false) { - this.ttl = ttl; - if (typeof ttl == 'string') { - this.ttl = parseInt(ttl); - } - this.cache = cacheController; - this.prefix = SCHEMA_CACHE_PREFIX; - if (!singleCache) { - this.prefix += randomString(20); - } - } - - getAllClasses() { - if (!this.ttl) { - return Promise.resolve(null); - } - return this.cache.get(this.prefix + MAIN_SCHEMA); - } - - setAllClasses(schema) { - if (!this.ttl) { - return Promise.resolve(null); - } - return this.cache.put(this.prefix + MAIN_SCHEMA, schema); - } - - getOneSchema(className) { - if (!this.ttl) { - return Promise.resolve(null); - } - return this.cache.get(this.prefix + MAIN_SCHEMA).then(cachedSchemas => { - cachedSchemas = cachedSchemas || []; - const schema = cachedSchemas.find(cachedSchema => { - return cachedSchema.className === className; - }); - if (schema) { - return Promise.resolve(schema); - } - return Promise.resolve(null); - }); - } - - clear() { - return this.cache.del(this.prefix + MAIN_SCHEMA); - } -} diff --git a/src/Controllers/SchemaController.js b/src/Controllers/SchemaController.js index a35126f38a..10a6b5f359 100644 --- a/src/Controllers/SchemaController.js +++ b/src/Controllers/SchemaController.js @@ -673,15 +673,15 @@ const typeToString = (type: SchemaField | string): string => { // the mongo format and the Parse format. Soon, this will all be Parse format. export default class SchemaController { _dbAdapter: StorageAdapter; - schemaData: { [string]: Schema }; _cache: any; + schemaData: { [string]: Schema }; reloadDataPromise: ?Promise; protectedFields: any; userIdRegEx: RegExp; - constructor(databaseAdapter: StorageAdapter, schemaCache: any) { + constructor(databaseAdapter: StorageAdapter, singleSchemaCache: Object) { this._dbAdapter = databaseAdapter; - this._cache = schemaCache; + this._cache = singleSchemaCache; this.schemaData = new SchemaData(); this.protectedFields = Config.get(Parse.applicationId).protectedFields; @@ -691,6 +691,10 @@ export default class SchemaController { const autoIdRegEx = /^[a-zA-Z0-9]{1,}$/; this.userIdRegEx = customIds ? customIdRegEx : autoIdRegEx; + + this._dbAdapter.watch(() => { + this.reloadData({ clearCache: true }); + }); } reloadData(options: LoadSchemaOptions = { clearCache: false }): Promise { @@ -717,12 +721,10 @@ export default class SchemaController { if (options.clearCache) { return this.setAllClasses(); } - return this._cache.getAllClasses().then(allClasses => { - if (allClasses && allClasses.length) { - return Promise.resolve(allClasses); - } - return this.setAllClasses(); - }); + if (this._cache.allClasses && this._cache.allClasses.length) { + return Promise.resolve(this._cache.allClasses); + } + return this.setAllClasses(); } setAllClasses(): Promise> { @@ -730,11 +732,7 @@ export default class SchemaController { .getAllClasses() .then(allSchemas => allSchemas.map(injectDefaultSchema)) .then(allSchemas => { - /* eslint-disable no-console */ - this._cache - .setAllClasses(allSchemas) - .catch(error => console.error('Error saving schema to cache:', error)); - /* eslint-enable no-console */ + this._cache.allClasses = allSchemas; return allSchemas; }); } @@ -744,32 +742,28 @@ export default class SchemaController { allowVolatileClasses: boolean = false, options: LoadSchemaOptions = { clearCache: false } ): Promise { - let promise = Promise.resolve(); if (options.clearCache) { - promise = this._cache.clear(); + this._cache.allClasses = undefined; } - return promise.then(() => { - if (allowVolatileClasses && volatileClasses.indexOf(className) > -1) { - const data = this.schemaData[className]; - return Promise.resolve({ - className, - fields: data.fields, - classLevelPermissions: data.classLevelPermissions, - indexes: data.indexes, - }); - } - return this._cache.getOneSchema(className).then(cached => { - if (cached && !options.clearCache) { - return Promise.resolve(cached); - } - return this.setAllClasses().then(allSchemas => { - const oneSchema = allSchemas.find(schema => schema.className === className); - if (!oneSchema) { - return Promise.reject(undefined); - } - return oneSchema; - }); + if (allowVolatileClasses && volatileClasses.indexOf(className) > -1) { + const data = this.schemaData[className]; + return Promise.resolve({ + className, + fields: data.fields, + classLevelPermissions: data.classLevelPermissions, + indexes: data.indexes, }); + } + const oneSchema = (this._cache.allClasses || []).find(schema => schema.className === className); + if (oneSchema && !options.clearCache) { + return Promise.resolve(oneSchema); + } + return this.setAllClasses().then(allSchemas => { + const oneSchema = allSchemas.find(schema => schema.className === className); + if (!oneSchema) { + return Promise.reject(undefined); + } + return oneSchema; }); } @@ -1195,7 +1189,10 @@ export default class SchemaController { ); }); }) - .then(() => this._cache.clear()); + .then(() => { + this._cache.allClasses = undefined; + return Promise.resolve(); + }); } // Validates an object provided in REST format. @@ -1404,13 +1401,11 @@ export default class SchemaController { } } +const singleSchemaCache = {}; + // Returns a promise for a new Schema. -const load = ( - dbAdapter: StorageAdapter, - schemaCache: any, - options: any -): Promise => { - const schema = new SchemaController(dbAdapter, schemaCache); +const load = (dbAdapter: StorageAdapter, options: any): Promise => { + const schema = new SchemaController(dbAdapter, singleSchemaCache); return schema.reloadData(options).then(() => schema); }; diff --git a/src/Controllers/index.js b/src/Controllers/index.js index 1e4765b666..5f911b75da 100644 --- a/src/Controllers/index.js +++ b/src/Controllers/index.js @@ -15,7 +15,6 @@ import { PushController } from './PushController'; import { PushQueue } from '../Push/PushQueue'; import { PushWorker } from '../Push/PushWorker'; import DatabaseController from './DatabaseController'; -import SchemaCache from './SchemaCache'; // Adapters import { GridFSBucketAdapter } from '../Adapters/Files/GridFSBucketAdapter'; @@ -41,7 +40,7 @@ export function getControllers(options: ParseServerOptions) { const cacheController = getCacheController(options); const analyticsController = getAnalyticsController(options); const liveQueryController = getLiveQueryController(options); - const databaseController = getDatabaseController(options, cacheController); + const databaseController = getDatabaseController(options); const hooksController = getHooksController(options, databaseController); const authDataManager = getAuthDataManager(options); const parseGraphQLController = getParseGraphQLController(options, { @@ -141,17 +140,8 @@ export function getLiveQueryController(options: ParseServerOptions): LiveQueryCo return new LiveQueryController(options.liveQuery); } -export function getDatabaseController( - options: ParseServerOptions, - cacheController: CacheController -): DatabaseController { - const { - databaseURI, - databaseOptions, - collectionPrefix, - schemaCacheTTL, - enableSingleSchemaCache, - } = options; +export function getDatabaseController(options: ParseServerOptions): DatabaseController { + const { databaseURI, databaseOptions, collectionPrefix } = options; let { databaseAdapter } = options; if ( (databaseOptions || @@ -165,10 +155,7 @@ export function getDatabaseController( } else { databaseAdapter = loadAdapter(databaseAdapter); } - return new DatabaseController( - databaseAdapter, - new SchemaCache(cacheController, schemaCacheTTL, enableSingleSchemaCache) - ); + return new DatabaseController(databaseAdapter); } export function getHooksController( diff --git a/src/GraphQL/helpers/objectsMutations.js b/src/GraphQL/helpers/objectsMutations.js index e4f32dbb14..72fb84bc86 100644 --- a/src/GraphQL/helpers/objectsMutations.js +++ b/src/GraphQL/helpers/objectsMutations.js @@ -5,40 +5,17 @@ const createObject = async (className, fields, config, auth, info) => { fields = {}; } - return ( - await rest.create( - config, - auth, - className, - fields, - info.clientSDK, - info.context - ) - ).response; + return (await rest.create(config, auth, className, fields, info.clientSDK, info.context)) + .response; }; -const updateObject = async ( - className, - objectId, - fields, - config, - auth, - info -) => { +const updateObject = async (className, objectId, fields, config, auth, info) => { if (!fields) { fields = {}; } return ( - await rest.update( - config, - auth, - className, - { objectId }, - fields, - info.clientSDK, - info.context - ) + await rest.update(config, auth, className, { objectId }, fields, info.clientSDK, info.context) ).response; }; diff --git a/src/GraphQL/helpers/objectsQueries.js b/src/GraphQL/helpers/objectsQueries.js index 5d18ea295c..6706ccf4fe 100644 --- a/src/GraphQL/helpers/objectsQueries.js +++ b/src/GraphQL/helpers/objectsQueries.js @@ -12,8 +12,7 @@ const needToGetAllKeys = (fields, keys, parseClasses) => if (fields[key[0]]) { if (fields[key[0]].type === 'Pointer') { const subClass = parseClasses.find( - ({ className: parseClassName }) => - fields[key[0]].targetClass === parseClassName + ({ className: parseClassName }) => fields[key[0]].targetClass === parseClassName ); if (subClass && subClass.fields[key[1]]) { // Current sub key is not custom @@ -50,9 +49,7 @@ const getObject = async ( try { if ( !needToGetAllKeys( - parseClasses.find( - ({ className: parseClassName }) => className === parseClassName - ).fields, + parseClasses.find(({ className: parseClassName }) => className === parseClassName).fields, keys, parseClasses ) @@ -141,15 +138,7 @@ const findObjects = async ( preCountOptions.subqueryReadPreference = subqueryReadPreference; } preCount = ( - await rest.find( - config, - auth, - className, - where, - preCountOptions, - info.clientSDK, - info.context - ) + await rest.find(config, auth, className, where, preCountOptions, info.clientSDK, info.context) ).count; if ((skip || 0) + limit < preCount) { skip = preCount - limit; @@ -158,11 +147,7 @@ const findObjects = async ( const options = {}; - if ( - selectedFields.find( - field => field.startsWith('edges.') || field.startsWith('pageInfo.') - ) - ) { + if (selectedFields.find(field => field.startsWith('edges.') || field.startsWith('pageInfo.'))) { if (limit || limit === 0) { options.limit = limit; } else { @@ -181,9 +166,7 @@ const findObjects = async ( } if ( !needToGetAllKeys( - parseClasses.find( - ({ className: parseClassName }) => className === parseClassName - ).fields, + parseClasses.find(({ className: parseClassName }) => className === parseClassName).fields, keys, parseClasses ) @@ -245,9 +228,7 @@ const findObjects = async ( pageInfo = { hasPreviousPage: - ((preCount && preCount > 0) || (count && count > 0)) && - skip !== undefined && - skip > 0, + ((preCount && preCount > 0) || (count && count > 0)) && skip !== undefined && skip > 0, startCursor: offsetToCursor(skip || 0), endCursor: offsetToCursor((skip || 0) + (results.length || 1) - 1), hasNextPage: (preCount || count) > (skip || 0) + results.length, @@ -261,14 +242,7 @@ const findObjects = async ( }; }; -const calculateSkipAndLimit = ( - skipInput, - first, - after, - last, - before, - maxLimit -) => { +const calculateSkipAndLimit = (skipInput, first, after, last, before, maxLimit) => { let skip = undefined; let limit = undefined; let needToPreCount = false; @@ -276,10 +250,7 @@ const calculateSkipAndLimit = ( // Validates the skip input if (skipInput || skipInput === 0) { if (skipInput < 0) { - throw new Parse.Error( - Parse.Error.INVALID_QUERY, - 'Skip should be a positive number' - ); + throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Skip should be a positive number'); } skip = skipInput; } @@ -288,10 +259,7 @@ const calculateSkipAndLimit = ( if (after) { after = cursorToOffset(after); if ((!after && after !== 0) || after < 0) { - throw new Parse.Error( - Parse.Error.INVALID_QUERY, - 'After is not a valid cursor' - ); + throw new Parse.Error(Parse.Error.INVALID_QUERY, 'After is not a valid cursor'); } // If skip and after are passed, a new skip is calculated by adding them @@ -301,10 +269,7 @@ const calculateSkipAndLimit = ( // Validates the first param if (first || first === 0) { if (first < 0) { - throw new Parse.Error( - Parse.Error.INVALID_QUERY, - 'First should be a positive number' - ); + throw new Parse.Error(Parse.Error.INVALID_QUERY, 'First should be a positive number'); } // The first param is translated to the limit param of the Parse legacy API @@ -316,10 +281,7 @@ const calculateSkipAndLimit = ( // This method converts the cursor to the index of the object before = cursorToOffset(before); if ((!before && before !== 0) || before < 0) { - throw new Parse.Error( - Parse.Error.INVALID_QUERY, - 'Before is not a valid cursor' - ); + throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Before is not a valid cursor'); } if ((skip || 0) >= before) { @@ -334,10 +296,7 @@ const calculateSkipAndLimit = ( // Validates the last param if (last || last === 0) { if (last < 0) { - throw new Parse.Error( - Parse.Error.INVALID_QUERY, - 'Last should be a positive number' - ); + throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Last should be a positive number'); } if (last > maxLimit) { diff --git a/src/GraphQL/loaders/defaultGraphQLTypes.js b/src/GraphQL/loaders/defaultGraphQLTypes.js index ec333aa272..d1d092ef6f 100644 --- a/src/GraphQL/loaders/defaultGraphQLTypes.js +++ b/src/GraphQL/loaders/defaultGraphQLTypes.js @@ -23,7 +23,7 @@ class TypeValidationError extends Error { } } -const parseStringValue = (value) => { +const parseStringValue = value => { if (typeof value === 'string') { return value; } @@ -31,7 +31,7 @@ const parseStringValue = (value) => { throw new TypeValidationError(value, 'String'); }; -const parseIntValue = (value) => { +const parseIntValue = value => { if (typeof value === 'string') { const int = Number(value); if (Number.isInteger(int)) { @@ -42,7 +42,7 @@ const parseIntValue = (value) => { throw new TypeValidationError(value, 'Int'); }; -const parseFloatValue = (value) => { +const parseFloatValue = value => { if (typeof value === 'string') { const float = Number(value); if (!isNaN(float)) { @@ -53,7 +53,7 @@ const parseFloatValue = (value) => { throw new TypeValidationError(value, 'Float'); }; -const parseBooleanValue = (value) => { +const parseBooleanValue = value => { if (typeof value === 'boolean') { return value; } @@ -61,7 +61,7 @@ const parseBooleanValue = (value) => { throw new TypeValidationError(value, 'Boolean'); }; -const parseValue = (value) => { +const parseValue = value => { switch (value.kind) { case Kind.STRING: return parseStringValue(value.value); @@ -86,15 +86,15 @@ const parseValue = (value) => { } }; -const parseListValues = (values) => { +const parseListValues = values => { if (Array.isArray(values)) { - return values.map((value) => parseValue(value)); + return values.map(value => parseValue(value)); } throw new TypeValidationError(values, 'List'); }; -const parseObjectFields = (fields) => { +const parseObjectFields = fields => { if (Array.isArray(fields)) { return fields.reduce( (object, field) => ({ @@ -112,15 +112,14 @@ const ANY = new GraphQLScalarType({ name: 'Any', description: 'The Any scalar type is used in operations and types that involve any type of value.', - parseValue: (value) => value, - serialize: (value) => value, - parseLiteral: (ast) => parseValue(ast), + parseValue: value => value, + serialize: value => value, + parseLiteral: ast => parseValue(ast), }); const OBJECT = new GraphQLScalarType({ name: 'Object', - description: - 'The Object scalar type is used in operations and types that involve objects.', + description: 'The Object scalar type is used in operations and types that involve objects.', parseValue(value) { if (typeof value === 'object') { return value; @@ -144,7 +143,7 @@ const OBJECT = new GraphQLScalarType({ }, }); -const parseDateIsoValue = (value) => { +const parseDateIsoValue = value => { if (typeof value === 'string') { const date = new Date(value); if (!isNaN(date)) { @@ -157,7 +156,7 @@ const parseDateIsoValue = (value) => { throw new TypeValidationError(value, 'Date'); }; -const serializeDateIso = (value) => { +const serializeDateIso = value => { if (typeof value === 'string') { return value; } @@ -168,7 +167,7 @@ const serializeDateIso = (value) => { throw new TypeValidationError(value, 'Date'); }; -const parseDateIsoLiteral = (ast) => { +const parseDateIsoLiteral = ast => { if (ast.kind === Kind.STRING) { return parseDateIsoValue(ast.value); } @@ -178,19 +177,14 @@ const parseDateIsoLiteral = (ast) => { const DATE = new GraphQLScalarType({ name: 'Date', - description: - 'The Date scalar type is used in operations and types that involve dates.', + description: 'The Date scalar type is used in operations and types that involve dates.', parseValue(value) { if (typeof value === 'string' || value instanceof Date) { return { __type: 'Date', iso: parseDateIsoValue(value), }; - } else if ( - typeof value === 'object' && - value.__type === 'Date' && - value.iso - ) { + } else if (typeof value === 'object' && value.__type === 'Date' && value.iso) { return { __type: value.__type, iso: parseDateIsoValue(value.iso), @@ -202,11 +196,7 @@ const DATE = new GraphQLScalarType({ serialize(value) { if (typeof value === 'string' || value instanceof Date) { return serializeDateIso(value); - } else if ( - typeof value === 'object' && - value.__type === 'Date' && - value.iso - ) { + } else if (typeof value === 'object' && value.__type === 'Date' && value.iso) { return serializeDateIso(value.iso); } @@ -219,8 +209,8 @@ const DATE = new GraphQLScalarType({ iso: parseDateIsoLiteral(ast), }; } else if (ast.kind === Kind.OBJECT) { - const __type = ast.fields.find((field) => field.name.value === '__type'); - const iso = ast.fields.find((field) => field.name.value === 'iso'); + const __type = ast.fields.find(field => field.name.value === '__type'); + const iso = ast.fields.find(field => field.name.value === 'iso'); if (__type && __type.value && __type.value.value === 'Date' && iso) { return { __type: __type.value.value, @@ -273,8 +263,8 @@ const BYTES = new GraphQLScalarType({ base64: ast.value, }; } else if (ast.kind === Kind.OBJECT) { - const __type = ast.fields.find((field) => field.name.value === '__type'); - const base64 = ast.fields.find((field) => field.name.value === 'base64'); + const __type = ast.fields.find(field => field.name.value === '__type'); + const base64 = ast.fields.find(field => field.name.value === 'base64'); if ( __type && __type.value && @@ -294,7 +284,7 @@ const BYTES = new GraphQLScalarType({ }, }); -const parseFileValue = (value) => { +const parseFileValue = value => { if (typeof value === 'string') { return { __type: 'File', @@ -314,10 +304,9 @@ const parseFileValue = (value) => { const FILE = new GraphQLScalarType({ name: 'File', - description: - 'The File scalar type is used in operations and types that involve files.', + description: 'The File scalar type is used in operations and types that involve files.', parseValue: parseFileValue, - serialize: (value) => { + serialize: value => { if (typeof value === 'string') { return value; } else if ( @@ -335,9 +324,9 @@ const FILE = new GraphQLScalarType({ if (ast.kind === Kind.STRING) { return parseFileValue(ast.value); } else if (ast.kind === Kind.OBJECT) { - const __type = ast.fields.find((field) => field.name.value === '__type'); - const name = ast.fields.find((field) => field.name.value === 'name'); - const url = ast.fields.find((field) => field.name.value === 'url'); + const __type = ast.fields.find(field => field.name.value === '__type'); + const name = ast.fields.find(field => field.name.value === 'name'); + const url = ast.fields.find(field => field.name.value === 'url'); if (__type && __type.value && name && name.value) { return parseFileValue({ __type: __type.value.value, @@ -353,8 +342,7 @@ const FILE = new GraphQLScalarType({ const FILE_INFO = new GraphQLObjectType({ name: 'FileInfo', - description: - 'The FileInfo object type is used to return the information about files.', + description: 'The FileInfo object type is used to return the information about files.', fields: { name: { description: 'This is the file name.', @@ -407,8 +395,7 @@ const GEO_POINT_INPUT = new GraphQLInputObjectType({ const GEO_POINT = new GraphQLObjectType({ name: 'GeoPoint', - description: - 'The GeoPoint object type is used to return the information about geo point fields.', + description: 'The GeoPoint object type is used to return the information about geo point fields.', fields: GEO_POINT_FIELDS, }); @@ -444,13 +431,11 @@ const ROLE_ACL_INPUT = new GraphQLInputObjectType({ type: new GraphQLNonNull(GraphQLString), }, read: { - description: - 'Allow users who are members of the role to read the current object.', + description: 'Allow users who are members of the role to read the current object.', type: new GraphQLNonNull(GraphQLBoolean), }, write: { - description: - 'Allow users who are members of the role to write on the current object.', + description: 'Allow users who are members of the role to write on the current object.', type: new GraphQLNonNull(GraphQLBoolean), }, }, @@ -521,13 +506,11 @@ const ROLE_ACL = new GraphQLObjectType({ type: new GraphQLNonNull(GraphQLID), }, read: { - description: - 'Allow users who are members of the role to read the current object.', + description: 'Allow users who are members of the role to read the current object.', type: new GraphQLNonNull(GraphQLBoolean), }, write: { - description: - 'Allow users who are members of the role to write on the current object.', + description: 'Allow users who are members of the role to write on the current object.', type: new GraphQLNonNull(GraphQLBoolean), }, }, @@ -557,7 +540,7 @@ const ACL = new GraphQLObjectType({ type: new GraphQLList(new GraphQLNonNull(USER_ACL)), resolve(p) { const users = []; - Object.keys(p).forEach((rule) => { + Object.keys(p).forEach(rule => { if (rule !== '*' && rule.indexOf('role:') !== 0) { users.push({ userId: toGlobalId('_User', rule), @@ -574,7 +557,7 @@ const ACL = new GraphQLObjectType({ type: new GraphQLList(new GraphQLNonNull(ROLE_ACL)), resolve(p) { const roles = []; - Object.keys(p).forEach((rule) => { + Object.keys(p).forEach(rule => { if (rule.indexOf('role:') === 0) { roles.push({ roleName: rule.replace('role:', ''), @@ -610,8 +593,7 @@ const CLASS_NAME_ATT = { }; const GLOBAL_OR_OBJECT_ID_ATT = { - description: - 'This is the object id. You can use either the global or the object id.', + description: 'This is the object id. You can use either the global or the object id.', type: OBJECT_ID, }; @@ -686,8 +668,7 @@ const READ_PREFERENCE_ATT = { }; const INCLUDE_READ_PREFERENCE_ATT = { - description: - 'The read preference for the queries to be executed to include fields.', + description: 'The read preference for the queries to be executed to include fields.', type: READ_PREFERENCE, }; @@ -713,8 +694,7 @@ const READ_OPTIONS_ATT = { }; const WHERE_ATT = { - description: - 'These are the conditions that the objects need to match in order to be found', + description: 'These are the conditions that the objects need to match in order to be found', type: OBJECT, }; @@ -736,8 +716,7 @@ const COUNT_ATT = { const SEARCH_INPUT = new GraphQLInputObjectType({ name: 'SearchInput', - description: - 'The SearchInput type is used to specifiy a search operation on a full text search.', + description: 'The SearchInput type is used to specifiy a search operation on a full text search.', fields: { term: { description: 'This is the term to be searched.', @@ -749,13 +728,11 @@ const SEARCH_INPUT = new GraphQLInputObjectType({ type: GraphQLString, }, caseSensitive: { - description: - 'This is the flag to enable or disable case sensitive search.', + description: 'This is the flag to enable or disable case sensitive search.', type: GraphQLBoolean, }, diacriticSensitive: { - description: - 'This is the flag to enable or disable diacritic sensitive search.', + description: 'This is the flag to enable or disable diacritic sensitive search.', type: GraphQLBoolean, }, }, @@ -763,8 +740,7 @@ const SEARCH_INPUT = new GraphQLInputObjectType({ const TEXT_INPUT = new GraphQLInputObjectType({ name: 'TextInput', - description: - 'The TextInput type is used to specify a text operation on a constraint.', + description: 'The TextInput type is used to specify a text operation on a constraint.', fields: { search: { description: 'This is the search to be executed.', @@ -775,8 +751,7 @@ const TEXT_INPUT = new GraphQLInputObjectType({ const BOX_INPUT = new GraphQLInputObjectType({ name: 'BoxInput', - description: - 'The BoxInput type is used to specifiy a box operation on a within geo query.', + description: 'The BoxInput type is used to specifiy a box operation on a within geo query.', fields: { bottomLeft: { description: 'This is the bottom left coordinates of the box.', @@ -791,8 +766,7 @@ const BOX_INPUT = new GraphQLInputObjectType({ const WITHIN_INPUT = new GraphQLInputObjectType({ name: 'WithinInput', - description: - 'The WithinInput type is used to specify a within operation on a constraint.', + description: 'The WithinInput type is used to specify a within operation on a constraint.', fields: { box: { description: 'This is the box to be specified.', @@ -819,8 +793,7 @@ const CENTER_SPHERE_INPUT = new GraphQLInputObjectType({ const GEO_WITHIN_INPUT = new GraphQLInputObjectType({ name: 'GeoWithinInput', - description: - 'The GeoWithinInput type is used to specify a geoWithin operation on a constraint.', + description: 'The GeoWithinInput type is used to specify a geoWithin operation on a constraint.', fields: { polygon: { description: 'This is the polygon to be specified.', @@ -845,49 +818,49 @@ const GEO_INTERSECTS_INPUT = new GraphQLInputObjectType({ }, }); -const equalTo = (type) => ({ +const equalTo = type => ({ description: 'This is the equalTo operator to specify a constraint to select the objects where the value of a field equals to a specified value.', type, }); -const notEqualTo = (type) => ({ +const notEqualTo = type => ({ description: 'This is the notEqualTo operator to specify a constraint to select the objects where the value of a field do not equal to a specified value.', type, }); -const lessThan = (type) => ({ +const lessThan = type => ({ description: 'This is the lessThan operator to specify a constraint to select the objects where the value of a field is less than a specified value.', type, }); -const lessThanOrEqualTo = (type) => ({ +const lessThanOrEqualTo = type => ({ description: 'This is the lessThanOrEqualTo operator to specify a constraint to select the objects where the value of a field is less than or equal to a specified value.', type, }); -const greaterThan = (type) => ({ +const greaterThan = type => ({ description: 'This is the greaterThan operator to specify a constraint to select the objects where the value of a field is greater than a specified value.', type, }); -const greaterThanOrEqualTo = (type) => ({ +const greaterThanOrEqualTo = type => ({ description: 'This is the greaterThanOrEqualTo operator to specify a constraint to select the objects where the value of a field is greater than or equal to a specified value.', type, }); -const inOp = (type) => ({ +const inOp = type => ({ description: 'This is the in operator to specify a constraint to select the objects where the value of a field equals any value in the specified array.', type: new GraphQLList(type), }); -const notIn = (type) => ({ +const notIn = type => ({ description: 'This is the notIn operator to specify a constraint to select the objects where the value of a field do not equal any value in the specified array.', type: new GraphQLList(type), @@ -913,8 +886,7 @@ const options = { const SUBQUERY_INPUT = new GraphQLInputObjectType({ name: 'SubqueryInput', - description: - 'The SubqueryInput type is used to specify a sub query to another class.', + description: 'The SubqueryInput type is used to specify a sub query to another class.', fields: { className: CLASS_NAME_ATT, where: Object.assign({}, WHERE_ATT, { @@ -988,8 +960,7 @@ const STRING_WHERE_INPUT = new GraphQLInputObjectType({ matchesRegex, options, text: { - description: - 'This is the $text operator to specify a full text search constraint.', + description: 'This is the $text operator to specify a full text search constraint.', type: TEXT_INPUT, }, inQueryKey, @@ -1225,27 +1196,21 @@ let ARRAY_RESULT; const loadArrayResult = (parseGraphQLSchema, parseClasses) => { const classTypes = parseClasses - .filter((parseClass) => - parseGraphQLSchema.parseClassTypes[parseClass.className] - .classGraphQLOutputType - ? true - : false + .filter(parseClass => + parseGraphQLSchema.parseClassTypes[parseClass.className].classGraphQLOutputType ? true : false ) .map( - (parseClass) => - parseGraphQLSchema.parseClassTypes[parseClass.className] - .classGraphQLOutputType + parseClass => parseGraphQLSchema.parseClassTypes[parseClass.className].classGraphQLOutputType ); ARRAY_RESULT = new GraphQLUnionType({ name: 'ArrayResult', description: 'Use Inline Fragment on Array to get results: https://graphql.org/learn/queries/#inline-fragments', types: () => [ELEMENT, ...classTypes], - resolveType: (value) => { + resolveType: value => { if (value.__type === 'Object' && value.className && value.objectId) { if (parseGraphQLSchema.parseClassTypes[value.className]) { - return parseGraphQLSchema.parseClassTypes[value.className] - .classGraphQLOutputType; + return parseGraphQLSchema.parseClassTypes[value.className].classGraphQLOutputType; } else { return ELEMENT; } @@ -1257,7 +1222,7 @@ const loadArrayResult = (parseGraphQLSchema, parseClasses) => { parseGraphQLSchema.graphQLTypes.push(ARRAY_RESULT); }; -const load = (parseGraphQLSchema) => { +const load = parseGraphQLSchema => { parseGraphQLSchema.addGraphQLType(GraphQLUpload, true); parseGraphQLSchema.addGraphQLType(ANY, true); parseGraphQLSchema.addGraphQLType(OBJECT, true); diff --git a/src/GraphQL/loaders/filesMutations.js b/src/GraphQL/loaders/filesMutations.js index 0bb031f31d..a732c277ef 100644 --- a/src/GraphQL/loaders/filesMutations.js +++ b/src/GraphQL/loaders/filesMutations.js @@ -14,7 +14,7 @@ const handleUpload = async (upload, config) => { const chunks = []; stream .on('error', reject) - .on('data', (chunk) => chunks.push(chunk)) + .on('data', chunk => chunks.push(chunk)) .on('end', () => resolve(Buffer.concat(chunks))); }); } @@ -28,35 +28,23 @@ const handleUpload = async (upload, config) => { } if (!filename.match(/^[_a-zA-Z0-9][a-zA-Z0-9@\.\ ~_-]*$/)) { - throw new Parse.Error( - Parse.Error.INVALID_FILE_NAME, - 'Filename contains invalid characters.' - ); + throw new Parse.Error(Parse.Error.INVALID_FILE_NAME, 'Filename contains invalid characters.'); } try { return { - fileInfo: await config.filesController.createFile( - config, - filename, - data, - mimetype - ), + fileInfo: await config.filesController.createFile(config, filename, data, mimetype), }; } catch (e) { logger.error('Error creating a file: ', e); - throw new Parse.Error( - Parse.Error.FILE_SAVE_ERROR, - `Could not store file: ${filename}.` - ); + throw new Parse.Error(Parse.Error.FILE_SAVE_ERROR, `Could not store file: ${filename}.`); } }; -const load = (parseGraphQLSchema) => { +const load = parseGraphQLSchema => { const createMutation = mutationWithClientMutationId({ name: 'CreateFile', - description: - 'The createFile mutation can be used to create and upload a new file.', + description: 'The createFile mutation can be used to create and upload a new file.', inputFields: { upload: { description: 'This is the new file to be created and uploaded.', @@ -80,18 +68,9 @@ const load = (parseGraphQLSchema) => { }, }); - parseGraphQLSchema.addGraphQLType( - createMutation.args.input.type.ofType, - true, - true - ); + parseGraphQLSchema.addGraphQLType(createMutation.args.input.type.ofType, true, true); parseGraphQLSchema.addGraphQLType(createMutation.type, true, true); - parseGraphQLSchema.addGraphQLMutation( - 'createFile', - createMutation, - true, - true - ); + parseGraphQLSchema.addGraphQLMutation('createFile', createMutation, true, true); }; export { load, handleUpload }; diff --git a/src/GraphQL/loaders/usersMutations.js b/src/GraphQL/loaders/usersMutations.js index 34f58205fc..314f229a58 100644 --- a/src/GraphQL/loaders/usersMutations.js +++ b/src/GraphQL/loaders/usersMutations.js @@ -1,9 +1,4 @@ -import { - GraphQLNonNull, - GraphQLString, - GraphQLBoolean, - GraphQLInputObjectType, -} from 'graphql'; +import { GraphQLNonNull, GraphQLString, GraphQLBoolean, GraphQLInputObjectType } from 'graphql'; import { mutationWithClientMutationId } from 'graphql-relay'; import UsersRouter from '../../Routers/UsersRouter'; import * as objectsMutations from '../helpers/objectsMutations'; @@ -20,20 +15,16 @@ const load = parseGraphQLSchema => { const signUpMutation = mutationWithClientMutationId({ name: 'SignUp', - description: - 'The signUp mutation can be used to create and sign up a new user.', + description: 'The signUp mutation can be used to create and sign up a new user.', inputFields: { fields: { - descriptions: - 'These are the fields of the new user to be created and signed up.', - type: - parseGraphQLSchema.parseClassTypes['_User'].classGraphQLCreateType, + descriptions: 'These are the fields of the new user to be created and signed up.', + type: parseGraphQLSchema.parseClassTypes['_User'].classGraphQLCreateType, }, }, outputFields: { viewer: { - description: - 'This is the new user that was created, signed up and returned as a viewer.', + description: 'This is the new user that was created, signed up and returned as a viewer.', type: new GraphQLNonNull(parseGraphQLSchema.viewerType), }, }, @@ -59,12 +50,7 @@ const load = parseGraphQLSchema => { context.info.sessionToken = sessionToken; return { - viewer: await getUserFromSessionToken( - context, - mutationInfo, - 'viewer.user.', - objectId - ), + viewer: await getUserFromSessionToken(context, mutationInfo, 'viewer.user.', objectId), }; } catch (e) { parseGraphQLSchema.handleError(e); @@ -72,11 +58,7 @@ const load = parseGraphQLSchema => { }, }); - parseGraphQLSchema.addGraphQLType( - signUpMutation.args.input.type.ofType, - true, - true - ); + parseGraphQLSchema.addGraphQLType(signUpMutation.args.input.type.ofType, true, true); parseGraphQLSchema.addGraphQLType(signUpMutation.type, true, true); parseGraphQLSchema.addGraphQLMutation('signUp', signUpMutation, true, true); const logInWithMutation = mutationWithClientMutationId({ @@ -89,35 +71,30 @@ const load = parseGraphQLSchema => { type: new GraphQLNonNull(OBJECT), }, fields: { - descriptions: - 'These are the fields of the user to be created/updated and logged in.', + descriptions: 'These are the fields of the user to be created/updated and logged in.', type: new GraphQLInputObjectType({ name: 'UserLoginWithInput', fields: () => { const classGraphQLCreateFields = parseGraphQLSchema.parseClassTypes[ '_User' ].classGraphQLCreateType.getFields(); - return Object.keys(classGraphQLCreateFields).reduce( - (fields, fieldName) => { - if ( - fieldName !== 'password' && - fieldName !== 'username' && - fieldName !== 'authData' - ) { - fields[fieldName] = classGraphQLCreateFields[fieldName]; - } - return fields; - }, - {} - ); + return Object.keys(classGraphQLCreateFields).reduce((fields, fieldName) => { + if ( + fieldName !== 'password' && + fieldName !== 'username' && + fieldName !== 'authData' + ) { + fields[fieldName] = classGraphQLCreateFields[fieldName]; + } + return fields; + }, {}); }, }), }, }, outputFields: { viewer: { - description: - 'This is the new user that was created, signed up and returned as a viewer.', + description: 'This is the new user that was created, signed up and returned as a viewer.', type: new GraphQLNonNull(parseGraphQLSchema.viewerType), }, }, @@ -143,12 +120,7 @@ const load = parseGraphQLSchema => { context.info.sessionToken = sessionToken; return { - viewer: await getUserFromSessionToken( - context, - mutationInfo, - 'viewer.user.', - objectId - ), + viewer: await getUserFromSessionToken(context, mutationInfo, 'viewer.user.', objectId), }; } catch (e) { parseGraphQLSchema.handleError(e); @@ -156,18 +128,9 @@ const load = parseGraphQLSchema => { }, }); - parseGraphQLSchema.addGraphQLType( - logInWithMutation.args.input.type.ofType, - true, - true - ); + parseGraphQLSchema.addGraphQLType(logInWithMutation.args.input.type.ofType, true, true); parseGraphQLSchema.addGraphQLType(logInWithMutation.type, true, true); - parseGraphQLSchema.addGraphQLMutation( - 'logInWith', - logInWithMutation, - true, - true - ); + parseGraphQLSchema.addGraphQLMutation('logInWith', logInWithMutation, true, true); const logInMutation = mutationWithClientMutationId({ name: 'LogIn', @@ -184,8 +147,7 @@ const load = parseGraphQLSchema => { }, outputFields: { viewer: { - description: - 'This is the existing user that was logged in and returned as a viewer.', + description: 'This is the existing user that was logged in and returned as a viewer.', type: new GraphQLNonNull(parseGraphQLSchema.viewerType), }, }, @@ -210,12 +172,7 @@ const load = parseGraphQLSchema => { context.info.sessionToken = sessionToken; return { - viewer: await getUserFromSessionToken( - context, - mutationInfo, - 'viewer.user.', - objectId - ), + viewer: await getUserFromSessionToken(context, mutationInfo, 'viewer.user.', objectId), }; } catch (e) { parseGraphQLSchema.handleError(e); @@ -223,11 +180,7 @@ const load = parseGraphQLSchema => { }, }); - parseGraphQLSchema.addGraphQLType( - logInMutation.args.input.type.ofType, - true, - true - ); + parseGraphQLSchema.addGraphQLType(logInMutation.args.input.type.ofType, true, true); parseGraphQLSchema.addGraphQLType(logInMutation.type, true, true); parseGraphQLSchema.addGraphQLMutation('logIn', logInMutation, true, true); @@ -236,8 +189,7 @@ const load = parseGraphQLSchema => { description: 'The logOut mutation can be used to log out an existing user.', outputFields: { viewer: { - description: - 'This is the existing user that was logged out and returned as a viewer.', + description: 'This is the existing user that was logged out and returned as a viewer.', type: new GraphQLNonNull(parseGraphQLSchema.viewerType), }, }, @@ -265,11 +217,7 @@ const load = parseGraphQLSchema => { }, }); - parseGraphQLSchema.addGraphQLType( - logOutMutation.args.input.type.ofType, - true, - true - ); + parseGraphQLSchema.addGraphQLType(logOutMutation.args.input.type.ofType, true, true); parseGraphQLSchema.addGraphQLType(logOutMutation.type, true, true); parseGraphQLSchema.addGraphQLMutation('logOut', logOutMutation, true, true); @@ -305,18 +253,9 @@ const load = parseGraphQLSchema => { }, }); - parseGraphQLSchema.addGraphQLType( - resetPasswordMutation.args.input.type.ofType, - true, - true - ); + parseGraphQLSchema.addGraphQLType(resetPasswordMutation.args.input.type.ofType, true, true); parseGraphQLSchema.addGraphQLType(resetPasswordMutation.type, true, true); - parseGraphQLSchema.addGraphQLMutation( - 'resetPassword', - resetPasswordMutation, - true, - true - ); + parseGraphQLSchema.addGraphQLMutation('resetPassword', resetPasswordMutation, true, true); const sendVerificationEmailMutation = mutationWithClientMutationId({ name: 'SendVerificationEmail', @@ -324,8 +263,7 @@ const load = parseGraphQLSchema => { 'The sendVerificationEmail mutation can be used to send the verification email again.', inputFields: { email: { - descriptions: - 'Email of the user that should receive the verification email', + descriptions: 'Email of the user that should receive the verification email', type: new GraphQLNonNull(GraphQLString), }, }, @@ -360,11 +298,7 @@ const load = parseGraphQLSchema => { true, true ); - parseGraphQLSchema.addGraphQLType( - sendVerificationEmailMutation.type, - true, - true - ); + parseGraphQLSchema.addGraphQLType(sendVerificationEmailMutation.type, true, true); parseGraphQLSchema.addGraphQLMutation( 'sendVerificationEmail', sendVerificationEmailMutation, diff --git a/src/Options/Definitions.js b/src/Options/Definitions.js index c3c1271786..3072b08bfe 100644 --- a/src/Options/Definitions.js +++ b/src/Options/Definitions.js @@ -142,13 +142,6 @@ module.exports.ParseServerOptions = { action: parsers.booleanParser, default: false, }, - enableSingleSchemaCache: { - env: 'PARSE_SERVER_ENABLE_SINGLE_SCHEMA_CACHE', - help: - 'Use a single schema cache shared across requests. Reduces number of queries made to _SCHEMA, defaults to false, i.e. unique schema cache per request.', - action: parsers.booleanParser, - default: false, - }, encryptionKey: { env: 'PARSE_SERVER_ENCRYPTION_KEY', help: 'Key for encrypting your files', diff --git a/src/Options/docs.js b/src/Options/docs.js index 9d8553d9f6..3e4573c1f1 100644 --- a/src/Options/docs.js +++ b/src/Options/docs.js @@ -26,7 +26,6 @@ * @property {Number} emailVerifyTokenValidityDuration Email verification token validity duration, in seconds * @property {Boolean} enableAnonymousUsers Enable (or disable) anonymous users, defaults to true * @property {Boolean} enableExpressErrorHandler Enables the default express error handler for all errors - * @property {Boolean} enableSingleSchemaCache Use a single schema cache shared across requests. Reduces number of queries made to _SCHEMA, defaults to false, i.e. unique schema cache per request. * @property {String} encryptionKey Key for encrypting your files * @property {Boolean} expireInactiveSessions Sets wether we should expire the inactive sessions, defaults to true * @property {String} fileKey Key for your files diff --git a/src/Options/index.js b/src/Options/index.js index 84ec9c7b99..0509c9a27b 100644 --- a/src/Options/index.js +++ b/src/Options/index.js @@ -164,9 +164,6 @@ export interface ParseServerOptions { :ENV: PARSE_SERVER_ENABLE_EXPERIMENTAL_DIRECT_ACCESS :DEFAULT: false */ directAccess: ?boolean; - /* Use a single schema cache shared across requests. Reduces number of queries made to _SCHEMA, defaults to false, i.e. unique schema cache per request. - :DEFAULT: false */ - enableSingleSchemaCache: ?boolean; /* Enables the default express error handler for all errors :DEFAULT: false */ enableExpressErrorHandler: ?boolean; diff --git a/src/PromiseRouter.js b/src/PromiseRouter.js index e1ec4eff9f..aa4d7e97a4 100644 --- a/src/PromiseRouter.js +++ b/src/PromiseRouter.js @@ -150,7 +150,6 @@ function makeExpressHandler(appId, promiseHandler) { promiseHandler(req) .then( result => { - clearSchemaCache(req); if (!result.response && !result.location && !result.text) { log.error('the handler did not include a "response" or a "location" field'); throw 'control should not get here'; @@ -183,17 +182,14 @@ function makeExpressHandler(appId, promiseHandler) { res.json(result.response); }, error => { - clearSchemaCache(req); next(error); } ) .catch(e => { - clearSchemaCache(req); log.error(`Error generating response. ${inspect(e)}`, { error: e }); next(e); }); } catch (e) { - clearSchemaCache(req); log.error(`Error handling request: ${inspect(e)}`, { error: e }); next(e); } @@ -211,9 +207,3 @@ function maskSensitiveUrl(req) { } return maskUrl; } - -function clearSchemaCache(req) { - if (req.config && !req.config.enableSingleSchemaCache) { - req.config.database.schemaCache.clear(); - } -}