diff --git a/.github/ISSUE_TEMPLATE/---1-report-an-issue.md b/.github/ISSUE_TEMPLATE/---1-report-an-issue.md
index dbfa97106a..44c2cc7f55 100644
--- a/.github/ISSUE_TEMPLATE/---1-report-an-issue.md
+++ b/.github/ISSUE_TEMPLATE/---1-report-an-issue.md
@@ -8,7 +8,11 @@ assignees: ''
---
### New Issue Checklist
-
+
- [ ] I am not disclosing a [vulnerability](https://github.com/parse-community/parse-server/blob/master/SECURITY.md).
- [ ] I am not just asking a [question](https://github.com/parse-community/.github/blob/master/SUPPORT.md).
@@ -27,6 +31,16 @@ assignees: ''
### Expected Outcome
+### Failing Test Case / Pull Request
+
+
+- [ ] 🤩 I submitted a PR with a fix and a test case.
+- [ ] 🧐 I submitted a PR with a failing test case.
+
### Environment
diff --git a/.github/ISSUE_TEMPLATE/---2-feature-request.md b/.github/ISSUE_TEMPLATE/---2-feature-request.md
index c2756fb952..282e9e7a12 100644
--- a/.github/ISSUE_TEMPLATE/---2-feature-request.md
+++ b/.github/ISSUE_TEMPLATE/---2-feature-request.md
@@ -7,14 +7,28 @@ assignees: ''
---
-**Is your feature request related to a problem? Please describe.**
-A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+### New Feature / Enhancement Checklist
+
-**Describe the solution you'd like**
-A clear and concise description of what you want to happen.
+- [ ] I am not disclosing a [vulnerability](https://github.com/parse-community/parse-server/blob/master/SECURITY.md).
+- [ ] I am not just asking a [question](https://github.com/parse-community/.github/blob/master/SUPPORT.md).
+- [ ] I have searched through [existing issues](https://github.com/parse-community/parse-server/issues?q=is%3Aissue).
-**Describe alternatives you've considered**
-A clear and concise description of any alternative solutions or features you've considered.
+### Current Limitation
+
-**Additional context**
-Add any other context or screenshots about the feature request here.
+### Feature / Enhancement Description
+
+
+### Example Use Case
+
+
+### Alternatives / Workarounds
+
+
+### 3rd Party References
+
\ No newline at end of file
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
new file mode 100644
index 0000000000..f7b9b13f1a
--- /dev/null
+++ b/.github/pull_request_template.md
@@ -0,0 +1,30 @@
+### New Pull Request Checklist
+
+
+- [ ] I am not disclosing a [vulnerability](https://github.com/parse-community/parse-server/blob/master/SECURITY.md).
+- [ ] I am creating this PR in reference to an [issue](https://github.com/parse-community/parse-server/issues?q=is%3Aissue).
+
+### Issue Description
+
+
+Related issue: FILL_THIS_OUT
+
+### Approach
+
+
+### TODOs before merging
+
+
+- [ ] Add test cases
+- [ ] Add entry to changelog
+- [ ] Add changes to documentation (guides, repository pages, in-code descriptions)
+- [ ] Add [security check](https://github.com/parse-community/parse-server/blob/master/CONTRIBUTING.md#security-checks)
+- [ ] Add new Parse Error codes to Parse JS SDK
+- [ ] ...
\ No newline at end of file
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 0000000000..334f78eea2
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,63 @@
+name: release
+on:
+ release:
+ types: [published]
+jobs:
+ publish-npm:
+ runs-on: ubuntu-18.04
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-node@v1
+ with:
+ node-version: '10.14'
+ registry-url: https://registry.npmjs.org/
+ - name: Cache Node.js modules
+ uses: actions/cache@v2
+ with:
+ path: ~/.npm
+ key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
+ restore-keys: |
+ ${{ runner.os }}-node-
+ - run: npm ci
+ - run: npm publish
+ env:
+ NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
+ publish-docs:
+ runs-on: ubuntu-18.04
+ timeout-minutes: 30
+ steps:
+ - uses: actions/checkout@v2
+ - name: Use Node.js
+ uses: actions/setup-node@v1
+ with:
+ node-version: '10.14'
+ - name: Cache Node.js modules
+ uses: actions/cache@v2
+ with:
+ path: ~/.npm
+ key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
+ restore-keys: |
+ ${{ runner.os }}-node-
+ - name: Get Tag
+ uses: actions/github-script@v3
+ id: tag
+ with:
+ github-token: ${{secrets.GITHUB_TOKEN}}
+ result-encoding: string
+ script: |
+ const ref = process.env.GITHUB_REF
+ if(!ref.startsWith('refs/tags/'))
+ return ''
+ return ref.replace(/^refs\/tags\//, '')
+ - name: Generate Docs
+ run: |
+ echo $SOURCE_TAG
+ npm ci
+ ./release_docs.sh
+ env:
+ SOURCE_TAG: ${{ steps.tag.outputs.result }}
+ - name: Deploy
+ uses: peaceiris/actions-gh-pages@v3.7.3
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ publish_dir: ./docs
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9882230ba1..71df536115 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,7 +1,35 @@
## Parse Server Changelog
### master
-[Full Changelog](https://github.com/parse-community/parse-server/compare/4.4.0...master)
+[Full Changelog](https://github.com/parse-community/parse-server/compare/4.5.0...master)
+
+__BREAKING CHANGES:__
+- NEW: Added file upload restriction. File upload is now only allowed for authenticated users by default for improved security. To allow file upload also for Anonymous Users or Public, set the `fileUpload` parameter in the [Parse Server Options](https://parseplatform.org/parse-server/api/master/ParseServerOptions.html). [#7071](https://github.com/parse-community/parse-server/pull/7071). Thanks to [dblythy](https://github.com/dblythy).
+___
+- IMPROVE: Optimize queries on classes with pointer permissions. [#7061](https://github.com/parse-community/parse-server/pull/7061). Thanks to [Pedro Diaz](https://github.com/pdiaz)
+- FIX: request.context for afterFind triggers. [#7078](https://github.com/parse-community/parse-server/pull/7078). Thanks to [dblythy](https://github.com/dblythy)
+
+### 4.5.0
+[Full Changelog](https://github.com/parse-community/parse-server/compare/4.4.0...4.5.0)
+
+__BREAKING CHANGES:__
+- FIX: Consistent casing for afterLiveQueryEvent. The afterLiveQueryEvent was introduced in 4.4.0 with inconsistent casing for the event names, which was fixed in 4.5.0. [#7023](https://github.com/parse-community/parse-server/pull/7023). Thanks to [dblythy](https://github.com/dblythy).
+___
+- FIX: Properly handle serverURL and publicServerUrl in Batch requests. [#7049](https://github.com/parse-community/parse-server/pull/7049). Thanks to [Zach Goldberg](https://github.com/ZachGoldberg).
+- IMPROVE: Prevent invalid column names (className and length). [#7053](https://github.com/parse-community/parse-server/pull/7053). Thanks to [Diamond Lewis](https://github.com/dplewis).
+- IMPROVE: GraphQL: Remove viewer from logout mutation. [#7029](https://github.com/parse-community/parse-server/pull/7029). Thanks to [Antoine Cormouls](https://github.com/Moumouls).
+- IMPROVE: GraphQL: Optimize on Relation. [#7044](https://github.com/parse-community/parse-server/pull/7044). Thanks to [Antoine Cormouls](https://github.com/Moumouls).
+- NEW: Include sessionToken in onLiveQueryEvent. [#7043](https://github.com/parse-community/parse-server/pull/7043). Thanks to [dblythy](https://github.com/dblythy).
+- FIX: Definitions for accountLockout and passwordPolicy. [#7040](https://github.com/parse-community/parse-server/pull/7040). Thanks to [dblythy](https://github.com/dblythy).
+- FIX: Fix typo in server definitions for emailVerifyTokenReuseIfValid. [#7037](https://github.com/parse-community/parse-server/pull/7037). Thanks to [dblythy](https://github.com/dblythy).
+- SECURITY FIX: LDAP auth stores password in plain text. See [GHSA-4w46-w44m-3jq3](https://github.com/parse-community/parse-server/security/advisories/GHSA-4w46-w44m-3jq3) for more details about the vulnerability and [da905a3](https://github.com/parse-community/parse-server/commit/da905a357d062ab4fea727a21eac231acc2ed92a) for the fix. Thanks to [Fabian Strachanski](https://github.com/fastrde).
+- NEW: Reuse tokens if they haven't expired. [#7017](https://github.com/parse-community/parse-server/pull/7017). Thanks to [dblythy](https://github.com/dblythy).
+- NEW: Add LDAPS-support to LDAP-Authcontroller. [#7014](https://github.com/parse-community/parse-server/pull/7014). Thanks to [Fabian Strachanski](https://github.com/fastrde).
+- FIX: (beforeSave/afterSave): Return value instead of Parse.Op for nested fields. [#7005](https://github.com/parse-community/parse-server/pull/7005). Thanks to [Diamond Lewis](https://github.com/dplewis).
+- FIX: (beforeSave): Skip Sanitizing Database results. [#7003](https://github.com/parse-community/parse-server/pull/7003). Thanks to [Diamond Lewis](https://github.com/dplewis).
+- FIX: Fix includeAll for querying a Pointer and Pointer array. [#7002](https://github.com/parse-community/parse-server/pull/7002). Thanks to [Corey Baker](https://github.com/cbaker6).
+- FIX: Add encryptionKey to src/options/index.js. [#6999](https://github.com/parse-community/parse-server/pull/6999). Thanks to [dblythy](https://github.com/dblythy).
+- IMPROVE: Update PostgresStorageAdapter.js. [#6989](https://github.com/parse-community/parse-server/pull/6989). Thanks to [Vitaly Tomilov](https://github.com/vitaly-t).
### 4.4.0
[Full Changelog](https://github.com/parse-community/parse-server/compare/4.3.0...4.4.0)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index bc7279af93..6c2ccd3bd8 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -100,7 +100,18 @@ If you want to make changes to [Parse Server Configuration][config] add the desi
To view docs run `npm run docs` and check the `/out` directory.
-### Code of Conduct
+## Feature Considerations
+### Security Checks
+
+The Parse Server security checks feature warns developers about weak security settings in their Parse Server deployment.
+
+A security check needs to be added for every new feature or enhancement that allows the developer to configure it in a way that weakens security mechanisms or exposes functionality which creates a weak spot for malicious attacks. If you are not sure whether your feature or enhancements requires a security check, feel free to ask.
+
+For example, allowing public read and write to a class may be useful to simplify development but should be disallowed in a production environment.
+
+Security checks are added in [SecurityChecks.js](https://github.com/parse-community/parse-server/blob/master/src/SecurityChecks.js).
+
+## Code of Conduct
This project adheres to the [Contributor Covenant Code of Conduct](https://github.com/parse-community/parse-server/blob/master/CODE_OF_CONDUCT.md). By participating, you are expected to honor this code.
diff --git a/package-lock.json b/package-lock.json
index b5f6313db7..35bf246ff8 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,6 +1,6 @@
{
"name": "parse-server",
- "version": "4.4.0",
+ "version": "4.5.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@@ -2734,9 +2734,9 @@
}
},
"@babel/runtime-corejs3": {
- "version": "7.12.1",
- "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.12.1.tgz",
- "integrity": "sha512-umhPIcMrlBZ2aTWlWjUseW9LjQKxi1dpFlQS8DzsxB//5K+u6GLTC/JliPKHsd5kJVPIU6X/Hy0YvWOYPcMxBw==",
+ "version": "7.12.5",
+ "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.12.5.tgz",
+ "integrity": "sha512-roGr54CsTmNPPzZoCP1AmDXuBoNao7tnSA83TXTwt+UK5QVyh1DIJnrgYRPWKCF2flqZQXwa7Yr8v7VmLzF0YQ==",
"requires": {
"core-js-pure": "^3.0.0",
"regenerator-runtime": "^0.13.4"
@@ -8530,9 +8530,9 @@
}
},
"ldapjs": {
- "version": "2.2.2",
- "resolved": "https://registry.npmjs.org/ldapjs/-/ldapjs-2.2.2.tgz",
- "integrity": "sha512-PgMVYKWUjkkK6v81QQc+kKsr4TcKRggYuvjwo1h4ZEhaXGIkhLvcK9Y60nZqR5RRvyYbt8Ott1VyN7S5EiYArA==",
+ "version": "2.2.3",
+ "resolved": "https://registry.npmjs.org/ldapjs/-/ldapjs-2.2.3.tgz",
+ "integrity": "sha512-143MayI+cSo1PEngge0HMVj3Fb0TneX4Qp9yl9bKs45qND3G64B75GMSxtZCfNuVsvg833aOp1UWG8peFu1LrQ==",
"requires": {
"abstract-logging": "^2.0.0",
"asn1": "^0.2.4",
@@ -10356,23 +10356,23 @@
}
},
"parse": {
- "version": "2.18.0",
- "resolved": "https://registry.npmjs.org/parse/-/parse-2.18.0.tgz",
- "integrity": "sha512-RaSLhcpTZIaITanJY3YzVASyLaCTBh0v4Bq0M3nrKGQXWa9/iscWtidPhV6xpSBzat53aqO4eL3L5J1zwZzsUg==",
+ "version": "2.19.0",
+ "resolved": "https://registry.npmjs.org/parse/-/parse-2.19.0.tgz",
+ "integrity": "sha512-twxq/Kzyd0c9exxK0jMEPISwDpFzukmexSa+VAFL4a6K+lqGeJ9TuysYhfR9Bkcd0mHGcMFM5gn4uycu1xykvA==",
"requires": {
- "@babel/runtime": "7.12.1",
- "@babel/runtime-corejs3": "7.12.1",
+ "@babel/runtime": "7.12.5",
+ "@babel/runtime-corejs3": "7.12.5",
"crypto-js": "4.0.0",
"react-native-crypto-js": "1.0.0",
"uuid": "3.4.0",
- "ws": "7.3.1",
+ "ws": "7.4.0",
"xmlhttprequest": "1.8.0"
},
"dependencies": {
"@babel/runtime": {
- "version": "7.12.1",
- "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.12.1.tgz",
- "integrity": "sha512-J5AIf3vPj3UwXaAzb5j1xM4WAQDX3EMgemF8rjCP3SoW09LfRKAXQKt6CoVYl230P6iWdRcBbnLDDdnqWxZSCA==",
+ "version": "7.12.5",
+ "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.12.5.tgz",
+ "integrity": "sha512-plcc+hbExy3McchJCEQG3knOsuh3HH+Prx1P6cLIkET/0dLuQDEnrT+s27Axgc9bqfsmNUNHfscgMUdBpC9xfg==",
"requires": {
"regenerator-runtime": "^0.13.4"
}
@@ -10383,9 +10383,9 @@
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
},
"ws": {
- "version": "7.3.1",
- "resolved": "https://registry.npmjs.org/ws/-/ws-7.3.1.tgz",
- "integrity": "sha512-D3RuNkynyHmEJIpD2qrgVkc9DQ23OrN/moAwZX4L8DfvszsJxpjQuUq3LMx6HoYji9fbIOBY18XWBsAux1ZZUA=="
+ "version": "7.4.0",
+ "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.0.tgz",
+ "integrity": "sha512-kyFwXuV/5ymf+IXhS6f0+eAFvydbaBW3zjpT6hUdAh/hbVjTIB5EHBGi0bPoCLSK2wcuz3BrEkB9LrYv1Nm4NQ=="
}
}
},
@@ -11208,9 +11208,27 @@
}
},
"semver": {
- "version": "7.3.2",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz",
- "integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ=="
+ "version": "7.3.4",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz",
+ "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==",
+ "requires": {
+ "lru-cache": "^6.0.0"
+ },
+ "dependencies": {
+ "lru-cache": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+ "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "requires": {
+ "yallist": "^4.0.0"
+ }
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
+ }
+ }
},
"semver-compare": {
"version": "1.0.0",
@@ -12526,9 +12544,9 @@
"integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM="
},
"uuid": {
- "version": "8.3.1",
- "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.1.tgz",
- "integrity": "sha512-FOmRr+FmWEIG8uhZv6C2bTgEVXsHk08kE7mPlrBbEe+c3r9pjceVPgupIfNIhc4yx55H69OXANrUaSuu9eInKg=="
+ "version": "8.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+ "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
},
"v8-compile-cache": {
"version": "2.1.0",
@@ -12792,9 +12810,9 @@
}
},
"ws": {
- "version": "7.4.0",
- "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.0.tgz",
- "integrity": "sha512-kyFwXuV/5ymf+IXhS6f0+eAFvydbaBW3zjpT6hUdAh/hbVjTIB5EHBGi0bPoCLSK2wcuz3BrEkB9LrYv1Nm4NQ=="
+ "version": "7.4.1",
+ "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.1.tgz",
+ "integrity": "sha512-pTsP8UAfhy3sk1lSk/O/s4tjD0CRwvMnzvwr4OKGX7ZvqZtUyx4KIJB5JWbkykPoc55tixMGgTNoh3k4FkNGFQ=="
},
"xml2js": {
"version": "0.4.19",
diff --git a/package.json b/package.json
index 9a9ffc7f0b..9ead1887b6 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "parse-server",
- "version": "4.4.0",
+ "version": "4.5.0",
"description": "An express module providing a Parse-compatible API server",
"main": "lib/index.js",
"repository": {
@@ -42,22 +42,22 @@
"intersect": "1.0.1",
"jsonwebtoken": "8.5.1",
"jwks-rsa": "1.11.0",
- "ldapjs": "2.2.2",
+ "ldapjs": "2.2.3",
"lodash": "4.17.20",
"lru-cache": "5.1.1",
"mime": "2.4.6",
"mongodb": "3.6.3",
- "parse": "2.18.0",
+ "parse": "2.19.0",
"pg-promise": "10.8.1",
"pluralize": "8.0.0",
"redis": "3.0.2",
- "semver": "7.3.2",
+ "semver": "7.3.4",
"subscriptions-transport-ws": "0.9.18",
"tv4": "1.3.0",
- "uuid": "8.3.1",
+ "uuid": "8.3.2",
"winston": "3.3.3",
"winston-daily-rotate-file": "4.5.0",
- "ws": "7.4.0"
+ "ws": "7.4.1"
},
"devDependencies": {
"@babel/cli": "7.10.0",
@@ -105,7 +105,7 @@
"posttest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner stop",
"coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 nyc jasmine",
"start": "node ./bin/parse-server",
- "prettier": "prettier --write {src,spec}/{**/*,*}.js",
+ "prettier": "prettier --write '{src,spec}/{**/*,*}.js'",
"prepare": "npm run build",
"postinstall": "node -p 'require(\"./postinstall.js\")()'"
},
diff --git a/release_docs.sh b/release_docs.sh
index bb12bac50f..a7bb26324c 100755
--- a/release_docs.sh
+++ b/release_docs.sh
@@ -1,11 +1,11 @@
#!/bin/sh -e
set -x
-if [ "${TRAVIS_REPO_SLUG}" = "" ];
+if [ "${GITHUB_ACTIONS}" = "" ];
then
- echo "Cannot release docs without TRAVIS_REPO_SLUG set"
+ echo "Cannot release docs without GITHUB_ACTIONS set"
exit 0;
fi
-REPO="https://github.com/${TRAVIS_REPO_SLUG}"
+REPO="https://github.com/parse-community/parse-server"
rm -rf docs
git clone -b gh-pages --single-branch $REPO ./docs
@@ -15,9 +15,9 @@ cd ..
DEST="master"
-if [ "${TRAVIS_TAG}" != "" ];
+if [ "${SOURCE_TAG}" != "" ];
then
- DEST="${TRAVIS_TAG}"
+ DEST="${SOURCE_TAG}"
# change the default page to the latest
echo "" > "docs/api/index.html"
fi
diff --git a/resources/buildConfigDefinitions.js b/resources/buildConfigDefinitions.js
index 99b57b1379..aee5403613 100644
--- a/resources/buildConfigDefinitions.js
+++ b/resources/buildConfigDefinitions.js
@@ -47,7 +47,8 @@ function getENVPrefix(iface) {
'LiveQueryOptions' : 'PARSE_SERVER_LIVEQUERY_',
'IdempotencyOptions' : 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_',
'AccountLockoutOptions' : 'PARSE_SERVER_ACCOUNT_LOCKOUT_',
- 'PasswordPolicyOptions' : 'PARSE_SERVER_PASSWORD_POLICY_'
+ 'PasswordPolicyOptions' : 'PARSE_SERVER_PASSWORD_POLICY_',
+ 'FileUploadOptions' : 'PARSE_SERVER_FILE_UPLOAD_'
}
if (options[iface.id.name]) {
return options[iface.id.name]
@@ -163,14 +164,8 @@ function parseDefaultValue(elt, value, t) {
if (type == 'NumberOrBoolean') {
literalValue = t.numericLiteral(parsers.numberOrBoolParser('')(value));
}
- if (type == 'CustomPagesOptions') {
- const object = parsers.objectParser(value);
- const props = Object.keys(object).map((key) => {
- return t.objectProperty(key, object[value]);
- });
- literalValue = t.objectExpression(props);
- }
- if (type == 'IdempotencyOptions') {
+ const literalTypes = ['IdempotencyOptions','FileUploadOptions','CustomPagesOptions'];
+ if (literalTypes.includes(type)) {
const object = parsers.objectParser(value);
const props = Object.keys(object).map((key) => {
return t.objectProperty(key, object[value]);
diff --git a/spec/CloudCode.spec.js b/spec/CloudCode.spec.js
index 7bc39a43bb..9b95bbf8b0 100644
--- a/spec/CloudCode.spec.js
+++ b/spec/CloudCode.spec.js
@@ -3157,4 +3157,14 @@ describe('afterLogin hook', () => {
await Parse.Cloud.run('contextTest', {}, { context: { a: 'a' } });
});
+
+ it('afterFind should have access to context', async () => {
+ Parse.Cloud.afterFind('TestObject', req => {
+ expect(req.context.a).toEqual('a');
+ });
+ const obj = new TestObject();
+ await obj.save();
+ const query = new Parse.Query(TestObject);
+ await query.find({ context: { a: 'a' } });
+ });
});
diff --git a/spec/DatabaseController.spec.js b/spec/DatabaseController.spec.js
index 988248c891..98103ce6e4 100644
--- a/spec/DatabaseController.spec.js
+++ b/spec/DatabaseController.spec.js
@@ -236,6 +236,57 @@ describe('DatabaseController', function () {
done();
});
+ it('should not return a $or operation if the query involves one of the two fields also used as array/pointer permissions', done => {
+ const clp = buildCLP(['users', 'user']);
+ const query = { a: 'b', user: createUserPointer(USER_ID) };
+ schemaController.testPermissionsForClassName
+ .withArgs(CLASS_NAME, ACL_GROUP, OPERATION)
+ .and.returnValue(false);
+ schemaController.getClassLevelPermissions.withArgs(CLASS_NAME).and.returnValue(clp);
+ schemaController.getExpectedType
+ .withArgs(CLASS_NAME, 'user')
+ .and.returnValue({ type: 'Pointer' });
+ schemaController.getExpectedType
+ .withArgs(CLASS_NAME, 'users')
+ .and.returnValue({ type: 'Array' });
+ const output = databaseController.addPointerPermissions(
+ schemaController,
+ CLASS_NAME,
+ OPERATION,
+ query,
+ ACL_GROUP
+ );
+ expect(output).toEqual({ ...query, user: createUserPointer(USER_ID) });
+ done();
+ });
+
+ it('should not return a $or operation if the query involves one of the fields also used as array/pointer permissions', done => {
+ const clp = buildCLP(['user', 'users', 'userObject']);
+ const query = { a: 'b', user: createUserPointer(USER_ID) };
+ schemaController.testPermissionsForClassName
+ .withArgs(CLASS_NAME, ACL_GROUP, OPERATION)
+ .and.returnValue(false);
+ schemaController.getClassLevelPermissions.withArgs(CLASS_NAME).and.returnValue(clp);
+ schemaController.getExpectedType
+ .withArgs(CLASS_NAME, 'user')
+ .and.returnValue({ type: 'Pointer' });
+ schemaController.getExpectedType
+ .withArgs(CLASS_NAME, 'users')
+ .and.returnValue({ type: 'Array' });
+ schemaController.getExpectedType
+ .withArgs(CLASS_NAME, 'userObject')
+ .and.returnValue({ type: 'Object' });
+ const output = databaseController.addPointerPermissions(
+ schemaController,
+ CLASS_NAME,
+ OPERATION,
+ query,
+ ACL_GROUP
+ );
+ expect(output).toEqual({ ...query, user: createUserPointer(USER_ID) });
+ done();
+ });
+
it('should throw an error if for some unexpected reason the property specified in the CLP is neither a pointer nor an array', done => {
const clp = buildCLP(['user']);
const query = { a: 'b' };
@@ -265,6 +316,51 @@ describe('DatabaseController', function () {
done();
});
});
+
+ describe('reduceOperations', function () {
+ const databaseController = new DatabaseController();
+
+ it('objectToEntriesStrings', done => {
+ const output = databaseController.objectToEntriesStrings({ a: 1, b: 2, c: 3 });
+ expect(output).toEqual(['"a":1', '"b":2', '"c":3']);
+ done();
+ });
+
+ it('reduceOrOperation', done => {
+ expect(databaseController.reduceOrOperation({ a: 1 })).toEqual({ a: 1 });
+ expect(databaseController.reduceOrOperation({ $or: [{ a: 1 }, { b: 2 }] })).toEqual({
+ $or: [{ a: 1 }, { b: 2 }],
+ });
+ expect(databaseController.reduceOrOperation({ $or: [{ a: 1 }, { a: 2 }] })).toEqual({
+ $or: [{ a: 1 }, { a: 2 }],
+ });
+ expect(databaseController.reduceOrOperation({ $or: [{ a: 1 }, { a: 1 }] })).toEqual({ a: 1 });
+ expect(
+ databaseController.reduceOrOperation({ $or: [{ a: 1, b: 2, c: 3 }, { a: 1 }] })
+ ).toEqual({ a: 1 });
+ expect(
+ databaseController.reduceOrOperation({ $or: [{ b: 2 }, { a: 1, b: 2, c: 3 }] })
+ ).toEqual({ b: 2 });
+ done();
+ });
+
+ it('reduceAndOperation', done => {
+ expect(databaseController.reduceAndOperation({ a: 1 })).toEqual({ a: 1 });
+ expect(databaseController.reduceAndOperation({ $and: [{ a: 1 }, { b: 2 }] })).toEqual({
+ $and: [{ a: 1 }, { b: 2 }],
+ });
+ expect(databaseController.reduceAndOperation({ $and: [{ a: 1 }, { a: 2 }] })).toEqual({
+ $and: [{ a: 1 }, { a: 2 }],
+ });
+ expect(databaseController.reduceAndOperation({ $and: [{ a: 1 }, { a: 1 }] })).toEqual({
+ a: 1,
+ });
+ expect(
+ databaseController.reduceAndOperation({ $and: [{ a: 1, b: 2, c: 3 }, { b: 2 }] })
+ ).toEqual({ a: 1, b: 2, c: 3 });
+ done();
+ });
+ });
});
function buildCLP(pointerNames) {
diff --git a/spec/EnableExpressErrorHandler.spec.js b/spec/EnableExpressErrorHandler.spec.js
index fa73d21fb3..26483ec6a1 100644
--- a/spec/EnableExpressErrorHandler.spec.js
+++ b/spec/EnableExpressErrorHandler.spec.js
@@ -2,6 +2,7 @@ const request = require('../lib/request');
describe('Enable express error handler', () => {
it('should call the default handler in case of error, like updating a non existing object', async done => {
+ spyOn(console, 'error');
const parseServer = await reconfigureServer(
Object.assign({}, defaultConfiguration, {
enableExpressErrorHandler: true,
diff --git a/spec/LdapAuth.spec.js b/spec/LdapAuth.spec.js
index 75fa3396e8..56e583e60b 100644
--- a/spec/LdapAuth.spec.js
+++ b/spec/LdapAuth.spec.js
@@ -216,7 +216,7 @@ it('Should delete the password from authData after validation', done => {
const options = {
suffix: 'o=example',
url: `ldap://localhost:${port}`,
- dn: 'uid={{id}}, o=example'
+ dn: 'uid={{id}}, o=example',
};
const authData = { id: 'testuser', password: 'secret' };
@@ -237,22 +237,23 @@ it('Should not save the password in the user record after authentication', done
const options = {
suffix: 'o=example',
url: `ldap://localhost:${port}`,
- dn: 'uid={{id}}, o=example'
+ dn: 'uid={{id}}, o=example',
};
reconfigureServer({ auth: { ldap: options } }).then(() => {
const authData = { authData: { id: 'testuser', password: 'secret' } };
- Parse.User.logInWith('ldap', authData).then((returnedUser) => {
- const query = new Parse.Query("User");
+ Parse.User.logInWith('ldap', authData).then(returnedUser => {
+ const query = new Parse.Query('User');
query
- .equalTo('objectId', returnedUser.id).first({ useMasterKey: true })
- .then((user) => {
- expect(user.get('authData')).toEqual({ ldap:{ id: 'testuser' }});
+ .equalTo('objectId', returnedUser.id)
+ .first({ useMasterKey: true })
+ .then(user => {
+ expect(user.get('authData')).toEqual({ ldap: { id: 'testuser' } });
expect(user.get('authData').ldap.password).toBeUndefined();
done();
})
.catch(done.fail)
- .finally(() => server.close())
- })
+ .finally(() => server.close());
+ });
});
});
});
diff --git a/spec/MongoStorageAdapter.spec.js b/spec/MongoStorageAdapter.spec.js
index b63da31623..9dd91c0b65 100644
--- a/spec/MongoStorageAdapter.spec.js
+++ b/spec/MongoStorageAdapter.spec.js
@@ -308,18 +308,18 @@ describe_only_db('mongo')('MongoStorageAdapter', () => {
});
it('should use index for caseInsensitive query', async () => {
+ const database = Config.get(Parse.applicationId).database;
+
const user = new Parse.User();
user.set('username', 'Bugs');
user.set('password', 'Bunny');
await user.signUp();
- const database = Config.get(Parse.applicationId).database;
const preIndexPlan = await database.find(
'_User',
{ username: 'bugs' },
{ caseInsensitive: true, explain: true }
);
-
const schema = await new Parse.Schema('_User').get();
await database.adapter.ensureIndex(
@@ -335,7 +335,7 @@ describe_only_db('mongo')('MongoStorageAdapter', () => {
{ username: 'bugs' },
{ caseInsensitive: true, explain: true }
);
- expect(preIndexPlan.executionStats.executionStages.stage).toBe('COLLSCAN');
+ expect(preIndexPlan.executionStats.executionStages.stage).toBe('FETCH');
expect(postIndexPlan.executionStats.executionStages.stage).toBe('FETCH');
});
@@ -549,5 +549,32 @@ describe_only_db('mongo')('MongoStorageAdapter', () => {
});
});
});
+
+ describe('watch _SCHEMA', () => {
+ it('should change', async done => {
+ const adapter = new MongoStorageAdapter({ uri: databaseURI });
+ await reconfigureServer({
+ replicaSet: true,
+ databaseAdapter: adapter,
+ });
+ expect(adapter.replicaSet).toBe(true);
+ spyOn(adapter, '_onchange');
+ const schema = {
+ fields: {
+ array: { type: 'Array' },
+ object: { type: 'Object' },
+ date: { type: 'Date' },
+ },
+ };
+
+ await adapter.createClass('Stuff', schema);
+ const myClassSchema = await adapter.getClass('Stuff');
+ expect(myClassSchema).toBeDefined();
+ setTimeout(() => {
+ expect(adapter._onchange).toHaveBeenCalledTimes(1);
+ done();
+ }, 5000);
+ });
+ });
}
});
diff --git a/spec/ParseFile.spec.js b/spec/ParseFile.spec.js
index 410d15c81b..0bd90426c6 100644
--- a/spec/ParseFile.spec.js
+++ b/spec/ParseFile.spec.js
@@ -4,6 +4,7 @@
'use strict';
const request = require('../lib/request');
+const Definitions = require('../src/Options/Definitions');
const str = 'Hello World!';
const data = [];
@@ -860,4 +861,196 @@ describe('Parse.File testing', () => {
});
});
});
+
+ describe('file upload configuration', () => {
+ it('allows file upload only for authenticated user by default', async () => {
+ await reconfigureServer({
+ fileUpload: {
+ enableForPublic: Definitions.FileUploadOptions.enableForPublic.default,
+ enableForAnonymousUser: Definitions.FileUploadOptions.enableForAnonymousUser.default,
+ enableForAuthenticatedUser: Definitions.FileUploadOptions.enableForAuthenticatedUser.default,
+ }
+ });
+ let file = new Parse.File('hello.txt', data, 'text/plain');
+ await expectAsync(file.save()).toBeRejectedWith(
+ new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'File upload by public is disabled.')
+ );
+ file = new Parse.File('hello.txt', data, 'text/plain');
+ const anonUser = await Parse.AnonymousUtils.logIn();
+ await expectAsync(file.save({ sessionToken: anonUser.getSessionToken() })).toBeRejectedWith(
+ new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'File upload by anonymous user is disabled.')
+ );
+ file = new Parse.File('hello.txt', data, 'text/plain');
+ const authUser = await Parse.User.signUp('user', 'password');
+ await expectAsync(file.save({ sessionToken: authUser.getSessionToken() })).toBeResolved();
+ });
+
+ it('allows file upload with master key', async () => {
+ await reconfigureServer({
+ fileUpload: {
+ enableForPublic: false,
+ enableForAnonymousUser: false,
+ enableForAuthenticatedUser: false,
+ },
+ });
+ const file = new Parse.File('hello.txt', data, 'text/plain');
+ await expectAsync(file.save({ useMasterKey: true })).toBeResolved();
+ });
+
+ it('rejects all file uploads', async () => {
+ await reconfigureServer({
+ fileUpload: {
+ enableForPublic: false,
+ enableForAnonymousUser: false,
+ enableForAuthenticatedUser: false,
+ },
+ });
+ let file = new Parse.File('hello.txt', data, 'text/plain');
+ await expectAsync(file.save()).toBeRejectedWith(
+ new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'File upload by public is disabled.')
+ );
+ file = new Parse.File('hello.txt', data, 'text/plain');
+ const anonUser = await Parse.AnonymousUtils.logIn();
+ await expectAsync(file.save({ sessionToken: anonUser.getSessionToken() })).toBeRejectedWith(
+ new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'File upload by anonymous user is disabled.')
+ );
+ file = new Parse.File('hello.txt', data, 'text/plain');
+ const authUser = await Parse.User.signUp('user', 'password');
+ await expectAsync(file.save({ sessionToken: authUser.getSessionToken() })).toBeRejectedWith(
+ new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'File upload by authenticated user is disabled.')
+ );
+ });
+
+ it('allows all file uploads', async () => {
+ await reconfigureServer({
+ fileUpload: {
+ enableForPublic: true,
+ enableForAnonymousUser: true,
+ enableForAuthenticatedUser: true,
+ },
+ });
+ let file = new Parse.File('hello.txt', data, 'text/plain');
+ await expectAsync(file.save()).toBeResolved();
+ file = new Parse.File('hello.txt', data, 'text/plain');
+ const anonUser = await Parse.AnonymousUtils.logIn();
+ await expectAsync(file.save({ sessionToken: anonUser.getSessionToken() })).toBeResolved();
+ file = new Parse.File('hello.txt', data, 'text/plain');
+ const authUser = await Parse.User.signUp('user', 'password');
+ await expectAsync(file.save({ sessionToken: authUser.getSessionToken() })).toBeResolved();
+ });
+
+ it('allows file upload only for public', async () => {
+ await reconfigureServer({
+ fileUpload: {
+ enableForPublic: true,
+ enableForAnonymousUser: false,
+ enableForAuthenticatedUser: false,
+ },
+ });
+ let file = new Parse.File('hello.txt', data, 'text/plain');
+ await expectAsync(file.save()).toBeResolved();
+ file = new Parse.File('hello.txt', data, 'text/plain');
+ const anonUser = await Parse.AnonymousUtils.logIn();
+ await expectAsync(file.save({ sessionToken: anonUser.getSessionToken() })).toBeRejectedWith(
+ new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'File upload by anonymous user is disabled.')
+ );
+ file = new Parse.File('hello.txt', data, 'text/plain');
+ const authUser = await Parse.User.signUp('user', 'password');
+ await expectAsync(file.save({ sessionToken: authUser.getSessionToken() })).toBeRejectedWith(
+ new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'File upload by authenticated user is disabled.')
+ );
+ });
+
+ it('allows file upload only for anonymous user', async () => {
+ await reconfigureServer({
+ fileUpload: {
+ enableForPublic: false,
+ enableForAnonymousUser: true,
+ enableForAuthenticatedUser: false,
+ },
+ });
+ let file = new Parse.File('hello.txt', data, 'text/plain');
+ await expectAsync(file.save()).toBeRejectedWith(
+ new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'File upload by public is disabled.')
+ );
+ file = new Parse.File('hello.txt', data, 'text/plain');
+ const anonUser = await Parse.AnonymousUtils.logIn();
+ await expectAsync(file.save({ sessionToken: anonUser.getSessionToken() })).toBeResolved();
+ file = new Parse.File('hello.txt', data, 'text/plain');
+ const authUser = await Parse.User.signUp('user', 'password');
+ await expectAsync(file.save({ sessionToken: authUser.getSessionToken() })).toBeRejectedWith(
+ new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'File upload by authenticated user is disabled.')
+ );
+ });
+
+ it('allows file upload only for authenticated user', async () => {
+ await reconfigureServer({
+ fileUpload: {
+ enableForPublic: false,
+ enableForAnonymousUser: false,
+ enableForAuthenticatedUser: true,
+ },
+ });
+ let file = new Parse.File('hello.txt', data, 'text/plain');
+ await expectAsync(file.save()).toBeRejectedWith(
+ new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'File upload by public is disabled.')
+ );
+ file = new Parse.File('hello.txt', data, 'text/plain');
+ const anonUser = await Parse.AnonymousUtils.logIn();
+ await expectAsync(file.save({ sessionToken: anonUser.getSessionToken() })).toBeRejectedWith(
+ new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'File upload by anonymous user is disabled.')
+ );
+ file = new Parse.File('hello.txt', data, 'text/plain');
+ const authUser = await Parse.User.signUp('user', 'password');
+ await expectAsync(file.save({ sessionToken: authUser.getSessionToken() })).toBeResolved();
+ });
+
+ it('rejects invalid fileUpload configuration', async () => {
+ const invalidConfigs = [
+ { fileUpload: [] },
+ { fileUpload: 1 },
+ { fileUpload: "string" },
+ ];
+ const validConfigs = [
+ { fileUpload: {} },
+ { fileUpload: null },
+ { fileUpload: undefined },
+ ];
+ const keys = [
+ "enableForPublic",
+ "enableForAnonymousUser",
+ "enableForAuthenticatedUser",
+ ];
+ const invalidValues = [
+ [],
+ {},
+ 1,
+ "string",
+ null,
+ ];
+ const validValues = [
+ undefined,
+ true,
+ false,
+ ];
+ for (const config of invalidConfigs) {
+ await expectAsync(reconfigureServer(config)).toBeRejectedWith(
+ 'fileUpload must be an object value.'
+ );
+ }
+ for (const config of validConfigs) {
+ await expectAsync(reconfigureServer(config)).toBeResolved();
+ }
+ for (const key of keys) {
+ for (const value of invalidValues) {
+ await expectAsync(reconfigureServer({ fileUpload: { [key]: value }})).toBeRejectedWith(
+ `fileUpload.${key} must be a boolean value.`
+ );
+ }
+ for (const value of validValues) {
+ await expectAsync(reconfigureServer({ fileUpload: { [key]: value }})).toBeResolved();
+ }
+ }
+ });
+ });
});
diff --git a/spec/ParseGraphQLServer.spec.js b/spec/ParseGraphQLServer.spec.js
index dad9bda3df..b704ed1059 100644
--- a/spec/ParseGraphQLServer.spec.js
+++ b/spec/ParseGraphQLServer.spec.js
@@ -4493,6 +4493,7 @@ describe('ParseGraphQLServer', () => {
const databaseAdapter = parseServer.config.databaseController.adapter;
spyOn(databaseAdapter.database.serverConfig, 'cursor').and.callThrough();
+ await new Promise(resolve => setTimeout(resolve, 300));
await apolloClient.query({
query: gql`
@@ -4537,6 +4538,7 @@ describe('ParseGraphQLServer', () => {
const databaseAdapter = parseServer.config.databaseController.adapter;
spyOn(databaseAdapter.database.serverConfig, 'cursor').and.callThrough();
+ await new Promise(resolve => setTimeout(resolve, 300));
await apolloClient.query({
query: gql`
@@ -5474,6 +5476,7 @@ describe('ParseGraphQLServer', () => {
const databaseAdapter = parseServer.config.databaseController.adapter;
spyOn(databaseAdapter.database.serverConfig, 'cursor').and.callThrough();
+ await new Promise(resolve => setTimeout(resolve, 300));
await apolloClient.query({
query: gql`
@@ -5519,6 +5522,7 @@ describe('ParseGraphQLServer', () => {
const databaseAdapter = parseServer.config.databaseController.adapter;
spyOn(databaseAdapter.database.serverConfig, 'cursor').and.callThrough();
+ await new Promise(resolve => setTimeout(resolve, 300));
await apolloClient.query({
query: gql`
@@ -9036,7 +9040,7 @@ describe('ParseGraphQLServer', () => {
it('should support object values', async () => {
try {
- const someFieldValue = {
+ const someObjectFieldValue = {
foo: { bar: 'baz' },
number: 10,
};
@@ -9051,7 +9055,7 @@ describe('ParseGraphQLServer', () => {
`,
variables: {
schemaFields: {
- addObjects: [{ name: 'someField' }],
+ addObjects: [{ name: 'someObjectField' }],
},
},
context: {
@@ -9060,11 +9064,10 @@ describe('ParseGraphQLServer', () => {
},
},
});
-
await parseGraphQLServer.parseGraphQLSchema.databaseController.schemaCache.clear();
const schema = await new Parse.Schema('SomeClass').get();
- expect(schema.fields.someField.type).toEqual('Object');
+ expect(schema.fields.someObjectField.type).toEqual('Object');
const createResult = await apolloClient.mutate({
mutation: gql`
@@ -9078,13 +9081,13 @@ describe('ParseGraphQLServer', () => {
`,
variables: {
fields: {
- someField: someFieldValue,
+ someObjectField: someObjectFieldValue,
},
},
});
const where = {
- someField: {
+ someObjectField: {
equalTo: { key: 'foo.bar', value: 'baz' },
notEqualTo: { key: 'foo.bar', value: 'bat' },
greaterThan: { key: 'number', value: 9 },
@@ -9096,13 +9099,13 @@ describe('ParseGraphQLServer', () => {
query GetSomeObject($id: ID!, $where: SomeClassWhereInput) {
someClass(id: $id) {
id
- someField
+ someObjectField
}
someClasses(where: $where) {
edges {
node {
id
- someField
+ someObjectField
}
}
}
@@ -9116,13 +9119,13 @@ describe('ParseGraphQLServer', () => {
const { someClass: getResult, someClasses } = queryResult.data;
- const { someField } = getResult;
- expect(typeof someField).toEqual('object');
- expect(someField).toEqual(someFieldValue);
+ const { someObjectField } = getResult;
+ expect(typeof someObjectField).toEqual('object');
+ expect(someObjectField).toEqual(someObjectFieldValue);
// Checks class query results
expect(someClasses.edges.length).toEqual(1);
- expect(someClasses.edges[0].node.someField).toEqual(someFieldValue);
+ expect(someClasses.edges[0].node.someObjectField).toEqual(someObjectFieldValue);
} catch (e) {
handleError(e);
}
@@ -9130,11 +9133,11 @@ describe('ParseGraphQLServer', () => {
it('should support object composed queries', async () => {
try {
- const someFieldValue = {
+ const someObjectFieldValue1 = {
lorem: 'ipsum',
number: 10,
};
- const someFieldValue2 = {
+ const someObjectFieldValue2 = {
foo: {
test: 'bar',
},
@@ -9147,7 +9150,7 @@ describe('ParseGraphQLServer', () => {
createClass(
input: {
name: "SomeClass"
- schemaFields: { addObjects: [{ name: "someField" }] }
+ schemaFields: { addObjects: [{ name: "someObjectField" }] }
}
) {
clientMutationId
@@ -9183,10 +9186,10 @@ describe('ParseGraphQLServer', () => {
`,
variables: {
fields1: {
- someField: someFieldValue,
+ someObjectField: someObjectFieldValue1,
},
fields2: {
- someField: someFieldValue2,
+ someObjectField: someObjectFieldValue2,
},
},
});
@@ -9194,24 +9197,24 @@ describe('ParseGraphQLServer', () => {
const where = {
AND: [
{
- someField: {
+ someObjectField: {
greaterThan: { key: 'number', value: 9 },
},
},
{
- someField: {
+ someObjectField: {
lessThan: { key: 'number', value: 11 },
},
},
{
OR: [
{
- someField: {
+ someObjectField: {
equalTo: { key: 'lorem', value: 'ipsum' },
},
},
{
- someField: {
+ someObjectField: {
equalTo: { key: 'foo.test', value: 'bar' },
},
},
@@ -9226,7 +9229,7 @@ describe('ParseGraphQLServer', () => {
edges {
node {
id
- someField
+ someObjectField
}
}
}
@@ -9244,11 +9247,11 @@ describe('ParseGraphQLServer', () => {
const { edges } = someClasses;
expect(edges.length).toEqual(2);
expect(
- edges.find(result => result.node.id === create1.someClass.id).node.someField
- ).toEqual(someFieldValue);
+ edges.find(result => result.node.id === create1.someClass.id).node.someObjectField
+ ).toEqual(someObjectFieldValue1);
expect(
- edges.find(result => result.node.id === create2.someClass.id).node.someField
- ).toEqual(someFieldValue2);
+ edges.find(result => result.node.id === create2.someClass.id).node.someObjectField
+ ).toEqual(someObjectFieldValue2);
} catch (e) {
handleError(e);
}
@@ -9256,7 +9259,7 @@ describe('ParseGraphQLServer', () => {
it('should support array values', async () => {
try {
- const someFieldValue = [1, 'foo', ['bar'], { lorem: 'ipsum' }, true];
+ const someArrayFieldValue = [1, 'foo', ['bar'], { lorem: 'ipsum' }, true];
await apolloClient.mutate({
mutation: gql`
@@ -9268,7 +9271,7 @@ describe('ParseGraphQLServer', () => {
`,
variables: {
schemaFields: {
- addArrays: [{ name: 'someField' }],
+ addArrays: [{ name: 'someArrayField' }],
},
},
context: {
@@ -9281,7 +9284,7 @@ describe('ParseGraphQLServer', () => {
await parseGraphQLServer.parseGraphQLSchema.databaseController.schemaCache.clear();
const schema = await new Parse.Schema('SomeClass').get();
- expect(schema.fields.someField.type).toEqual('Array');
+ expect(schema.fields.someArrayField.type).toEqual('Array');
const createResult = await apolloClient.mutate({
mutation: gql`
@@ -9295,7 +9298,7 @@ describe('ParseGraphQLServer', () => {
`,
variables: {
fields: {
- someField: someFieldValue,
+ someArrayField: someArrayFieldValue,
},
},
});
@@ -9304,17 +9307,17 @@ describe('ParseGraphQLServer', () => {
query: gql`
query GetSomeObject($id: ID!) {
someClass(id: $id) {
- someField {
+ someArrayField {
... on Element {
value
}
}
}
- someClasses(where: { someField: { exists: true } }) {
+ someClasses(where: { someArrayField: { exists: true } }) {
edges {
node {
id
- someField {
+ someArrayField {
... on Element {
value
}
@@ -9329,9 +9332,9 @@ describe('ParseGraphQLServer', () => {
},
});
- const { someField } = getResult.data.someClass;
- expect(Array.isArray(someField)).toBeTruthy();
- expect(someField.map(element => element.value)).toEqual(someFieldValue);
+ const { someArrayField } = getResult.data.someClass;
+ expect(Array.isArray(someArrayField)).toBeTruthy();
+ expect(someArrayField.map(element => element.value)).toEqual(someArrayFieldValue);
expect(getResult.data.someClasses.edges.length).toEqual(1);
} catch (e) {
handleError(e);
diff --git a/spec/ParseQuery.hint.spec.js b/spec/ParseQuery.hint.spec.js
index 156c732587..5e2871c7c9 100644
--- a/spec/ParseQuery.hint.spec.js
+++ b/spec/ParseQuery.hint.spec.js
@@ -24,7 +24,6 @@ describe_only_db('mongo')('Parse.Query hint', () => {
});
afterEach(async () => {
- await config.database.schemaCache.clear();
await TestUtils.destroyAllDataPermanently(false);
});
diff --git a/spec/PostgresStorageAdapter.spec.js b/spec/PostgresStorageAdapter.spec.js
index e51f2bb730..72bf075968 100644
--- a/spec/PostgresStorageAdapter.spec.js
+++ b/spec/PostgresStorageAdapter.spec.js
@@ -235,12 +235,13 @@ describe_only_db('postgres')('PostgresStorageAdapter', () => {
});
it('should use index for caseInsensitive query', async () => {
+ const database = Config.get(Parse.applicationId).database;
+ await database.loadSchema({ clearCache: true });
const tableName = '_User';
const user = new Parse.User();
user.set('username', 'Bugs');
user.set('password', 'Bunny');
await user.signUp();
- const database = Config.get(Parse.applicationId).database;
//Postgres won't take advantage of the index until it has a lot of records because sequential is faster for small db's
const client = adapter._client;
@@ -289,12 +290,14 @@ describe_only_db('postgres')('PostgresStorageAdapter', () => {
});
it('should use index for caseInsensitive query using default indexname', async () => {
+ const database = Config.get(Parse.applicationId).database;
+ await database.loadSchema({ clearCache: true });
const tableName = '_User';
const user = new Parse.User();
user.set('username', 'Bugs');
user.set('password', 'Bunny');
await user.signUp();
- const database = Config.get(Parse.applicationId).database;
+
const fieldToSearch = 'username';
//Create index before data is inserted
const schema = await new Parse.Schema('_User').get();
@@ -377,6 +380,21 @@ describe_only_db('postgres')('PostgresStorageAdapter', () => {
});
});
});
+
+ it('should watch _SCHEMA changes', async () => {
+ const { database } = Config.get(Parse.applicationId);
+ const { adapter } = database;
+
+ spyOn(adapter, 'watch');
+ spyOn(adapter, '_onchange');
+ const schema = await database.loadSchema();
+ // Create a valid class
+ await schema.validateObject('Stuff', { foo: 'bar' });
+ await new Promise(resolve => setTimeout(resolve, 500));
+
+ expect(adapter.watch).toHaveBeenCalledTimes(1);
+ expect(adapter._onchange).toHaveBeenCalledTimes(1);
+ });
});
describe_only_db('postgres')('PostgresStorageAdapter shutdown', () => {
diff --git a/spec/PushWorker.spec.js b/spec/PushWorker.spec.js
index 6f9852091b..422cdf592a 100644
--- a/spec/PushWorker.spec.js
+++ b/spec/PushWorker.spec.js
@@ -314,6 +314,7 @@ describe('PushWorker', () => {
amount: 1,
},
count: { __op: 'Increment', amount: -1 },
+ status: 'running',
});
const query = new Parse.Query('_PushStatus');
return query.get(handler.objectId, { useMasterKey: true });
@@ -409,6 +410,7 @@ describe('PushWorker', () => {
amount: 1,
},
count: { __op: 'Increment', amount: -1 },
+ status: 'running',
});
done();
});
diff --git a/spec/ReadPreferenceOption.spec.js b/spec/ReadPreferenceOption.spec.js
index d78aa92de9..f2bc328d99 100644
--- a/spec/ReadPreferenceOption.spec.js
+++ b/spec/ReadPreferenceOption.spec.js
@@ -7,7 +7,7 @@ const Config = require('../lib/Config');
function waitForReplication() {
return new Promise(function (resolve) {
- setTimeout(resolve, 300);
+ setTimeout(resolve, 1000);
});
}
diff --git a/spec/RedisCacheAdapter.spec.js b/spec/RedisCacheAdapter.spec.js
index 4991d2b937..545e45c10b 100644
--- a/spec/RedisCacheAdapter.spec.js
+++ b/spec/RedisCacheAdapter.spec.js
@@ -1,5 +1,4 @@
const RedisCacheAdapter = require('../lib/Adapters/Cache/RedisCacheAdapter').default;
-const Config = require('../lib/Config');
/*
To run this test part of the complete suite
@@ -173,356 +172,3 @@ describe_only(() => {
.then(done);
});
});
-
-describe_only(() => {
- return process.env.PARSE_SERVER_TEST_CACHE === 'redis';
-})('Redis Performance', function () {
- let cacheAdapter;
- let getSpy;
- let putSpy;
- let delSpy;
-
- beforeEach(async () => {
- cacheAdapter = new RedisCacheAdapter();
- await reconfigureServer({
- cacheAdapter,
- });
- await cacheAdapter.clear();
-
- getSpy = spyOn(cacheAdapter, 'get').and.callThrough();
- putSpy = spyOn(cacheAdapter, 'put').and.callThrough();
- delSpy = spyOn(cacheAdapter, 'del').and.callThrough();
- });
-
- it('test new object', async () => {
- const object = new TestObject();
- object.set('foo', 'bar');
- await object.save();
- expect(getSpy.calls.count()).toBe(3);
- expect(putSpy.calls.count()).toBe(3);
- expect(delSpy.calls.count()).toBe(1);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('test new object multiple fields', async () => {
- const container = new Container({
- dateField: new Date(),
- arrayField: [],
- numberField: 1,
- stringField: 'hello',
- booleanField: true,
- });
- await container.save();
- expect(getSpy.calls.count()).toBe(3);
- expect(putSpy.calls.count()).toBe(3);
- expect(delSpy.calls.count()).toBe(1);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('test update existing fields', async () => {
- const object = new TestObject();
- object.set('foo', 'bar');
- await object.save();
-
- getSpy.calls.reset();
- putSpy.calls.reset();
-
- object.set('foo', 'barz');
- await object.save();
- expect(getSpy.calls.count()).toBe(3);
- expect(putSpy.calls.count()).toBe(1);
- expect(delSpy.calls.count()).toBe(2);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('test saveAll / destroyAll', async () => {
- const object = new TestObject();
- await object.save();
-
- getSpy.calls.reset();
- putSpy.calls.reset();
-
- const objects = [];
- for (let i = 0; i < 10; i++) {
- const object = new TestObject();
- object.set('number', i);
- objects.push(object);
- }
- await Parse.Object.saveAll(objects);
- expect(getSpy.calls.count()).toBe(21);
- expect(putSpy.calls.count()).toBe(11);
-
- getSpy.calls.reset();
- putSpy.calls.reset();
-
- await Parse.Object.destroyAll(objects);
- expect(getSpy.calls.count()).toBe(11);
- expect(putSpy.calls.count()).toBe(1);
- expect(delSpy.calls.count()).toBe(3);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('test saveAll / destroyAll batch', async () => {
- const object = new TestObject();
- await object.save();
-
- getSpy.calls.reset();
- putSpy.calls.reset();
-
- const objects = [];
- for (let i = 0; i < 10; i++) {
- const object = new TestObject();
- object.set('number', i);
- objects.push(object);
- }
- await Parse.Object.saveAll(objects, { batchSize: 5 });
- expect(getSpy.calls.count()).toBe(22);
- expect(putSpy.calls.count()).toBe(7);
-
- getSpy.calls.reset();
- putSpy.calls.reset();
-
- await Parse.Object.destroyAll(objects, { batchSize: 5 });
- expect(getSpy.calls.count()).toBe(12);
- expect(putSpy.calls.count()).toBe(2);
- expect(delSpy.calls.count()).toBe(5);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('test add new field to existing object', async () => {
- const object = new TestObject();
- object.set('foo', 'bar');
- await object.save();
-
- getSpy.calls.reset();
- putSpy.calls.reset();
-
- object.set('new', 'barz');
- await object.save();
- expect(getSpy.calls.count()).toBe(3);
- expect(putSpy.calls.count()).toBe(2);
- expect(delSpy.calls.count()).toBe(2);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('test add multiple fields to existing object', async () => {
- const object = new TestObject();
- object.set('foo', 'bar');
- await object.save();
-
- getSpy.calls.reset();
- putSpy.calls.reset();
-
- object.set({
- dateField: new Date(),
- arrayField: [],
- numberField: 1,
- stringField: 'hello',
- booleanField: true,
- });
- await object.save();
- expect(getSpy.calls.count()).toBe(3);
- expect(putSpy.calls.count()).toBe(2);
- expect(delSpy.calls.count()).toBe(2);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('test user', async () => {
- const user = new Parse.User();
- user.setUsername('testing');
- user.setPassword('testing');
- await user.signUp();
-
- expect(getSpy.calls.count()).toBe(8);
- expect(putSpy.calls.count()).toBe(2);
- expect(delSpy.calls.count()).toBe(1);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('test allowClientCreation false', async () => {
- const object = new TestObject();
- await object.save();
- await reconfigureServer({
- cacheAdapter,
- allowClientClassCreation: false,
- });
- await cacheAdapter.clear();
-
- getSpy.calls.reset();
- putSpy.calls.reset();
- delSpy.calls.reset();
-
- object.set('foo', 'bar');
- await object.save();
- expect(getSpy.calls.count()).toBe(4);
- expect(putSpy.calls.count()).toBe(2);
-
- getSpy.calls.reset();
- putSpy.calls.reset();
-
- const query = new Parse.Query(TestObject);
- await query.get(object.id);
- expect(getSpy.calls.count()).toBe(3);
- expect(putSpy.calls.count()).toBe(1);
- expect(delSpy.calls.count()).toBe(2);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('test query', async () => {
- const object = new TestObject();
- object.set('foo', 'bar');
- await object.save();
-
- getSpy.calls.reset();
- putSpy.calls.reset();
- delSpy.calls.reset();
-
- const query = new Parse.Query(TestObject);
- await query.get(object.id);
- expect(getSpy.calls.count()).toBe(2);
- expect(putSpy.calls.count()).toBe(1);
- expect(delSpy.calls.count()).toBe(1);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('test query include', async () => {
- const child = new TestObject();
- await child.save();
-
- const object = new TestObject();
- object.set('child', child);
- await object.save();
-
- getSpy.calls.reset();
- putSpy.calls.reset();
-
- const query = new Parse.Query(TestObject);
- query.include('child');
- await query.get(object.id);
-
- expect(getSpy.calls.count()).toBe(4);
- expect(putSpy.calls.count()).toBe(1);
- expect(delSpy.calls.count()).toBe(3);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('query relation without schema', async () => {
- const child = new Parse.Object('ChildObject');
- await child.save();
-
- const parent = new Parse.Object('ParentObject');
- const relation = parent.relation('child');
- relation.add(child);
- await parent.save();
-
- getSpy.calls.reset();
- putSpy.calls.reset();
-
- const objects = await relation.query().find();
- expect(objects.length).toBe(1);
- expect(objects[0].id).toBe(child.id);
-
- expect(getSpy.calls.count()).toBe(2);
- expect(putSpy.calls.count()).toBe(1);
- expect(delSpy.calls.count()).toBe(3);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('test delete object', async () => {
- const object = new TestObject();
- object.set('foo', 'bar');
- await object.save();
-
- getSpy.calls.reset();
- putSpy.calls.reset();
- delSpy.calls.reset();
-
- await object.destroy();
- expect(getSpy.calls.count()).toBe(2);
- expect(putSpy.calls.count()).toBe(1);
- expect(delSpy.calls.count()).toBe(1);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(0);
- });
-
- it('test schema update class', async () => {
- const container = new Container();
- await container.save();
-
- getSpy.calls.reset();
- putSpy.calls.reset();
- delSpy.calls.reset();
-
- const config = Config.get('test');
- const schema = await config.database.loadSchema();
- await schema.reloadData();
-
- const levelPermissions = {
- find: { '*': true },
- get: { '*': true },
- create: { '*': true },
- update: { '*': true },
- delete: { '*': true },
- addField: { '*': true },
- protectedFields: { '*': [] },
- };
-
- await schema.updateClass(
- 'Container',
- {
- fooOne: { type: 'Number' },
- fooTwo: { type: 'Array' },
- fooThree: { type: 'Date' },
- fooFour: { type: 'Object' },
- fooFive: { type: 'Relation', targetClass: '_User' },
- fooSix: { type: 'String' },
- fooSeven: { type: 'Object' },
- fooEight: { type: 'String' },
- fooNine: { type: 'String' },
- fooTeen: { type: 'Number' },
- fooEleven: { type: 'String' },
- fooTwelve: { type: 'String' },
- fooThirteen: { type: 'String' },
- fooFourteen: { type: 'String' },
- fooFifteen: { type: 'String' },
- fooSixteen: { type: 'String' },
- fooEighteen: { type: 'String' },
- fooNineteen: { type: 'String' },
- },
- levelPermissions,
- {},
- config.database
- );
- expect(getSpy.calls.count()).toBe(3);
- expect(putSpy.calls.count()).toBe(3);
- expect(delSpy.calls.count()).toBe(0);
-
- const keys = await cacheAdapter.getAllKeys();
- expect(keys.length).toBe(1);
- });
-});
diff --git a/spec/Schema.spec.js b/spec/Schema.spec.js
index 932eec16d9..9ed39ea32e 100644
--- a/spec/Schema.spec.js
+++ b/spec/Schema.spec.js
@@ -3,7 +3,6 @@
const Config = require('../lib/Config');
const SchemaController = require('../lib/Controllers/SchemaController');
const dd = require('deep-diff');
-const TestUtils = require('../lib/TestUtils');
let config;
@@ -25,11 +24,6 @@ describe('SchemaController', () => {
config = Config.get('test');
});
- afterEach(async () => {
- await config.database.schemaCache.clear();
- await TestUtils.destroyAllDataPermanently(false);
- });
-
it('can validate one object', done => {
config.database
.loadSchema()
@@ -1349,17 +1343,6 @@ describe('SchemaController', () => {
.catch(done.fail);
});
- it('setAllClasses return classes if cache fails', async () => {
- const schema = await config.database.loadSchema();
-
- spyOn(schema._cache, 'setAllClasses').and.callFake(() => Promise.reject('Oops!'));
- const errorSpy = spyOn(console, 'error').and.callFake(() => {});
- const allSchema = await schema.setAllClasses();
-
- expect(allSchema).toBeDefined();
- expect(errorSpy).toHaveBeenCalledWith('Error saving schema to cache:', 'Oops!');
- });
-
it('should not throw on null field types', async () => {
const schema = await config.database.loadSchema();
const result = await schema.enforceFieldExists('NewClass', 'fieldName', null);
diff --git a/spec/SchemaCache.spec.js b/spec/SchemaCache.spec.js
deleted file mode 100644
index 5a4a517395..0000000000
--- a/spec/SchemaCache.spec.js
+++ /dev/null
@@ -1,75 +0,0 @@
-const CacheController = require('../lib/Controllers/CacheController.js').default;
-const InMemoryCacheAdapter = require('../lib/Adapters/Cache/InMemoryCacheAdapter').default;
-const SchemaCache = require('../lib/Controllers/SchemaCache').default;
-
-describe('SchemaCache', () => {
- let cacheController;
-
- beforeEach(() => {
- const cacheAdapter = new InMemoryCacheAdapter({});
- cacheController = new CacheController(cacheAdapter, 'appId');
- });
-
- it('can retrieve a single schema after all schemas stored', done => {
- const schemaCache = new SchemaCache(cacheController);
- const allSchemas = [
- {
- className: 'Class1',
- },
- {
- className: 'Class2',
- },
- ];
- schemaCache
- .setAllClasses(allSchemas)
- .then(() => {
- return schemaCache.getOneSchema('Class2');
- })
- .then(schema => {
- expect(schema).not.toBeNull();
- done();
- });
- });
-
- it("doesn't persist cached data by default", done => {
- const schemaCache = new SchemaCache(cacheController);
- const schema = {
- className: 'Class1',
- };
- schemaCache.setAllClasses([schema]).then(() => {
- const anotherSchemaCache = new SchemaCache(cacheController);
- return anotherSchemaCache.getOneSchema(schema.className).then(schema => {
- expect(schema).toBeNull();
- done();
- });
- });
- });
-
- it('can persist cached data', done => {
- const schemaCache = new SchemaCache(cacheController, 5000, true);
- const schema = {
- className: 'Class1',
- };
- schemaCache.setAllClasses([schema]).then(() => {
- const anotherSchemaCache = new SchemaCache(cacheController, 5000, true);
- return anotherSchemaCache.getOneSchema(schema.className).then(schema => {
- expect(schema).not.toBeNull();
- done();
- });
- });
- });
-
- it('should not store if ttl is null', async () => {
- const ttl = null;
- const schemaCache = new SchemaCache(cacheController, ttl);
- expect(await schemaCache.getAllClasses()).toBeNull();
- expect(await schemaCache.setAllClasses()).toBeNull();
- expect(await schemaCache.getOneSchema()).toBeNull();
- });
-
- it('should convert string ttl to number', async () => {
- const ttl = '5000';
- const schemaCache = new SchemaCache(cacheController, ttl);
- expect(schemaCache.ttl).toBe(5000);
- });
-});
diff --git a/spec/SchemaPerformance.spec.js b/spec/SchemaPerformance.spec.js
new file mode 100644
index 0000000000..f0305b9bf3
--- /dev/null
+++ b/spec/SchemaPerformance.spec.js
@@ -0,0 +1,212 @@
+const Config = require('../lib/Config');
+const MongoStorageAdapter = require('../lib/Adapters/Storage/Mongo/MongoStorageAdapter').default;
+const mongoURI = 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase';
+
+describe_only_db('mongo')('Schema Performance', function () {
+ let getAllSpy;
+ let config;
+
+ beforeEach(async () => {
+ config = Config.get('test');
+ config.database.schemaCache.clear();
+ const databaseAdapter = new MongoStorageAdapter({ uri: mongoURI });
+ await reconfigureServer({
+ replicaSet: false,
+ databaseAdapter,
+ });
+ getAllSpy = spyOn(databaseAdapter, 'getAllClasses').and.callThrough();
+ });
+
+ it('test new object', async () => {
+ const object = new TestObject();
+ object.set('foo', 'bar');
+ await object.save();
+ expect(getAllSpy.calls.count()).toBe(0);
+ });
+
+ it('test new object multiple fields', async () => {
+ const container = new Container({
+ dateField: new Date(),
+ arrayField: [],
+ numberField: 1,
+ stringField: 'hello',
+ booleanField: true,
+ });
+ await container.save();
+ expect(getAllSpy.calls.count()).toBe(0);
+ });
+
+ it('test update existing fields', async () => {
+ const object = new TestObject();
+ object.set('foo', 'bar');
+ await object.save();
+
+ getAllSpy.calls.reset();
+
+ object.set('foo', 'barz');
+ await object.save();
+ expect(getAllSpy.calls.count()).toBe(0);
+ });
+
+ xit('test saveAll / destroyAll', async () => {
+ // This test can be flaky due to the nature of /batch requests
+ // Used for performance
+ const object = new TestObject();
+ await object.save();
+
+ getAllSpy.calls.reset();
+
+ const objects = [];
+ for (let i = 0; i < 10; i++) {
+ const object = new TestObject();
+ object.set('number', i);
+ objects.push(object);
+ }
+ await Parse.Object.saveAll(objects);
+ expect(getAllSpy.calls.count()).toBe(0);
+
+ getAllSpy.calls.reset();
+
+ const query = new Parse.Query(TestObject);
+ await query.find();
+ expect(getAllSpy.calls.count()).toBe(0);
+
+ getAllSpy.calls.reset();
+
+ await Parse.Object.destroyAll(objects);
+ expect(getAllSpy.calls.count()).toBe(0);
+ });
+
+ it('test add new field to existing object', async () => {
+ const object = new TestObject();
+ object.set('foo', 'bar');
+ await object.save();
+
+ getAllSpy.calls.reset();
+
+ object.set('new', 'barz');
+ await object.save();
+ expect(getAllSpy.calls.count()).toBe(0);
+ });
+
+ it('test add multiple fields to existing object', async () => {
+ const object = new TestObject();
+ object.set('foo', 'bar');
+ await object.save();
+
+ getAllSpy.calls.reset();
+
+ object.set({
+ dateField: new Date(),
+ arrayField: [],
+ numberField: 1,
+ stringField: 'hello',
+ booleanField: true,
+ });
+ await object.save();
+ expect(getAllSpy.calls.count()).toBe(0);
+ });
+
+ it('test user', async () => {
+ const user = new Parse.User();
+ user.setUsername('testing');
+ user.setPassword('testing');
+ await user.signUp();
+
+ expect(getAllSpy.calls.count()).toBe(0);
+ });
+
+ it('test query include', async () => {
+ const child = new TestObject();
+ await child.save();
+
+ const object = new TestObject();
+ object.set('child', child);
+ await object.save();
+
+ getAllSpy.calls.reset();
+
+ const query = new Parse.Query(TestObject);
+ query.include('child');
+ await query.get(object.id);
+
+ expect(getAllSpy.calls.count()).toBe(0);
+ });
+
+ it('query relation without schema', async () => {
+ const child = new Parse.Object('ChildObject');
+ await child.save();
+
+ const parent = new Parse.Object('ParentObject');
+ const relation = parent.relation('child');
+ relation.add(child);
+ await parent.save();
+
+ getAllSpy.calls.reset();
+
+ const objects = await relation.query().find();
+ expect(objects.length).toBe(1);
+ expect(objects[0].id).toBe(child.id);
+
+ expect(getAllSpy.calls.count()).toBe(0);
+ });
+
+ it('test delete object', async () => {
+ const object = new TestObject();
+ object.set('foo', 'bar');
+ await object.save();
+
+ getAllSpy.calls.reset();
+
+ await object.destroy();
+ expect(getAllSpy.calls.count()).toBe(0);
+ });
+
+ it('test schema update class', async () => {
+ const container = new Container();
+ await container.save();
+
+ getAllSpy.calls.reset();
+
+ const schema = await config.database.loadSchema();
+ await schema.reloadData();
+
+ const levelPermissions = {
+ find: { '*': true },
+ get: { '*': true },
+ create: { '*': true },
+ update: { '*': true },
+ delete: { '*': true },
+ addField: { '*': true },
+ protectedFields: { '*': [] },
+ };
+
+ await schema.updateClass(
+ 'Container',
+ {
+ fooOne: { type: 'Number' },
+ fooTwo: { type: 'Array' },
+ fooThree: { type: 'Date' },
+ fooFour: { type: 'Object' },
+ fooFive: { type: 'Relation', targetClass: '_User' },
+ fooSix: { type: 'String' },
+ fooSeven: { type: 'Object' },
+ fooEight: { type: 'String' },
+ fooNine: { type: 'String' },
+ fooTeen: { type: 'Number' },
+ fooEleven: { type: 'String' },
+ fooTwelve: { type: 'String' },
+ fooThirteen: { type: 'String' },
+ fooFourteen: { type: 'String' },
+ fooFifteen: { type: 'String' },
+ fooSixteen: { type: 'String' },
+ fooEighteen: { type: 'String' },
+ fooNineteen: { type: 'String' },
+ },
+ levelPermissions,
+ {},
+ config.database
+ );
+ expect(getAllSpy.calls.count()).toBe(0);
+ });
+});
diff --git a/spec/dev.js b/spec/dev.js
index c58879a533..9b1559464c 100644
--- a/spec/dev.js
+++ b/spec/dev.js
@@ -4,12 +4,9 @@ const Parse = require('parse/node');
const className = 'AnObject';
const defaultRoleName = 'tester';
-let schemaCache;
-
module.exports = {
/* AnObject */
className,
- schemaCache,
/**
* Creates and returns new user.
diff --git a/spec/helper.js b/spec/helper.js
index a7f6cf2280..7046065713 100644
--- a/spec/helper.js
+++ b/spec/helper.js
@@ -88,6 +88,12 @@ const defaultConfiguration = {
fileKey: 'test',
silent,
logLevel,
+ replicaSet: false,
+ fileUpload: {
+ enableForPublic: true,
+ enableForAnonymousUser: true,
+ enableForAuthenticatedUser: true,
+ },
push: {
android: {
senderId: 'yolo',
diff --git a/spec/index.spec.js b/spec/index.spec.js
index 1b542926c1..a26c015f54 100644
--- a/spec/index.spec.js
+++ b/spec/index.spec.js
@@ -70,6 +70,8 @@ describe('server', () => {
},
}),
}).catch(() => {
+ const config = Config.get('test');
+ config.database.schemaCache.clear();
//Need to use rest api because saving via JS SDK results in fail() not getting called
request({
method: 'POST',
diff --git a/spec/schemas.spec.js b/spec/schemas.spec.js
index 6cdb610e9d..72ed6fea2f 100644
--- a/spec/schemas.spec.js
+++ b/spec/schemas.spec.js
@@ -4,7 +4,6 @@ const Parse = require('parse/node').Parse;
const dd = require('deep-diff');
const Config = require('../lib/Config');
const request = require('../lib/request');
-const TestUtils = require('../lib/TestUtils');
let config;
@@ -144,11 +143,6 @@ describe('schemas', () => {
config = Config.get('test');
});
- afterEach(async () => {
- await config.database.schemaCache.clear();
- await TestUtils.destroyAllDataPermanently(false);
- });
-
it('requires the master key to get all schemas', done => {
request({
url: 'http://localhost:8378/1/schemas',
diff --git a/src/Adapters/Auth/OAuth1Client.js b/src/Adapters/Auth/OAuth1Client.js
index 4e7f9267d3..f622852e9a 100644
--- a/src/Adapters/Auth/OAuth1Client.js
+++ b/src/Adapters/Auth/OAuth1Client.js
@@ -2,12 +2,9 @@ var https = require('https'),
crypto = require('crypto');
var Parse = require('parse/node').Parse;
-var OAuth = function(options) {
+var OAuth = function (options) {
if (!options) {
- throw new Parse.Error(
- Parse.Error.INTERNAL_SERVER_ERROR,
- 'No options passed to OAuth'
- );
+ throw new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'No options passed to OAuth');
}
this.consumer_key = options.consumer_key;
this.consumer_secret = options.consumer_secret;
@@ -17,22 +14,22 @@ var OAuth = function(options) {
this.oauth_params = options.oauth_params || {};
};
-OAuth.prototype.send = function(method, path, params, body) {
+OAuth.prototype.send = function (method, path, params, body) {
var request = this.buildRequest(method, path, params, body);
// Encode the body properly, the current Parse Implementation don't do it properly
- return new Promise(function(resolve, reject) {
+ return new Promise(function (resolve, reject) {
var httpRequest = https
- .request(request, function(res) {
+ .request(request, function (res) {
var data = '';
- res.on('data', function(chunk) {
+ res.on('data', function (chunk) {
data += chunk;
});
- res.on('end', function() {
+ res.on('end', function () {
data = JSON.parse(data);
resolve(data);
});
})
- .on('error', function() {
+ .on('error', function () {
reject('Failed to make an OAuth request');
});
if (request.body) {
@@ -42,7 +39,7 @@ OAuth.prototype.send = function(method, path, params, body) {
});
};
-OAuth.prototype.buildRequest = function(method, path, params, body) {
+OAuth.prototype.buildRequest = function (method, path, params, body) {
if (path.indexOf('/') != 0) {
path = '/' + path;
}
@@ -62,12 +59,7 @@ OAuth.prototype.buildRequest = function(method, path, params, body) {
oauth_params['oauth_token'] = this.auth_token;
}
- request = OAuth.signRequest(
- request,
- oauth_params,
- this.consumer_secret,
- this.auth_token_secret
- );
+ request = OAuth.signRequest(request, oauth_params, this.consumer_secret, this.auth_token_secret);
if (body && Object.keys(body).length > 0) {
request.body = OAuth.buildParameterString(body);
@@ -75,18 +67,18 @@ OAuth.prototype.buildRequest = function(method, path, params, body) {
return request;
};
-OAuth.prototype.get = function(path, params) {
+OAuth.prototype.get = function (path, params) {
return this.send('GET', path, params);
};
-OAuth.prototype.post = function(path, params, body) {
+OAuth.prototype.post = function (path, params, body) {
return this.send('POST', path, params, body);
};
/*
Proper string %escape encoding
*/
-OAuth.encode = function(str) {
+OAuth.encode = function (str) {
// discuss at: http://phpjs.org/functions/rawurlencode/
// original by: Brett Zamir (http://brett-zamir.me)
// input by: travc
@@ -126,25 +118,23 @@ OAuth.version = '1.0';
/*
Generate a nonce
*/
-OAuth.nonce = function() {
+OAuth.nonce = function () {
var text = '';
- var possible =
- 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
+ var possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
- for (var i = 0; i < 30; i++)
- text += possible.charAt(Math.floor(Math.random() * possible.length));
+ for (var i = 0; i < 30; i++) text += possible.charAt(Math.floor(Math.random() * possible.length));
return text;
};
-OAuth.buildParameterString = function(obj) {
+OAuth.buildParameterString = function (obj) {
// Sort keys and encode values
if (obj) {
var keys = Object.keys(obj).sort();
// Map key=value, join them by &
return keys
- .map(function(key) {
+ .map(function (key) {
return key + '=' + OAuth.encode(obj[key]);
})
.join('&');
@@ -157,33 +147,19 @@ OAuth.buildParameterString = function(obj) {
Build the signature string from the object
*/
-OAuth.buildSignatureString = function(method, url, parameters) {
- return [
- method.toUpperCase(),
- OAuth.encode(url),
- OAuth.encode(parameters),
- ].join('&');
+OAuth.buildSignatureString = function (method, url, parameters) {
+ return [method.toUpperCase(), OAuth.encode(url), OAuth.encode(parameters)].join('&');
};
/*
Retuns encoded HMAC-SHA1 from key and text
*/
-OAuth.signature = function(text, key) {
+OAuth.signature = function (text, key) {
crypto = require('crypto');
- return OAuth.encode(
- crypto
- .createHmac('sha1', key)
- .update(text)
- .digest('base64')
- );
+ return OAuth.encode(crypto.createHmac('sha1', key).update(text).digest('base64'));
};
-OAuth.signRequest = function(
- request,
- oauth_parameters,
- consumer_secret,
- auth_token_secret
-) {
+OAuth.signRequest = function (request, oauth_parameters, consumer_secret, auth_token_secret) {
oauth_parameters = oauth_parameters || {};
// Set default values
@@ -224,16 +200,9 @@ OAuth.signRequest = function(
// Build the signature string
var url = 'https://' + request.host + '' + request.path;
- var signatureString = OAuth.buildSignatureString(
- request.method,
- url,
- parameterString
- );
+ var signatureString = OAuth.buildSignatureString(request.method, url, parameterString);
// Hash the signature string
- var signatureKey = [
- OAuth.encode(consumer_secret),
- OAuth.encode(auth_token_secret),
- ].join('&');
+ var signatureKey = [OAuth.encode(consumer_secret), OAuth.encode(auth_token_secret)].join('&');
var signature = OAuth.signature(signatureString, signatureKey);
@@ -246,7 +215,7 @@ OAuth.signRequest = function(
// Set the authorization header
var authHeader = Object.keys(oauth_parameters)
.sort()
- .map(function(key) {
+ .map(function (key) {
var value = oauth_parameters[key];
return key + '="' + value + '"';
})
diff --git a/src/Adapters/Auth/apple.js b/src/Adapters/Auth/apple.js
index 2731183b7f..18989a4529 100644
--- a/src/Adapters/Auth/apple.js
+++ b/src/Adapters/Auth/apple.js
@@ -33,24 +33,15 @@ const getAppleKeyByKeyId = async (keyId, cacheMaxEntries, cacheMaxAge) => {
const getHeaderFromToken = token => {
const decodedToken = jwt.decode(token, { complete: true });
if (!decodedToken) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- `provided token does not decode as JWT`
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `provided token does not decode as JWT`);
}
return decodedToken.header;
};
-const verifyIdToken = async (
- { token, id },
- { clientId, cacheMaxEntries, cacheMaxAge }
-) => {
+const verifyIdToken = async ({ token, id }, { clientId, cacheMaxEntries, cacheMaxAge }) => {
if (!token) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- `id token is invalid for this user.`
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `id token is invalid for this user.`);
}
const { kid: keyId, alg: algorithm } = getHeaderFromToken(token);
@@ -60,11 +51,7 @@ const verifyIdToken = async (
cacheMaxAge = cacheMaxAge || ONE_HOUR_IN_MS;
cacheMaxEntries = cacheMaxEntries || 5;
- const appleKey = await getAppleKeyByKeyId(
- keyId,
- cacheMaxEntries,
- cacheMaxAge
- );
+ const appleKey = await getAppleKeyByKeyId(keyId, cacheMaxEntries, cacheMaxAge);
const signingKey = appleKey.publicKey || appleKey.rsaPublicKey;
try {
@@ -87,10 +74,7 @@ const verifyIdToken = async (
}
if (jwtClaims.sub !== id) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- `auth data is invalid for this user.`
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `auth data is invalid for this user.`);
}
return jwtClaims;
};
diff --git a/src/Adapters/Auth/facebook.js b/src/Adapters/Auth/facebook.js
index 1ee0147aa2..3e3d79b3c3 100644
--- a/src/Adapters/Auth/facebook.js
+++ b/src/Adapters/Auth/facebook.js
@@ -19,20 +19,12 @@ function getAppSecretPath(authData, options = {}) {
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData, options) {
return graphRequest(
- 'me?fields=id&access_token=' +
- authData.access_token +
- getAppSecretPath(authData, options)
+ 'me?fields=id&access_token=' + authData.access_token + getAppSecretPath(authData, options)
).then(data => {
- if (
- (data && data.id == authData.id) ||
- (process.env.TESTING && authData.id === 'test')
- ) {
+ if ((data && data.id == authData.id) || (process.env.TESTING && authData.id === 'test')) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Facebook auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Facebook auth is invalid for this user.');
});
}
@@ -43,10 +35,7 @@ function validateAppId(appIds, authData, options) {
return Promise.resolve();
}
if (!appIds.length) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Facebook auth is not configured.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Facebook auth is not configured.');
}
return graphRequest(
'app?access_token=' + access_token + getAppSecretPath(authData, options)
@@ -54,10 +43,7 @@ function validateAppId(appIds, authData, options) {
if (data && appIds.indexOf(data.id) != -1) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Facebook auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Facebook auth is invalid for this user.');
});
}
diff --git a/src/Adapters/Auth/gcenter.js b/src/Adapters/Auth/gcenter.js
index 20c453db9f..090b9fab02 100644
--- a/src/Adapters/Auth/gcenter.js
+++ b/src/Adapters/Auth/gcenter.js
@@ -96,20 +96,14 @@ function verifySignature(publicKey, authData) {
verifier.update(authData.salt, 'base64');
if (!verifier.verify(publicKey, authData.signature, 'base64')) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Apple Game Center - invalid signature'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Apple Game Center - invalid signature');
}
}
// Returns a promise that fulfills if this user id is valid.
async function validateAuthData(authData) {
if (!authData.id) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Apple Game Center - authData id missing'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Apple Game Center - authData id missing');
}
authData.playerId = authData.id;
const publicKey = await getAppleCertificate(authData.publicKeyUrl);
diff --git a/src/Adapters/Auth/github.js b/src/Adapters/Auth/github.js
index c3a167fdaa..75233d53fd 100644
--- a/src/Adapters/Auth/github.js
+++ b/src/Adapters/Auth/github.js
@@ -8,10 +8,7 @@ function validateAuthData(authData) {
if (data && data.id == authData.id) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Github auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Github auth is invalid for this user.');
});
}
diff --git a/src/Adapters/Auth/google.js b/src/Adapters/Auth/google.js
index 75671e2c6b..8691cf9cae 100644
--- a/src/Adapters/Auth/google.js
+++ b/src/Adapters/Auth/google.js
@@ -39,9 +39,7 @@ function getGoogleKeyByKeyId(keyId) {
if (expire) {
cache = Object.assign({}, pems, {
- expiresAt: new Date(
- new Date().getTime() + Number(expire[1]) * 1000
- ),
+ expiresAt: new Date(new Date().getTime() + Number(expire[1]) * 1000),
});
}
}
@@ -57,10 +55,7 @@ function getHeaderFromToken(token) {
const decodedToken = jwt.decode(token, { complete: true });
if (!decodedToken) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- `provided token does not decode as JWT`
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `provided token does not decode as JWT`);
}
return decodedToken.header;
@@ -68,10 +63,7 @@ function getHeaderFromToken(token) {
async function verifyIdToken({ id_token: token, id }, { clientId }) {
if (!token) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- `id token is invalid for this user.`
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `id token is invalid for this user.`);
}
const { kid: keyId, alg: algorithm } = getHeaderFromToken(token);
@@ -96,10 +88,7 @@ async function verifyIdToken({ id_token: token, id }, { clientId }) {
}
if (jwtClaims.sub !== id) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- `auth data is invalid for this user.`
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `auth data is invalid for this user.`);
}
if (clientId && jwtClaims.aud !== clientId) {
@@ -140,9 +129,7 @@ function rsaPublicKeyToPEM(modulusB64, exponentB64) {
const encodedExplen = encodeLengthHex(explen);
const encodedPubkey =
'30' +
- encodeLengthHex(
- modlen + explen + encodedModlen.length / 2 + encodedExplen.length / 2 + 2
- ) +
+ encodeLengthHex(modlen + explen + encodedModlen.length / 2 + encodedExplen.length / 2 + 2) +
'02' +
encodedModlen +
modulusHex +
diff --git a/src/Adapters/Auth/httpsRequest.js b/src/Adapters/Auth/httpsRequest.js
index 0233904048..a198fbd318 100644
--- a/src/Adapters/Auth/httpsRequest.js
+++ b/src/Adapters/Auth/httpsRequest.js
@@ -1,7 +1,7 @@
const https = require('https');
function makeCallback(resolve, reject, noJSON) {
- return function(res) {
+ return function (res) {
let data = '';
res.on('data', chunk => {
data += chunk;
@@ -23,9 +23,7 @@ function makeCallback(resolve, reject, noJSON) {
function get(options, noJSON = false) {
return new Promise((resolve, reject) => {
- https
- .get(options, makeCallback(resolve, reject, noJSON))
- .on('error', reject);
+ https.get(options, makeCallback(resolve, reject, noJSON)).on('error', reject);
});
}
diff --git a/src/Adapters/Auth/index.js b/src/Adapters/Auth/index.js
index d0da98ab3c..00637d1131 100755
--- a/src/Adapters/Auth/index.js
+++ b/src/Adapters/Auth/index.js
@@ -92,11 +92,7 @@ function loadAuthAdapter(provider, authOptions) {
// Try the configuration methods
if (providerOptions) {
- const optionalAdapter = loadAdapter(
- providerOptions,
- undefined,
- providerOptions
- );
+ const optionalAdapter = loadAdapter(providerOptions, undefined, providerOptions);
if (optionalAdapter) {
['validateAuthData', 'validateAppId'].forEach(key => {
if (optionalAdapter[key]) {
@@ -128,10 +124,7 @@ module.exports = function (authOptions = {}, enableAnonymousUsers = true) {
return;
}
- const { adapter, appIds, providerOptions } = loadAuthAdapter(
- provider,
- authOptions
- );
+ const { adapter, appIds, providerOptions } = loadAuthAdapter(provider, authOptions);
return authDataValidator(adapter, appIds, providerOptions);
};
diff --git a/src/Adapters/Auth/instagram.js b/src/Adapters/Auth/instagram.js
index 0c1379d4ca..6d61413bf0 100644
--- a/src/Adapters/Auth/instagram.js
+++ b/src/Adapters/Auth/instagram.js
@@ -11,10 +11,7 @@ function validateAuthData(authData) {
if (response && response.data && response.data.id == authData.id) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Instagram auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Instagram auth is invalid for this user.');
});
}
diff --git a/src/Adapters/Auth/janraincapture.js b/src/Adapters/Auth/janraincapture.js
index fbff3c2421..01670e84aa 100644
--- a/src/Adapters/Auth/janraincapture.js
+++ b/src/Adapters/Auth/janraincapture.js
@@ -5,19 +5,17 @@ const httpsRequest = require('./httpsRequest');
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData, options) {
- return request(options.janrain_capture_host, authData.access_token).then(
- data => {
- //successful response will have a "stat" (status) of 'ok' and a result node that stores the uuid, because that's all we asked for
- //see: https://docs.janrain.com/api/registration/entity/#entity
- if (data && data.stat == 'ok' && data.result == authData.id) {
- return;
- }
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Janrain capture auth is invalid for this user.'
- );
+ return request(options.janrain_capture_host, authData.access_token).then(data => {
+ //successful response will have a "stat" (status) of 'ok' and a result node that stores the uuid, because that's all we asked for
+ //see: https://docs.janrain.com/api/registration/entity/#entity
+ if (data && data.stat == 'ok' && data.result == authData.id) {
+ return;
}
- );
+ throw new Parse.Error(
+ Parse.Error.OBJECT_NOT_FOUND,
+ 'Janrain capture auth is invalid for this user.'
+ );
+ });
}
// Returns a promise that fulfills iff this app id is valid.
diff --git a/src/Adapters/Auth/keycloak.js b/src/Adapters/Auth/keycloak.js
index 1223eac36b..037542f7af 100644
--- a/src/Adapters/Auth/keycloak.js
+++ b/src/Adapters/Auth/keycloak.js
@@ -37,12 +37,7 @@ const { Parse } = require('parse/node');
const httpsRequest = require('./httpsRequest');
const arraysEqual = (_arr1, _arr2) => {
- if (
- !Array.isArray(_arr1) ||
- !Array.isArray(_arr2) ||
- _arr1.length !== _arr2.length
- )
- return false;
+ if (!Array.isArray(_arr1) || !Array.isArray(_arr2) || _arr1.length !== _arr2.length) return false;
var arr1 = _arr1.concat().sort();
var arr2 = _arr2.concat().sort();
@@ -54,21 +49,12 @@ const arraysEqual = (_arr1, _arr2) => {
return true;
};
-const handleAuth = async (
- { access_token, id, roles, groups } = {},
- { config } = {}
-) => {
+const handleAuth = async ({ access_token, id, roles, groups } = {}, { config } = {}) => {
if (!(access_token && id)) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Missing access token and/or User id'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Missing access token and/or User id');
}
if (!config || !(config['auth-server-url'] && config['realm'])) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Missing keycloak configuration'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Missing keycloak configuration');
}
try {
const response = await httpsRequest.get({
@@ -87,10 +73,7 @@ const handleAuth = async (
) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Invalid authentication'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Invalid authentication');
} catch (e) {
if (e instanceof Parse.Error) {
throw e;
diff --git a/src/Adapters/Auth/ldap.js b/src/Adapters/Auth/ldap.js
index a0fa637ddd..38a64927ff 100644
--- a/src/Adapters/Auth/ldap.js
+++ b/src/Adapters/Auth/ldap.js
@@ -4,16 +4,12 @@ const Parse = require('parse/node').Parse;
function validateAuthData(authData, options) {
if (!optionsAreValid(options)) {
return new Promise((_, reject) => {
- reject(
- new Parse.Error(
- Parse.Error.INTERNAL_SERVER_ERROR,
- 'LDAP auth configuration missing'
- )
- );
+ reject(new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'LDAP auth configuration missing'));
});
}
- const clientOptions = (options.url.startsWith("ldaps://")) ?
- { url: options.url, tlsOptions: options.tlsOptions } : { url: options.url };
+ const clientOptions = options.url.startsWith('ldaps://')
+ ? { url: options.url, tlsOptions: options.tlsOptions }
+ : { url: options.url };
const client = ldapjs.createClient(clientOptions);
const userCn =
@@ -23,28 +19,31 @@ function validateAuthData(authData, options) {
return new Promise((resolve, reject) => {
client.bind(userCn, authData.password, ldapError => {
- delete(authData.password);
+ delete authData.password;
if (ldapError) {
let error;
switch (ldapError.code) {
case 49:
- error = new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'LDAP: Wrong username or password');
+ error = new Parse.Error(
+ Parse.Error.OBJECT_NOT_FOUND,
+ 'LDAP: Wrong username or password'
+ );
break;
- case "DEPTH_ZERO_SELF_SIGNED_CERT":
+ case 'DEPTH_ZERO_SELF_SIGNED_CERT':
error = new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'LDAPS: Certificate mismatch');
break;
default:
- error = new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'LDAP: Somthing went wrong (' + ldapError.code + ')');
+ error = new Parse.Error(
+ Parse.Error.OBJECT_NOT_FOUND,
+ 'LDAP: Somthing went wrong (' + ldapError.code + ')'
+ );
}
reject(error);
client.destroy(ldapError);
return;
}
- if (
- typeof options.groupCn === 'string' &&
- typeof options.groupFilter === 'string'
- ) {
+ if (typeof options.groupCn === 'string' && typeof options.groupFilter === 'string') {
searchForGroup(client, options, authData.id, resolve, reject);
} else {
client.unbind();
@@ -61,7 +60,7 @@ function optionsAreValid(options) {
typeof options.suffix === 'string' &&
typeof options.url === 'string' &&
(options.url.startsWith('ldap://') ||
- options.url.startsWith('ldaps://') && typeof options.tlsOptions === 'object')
+ (options.url.startsWith('ldaps://') && typeof options.tlsOptions === 'object'))
);
}
@@ -76,12 +75,7 @@ function searchForGroup(client, options, id, resolve, reject) {
if (searchError) {
client.unbind();
client.destroy();
- return reject(
- new Parse.Error(
- Parse.Error.INTERNAL_SERVER_ERROR,
- 'LDAP group search failed'
- )
- );
+ return reject(new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'LDAP group search failed'));
}
res.on('searchEntry', entry => {
if (entry.object.cn === options.groupCn) {
@@ -96,20 +90,12 @@ function searchForGroup(client, options, id, resolve, reject) {
client.unbind();
client.destroy();
return reject(
- new Parse.Error(
- Parse.Error.INTERNAL_SERVER_ERROR,
- 'LDAP: User not in group'
- )
+ new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'LDAP: User not in group')
);
}
});
res.on('error', () => {
- return reject(
- new Parse.Error(
- Parse.Error.INTERNAL_SERVER_ERROR,
- 'LDAP group search failed'
- )
- );
+ return reject(new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'LDAP group search failed'));
});
});
}
diff --git a/src/Adapters/Auth/line.js b/src/Adapters/Auth/line.js
index e1a5584933..d773323f70 100644
--- a/src/Adapters/Auth/line.js
+++ b/src/Adapters/Auth/line.js
@@ -8,10 +8,7 @@ function validateAuthData(authData) {
if (response && response.userId && response.userId === authData.id) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Line auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Line auth is invalid for this user.');
});
}
diff --git a/src/Adapters/Auth/linkedin.js b/src/Adapters/Auth/linkedin.js
index ede1c5df5f..4faa2eb2a9 100644
--- a/src/Adapters/Auth/linkedin.js
+++ b/src/Adapters/Auth/linkedin.js
@@ -4,17 +4,12 @@ const httpsRequest = require('./httpsRequest');
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
- return request('me', authData.access_token, authData.is_mobile_sdk).then(
- data => {
- if (data && data.id == authData.id) {
- return;
- }
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Linkedin auth is invalid for this user.'
- );
+ return request('me', authData.access_token, authData.is_mobile_sdk).then(data => {
+ if (data && data.id == authData.id) {
+ return;
}
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Linkedin auth is invalid for this user.');
+ });
}
// Returns a promise that fulfills iff this app id is valid.
diff --git a/src/Adapters/Auth/meetup.js b/src/Adapters/Auth/meetup.js
index d949a65e4a..93dc1d48ad 100644
--- a/src/Adapters/Auth/meetup.js
+++ b/src/Adapters/Auth/meetup.js
@@ -8,10 +8,7 @@ function validateAuthData(authData) {
if (data && data.id == authData.id) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Meetup auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Meetup auth is invalid for this user.');
});
}
diff --git a/src/Adapters/Auth/microsoft.js b/src/Adapters/Auth/microsoft.js
index 1574045528..9f4f5c4ea4 100644
--- a/src/Adapters/Auth/microsoft.js
+++ b/src/Adapters/Auth/microsoft.js
@@ -4,17 +4,15 @@ const httpsRequest = require('./httpsRequest');
// Returns a promise that fulfills if this user mail is valid.
function validateAuthData(authData) {
- return request('me', authData.access_token).then(
- response => {
- if (response && response.id && response.id == authData.id) {
- return;
- }
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Microsoft Graph auth is invalid for this user.'
- );
+ return request('me', authData.access_token).then(response => {
+ if (response && response.id && response.id == authData.id) {
+ return;
}
- );
+ throw new Parse.Error(
+ Parse.Error.OBJECT_NOT_FOUND,
+ 'Microsoft Graph auth is invalid for this user.'
+ );
+ });
}
// Returns a promise that fulfills if this app id is valid.
diff --git a/src/Adapters/Auth/oauth2.js b/src/Adapters/Auth/oauth2.js
index 80564d5b32..cefe7bdff2 100644
--- a/src/Adapters/Auth/oauth2.js
+++ b/src/Adapters/Auth/oauth2.js
@@ -63,8 +63,7 @@ const INVALID_ACCESS_APPID =
"OAuth2: the access_token's appID is empty or is not in the list of permitted appIDs in the auth configuration.";
const MISSING_APPIDS =
'OAuth2 configuration is missing the client app IDs ("appIds" config parameter).';
-const MISSING_URL =
- 'OAuth2 token introspection endpoint URL is missing from configuration!';
+const MISSING_URL = 'OAuth2 token introspection endpoint URL is missing from configuration!';
// Returns a promise that fulfills if this user id is valid.
function validateAuthData(authData, options) {
diff --git a/src/Adapters/Auth/phantauth.js b/src/Adapters/Auth/phantauth.js
index 1fca7e9794..a7fba68dc5 100644
--- a/src/Adapters/Auth/phantauth.js
+++ b/src/Adapters/Auth/phantauth.js
@@ -14,10 +14,7 @@ function validateAuthData(authData) {
if (data && data.sub == authData.id) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'PhantAuth auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'PhantAuth auth is invalid for this user.');
});
}
diff --git a/src/Adapters/Auth/qq.js b/src/Adapters/Auth/qq.js
index 45e776665e..dddc7cc7a3 100644
--- a/src/Adapters/Auth/qq.js
+++ b/src/Adapters/Auth/qq.js
@@ -4,16 +4,11 @@ var Parse = require('parse/node').Parse;
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
- return graphRequest('me?access_token=' + authData.access_token).then(function(
- data
- ) {
+ return graphRequest('me?access_token=' + authData.access_token).then(function (data) {
if (data && data.openid == authData.id) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'qq auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'qq auth is invalid for this user.');
});
}
@@ -24,21 +19,16 @@ function validateAppId() {
// A promisey wrapper for qq graph requests.
function graphRequest(path) {
- return httpsRequest
- .get('https://graph.qq.com/oauth2.0/' + path, true)
- .then(data => {
- return parseResponseData(data);
- });
+ return httpsRequest.get('https://graph.qq.com/oauth2.0/' + path, true).then(data => {
+ return parseResponseData(data);
+ });
}
function parseResponseData(data) {
const starPos = data.indexOf('(');
const endPos = data.indexOf(')');
if (starPos == -1 || endPos == -1) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'qq auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'qq auth is invalid for this user.');
}
data = data.substring(starPos + 1, endPos - 1);
return JSON.parse(data);
diff --git a/src/Adapters/Auth/spotify.js b/src/Adapters/Auth/spotify.js
index 1bafc44ba3..69e6cbae2d 100644
--- a/src/Adapters/Auth/spotify.js
+++ b/src/Adapters/Auth/spotify.js
@@ -8,10 +8,7 @@ function validateAuthData(authData) {
if (data && data.id == authData.id) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Spotify auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Spotify auth is invalid for this user.');
});
}
@@ -19,19 +16,13 @@ function validateAuthData(authData) {
function validateAppId(appIds, authData) {
var access_token = authData.access_token;
if (!appIds.length) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Spotify auth is not configured.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Spotify auth is not configured.');
}
return request('me', access_token).then(data => {
if (data && appIds.indexOf(data.id) != -1) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Spotify auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Spotify auth is invalid for this user.');
});
}
diff --git a/src/Adapters/Auth/twitter.js b/src/Adapters/Auth/twitter.js
index b6dd49a3fb..eac83cbed4 100644
--- a/src/Adapters/Auth/twitter.js
+++ b/src/Adapters/Auth/twitter.js
@@ -5,10 +5,7 @@ var Parse = require('parse/node').Parse;
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData, options) {
if (!options) {
- throw new Parse.Error(
- Parse.Error.INTERNAL_SERVER_ERROR,
- 'Twitter auth configuration missing'
- );
+ throw new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'Twitter auth configuration missing');
}
options = handleMultipleConfigurations(authData, options);
var client = new OAuth(options);
@@ -20,10 +17,7 @@ function validateAuthData(authData, options) {
if (data && data.id_str == '' + authData.id) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Twitter auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Twitter auth is invalid for this user.');
});
}
@@ -36,20 +30,14 @@ function handleMultipleConfigurations(authData, options) {
if (Array.isArray(options)) {
const consumer_key = authData.consumer_key;
if (!consumer_key) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Twitter auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Twitter auth is invalid for this user.');
}
options = options.filter(option => {
return option.consumer_key == consumer_key;
});
if (options.length == 0) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Twitter auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Twitter auth is invalid for this user.');
}
options = options[0];
}
diff --git a/src/Adapters/Auth/vkontakte.js b/src/Adapters/Auth/vkontakte.js
index fe9913ab8c..46fd1248ae 100644
--- a/src/Adapters/Auth/vkontakte.js
+++ b/src/Adapters/Auth/vkontakte.js
@@ -11,10 +11,7 @@ function validateAuthData(authData, params) {
if (response && response.access_token) {
return request(
'api.vk.com',
- 'method/users.get?access_token=' +
- authData.access_token +
- '&v=' +
- params.apiVersion
+ 'method/users.get?access_token=' + authData.access_token + '&v=' + params.apiVersion
).then(function (response) {
if (
response &&
@@ -24,16 +21,10 @@ function validateAuthData(authData, params) {
) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Vk auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Vk auth is invalid for this user.');
});
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Vk appIds or appSecret is incorrect.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Vk appIds or appSecret is incorrect.');
});
}
diff --git a/src/Adapters/Auth/wechat.js b/src/Adapters/Auth/wechat.js
index 56c2293b52..82ddb851ef 100644
--- a/src/Adapters/Auth/wechat.js
+++ b/src/Adapters/Auth/wechat.js
@@ -4,17 +4,14 @@ var Parse = require('parse/node').Parse;
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
- return graphRequest(
- 'auth?access_token=' + authData.access_token + '&openid=' + authData.id
- ).then(function(data) {
- if (data.errcode == 0) {
- return;
+ return graphRequest('auth?access_token=' + authData.access_token + '&openid=' + authData.id).then(
+ function (data) {
+ if (data.errcode == 0) {
+ return;
+ }
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'wechat auth is invalid for this user.');
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'wechat auth is invalid for this user.'
- );
- });
+ );
}
// Returns a promise that fulfills if this app id is valid.
diff --git a/src/Adapters/Auth/weibo.js b/src/Adapters/Auth/weibo.js
index bcdaf11a36..a29c3872df 100644
--- a/src/Adapters/Auth/weibo.js
+++ b/src/Adapters/Auth/weibo.js
@@ -5,14 +5,11 @@ var querystring = require('querystring');
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
- return graphRequest(authData.access_token).then(function(data) {
+ return graphRequest(authData.access_token).then(function (data) {
if (data && data.uid == authData.id) {
return;
}
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'weibo auth is invalid for this user.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'weibo auth is invalid for this user.');
});
}
diff --git a/src/Adapters/Cache/RedisCacheAdapter/index.js b/src/Adapters/Cache/RedisCacheAdapter/index.js
index 0619d1ebcb..17662628af 100644
--- a/src/Adapters/Cache/RedisCacheAdapter/index.js
+++ b/src/Adapters/Cache/RedisCacheAdapter/index.js
@@ -9,7 +9,7 @@ function debug() {
logger.debug.apply(logger, ['RedisCacheAdapter', ...arguments]);
}
-const isValidTTL = (ttl) => typeof ttl === 'number' && ttl > 0;
+const isValidTTL = ttl => typeof ttl === 'number' && ttl > 0;
export class RedisCacheAdapter {
constructor(redisCtx, ttl = DEFAULT_REDIS_TTL) {
@@ -22,8 +22,8 @@ export class RedisCacheAdapter {
if (!this.client) {
return Promise.resolve();
}
- return new Promise((resolve) => {
- this.client.quit((err) => {
+ return new Promise(resolve => {
+ this.client.quit(err => {
if (err) {
logger.error('RedisCacheAdapter error on shutdown', { error: err });
}
@@ -37,7 +37,7 @@ export class RedisCacheAdapter {
return this.queue.enqueue(
key,
() =>
- new Promise((resolve) => {
+ new Promise(resolve => {
this.client.get(key, function (err, res) {
debug('-> get', key, res);
if (!res) {
@@ -62,7 +62,7 @@ export class RedisCacheAdapter {
return this.queue.enqueue(
key,
() =>
- new Promise((resolve) => {
+ new Promise(resolve => {
this.client.set(key, value, function () {
resolve();
});
@@ -77,7 +77,7 @@ export class RedisCacheAdapter {
return this.queue.enqueue(
key,
() =>
- new Promise((resolve) => {
+ new Promise(resolve => {
this.client.psetex(key, ttl, value, function () {
resolve();
});
@@ -90,7 +90,7 @@ export class RedisCacheAdapter {
return this.queue.enqueue(
key,
() =>
- new Promise((resolve) => {
+ new Promise(resolve => {
this.client.del(key, function () {
resolve();
});
@@ -103,7 +103,7 @@ export class RedisCacheAdapter {
return this.queue.enqueue(
FLUSH_DB_KEY,
() =>
- new Promise((resolve) => {
+ new Promise(resolve => {
this.client.flushdb(function () {
resolve();
});
diff --git a/src/Adapters/Files/FilesAdapter.js b/src/Adapters/Files/FilesAdapter.js
index 5aac18d1ab..68666b919a 100644
--- a/src/Adapters/Files/FilesAdapter.js
+++ b/src/Adapters/Files/FilesAdapter.js
@@ -38,12 +38,7 @@ export class FilesAdapter {
*
* @return {Promise} a promise that should fail if the storage didn't succeed
*/
- createFile(
- filename: string,
- data,
- contentType: string,
- options: Object
- ): Promise {}
+ createFile(filename: string, data, contentType: string, options: Object): Promise {}
/** Responsible for deleting the specified file
*
@@ -111,10 +106,7 @@ export function validateFilename(filename): ?Parse.Error {
const regx = /^[_a-zA-Z0-9][a-zA-Z0-9@. ~_-]*$/;
if (!filename.match(regx)) {
- return new Parse.Error(
- Parse.Error.INVALID_FILE_NAME,
- 'Filename contains invalid characters.'
- );
+ return new Parse.Error(Parse.Error.INVALID_FILE_NAME, 'Filename contains invalid characters.');
}
return null;
}
diff --git a/src/Adapters/Files/GridFSBucketAdapter.js b/src/Adapters/Files/GridFSBucketAdapter.js
index bf9f119f4d..84876fad6f 100644
--- a/src/Adapters/Files/GridFSBucketAdapter.js
+++ b/src/Adapters/Files/GridFSBucketAdapter.js
@@ -28,11 +28,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
this._algorithm = 'aes-256-gcm';
this._encryptionKey =
encryptionKey !== undefined
- ? crypto
- .createHash('sha256')
- .update(String(encryptionKey))
- .digest('base64')
- .substr(0, 32)
+ ? crypto.createHash('sha256').update(String(encryptionKey)).digest('base64').substr(0, 32)
: null;
const defaultMongoOptions = {
useNewUrlParser: true,
@@ -43,13 +39,12 @@ export class GridFSBucketAdapter extends FilesAdapter {
_connect() {
if (!this._connectionPromise) {
- this._connectionPromise = MongoClient.connect(
- this._databaseURI,
- this._mongoOptions
- ).then(client => {
- this._client = client;
- return client.db(client.s.options.dbName);
- });
+ this._connectionPromise = MongoClient.connect(this._databaseURI, this._mongoOptions).then(
+ client => {
+ this._client = client;
+ return client.db(client.s.options.dbName);
+ }
+ );
}
return this._connectionPromise;
}
@@ -68,11 +63,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
if (this._encryptionKey !== null) {
try {
const iv = crypto.randomBytes(16);
- const cipher = crypto.createCipheriv(
- this._algorithm,
- this._encryptionKey,
- iv
- );
+ const cipher = crypto.createCipheriv(this._algorithm, this._encryptionKey, iv);
const encryptedResult = Buffer.concat([
cipher.update(data),
cipher.final(),
@@ -126,16 +117,9 @@ export class GridFSBucketAdapter extends FilesAdapter {
const authTag = data.slice(authTagLocation);
const iv = data.slice(ivLocation, authTagLocation);
const encrypted = data.slice(0, ivLocation);
- const decipher = crypto.createDecipheriv(
- this._algorithm,
- this._encryptionKey,
- iv
- );
+ const decipher = crypto.createDecipheriv(this._algorithm, this._encryptionKey, iv);
decipher.setAuthTag(authTag);
- const decrypted = Buffer.concat([
- decipher.update(encrypted),
- decipher.final(),
- ]);
+ const decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]);
return resolve(decrypted);
} catch (err) {
return reject(err);
@@ -160,10 +144,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
options.oldKey
);
} else {
- oldKeyFileAdapter = new GridFSBucketAdapter(
- this._databaseURI,
- this._mongoOptions
- );
+ oldKeyFileAdapter = new GridFSBucketAdapter(this._databaseURI, this._mongoOptions);
}
if (options.fileNames !== undefined) {
fileNames = options.fileNames;
@@ -186,9 +167,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
this.createFile(fileName, plainTextData)
.then(() => {
fileNamesRotated.push(fileName);
- fileNamesNotRotated = fileNamesNotRotated.filter(function (
- value
- ) {
+ fileNamesNotRotated = fileNamesNotRotated.filter(function (value) {
return value !== fileName;
});
fileNameIndex += 1;
@@ -223,13 +202,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
}
getFileLocation(config, filename) {
- return (
- config.mount +
- '/files/' +
- config.applicationId +
- '/' +
- encodeURIComponent(filename)
- );
+ return config.mount + '/files/' + config.applicationId + '/' + encodeURIComponent(filename);
}
async getMetadata(filename) {
diff --git a/src/Adapters/Files/GridStoreAdapter.js b/src/Adapters/Files/GridStoreAdapter.js
index 3d6bba1072..ca3166af98 100644
--- a/src/Adapters/Files/GridStoreAdapter.js
+++ b/src/Adapters/Files/GridStoreAdapter.js
@@ -30,13 +30,12 @@ export class GridStoreAdapter extends FilesAdapter {
_connect() {
if (!this._connectionPromise) {
- this._connectionPromise = MongoClient.connect(
- this._databaseURI,
- this._mongoOptions
- ).then(client => {
- this._client = client;
- return client.db(client.s.options.dbName);
- });
+ this._connectionPromise = MongoClient.connect(this._databaseURI, this._mongoOptions).then(
+ client => {
+ this._client = client;
+ return client.db(client.s.options.dbName);
+ }
+ );
}
return this._connectionPromise;
}
@@ -85,13 +84,7 @@ export class GridStoreAdapter extends FilesAdapter {
}
getFileLocation(config, filename) {
- return (
- config.mount +
- '/files/' +
- config.applicationId +
- '/' +
- encodeURIComponent(filename)
- );
+ return config.mount + '/files/' + config.applicationId + '/' + encodeURIComponent(filename);
}
async handleFileStream(filename: string, req, res, contentType) {
@@ -152,14 +145,14 @@ function handleRangeRequest(stream, req, res, contentType) {
'Content-Type': contentType,
});
- stream.seek(start, function() {
+ stream.seek(start, function () {
// Get gridFile stream
const gridFileStream = stream.stream(true);
let bufferAvail = 0;
let remainingBytesToWrite = contentLength;
let totalBytesWritten = 0;
// Write to response
- gridFileStream.on('data', function(data) {
+ gridFileStream.on('data', function (data) {
bufferAvail += data.length;
if (bufferAvail > 0) {
// slice returns the same buffer if overflowing
diff --git a/src/Adapters/Logger/WinstonLogger.js b/src/Adapters/Logger/WinstonLogger.js
index 04e0283258..98a86610c0 100644
--- a/src/Adapters/Logger/WinstonLogger.js
+++ b/src/Adapters/Logger/WinstonLogger.js
@@ -20,11 +20,7 @@ function configureTransports(options) {
{
filename: 'parse-server.info',
json: true,
- format: format.combine(
- format.timestamp(),
- format.splat(),
- format.json()
- ),
+ format: format.combine(format.timestamp(), format.splat(), format.json()),
},
options
)
@@ -37,11 +33,7 @@ function configureTransports(options) {
{
filename: 'parse-server.err',
json: true,
- format: format.combine(
- format.timestamp(),
- format.splat(),
- format.json()
- ),
+ format: format.combine(format.timestamp(), format.splat(), format.json()),
},
options,
{ level: 'error' }
@@ -120,9 +112,7 @@ export function addTransport(transport) {
export function removeTransport(transport) {
const matchingTransport = logger.transports.find(t1 => {
- return typeof transport === 'string'
- ? t1.name === transport
- : t1 === transport;
+ return typeof transport === 'string' ? t1.name === transport : t1 === transport;
});
if (matchingTransport) {
diff --git a/src/Adapters/Logger/WinstonLoggerAdapter.js b/src/Adapters/Logger/WinstonLoggerAdapter.js
index 0a662eb28d..ab866ee107 100644
--- a/src/Adapters/Logger/WinstonLoggerAdapter.js
+++ b/src/Adapters/Logger/WinstonLoggerAdapter.js
@@ -28,8 +28,7 @@ export class WinstonLoggerAdapter extends LoggerAdapter {
options = {};
}
// defaults to 7 days prior
- const from =
- options.from || new Date(Date.now() - 7 * MILLISECONDS_IN_A_DAY);
+ const from = options.from || new Date(Date.now() - 7 * MILLISECONDS_IN_A_DAY);
const until = options.until || new Date();
const limit = options.size || 10;
const order = options.order || 'desc';
diff --git a/src/Adapters/PubSub/EventEmitterPubSub.js b/src/Adapters/PubSub/EventEmitterPubSub.js
index 1ecc006e0c..277118a082 100644
--- a/src/Adapters/PubSub/EventEmitterPubSub.js
+++ b/src/Adapters/PubSub/EventEmitterPubSub.js
@@ -46,6 +46,10 @@ function createPublisher(): any {
}
function createSubscriber(): any {
+ // createSubscriber is called once at live query server start
+ // to avoid max listeners warning, we should clean up the event emitter
+ // each time this function is called
+ emitter.removeAllListeners();
return new Subscriber(emitter);
}
diff --git a/src/Adapters/Storage/Mongo/MongoCollection.js b/src/Adapters/Storage/Mongo/MongoCollection.js
index 8ab24fc29b..f3f089f0f1 100644
--- a/src/Adapters/Storage/Mongo/MongoCollection.js
+++ b/src/Adapters/Storage/Mongo/MongoCollection.js
@@ -15,17 +15,7 @@ export default class MongoCollection {
// idea. Or even if this behavior is a good idea.
find(
query,
- {
- skip,
- limit,
- sort,
- keys,
- maxTimeMS,
- readPreference,
- hint,
- caseInsensitive,
- explain,
- } = {}
+ { skip, limit, sort, keys, maxTimeMS, readPreference, hint, caseInsensitive, explain } = {}
) {
// Support for Full Text Search - $text
if (keys && keys.$score) {
@@ -44,10 +34,7 @@ export default class MongoCollection {
explain,
}).catch(error => {
// Check for "no geoindex" error
- if (
- error.code != 17007 &&
- !error.message.match(/unable to find index for .geoNear/)
- ) {
+ if (error.code != 17007 && !error.message.match(/unable to find index for .geoNear/)) {
throw error;
}
// Figure out what key needs an index
@@ -88,17 +75,7 @@ export default class MongoCollection {
_rawFind(
query,
- {
- skip,
- limit,
- sort,
- keys,
- maxTimeMS,
- readPreference,
- hint,
- caseInsensitive,
- explain,
- } = {}
+ { skip, limit, sort, keys, maxTimeMS, readPreference, hint, caseInsensitive, explain } = {}
) {
let findOperation = this._mongoCollection.find(query, {
skip,
@@ -113,9 +90,7 @@ export default class MongoCollection {
}
if (caseInsensitive) {
- findOperation = findOperation.collation(
- MongoCollection.caseInsensitiveCollation()
- );
+ findOperation = findOperation.collation(MongoCollection.caseInsensitiveCollation());
}
if (maxTimeMS) {
diff --git a/src/Adapters/Storage/Mongo/MongoSchemaCollection.js b/src/Adapters/Storage/Mongo/MongoSchemaCollection.js
index d824787c36..d2f89d75d4 100644
--- a/src/Adapters/Storage/Mongo/MongoSchemaCollection.js
+++ b/src/Adapters/Storage/Mongo/MongoSchemaCollection.js
@@ -41,9 +41,7 @@ function mongoFieldToParseSchemaField(type) {
const nonFieldSchemaKeys = ['_id', '_metadata', '_client_permissions'];
function mongoSchemaFieldsToParseSchemaFields(schema) {
- var fieldNames = Object.keys(schema).filter(
- (key) => nonFieldSchemaKeys.indexOf(key) === -1
- );
+ var fieldNames = Object.keys(schema).filter(key => nonFieldSchemaKeys.indexOf(key) === -1);
var response = fieldNames.reduce((obj, fieldName) => {
obj[fieldName] = mongoFieldToParseSchemaField(schema[fieldName]);
if (
@@ -110,7 +108,7 @@ function mongoSchemaToParseSchema(mongoSchema) {
function _mongoSchemaQueryFromNameQuery(name: string, query) {
const object = { _id: name };
if (query) {
- Object.keys(query).forEach((key) => {
+ Object.keys(query).forEach(key => {
object[key] = query[key];
});
}
@@ -156,15 +154,13 @@ class MongoSchemaCollection {
}
_fetchAllSchemasFrom_SCHEMA() {
- return this._collection
- ._rawFind({})
- .then((schemas) => schemas.map(mongoSchemaToParseSchema));
+ return this._collection._rawFind({}).then(schemas => schemas.map(mongoSchemaToParseSchema));
}
_fetchOneSchemaFrom_SCHEMA(name: string) {
return this._collection
._rawFind(_mongoSchemaQueryFromNameQuery(name), { limit: 1 })
- .then((results) => {
+ .then(results => {
if (results.length === 1) {
return mongoSchemaToParseSchema(results[0]);
} else {
@@ -175,22 +171,17 @@ class MongoSchemaCollection {
// Atomically find and delete an object based on query.
findAndDeleteSchema(name: string) {
- return this._collection._mongoCollection.findOneAndDelete(
- _mongoSchemaQueryFromNameQuery(name)
- );
+ return this._collection._mongoCollection.findOneAndDelete(_mongoSchemaQueryFromNameQuery(name));
}
insertSchema(schema: any) {
return this._collection
.insertOne(schema)
- .then((result) => mongoSchemaToParseSchema(result.ops[0]))
- .catch((error) => {
+ .then(result => mongoSchemaToParseSchema(result.ops[0]))
+ .catch(error => {
if (error.code === 11000) {
//Mongo's duplicate key error
- throw new Parse.Error(
- Parse.Error.DUPLICATE_VALUE,
- 'Class already exists.'
- );
+ throw new Parse.Error(Parse.Error.DUPLICATE_VALUE, 'Class already exists.');
} else {
throw error;
}
@@ -198,17 +189,11 @@ class MongoSchemaCollection {
}
updateSchema(name: string, update) {
- return this._collection.updateOne(
- _mongoSchemaQueryFromNameQuery(name),
- update
- );
+ return this._collection.updateOne(_mongoSchemaQueryFromNameQuery(name), update);
}
upsertSchema(name: string, query: string, update) {
- return this._collection.upsertOne(
- _mongoSchemaQueryFromNameQuery(name, query),
- update
- );
+ return this._collection.upsertOne(_mongoSchemaQueryFromNameQuery(name, query), update);
}
// Add a field to the schema. If database does not support the field
@@ -225,7 +210,7 @@ class MongoSchemaCollection {
addFieldIfNotExists(className: string, fieldName: string, fieldType: string) {
return this._fetchOneSchemaFrom_SCHEMA(className)
.then(
- (schema) => {
+ schema => {
// If a field with this name already exists, it will be handled elsewhere.
if (schema.fields[fieldName] != undefined) {
return;
@@ -235,8 +220,7 @@ class MongoSchemaCollection {
// Make sure there are not other geopoint fields
if (
Object.keys(schema.fields).some(
- (existingField) =>
- schema.fields[existingField].type === 'GeoPoint'
+ existingField => schema.fields[existingField].type === 'GeoPoint'
)
) {
throw new Parse.Error(
@@ -247,7 +231,7 @@ class MongoSchemaCollection {
}
return;
},
- (error) => {
+ error => {
// If error is undefined, the schema doesn't exist, and we can create the schema with the field.
// If some other error, reject with it.
if (error === undefined) {
diff --git a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js
index 60483480ed..a3f1a3223e 100644
--- a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js
+++ b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js
@@ -2,16 +2,8 @@
import MongoCollection from './MongoCollection';
import MongoSchemaCollection from './MongoSchemaCollection';
import { StorageAdapter } from '../StorageAdapter';
-import type {
- SchemaType,
- QueryType,
- StorageClass,
- QueryOptions,
-} from '../StorageAdapter';
-import {
- parse as parseUrl,
- format as formatUrl,
-} from '../../../vendor/mongodbUrl';
+import type { SchemaType, QueryType, StorageClass, QueryOptions } from '../StorageAdapter';
+import { parse as parseUrl, format as formatUrl } from '../../../vendor/mongodbUrl';
import {
parseObjectToMongoObjectForCreate,
mongoObjectToParseObject,
@@ -45,9 +37,7 @@ const storageAdapterAllCollections = mongoAdapter => {
}
// TODO: If you have one app with a collection prefix that happens to be a prefix of another
// apps prefix, this will go very very badly. We should fix that somehow.
- return (
- collection.collectionName.indexOf(mongoAdapter._collectionPrefix) == 0
- );
+ return collection.collectionName.indexOf(mongoAdapter._collectionPrefix) == 0;
});
});
};
@@ -85,16 +75,13 @@ const mongoSchemaFromFieldsAndClassNameAndCLP = (
for (const fieldName in fields) {
const { type, targetClass, ...fieldOptions } = fields[fieldName];
- mongoObject[
- fieldName
- ] = MongoSchemaCollection.parseFieldTypeToMongoFieldType({
+ mongoObject[fieldName] = MongoSchemaCollection.parseFieldTypeToMongoFieldType({
type,
targetClass,
});
if (fieldOptions && Object.keys(fieldOptions).length > 0) {
mongoObject._metadata = mongoObject._metadata || {};
- mongoObject._metadata.fields_options =
- mongoObject._metadata.fields_options || {};
+ mongoObject._metadata.fields_options = mongoObject._metadata.fields_options || {};
mongoObject._metadata.fields_options[fieldName] = fieldOptions;
}
}
@@ -108,11 +95,7 @@ const mongoSchemaFromFieldsAndClassNameAndCLP = (
}
}
- if (
- indexes &&
- typeof indexes === 'object' &&
- Object.keys(indexes).length > 0
- ) {
+ if (indexes && typeof indexes === 'object' && Object.keys(indexes).length > 0) {
mongoObject._metadata = mongoObject._metadata || {};
mongoObject._metadata.indexes = indexes;
}
@@ -130,30 +113,36 @@ export class MongoStorageAdapter implements StorageAdapter {
_uri: string;
_collectionPrefix: string;
_mongoOptions: Object;
+ _onchange: any;
+ _stream: any;
// Public
connectionPromise: ?Promise;
database: any;
client: MongoClient;
_maxTimeMS: ?number;
canSortOnJoinTables: boolean;
+ replicaSet: boolean;
- constructor({
- uri = defaults.DefaultMongoURI,
- collectionPrefix = '',
- mongoOptions = {},
- }: any) {
+ constructor({ uri = defaults.DefaultMongoURI, collectionPrefix = '', mongoOptions = {} }: any) {
this._uri = uri;
this._collectionPrefix = collectionPrefix;
this._mongoOptions = mongoOptions;
this._mongoOptions.useNewUrlParser = true;
this._mongoOptions.useUnifiedTopology = true;
+ this._onchange = () => {};
// MaxTimeMS is not a global MongoDB client option, it is applied per operation.
this._maxTimeMS = mongoOptions.maxTimeMS;
this.canSortOnJoinTables = true;
+ this.replicaSet = !!mongoOptions.replicaSet;
+ delete mongoOptions.replicaSet;
delete mongoOptions.maxTimeMS;
}
+ watch(callback: () => void): void {
+ this._onchange = callback;
+ }
+
connect() {
if (this.connectionPromise) {
return this.connectionPromise;
@@ -219,15 +208,19 @@ export class MongoStorageAdapter implements StorageAdapter {
_schemaCollection(): Promise {
return this.connect()
.then(() => this._adaptiveCollection(MongoSchemaCollectionName))
- .then(collection => new MongoSchemaCollection(collection));
+ .then(collection => {
+ if (!this._stream && this.replicaSet) {
+ this._stream = collection._mongoCollection.watch();
+ this._stream.on('change', () => this._onchange());
+ }
+ return new MongoSchemaCollection(collection);
+ });
}
classExists(name: string) {
return this.connect()
.then(() => {
- return this.database
- .listCollections({ name: this._collectionPrefix + name })
- .toArray();
+ return this.database.listCollections({ name: this._collectionPrefix + name }).toArray();
})
.then(collections => {
return collections.length > 0;
@@ -262,10 +255,7 @@ export class MongoStorageAdapter implements StorageAdapter {
Object.keys(submittedIndexes).forEach(name => {
const field = submittedIndexes[name];
if (existingIndexes[name] && field.__op !== 'Delete') {
- throw new Parse.Error(
- Parse.Error.INVALID_QUERY,
- `Index ${name} exists, cannot update.`
- );
+ throw new Parse.Error(Parse.Error.INVALID_QUERY, `Index ${name} exists, cannot update.`);
}
if (!existingIndexes[name] && field.__op === 'Delete') {
throw new Parse.Error(
@@ -349,26 +339,15 @@ export class MongoStorageAdapter implements StorageAdapter {
schema.indexes
);
mongoObject._id = className;
- return this.setIndexesWithSchemaFormat(
- className,
- schema.indexes,
- {},
- schema.fields
- )
+ return this.setIndexesWithSchemaFormat(className, schema.indexes, {}, schema.fields)
.then(() => this._schemaCollection())
.then(schemaCollection => schemaCollection.insertSchema(mongoObject))
.catch(err => this.handleError(err));
}
- addFieldIfNotExists(
- className: string,
- fieldName: string,
- type: any
- ): Promise {
+ addFieldIfNotExists(className: string, fieldName: string, type: any): Promise {
return this._schemaCollection()
- .then(schemaCollection =>
- schemaCollection.addFieldIfNotExists(className, fieldName, type)
- )
+ .then(schemaCollection => schemaCollection.addFieldIfNotExists(className, fieldName, type))
.then(() => this.createIndexesIfNeeded(className, fieldName, type))
.catch(err => this.handleError(err));
}
@@ -388,9 +367,7 @@ export class MongoStorageAdapter implements StorageAdapter {
})
// We've dropped the collection, now remove the _SCHEMA document
.then(() => this._schemaCollection())
- .then(schemaCollection =>
- schemaCollection.findAndDeleteSchema(className)
- )
+ .then(schemaCollection => schemaCollection.findAndDeleteSchema(className))
.catch(err => this.handleError(err))
);
}
@@ -398,9 +375,7 @@ export class MongoStorageAdapter implements StorageAdapter {
deleteAllClasses(fast: boolean) {
return storageAdapterAllCollections(this).then(collections =>
Promise.all(
- collections.map(collection =>
- fast ? collection.deleteMany({}) : collection.drop()
- )
+ collections.map(collection => (fast ? collection.deleteMany({}) : collection.drop()))
)
);
}
@@ -450,13 +425,9 @@ export class MongoStorageAdapter implements StorageAdapter {
});
return this._adaptiveCollection(className)
- .then(collection =>
- collection.updateMany(collectionFilter, collectionUpdate)
- )
+ .then(collection => collection.updateMany(collectionFilter, collectionUpdate))
.then(() => this._schemaCollection())
- .then(schemaCollection =>
- schemaCollection.updateSchema(className, schemaUpdate)
- )
+ .then(schemaCollection => schemaCollection.updateSchema(className, schemaUpdate))
.catch(err => this.handleError(err));
}
@@ -465,9 +436,7 @@ export class MongoStorageAdapter implements StorageAdapter {
// rejection reason are TBD.
getAllClasses(): Promise {
return this._schemaCollection()
- .then(schemasCollection =>
- schemasCollection._fetchAllSchemasFrom_SCHEMA()
- )
+ .then(schemasCollection => schemasCollection._fetchAllSchemasFrom_SCHEMA())
.catch(err => this.handleError(err));
}
@@ -476,31 +445,18 @@ export class MongoStorageAdapter implements StorageAdapter {
// undefined as the reason.
getClass(className: string): Promise {
return this._schemaCollection()
- .then(schemasCollection =>
- schemasCollection._fetchOneSchemaFrom_SCHEMA(className)
- )
+ .then(schemasCollection => schemasCollection._fetchOneSchemaFrom_SCHEMA(className))
.catch(err => this.handleError(err));
}
// TODO: As yet not particularly well specified. Creates an object. Maybe shouldn't even need the schema,
// and should infer from the type. Or maybe does need the schema for validations. Or maybe needs
// the schema only for the legacy mongo format. We'll figure that out later.
- createObject(
- className: string,
- schema: SchemaType,
- object: any,
- transactionalSession: ?any
- ) {
+ createObject(className: string, schema: SchemaType, object: any, transactionalSession: ?any) {
schema = convertParseSchemaToMongoSchema(schema);
- const mongoObject = parseObjectToMongoObjectForCreate(
- className,
- object,
- schema
- );
+ const mongoObject = parseObjectToMongoObjectForCreate(className, object, schema);
return this._adaptiveCollection(className)
- .then(collection =>
- collection.insertOne(mongoObject, transactionalSession)
- )
+ .then(collection => collection.insertOne(mongoObject, transactionalSession))
.catch(error => {
if (error.code === 11000) {
// Duplicate value
@@ -510,9 +466,7 @@ export class MongoStorageAdapter implements StorageAdapter {
);
err.underlyingError = error;
if (error.message) {
- const matches = error.message.match(
- /index:[\sa-zA-Z0-9_\-\.]+\$?([a-zA-Z_-]+)_1/
- );
+ const matches = error.message.match(/index:[\sa-zA-Z0-9_\-\.]+\$?([a-zA-Z_-]+)_1/);
if (matches && Array.isArray(matches)) {
err.userInfo = { duplicated_field: matches[1] };
}
@@ -543,18 +497,12 @@ export class MongoStorageAdapter implements StorageAdapter {
.then(
({ result }) => {
if (result.n === 0) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Object not found.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.');
}
return Promise.resolve();
},
() => {
- throw new Parse.Error(
- Parse.Error.INTERNAL_SERVER_ERROR,
- 'Database adapter error'
- );
+ throw new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'Database adapter error');
}
);
}
@@ -571,9 +519,7 @@ export class MongoStorageAdapter implements StorageAdapter {
const mongoUpdate = transformUpdate(className, update, schema);
const mongoWhere = transformWhere(className, query, schema);
return this._adaptiveCollection(className)
- .then(collection =>
- collection.updateMany(mongoWhere, mongoUpdate, transactionalSession)
- )
+ .then(collection => collection.updateMany(mongoWhere, mongoUpdate, transactionalSession))
.catch(err => this.handleError(err));
}
@@ -621,9 +567,7 @@ export class MongoStorageAdapter implements StorageAdapter {
const mongoUpdate = transformUpdate(className, update, schema);
const mongoWhere = transformWhere(className, query, schema);
return this._adaptiveCollection(className)
- .then(collection =>
- collection.upsertOne(mongoWhere, mongoUpdate, transactionalSession)
- )
+ .then(collection => collection.upsertOne(mongoWhere, mongoUpdate, transactionalSession))
.catch(err => this.handleError(err));
}
@@ -632,16 +576,7 @@ export class MongoStorageAdapter implements StorageAdapter {
className: string,
schema: SchemaType,
query: QueryType,
- {
- skip,
- limit,
- sort,
- keys,
- readPreference,
- hint,
- caseInsensitive,
- explain,
- }: QueryOptions
+ { skip, limit, sort, keys, readPreference, hint, caseInsensitive, explain }: QueryOptions
): Promise {
schema = convertParseSchemaToMongoSchema(schema);
const mongoWhere = transformWhere(className, query, schema);
@@ -689,9 +624,7 @@ export class MongoStorageAdapter implements StorageAdapter {
if (explain) {
return objects;
}
- return objects.map(object =>
- mongoObjectToParseObject(className, object, schema)
- );
+ return objects.map(object => mongoObjectToParseObject(className, object, schema));
})
.catch(err => this.handleError(err));
}
@@ -706,18 +639,14 @@ export class MongoStorageAdapter implements StorageAdapter {
): Promise {
schema = convertParseSchemaToMongoSchema(schema);
const indexCreationRequest = {};
- const mongoFieldNames = fieldNames.map(fieldName =>
- transformKey(className, fieldName, schema)
- );
+ const mongoFieldNames = fieldNames.map(fieldName => transformKey(className, fieldName, schema));
mongoFieldNames.forEach(fieldName => {
- indexCreationRequest[fieldName] =
- options.indexType !== undefined ? options.indexType : 1;
+ indexCreationRequest[fieldName] = options.indexType !== undefined ? options.indexType : 1;
});
const defaultOptions: Object = { background: true, sparse: true };
const indexNameOptions: Object = indexName ? { name: indexName } : {};
- const ttlOptions: Object =
- options.ttl !== undefined ? { expireAfterSeconds: options.ttl } : {};
+ const ttlOptions: Object = options.ttl !== undefined ? { expireAfterSeconds: options.ttl } : {};
const caseInsensitiveOptions: Object = caseInsensitive
? { collation: MongoCollection.caseInsensitiveCollation() }
: {};
@@ -732,10 +661,8 @@ export class MongoStorageAdapter implements StorageAdapter {
.then(
collection =>
new Promise((resolve, reject) =>
- collection._mongoCollection.createIndex(
- indexCreationRequest,
- indexOptions,
- error => (error ? reject(error) : resolve())
+ collection._mongoCollection.createIndex(indexCreationRequest, indexOptions, error =>
+ error ? reject(error) : resolve()
)
)
)
@@ -747,23 +674,15 @@ export class MongoStorageAdapter implements StorageAdapter {
// As such, we shouldn't expose this function to users of parse until we have an out-of-band
// Way of determining if a field is nullable. Undefined doesn't count against uniqueness,
// which is why we use sparse indexes.
- ensureUniqueness(
- className: string,
- schema: SchemaType,
- fieldNames: string[]
- ) {
+ ensureUniqueness(className: string, schema: SchemaType, fieldNames: string[]) {
schema = convertParseSchemaToMongoSchema(schema);
const indexCreationRequest = {};
- const mongoFieldNames = fieldNames.map(fieldName =>
- transformKey(className, fieldName, schema)
- );
+ const mongoFieldNames = fieldNames.map(fieldName => transformKey(className, fieldName, schema));
mongoFieldNames.forEach(fieldName => {
indexCreationRequest[fieldName] = 1;
});
return this._adaptiveCollection(className)
- .then(collection =>
- collection._ensureSparseUniqueIndexInBackground(indexCreationRequest)
- )
+ .then(collection => collection._ensureSparseUniqueIndexInBackground(indexCreationRequest))
.catch(error => {
if (error.code === 11000) {
throw new Parse.Error(
@@ -808,23 +727,14 @@ export class MongoStorageAdapter implements StorageAdapter {
.catch(err => this.handleError(err));
}
- distinct(
- className: string,
- schema: SchemaType,
- query: QueryType,
- fieldName: string
- ) {
+ distinct(className: string, schema: SchemaType, query: QueryType, fieldName: string) {
schema = convertParseSchemaToMongoSchema(schema);
- const isPointerField =
- schema.fields[fieldName] && schema.fields[fieldName].type === 'Pointer';
+ const isPointerField = schema.fields[fieldName] && schema.fields[fieldName].type === 'Pointer';
const transformField = transformKey(className, fieldName, schema);
return this._adaptiveCollection(className)
.then(collection =>
- collection.distinct(
- transformField,
- transformWhere(className, query, schema)
- )
+ collection.distinct(transformField, transformWhere(className, query, schema))
)
.then(objects => {
objects = objects.filter(obj => obj != null);
@@ -862,16 +772,10 @@ export class MongoStorageAdapter implements StorageAdapter {
stage.$match = this._parseAggregateArgs(schema, stage.$match);
}
if (stage.$project) {
- stage.$project = this._parseAggregateProjectArgs(
- schema,
- stage.$project
- );
+ stage.$project = this._parseAggregateProjectArgs(schema, stage.$project);
}
if (stage.$geoNear && stage.$geoNear.query) {
- stage.$geoNear.query = this._parseAggregateArgs(
- schema,
- stage.$geoNear.query
- );
+ stage.$geoNear.query = this._parseAggregateArgs(schema, stage.$geoNear.query);
}
return stage;
});
@@ -894,8 +798,7 @@ export class MongoStorageAdapter implements StorageAdapter {
if (
result._id == null ||
result._id == undefined ||
- (['object', 'string'].includes(typeof result._id) &&
- _.isEmpty(result._id))
+ (['object', 'string'].includes(typeof result._id) && _.isEmpty(result._id))
) {
result._id = null;
}
@@ -905,11 +808,7 @@ export class MongoStorageAdapter implements StorageAdapter {
});
return results;
})
- .then(objects =>
- objects.map(object =>
- mongoObjectToParseObject(className, object, schema)
- )
- )
+ .then(objects => objects.map(object => mongoObjectToParseObject(className, object, schema)))
.catch(err => this.handleError(err));
}
@@ -945,20 +844,12 @@ export class MongoStorageAdapter implements StorageAdapter {
// Pass objects down to MongoDB...this is more than likely an $exists operator.
returnValue[`_p_${field}`] = pipeline[field];
} else {
- returnValue[
- `_p_${field}`
- ] = `${schema.fields[field].targetClass}$${pipeline[field]}`;
+ returnValue[`_p_${field}`] = `${schema.fields[field].targetClass}$${pipeline[field]}`;
}
- } else if (
- schema.fields[field] &&
- schema.fields[field].type === 'Date'
- ) {
+ } else if (schema.fields[field] && schema.fields[field].type === 'Date') {
returnValue[field] = this._convertToDate(pipeline[field]);
} else {
- returnValue[field] = this._parseAggregateArgs(
- schema,
- pipeline[field]
- );
+ returnValue[field] = this._parseAggregateArgs(schema, pipeline[field]);
}
if (field === 'objectId') {
@@ -1011,16 +902,11 @@ export class MongoStorageAdapter implements StorageAdapter {
// updatedAt or objectId and change it accordingly.
_parseAggregateGroupArgs(schema: any, pipeline: any): any {
if (Array.isArray(pipeline)) {
- return pipeline.map(value =>
- this._parseAggregateGroupArgs(schema, value)
- );
+ return pipeline.map(value => this._parseAggregateGroupArgs(schema, value));
} else if (typeof pipeline === 'object') {
const returnValue = {};
for (const field in pipeline) {
- returnValue[field] = this._parseAggregateGroupArgs(
- schema,
- pipeline[field]
- );
+ returnValue[field] = this._parseAggregateGroupArgs(schema, pipeline[field]);
}
return returnValue;
} else if (typeof pipeline === 'string') {
@@ -1077,10 +963,7 @@ export class MongoStorageAdapter implements StorageAdapter {
case '':
break;
default:
- throw new Parse.Error(
- Parse.Error.INVALID_QUERY,
- 'Not supported read preference.'
- );
+ throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Not supported read preference.');
}
return readPreference;
}
@@ -1111,11 +994,7 @@ export class MongoStorageAdapter implements StorageAdapter {
return Promise.resolve();
}
- createTextIndexesIfNeeded(
- className: string,
- query: QueryType,
- schema: any
- ): Promise {
+ createTextIndexesIfNeeded(className: string, query: QueryType, schema: any): Promise {
for (const fieldName in query) {
if (!query[fieldName] || !query[fieldName].$text) {
continue;
diff --git a/src/Adapters/Storage/Mongo/MongoTransform.js b/src/Adapters/Storage/Mongo/MongoTransform.js
index ff025cfd09..c591522479 100644
--- a/src/Adapters/Storage/Mongo/MongoTransform.js
+++ b/src/Adapters/Storage/Mongo/MongoTransform.js
@@ -20,27 +20,16 @@ const transformKey = (className, fieldName, schema) => {
return 'times_used';
}
- if (
- schema.fields[fieldName] &&
- schema.fields[fieldName].__type == 'Pointer'
- ) {
+ if (schema.fields[fieldName] && schema.fields[fieldName].__type == 'Pointer') {
fieldName = '_p_' + fieldName;
- } else if (
- schema.fields[fieldName] &&
- schema.fields[fieldName].type == 'Pointer'
- ) {
+ } else if (schema.fields[fieldName] && schema.fields[fieldName].type == 'Pointer') {
fieldName = '_p_' + fieldName;
}
return fieldName;
};
-const transformKeyValueForUpdate = (
- className,
- restKey,
- restValue,
- parseFormatSchema
-) => {
+const transformKeyValueForUpdate = (className, restKey, restValue, parseFormatSchema) => {
// Check if the schema is known since it's a built-in field.
var key = restKey;
var timeField = false;
@@ -109,11 +98,8 @@ const transformKeyValueForUpdate = (
}
if (
- (parseFormatSchema.fields[key] &&
- parseFormatSchema.fields[key].type === 'Pointer') ||
- (!parseFormatSchema.fields[key] &&
- restValue &&
- restValue.__type == 'Pointer')
+ (parseFormatSchema.fields[key] && parseFormatSchema.fields[key].type === 'Pointer') ||
+ (!parseFormatSchema.fields[key] && restValue && restValue.__type == 'Pointer')
) {
key = '_p_' + key;
}
@@ -179,7 +165,7 @@ const isAllValuesRegexOrNone = values => {
};
const isAnyValueRegex = values => {
- return values.some(function(value) {
+ return values.some(function (value) {
return isRegex(value);
});
};
@@ -292,9 +278,7 @@ function transformQueryKeyValue(className, key, value, schema, count = false) {
case '$nor':
return {
key: key,
- value: value.map(subQuery =>
- transformWhere(className, subQuery, schema, count)
- ),
+ value: value.map(subQuery => transformWhere(className, subQuery, schema, count)),
};
case 'lastUsed':
if (valueAsDate(value)) {
@@ -315,17 +299,13 @@ function transformQueryKeyValue(className, key, value, schema, count = false) {
}
}
- const expectedTypeIsArray =
- schema && schema.fields[key] && schema.fields[key].type === 'Array';
+ const expectedTypeIsArray = schema && schema.fields[key] && schema.fields[key].type === 'Array';
const expectedTypeIsPointer =
schema && schema.fields[key] && schema.fields[key].type === 'Pointer';
const field = schema && schema.fields[key];
- if (
- expectedTypeIsPointer ||
- (!schema && value && value.__type === 'Pointer')
- ) {
+ if (expectedTypeIsPointer || (!schema && value && value.__type === 'Pointer')) {
key = '_p_' + key;
}
@@ -362,23 +342,13 @@ function transformQueryKeyValue(className, key, value, schema, count = false) {
function transformWhere(className, restWhere, schema, count = false) {
const mongoWhere = {};
for (const restKey in restWhere) {
- const out = transformQueryKeyValue(
- className,
- restKey,
- restWhere[restKey],
- schema,
- count
- );
+ const out = transformQueryKeyValue(className, restKey, restWhere[restKey], schema, count);
mongoWhere[out.key] = out.value;
}
return mongoWhere;
}
-const parseObjectKeyValueToMongoObjectKeyValue = (
- restKey,
- restValue,
- schema
-) => {
+const parseObjectKeyValueToMongoObjectKeyValue = (restKey, restValue, schema) => {
// Check if the schema is known since it's a built-in field.
let transformedValue;
let coercedToDate;
@@ -388,37 +358,27 @@ const parseObjectKeyValueToMongoObjectKeyValue = (
case 'expiresAt':
transformedValue = transformTopLevelAtom(restValue);
coercedToDate =
- typeof transformedValue === 'string'
- ? new Date(transformedValue)
- : transformedValue;
+ typeof transformedValue === 'string' ? new Date(transformedValue) : transformedValue;
return { key: 'expiresAt', value: coercedToDate };
case '_email_verify_token_expires_at':
transformedValue = transformTopLevelAtom(restValue);
coercedToDate =
- typeof transformedValue === 'string'
- ? new Date(transformedValue)
- : transformedValue;
+ typeof transformedValue === 'string' ? new Date(transformedValue) : transformedValue;
return { key: '_email_verify_token_expires_at', value: coercedToDate };
case '_account_lockout_expires_at':
transformedValue = transformTopLevelAtom(restValue);
coercedToDate =
- typeof transformedValue === 'string'
- ? new Date(transformedValue)
- : transformedValue;
+ typeof transformedValue === 'string' ? new Date(transformedValue) : transformedValue;
return { key: '_account_lockout_expires_at', value: coercedToDate };
case '_perishable_token_expires_at':
transformedValue = transformTopLevelAtom(restValue);
coercedToDate =
- typeof transformedValue === 'string'
- ? new Date(transformedValue)
- : transformedValue;
+ typeof transformedValue === 'string' ? new Date(transformedValue) : transformedValue;
return { key: '_perishable_token_expires_at', value: coercedToDate };
case '_password_changed_at':
transformedValue = transformTopLevelAtom(restValue);
coercedToDate =
- typeof transformedValue === 'string'
- ? new Date(transformedValue)
- : transformedValue;
+ typeof transformedValue === 'string' ? new Date(transformedValue) : transformedValue;
return { key: '_password_changed_at', value: coercedToDate };
case '_failed_login_count':
case '_rperm':
@@ -432,10 +392,7 @@ const parseObjectKeyValueToMongoObjectKeyValue = (
default:
// Auth data should have been transformed already
if (restKey.match(/^authData\.([a-zA-Z0-9_]+)\.id$/)) {
- throw new Parse.Error(
- Parse.Error.INVALID_KEY_NAME,
- 'can only query on ' + restKey
- );
+ throw new Parse.Error(Parse.Error.INVALID_KEY_NAME, 'can only query on ' + restKey);
}
// Trust that the auth data has been transformed and save it directly
if (restKey.match(/^_auth_data_[a-zA-Z0-9_]+$/)) {
@@ -473,9 +430,7 @@ const parseObjectKeyValueToMongoObjectKeyValue = (
}
// Handle normal objects by recursing
- if (
- Object.keys(restValue).some(key => key.includes('$') || key.includes('.'))
- ) {
+ if (Object.keys(restValue).some(key => key.includes('$') || key.includes('.'))) {
throw new Parse.Error(
Parse.Error.INVALID_NESTED_KEY,
"Nested keys should not contain the '$' or '.' characters"
@@ -504,15 +459,11 @@ const parseObjectToMongoObjectForCreate = (className, restCreate, schema) => {
// Use the legacy mongo format for createdAt and updatedAt
if (mongoCreate.createdAt) {
- mongoCreate._created_at = new Date(
- mongoCreate.createdAt.iso || mongoCreate.createdAt
- );
+ mongoCreate._created_at = new Date(mongoCreate.createdAt.iso || mongoCreate.createdAt);
delete mongoCreate.createdAt;
}
if (mongoCreate.updatedAt) {
- mongoCreate._updated_at = new Date(
- mongoCreate.updatedAt.iso || mongoCreate.updatedAt
- );
+ mongoCreate._updated_at = new Date(mongoCreate.updatedAt.iso || mongoCreate.updatedAt);
delete mongoCreate.updatedAt;
}
@@ -593,22 +544,14 @@ function CannotTransform() {}
const transformInteriorAtom = atom => {
// TODO: check validity harder for the __type-defined types
- if (
- typeof atom === 'object' &&
- atom &&
- !(atom instanceof Date) &&
- atom.__type === 'Pointer'
- ) {
+ if (typeof atom === 'object' && atom && !(atom instanceof Date) && atom.__type === 'Pointer') {
return {
__type: 'Pointer',
className: atom.className,
objectId: atom.objectId,
};
} else if (typeof atom === 'function' || typeof atom === 'symbol') {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- `cannot transform value: ${atom}`
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, `cannot transform value: ${atom}`);
} else if (DateCoder.isValidJSON(atom)) {
return DateCoder.JSONToDatabase(atom);
} else if (BytesCoder.isValidJSON(atom)) {
@@ -640,10 +583,7 @@ function transformTopLevelAtom(atom, field) {
return atom;
case 'symbol':
case 'function':
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- `cannot transform value: ${atom}`
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, `cannot transform value: ${atom}`);
case 'object':
if (atom instanceof Date) {
// Technically dates are not rest format, but, it seems pretty
@@ -822,16 +762,11 @@ function transformConstraint(constraint, field, count = false) {
if (typeof constraint !== 'object' || !constraint) {
return CannotTransform;
}
- const transformFunction = inArray
- ? transformInteriorAtom
- : transformTopLevelAtom;
+ const transformFunction = inArray ? transformInteriorAtom : transformTopLevelAtom;
const transformer = atom => {
const result = transformFunction(atom, field);
if (result === CannotTransform) {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- `bad atom: ${JSON.stringify(atom)}`
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, `bad atom: ${JSON.stringify(atom)}`);
}
return result;
};
@@ -839,9 +774,7 @@ function transformConstraint(constraint, field, count = false) {
// This is a hack so that:
// $regex is handled before $options
// $nearSphere is handled before $maxDistance
- var keys = Object.keys(constraint)
- .sort()
- .reverse();
+ var keys = Object.keys(constraint).sort().reverse();
var answer = {};
for (var key of keys) {
switch (key) {
@@ -892,10 +825,7 @@ function transformConstraint(constraint, field, count = false) {
case '$nin': {
const arr = constraint[key];
if (!(arr instanceof Array)) {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- 'bad ' + key + ' value'
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, 'bad ' + key + ' value');
}
answer[key] = _.flatMap(arr, value => {
return (atom => {
@@ -911,10 +841,7 @@ function transformConstraint(constraint, field, count = false) {
case '$all': {
const arr = constraint[key];
if (!(arr instanceof Array)) {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- 'bad ' + key + ' value'
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, 'bad ' + key + ' value');
}
answer[key] = arr.map(transformInteriorAtom);
@@ -939,10 +866,7 @@ function transformConstraint(constraint, field, count = false) {
case '$containedBy': {
const arr = constraint[key];
if (!(arr instanceof Array)) {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- `bad $containedBy: should be an array`
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, `bad $containedBy: should be an array`);
}
answer.$elemMatch = {
$nin: arr.map(transformer),
@@ -956,33 +880,21 @@ function transformConstraint(constraint, field, count = false) {
case '$text': {
const search = constraint[key].$search;
if (typeof search !== 'object') {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- `bad $text: $search, should be object`
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, `bad $text: $search, should be object`);
}
if (!search.$term || typeof search.$term !== 'string') {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- `bad $text: $term, should be string`
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, `bad $text: $term, should be string`);
} else {
answer[key] = {
$search: search.$term,
};
}
if (search.$language && typeof search.$language !== 'string') {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- `bad $text: $language, should be string`
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, `bad $text: $language, should be string`);
} else if (search.$language) {
answer[key].$language = search.$language;
}
- if (
- search.$caseSensitive &&
- typeof search.$caseSensitive !== 'boolean'
- ) {
+ if (search.$caseSensitive && typeof search.$caseSensitive !== 'boolean') {
throw new Parse.Error(
Parse.Error.INVALID_JSON,
`bad $text: $caseSensitive, should be boolean`
@@ -990,10 +902,7 @@ function transformConstraint(constraint, field, count = false) {
} else if (search.$caseSensitive) {
answer[key].$caseSensitive = search.$caseSensitive;
}
- if (
- search.$diacriticSensitive &&
- typeof search.$diacriticSensitive !== 'boolean'
- ) {
+ if (search.$diacriticSensitive && typeof search.$diacriticSensitive !== 'boolean') {
throw new Parse.Error(
Parse.Error.INVALID_JSON,
`bad $text: $diacriticSensitive, should be boolean`
@@ -1007,10 +916,7 @@ function transformConstraint(constraint, field, count = false) {
const point = constraint[key];
if (count) {
answer.$geoWithin = {
- $centerSphere: [
- [point.longitude, point.latitude],
- constraint.$maxDistance,
- ],
+ $centerSphere: [[point.longitude, point.latitude], constraint.$maxDistance],
};
} else {
answer[key] = [point.longitude, point.latitude];
@@ -1046,10 +952,7 @@ function transformConstraint(constraint, field, count = false) {
case '$within':
var box = constraint[key]['$box'];
if (!box || box.length != 2) {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- 'malformatted $within arg'
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, 'malformatted $within arg');
}
answer[key] = {
$box: [
@@ -1092,10 +995,7 @@ function transformConstraint(constraint, field, count = false) {
return point;
}
if (!GeoPointCoder.isValidJSON(point)) {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- 'bad $geoWithin value'
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, 'bad $geoWithin value');
} else {
Parse.GeoPoint._validate(point.latitude, point.longitude);
}
@@ -1156,10 +1056,7 @@ function transformConstraint(constraint, field, count = false) {
}
default:
if (key.match(/^\$+/)) {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- 'bad constraint: ' + key
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, 'bad constraint: ' + key);
}
return CannotTransform;
}
@@ -1188,10 +1085,7 @@ function transformUpdateOperator({ __op, amount, objects }, flatten) {
case 'Increment':
if (typeof amount !== 'number') {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- 'incrementing must provide a number'
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, 'incrementing must provide a number');
}
if (flatten) {
return amount;
@@ -1202,10 +1096,7 @@ function transformUpdateOperator({ __op, amount, objects }, flatten) {
case 'Add':
case 'AddUnique':
if (!(objects instanceof Array)) {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- 'objects to add must be an array'
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, 'objects to add must be an array');
}
var toAdd = objects.map(transformInteriorAtom);
if (flatten) {
@@ -1220,10 +1111,7 @@ function transformUpdateOperator({ __op, amount, objects }, flatten) {
case 'Remove':
if (!(objects instanceof Array)) {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- 'objects to remove must be an array'
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, 'objects to remove must be an array');
}
var toRemove = objects.map(transformInteriorAtom);
if (flatten) {
@@ -1379,15 +1267,11 @@ const mongoObjectToParseObject = (className, mongoObject, schema) => {
break;
case 'updatedAt':
case '_updated_at':
- restObject['updatedAt'] = Parse._encode(
- new Date(mongoObject[key])
- ).iso;
+ restObject['updatedAt'] = Parse._encode(new Date(mongoObject[key])).iso;
break;
case 'createdAt':
case '_created_at':
- restObject['createdAt'] = Parse._encode(
- new Date(mongoObject[key])
- ).iso;
+ restObject['createdAt'] = Parse._encode(new Date(mongoObject[key])).iso;
break;
case 'expiresAt':
case '_expiresAt':
@@ -1395,9 +1279,7 @@ const mongoObjectToParseObject = (className, mongoObject, schema) => {
break;
case 'lastUsed':
case '_last_used':
- restObject['lastUsed'] = Parse._encode(
- new Date(mongoObject[key])
- ).iso;
+ restObject['lastUsed'] = Parse._encode(new Date(mongoObject[key])).iso;
break;
case 'timesUsed':
case 'times_used':
@@ -1445,11 +1327,7 @@ const mongoObjectToParseObject = (className, mongoObject, schema) => {
if (mongoObject[key] === null) {
break;
}
- restObject[newKey] = transformPointerString(
- schema,
- newKey,
- mongoObject[key]
- );
+ restObject[newKey] = transformPointerString(schema, newKey, mongoObject[key]);
break;
} else if (key[0] == '_' && key != '__type') {
throw 'bad key in untransform: ' + key;
@@ -1488,9 +1366,7 @@ const mongoObjectToParseObject = (className, mongoObject, schema) => {
break;
}
}
- restObject[key] = nestedMongoObjectToNestedParseObject(
- mongoObject[key]
- );
+ restObject[key] = nestedMongoObjectToNestedParseObject(mongoObject[key]);
}
}
@@ -1518,16 +1394,12 @@ var DateCoder = {
},
isValidJSON(value) {
- return (
- typeof value === 'object' && value !== null && value.__type === 'Date'
- );
+ return typeof value === 'object' && value !== null && value.__type === 'Date';
},
};
var BytesCoder = {
- base64Pattern: new RegExp(
- '^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$'
- ),
+ base64Pattern: new RegExp('^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$'),
isBase64Value(object) {
if (typeof object !== 'string') {
return false;
@@ -1557,9 +1429,7 @@ var BytesCoder = {
},
isValidJSON(value) {
- return (
- typeof value === 'object' && value !== null && value.__type === 'Bytes'
- );
+ return typeof value === 'object' && value !== null && value.__type === 'Bytes';
},
};
@@ -1581,9 +1451,7 @@ var GeoPointCoder = {
},
isValidJSON(value) {
- return (
- typeof value === 'object' && value !== null && value.__type === 'GeoPoint'
- );
+ return typeof value === 'object' && value !== null && value.__type === 'GeoPoint';
},
};
@@ -1648,9 +1516,7 @@ var PolygonCoder = {
},
isValidJSON(value) {
- return (
- typeof value === 'object' && value !== null && value.__type === 'Polygon'
- );
+ return typeof value === 'object' && value !== null && value.__type === 'Polygon';
},
};
@@ -1671,9 +1537,7 @@ var FileCoder = {
},
isValidJSON(value) {
- return (
- typeof value === 'object' && value !== null && value.__type === 'File'
- );
+ return typeof value === 'object' && value !== null && value.__type === 'File';
},
};
diff --git a/src/Adapters/Storage/Postgres/PostgresConfigParser.js b/src/Adapters/Storage/Postgres/PostgresConfigParser.js
index da17142777..170e76282a 100644
--- a/src/Adapters/Storage/Postgres/PostgresConfigParser.js
+++ b/src/Adapters/Storage/Postgres/PostgresConfigParser.js
@@ -9,9 +9,7 @@ function getDatabaseOptionsFromURI(uri) {
databaseOptions.host = parsedURI.hostname || 'localhost';
databaseOptions.port = parsedURI.port ? parseInt(parsedURI.port) : 5432;
- databaseOptions.database = parsedURI.pathname
- ? parsedURI.pathname.substr(1)
- : undefined;
+ databaseOptions.database = parsedURI.pathname ? parsedURI.pathname.substr(1) : undefined;
databaseOptions.user = authParts.length > 0 ? authParts[0] : '';
databaseOptions.password = authParts.length > 1 ? authParts[1] : '';
@@ -55,14 +53,11 @@ function getDatabaseOptionsFromURI(uri) {
}
databaseOptions.binary =
- queryParams.binary && queryParams.binary.toLowerCase() === 'true'
- ? true
- : false;
+ queryParams.binary && queryParams.binary.toLowerCase() === 'true' ? true : false;
databaseOptions.client_encoding = queryParams.client_encoding;
databaseOptions.application_name = queryParams.application_name;
- databaseOptions.fallback_application_name =
- queryParams.fallback_application_name;
+ databaseOptions.fallback_application_name = queryParams.fallback_application_name;
if (queryParams.poolSize) {
databaseOptions.poolSize = parseInt(queryParams.poolSize) || 10;
@@ -77,8 +72,7 @@ function getDatabaseOptionsFromURI(uri) {
databaseOptions.idleTimeoutMillis = parseInt(queryParams.idleTimeoutMillis);
}
if (queryParams.keepAlive) {
- databaseOptions.keepAlive =
- queryParams.keepAlive.toLowerCase() === 'true' ? true : false;
+ databaseOptions.keepAlive = queryParams.keepAlive.toLowerCase() === 'true' ? true : false;
}
return databaseOptions;
diff --git a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js
index aa2ddf3f40..9fbee1fe38 100644
--- a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js
+++ b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js
@@ -253,12 +253,7 @@ interface WhereClause {
sorts: Array;
}
-const buildWhereClause = ({
- schema,
- query,
- index,
- caseInsensitive,
-}): WhereClause => {
+const buildWhereClause = ({ schema, query, index, caseInsensitive }): WhereClause => {
const patterns = [];
let values = [];
const sorts = [];
@@ -266,9 +261,7 @@ const buildWhereClause = ({
schema = toPostgresSchema(schema);
for (const fieldName in query) {
const isArrayField =
- schema.fields &&
- schema.fields[fieldName] &&
- schema.fields[fieldName].type === 'Array';
+ schema.fields && schema.fields[fieldName] && schema.fields[fieldName].type === 'Array';
const initialPatternsLength = patterns.length;
const fieldValue = query[fieldName];
@@ -284,10 +277,7 @@ const buildWhereClause = ({
if (authDataMatch) {
// TODO: Handle querying by _auth_data_provider, authData is stored in authData field
continue;
- } else if (
- caseInsensitive &&
- (fieldName === 'username' || fieldName === 'email')
- ) {
+ } else if (caseInsensitive && (fieldName === 'username' || fieldName === 'email')) {
patterns.push(`LOWER($${index}:name) = LOWER($${index + 1})`);
values.push(fieldName, fieldValue);
index += 2;
@@ -324,10 +314,7 @@ const buildWhereClause = ({
} else if (typeof fieldValue === 'boolean') {
patterns.push(`$${index}:name = $${index + 1}`);
// Can't cast boolean to double precision
- if (
- schema.fields[fieldName] &&
- schema.fields[fieldName].type === 'Number'
- ) {
+ if (schema.fields[fieldName] && schema.fields[fieldName].type === 'Number') {
// Should always return zero results
const MAX_INT_PLUS_ONE = 9223372036854775808;
values.push(fieldName, MAX_INT_PLUS_ONE);
@@ -377,9 +364,7 @@ const buildWhereClause = ({
// if not null, we need to manually exclude null
if (fieldValue.$ne.__type === 'GeoPoint') {
patterns.push(
- `($${index}:name <> POINT($${index + 1}, $${
- index + 2
- }) OR $${index}:name IS NULL)`
+ `($${index}:name <> POINT($${index + 1}, $${index + 2}) OR $${index}:name IS NULL)`
);
} else {
if (fieldName.indexOf('.') >= 0) {
@@ -388,9 +373,7 @@ const buildWhereClause = ({
`(${constraintFieldName} <> $${index} OR ${constraintFieldName} IS NULL)`
);
} else {
- patterns.push(
- `($${index}:name <> $${index + 1} OR $${index}:name IS NULL)`
- );
+ patterns.push(`($${index}:name <> $${index + 1} OR $${index}:name IS NULL)`);
}
}
}
@@ -421,8 +404,7 @@ const buildWhereClause = ({
}
}
}
- const isInOrNin =
- Array.isArray(fieldValue.$in) || Array.isArray(fieldValue.$nin);
+ const isInOrNin = Array.isArray(fieldValue.$in) || Array.isArray(fieldValue.$nin);
if (
Array.isArray(fieldValue.$in) &&
isArrayField &&
@@ -441,9 +423,7 @@ const buildWhereClause = ({
}
});
if (allowNull) {
- patterns.push(
- `($${index}:name IS NULL OR $${index}:name && ARRAY[${inPatterns.join()}])`
- );
+ patterns.push(`($${index}:name IS NULL OR $${index}:name && ARRAY[${inPatterns.join()}])`);
} else {
patterns.push(`$${index}:name && ARRAY[${inPatterns.join()}]`);
}
@@ -453,9 +433,7 @@ const buildWhereClause = ({
const not = notIn ? ' NOT ' : '';
if (baseArray.length > 0) {
if (isArrayField) {
- patterns.push(
- `${not} array_contains($${index}:name, $${index + 1})`
- );
+ patterns.push(`${not} array_contains($${index}:name, $${index + 1})`);
values.push(fieldName, JSON.stringify(baseArray));
index += 2;
} else {
@@ -518,13 +496,9 @@ const buildWhereClause = ({
const value = processRegexPattern(fieldValue.$all[i].$regex);
fieldValue.$all[i] = value.substring(1) + '%';
}
- patterns.push(
- `array_contains_all_regex($${index}:name, $${index + 1}::jsonb)`
- );
+ patterns.push(`array_contains_all_regex($${index}:name, $${index + 1}::jsonb)`);
} else {
- patterns.push(
- `array_contains_all($${index}:name, $${index + 1}::jsonb)`
- );
+ patterns.push(`array_contains_all($${index}:name, $${index + 1}::jsonb)`);
}
values.push(fieldName, JSON.stringify(fieldValue.$all));
index += 2;
@@ -549,10 +523,7 @@ const buildWhereClause = ({
if (fieldValue.$containedBy) {
const arr = fieldValue.$containedBy;
if (!(arr instanceof Array)) {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- `bad $containedBy: should be an array`
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, `bad $containedBy: should be an array`);
}
patterns.push(`$${index}:name <@ $${index + 1}::jsonb`);
@@ -564,22 +535,13 @@ const buildWhereClause = ({
const search = fieldValue.$text.$search;
let language = 'english';
if (typeof search !== 'object') {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- `bad $text: $search, should be object`
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, `bad $text: $search, should be object`);
}
if (!search.$term || typeof search.$term !== 'string') {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- `bad $text: $term, should be string`
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, `bad $text: $term, should be string`);
}
if (search.$language && typeof search.$language !== 'string') {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- `bad $text: $language, should be string`
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, `bad $text: $language, should be string`);
} else if (search.$language) {
language = search.$language;
}
@@ -594,10 +556,7 @@ const buildWhereClause = ({
`bad $text: $caseSensitive not supported, please use $regex or create a separate lower case column.`
);
}
- if (
- search.$diacriticSensitive &&
- typeof search.$diacriticSensitive !== 'boolean'
- ) {
+ if (search.$diacriticSensitive && typeof search.$diacriticSensitive !== 'boolean') {
throw new Parse.Error(
Parse.Error.INVALID_JSON,
`bad $text: $diacriticSensitive, should be boolean`
@@ -609,9 +568,7 @@ const buildWhereClause = ({
);
}
patterns.push(
- `to_tsvector($${index}, $${index + 1}:name) @@ to_tsquery($${
- index + 2
- }, $${index + 3})`
+ `to_tsvector($${index}, $${index + 1}:name) @@ to_tsquery($${index + 2}, $${index + 3})`
);
values.push(language, fieldName, language, search.$term);
index += 4;
@@ -716,10 +673,7 @@ const buildWhereClause = ({
return `(${point[0]}, ${point[1]})`;
}
if (typeof point !== 'object' || point.__type !== 'GeoPoint') {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- 'bad $geoWithin value'
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, 'bad $geoWithin value');
} else {
Parse.GeoPoint._validate(point.latitude, point.longitude);
}
@@ -830,9 +784,7 @@ const buildWhereClause = ({
if (initialPatternsLength === patterns.length) {
throw new Parse.Error(
Parse.Error.OPERATION_FORBIDDEN,
- `Postgres doesn't support this query type yet ${JSON.stringify(
- fieldValue
- )}`
+ `Postgres doesn't support this query type yet ${JSON.stringify(fieldValue)}`
);
}
}
@@ -846,16 +798,23 @@ export class PostgresStorageAdapter implements StorageAdapter {
// Private
_collectionPrefix: string;
_client: any;
+ _onchange: any;
_pgp: any;
+ _stream: any;
constructor({ uri, collectionPrefix = '', databaseOptions }: any) {
this._collectionPrefix = collectionPrefix;
const { client, pgp } = createClient(uri, databaseOptions);
this._client = client;
+ this._onchange = () => {};
this._pgp = pgp;
this.canSortOnJoinTables = false;
}
+ watch(callback: () => void): void {
+ this._onchange = callback;
+ }
+
//Note that analyze=true will run the query, executing INSERTS, DELETES, etc.
createExplainableQuery(query: string, analyze: boolean = false) {
if (analyze) {
@@ -872,6 +831,14 @@ export class PostgresStorageAdapter implements StorageAdapter {
this._client.$pool.end();
}
+ _notifySchemaChange() {
+ if (this._stream) {
+ this._stream.none('NOTIFY $1~, $2', ['schema.change', '']).catch(error => {
+ console.log('Failed to Notify:', error); // unlikely to ever happen
+ });
+ }
+ }
+
async _ensureSchemaCollectionExists(conn: any) {
conn = conn || this._client;
await conn
@@ -903,17 +870,13 @@ export class PostgresStorageAdapter implements StorageAdapter {
const self = this;
await this._client.task('set-class-level-permissions', async t => {
await self._ensureSchemaCollectionExists(t);
- const values = [
- className,
- 'schema',
- 'classLevelPermissions',
- JSON.stringify(CLPs),
- ];
+ const values = [className, 'schema', 'classLevelPermissions', JSON.stringify(CLPs)];
await t.none(
`UPDATE "_SCHEMA" SET $2:name = json_object_set_key($2:name, $3::text, $4::jsonb) WHERE "className" = $1`,
values
);
});
+ this._notifySchemaChange();
}
async setIndexesWithSchemaFormat(
@@ -936,10 +899,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
Object.keys(submittedIndexes).forEach(name => {
const field = submittedIndexes[name];
if (existingIndexes[name] && field.__op !== 'Delete') {
- throw new Parse.Error(
- Parse.Error.INVALID_QUERY,
- `Index ${name} exists, cannot update.`
- );
+ throw new Parse.Error(Parse.Error.INVALID_QUERY, `Index ${name} exists, cannot update.`);
}
if (!existingIndexes[name] && field.__op === 'Delete') {
throw new Parse.Error(
@@ -978,6 +938,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
'UPDATE "_SCHEMA" SET $2:name = json_object_set_key($2:name, $3::text, $4::jsonb) WHERE "className" = $1',
[className, 'schema', 'indexes', JSON.stringify(existingIndexes)]
);
+ this._notifySchemaChange();
});
}
@@ -990,24 +951,12 @@ export class PostgresStorageAdapter implements StorageAdapter {
'INSERT INTO "_SCHEMA" ("className", "schema", "isParseClass") VALUES ($, $, true)',
{ className, schema }
);
- await this.setIndexesWithSchemaFormat(
- className,
- schema.indexes,
- {},
- schema.fields,
- t
- );
+ await this.setIndexesWithSchemaFormat(className, schema.indexes, {}, schema.fields, t);
return toParseSchema(schema);
})
.catch(err => {
- if (
- err.code === PostgresUniqueIndexViolationError &&
- err.detail.includes(className)
- ) {
- throw new Parse.Error(
- Parse.Error.DUPLICATE_VALUE,
- `Class ${className} already exists.`
- );
+ if (err.code === PostgresUniqueIndexViolationError && err.detail.includes(className)) {
+ throw new Parse.Error(Parse.Error.DUPLICATE_VALUE, `Class ${className} already exists.`);
}
throw err;
});
@@ -1093,24 +1042,14 @@ export class PostgresStorageAdapter implements StorageAdapter {
const newColumns = Object.keys(schema.fields)
.filter(item => columns.indexOf(item) === -1)
.map(fieldName =>
- self.addFieldIfNotExists(
- className,
- fieldName,
- schema.fields[fieldName],
- t
- )
+ self.addFieldIfNotExists(className, fieldName, schema.fields[fieldName], t)
);
await t.batch(newColumns);
});
}
- async addFieldIfNotExists(
- className: string,
- fieldName: string,
- type: any,
- conn: any
- ) {
+ async addFieldIfNotExists(className: string, fieldName: string, type: any, conn: any) {
// TODO: Must be revised for invalid logic...
debug('addFieldIfNotExists', { className, fieldName, type });
conn = conn || this._client;
@@ -1128,11 +1067,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
);
} catch (error) {
if (error.code === PostgresRelationDoesNotExistError) {
- return self.createClass(
- className,
- { fields: { [fieldName]: type } },
- t
- );
+ return self.createClass(className, { fields: { [fieldName]: type } }, t);
}
if (error.code !== PostgresDuplicateColumnError) {
throw error;
@@ -1159,6 +1094,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
'UPDATE "_SCHEMA" SET "schema"=jsonb_set("schema", $, $) WHERE "className"=$',
{ path, type, className }
);
+ this._notifySchemaChange();
}
});
}
@@ -1174,7 +1110,10 @@ export class PostgresStorageAdapter implements StorageAdapter {
},
];
return this._client
- .tx(t => t.none(this._pgp.helpers.concat(operations)))
+ .tx('delete-class', async t => {
+ await t.none(this._pgp.helpers.concat(operations));
+ this._notifySchemaChange();
+ })
.then(() => className.indexOf('_Join:') != 0); // resolves with false when _Join table
}
@@ -1234,11 +1173,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
// may do so.
// Returns a Promise.
- async deleteFields(
- className: string,
- schema: SchemaType,
- fieldNames: string[]
- ): Promise {
+ async deleteFields(className: string, schema: SchemaType, fieldNames: string[]): Promise {
debug('deleteFields', className, fieldNames);
fieldNames = fieldNames.reduce((list: Array, fieldName: string) => {
const field = schema.fields[fieldName];
@@ -1257,16 +1192,14 @@ export class PostgresStorageAdapter implements StorageAdapter {
.join(', DROP COLUMN');
await this._client.tx('delete-fields', async t => {
- await t.none(
- 'UPDATE "_SCHEMA" SET "schema" = $ WHERE "className" = $',
- { schema, className }
- );
+ await t.none('UPDATE "_SCHEMA" SET "schema" = $ WHERE "className" = $', {
+ schema,
+ className,
+ });
if (values.length > 1) {
- await t.none(
- `ALTER TABLE $1:name DROP COLUMN IF EXISTS ${columns}`,
- values
- );
+ await t.none(`ALTER TABLE $1:name DROP COLUMN IF EXISTS ${columns}`, values);
}
+ this._notifySchemaChange();
});
}
@@ -1412,10 +1345,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
const fieldName = columnsArray[index];
if (['_rperm', '_wperm'].indexOf(fieldName) >= 0) {
termination = '::text[]';
- } else if (
- schema.fields[fieldName] &&
- schema.fields[fieldName].type === 'Array'
- ) {
+ } else if (schema.fields[fieldName] && schema.fields[fieldName].type === 'Array') {
termination = '::jsonb';
}
return `$${index + 2 + columnsArray.length}${termination}`;
@@ -1427,18 +1357,13 @@ export class PostgresStorageAdapter implements StorageAdapter {
return `POINT($${l}, $${l + 1})`;
});
- const columnsPattern = columnsArray
- .map((col, index) => `$${index + 2}:name`)
- .join();
+ const columnsPattern = columnsArray.map((col, index) => `$${index + 2}:name`).join();
const valuesPattern = initialValues.concat(geoPointsInjects).join();
const qs = `INSERT INTO $1:name (${columnsPattern}) VALUES (${valuesPattern})`;
const values = [className, ...columnsArray, ...valuesArray];
debug(qs, values);
- const promise = (transactionalSession
- ? transactionalSession.t
- : this._client
- )
+ const promise = (transactionalSession ? transactionalSession.t : this._client)
.none(qs, values)
.then(() => ({ ops: [object] }))
.catch(error => {
@@ -1488,17 +1413,11 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
const qs = `WITH deleted AS (DELETE FROM $1:name WHERE ${where.pattern} RETURNING *) SELECT count(*) FROM deleted`;
debug(qs, values);
- const promise = (transactionalSession
- ? transactionalSession.t
- : this._client
- )
+ const promise = (transactionalSession ? transactionalSession.t : this._client)
.one(qs, values, a => +a.count)
.then(count => {
if (count === 0) {
- throw new Parse.Error(
- Parse.Error.OBJECT_NOT_FOUND,
- 'Object not found.'
- );
+ throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.');
} else {
return count;
}
@@ -1523,13 +1442,9 @@ export class PostgresStorageAdapter implements StorageAdapter {
transactionalSession: ?any
): Promise {
debug('findOneAndUpdate', className, query, update);
- return this.updateObjectsByQuery(
- className,
- schema,
- query,
- update,
- transactionalSession
- ).then(val => val[0]);
+ return this.updateObjectsByQuery(className, schema, query, update, transactionalSession).then(
+ val => val[0]
+ );
}
// Apply the update to all objects that match the given Parse Query.
@@ -1592,39 +1507,28 @@ export class PostgresStorageAdapter implements StorageAdapter {
const fieldNameIndex = index;
index += 1;
values.push(fieldName);
- const update = Object.keys(fieldValue).reduce(
- (lastKey: string, key: string) => {
- const str = generate(
- lastKey,
- `$${index}::text`,
- `$${index + 1}::jsonb`
- );
- index += 2;
- let value = fieldValue[key];
- if (value) {
- if (value.__op === 'Delete') {
- value = null;
- } else {
- value = JSON.stringify(value);
- }
+ const update = Object.keys(fieldValue).reduce((lastKey: string, key: string) => {
+ const str = generate(lastKey, `$${index}::text`, `$${index + 1}::jsonb`);
+ index += 2;
+ let value = fieldValue[key];
+ if (value) {
+ if (value.__op === 'Delete') {
+ value = null;
+ } else {
+ value = JSON.stringify(value);
}
- values.push(key, value);
- return str;
- },
- lastKey
- );
+ }
+ values.push(key, value);
+ return str;
+ }, lastKey);
updatePatterns.push(`$${fieldNameIndex}:name = ${update}`);
} else if (fieldValue.__op === 'Increment') {
- updatePatterns.push(
- `$${index}:name = COALESCE($${index}:name, 0) + $${index + 1}`
- );
+ updatePatterns.push(`$${index}:name = COALESCE($${index}:name, 0) + $${index + 1}`);
values.push(fieldName, fieldValue.amount);
index += 2;
} else if (fieldValue.__op === 'Add') {
updatePatterns.push(
- `$${index}:name = array_add(COALESCE($${index}:name, '[]'::jsonb), $${
- index + 1
- }::jsonb)`
+ `$${index}:name = array_add(COALESCE($${index}:name, '[]'::jsonb), $${index + 1}::jsonb)`
);
values.push(fieldName, JSON.stringify(fieldValue.objects));
index += 2;
@@ -1678,9 +1582,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
values.push(fieldName, toPostgresValue(fieldValue));
index += 2;
} else if (fieldValue.__type === 'GeoPoint') {
- updatePatterns.push(
- `$${index}:name = POINT($${index + 1}, $${index + 2})`
- );
+ updatePatterns.push(`$${index}:name = POINT($${index + 1}, $${index + 2})`);
values.push(fieldName, fieldValue.longitude, fieldValue.latitude);
index += 3;
} else if (fieldValue.__type === 'Polygon') {
@@ -1745,12 +1647,9 @@ export class PostgresStorageAdapter implements StorageAdapter {
})
.map(k => k.split('.')[1]);
- const deletePatterns = keysToDelete.reduce(
- (p: string, c: string, i: number) => {
- return p + ` - '$${index + 1 + i}:value'`;
- },
- ''
- );
+ const deletePatterns = keysToDelete.reduce((p: string, c: string, i: number) => {
+ return p + ` - '$${index + 1 + i}:value'`;
+ }, '');
// Override Object
let updateObject = "'{}'::jsonb";
@@ -1799,14 +1698,10 @@ export class PostgresStorageAdapter implements StorageAdapter {
});
values.push(...where.values);
- const whereClause =
- where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
+ const whereClause = where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
const qs = `UPDATE $1:name SET ${updatePatterns.join()} ${whereClause} RETURNING *`;
debug('update: ', qs, values);
- const promise = (transactionalSession
- ? transactionalSession.t
- : this._client
- ).any(qs, values);
+ const promise = (transactionalSession ? transactionalSession.t : this._client).any(qs, values);
if (transactionalSession) {
transactionalSession.batch.push(promise);
}
@@ -1823,23 +1718,12 @@ export class PostgresStorageAdapter implements StorageAdapter {
) {
debug('upsertOneObject', { className, query, update });
const createValue = Object.assign({}, query, update);
- return this.createObject(
- className,
- schema,
- createValue,
- transactionalSession
- ).catch(error => {
+ return this.createObject(className, schema, createValue, transactionalSession).catch(error => {
// ignore duplicate value errors as it's upsert
if (error.code !== Parse.Error.DUPLICATE_VALUE) {
throw error;
}
- return this.findOneAndUpdate(
- className,
- schema,
- query,
- update,
- transactionalSession
- );
+ return this.findOneAndUpdate(className, schema, query, update, transactionalSession);
});
}
@@ -1868,8 +1752,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
});
values.push(...where.values);
- const wherePattern =
- where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
+ const wherePattern = where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
const limitPattern = hasLimit ? `LIMIT $${values.length + 1}` : '';
if (hasLimit) {
values.push(limit);
@@ -1892,10 +1775,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
return `${transformKey} DESC`;
})
.join();
- sortPattern =
- sort !== undefined && Object.keys(sort).length > 0
- ? `ORDER BY ${sorting}`
- : '';
+ sortPattern = sort !== undefined && Object.keys(sort).length > 0 ? `ORDER BY ${sorting}` : '';
}
if (where.sorts && Object.keys((where.sorts: any)).length > 0) {
sortPattern = `ORDER BY ${where.sorts.join()}`;
@@ -1926,9 +1806,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
const originalQuery = `SELECT ${columns} FROM $1:name ${wherePattern} ${sortPattern} ${limitPattern} ${skipPattern}`;
- const qs = explain
- ? this.createExplainableQuery(originalQuery)
- : originalQuery;
+ const qs = explain ? this.createExplainableQuery(originalQuery) : originalQuery;
debug(qs, values);
return this._client
.any(qs, values)
@@ -1943,9 +1821,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
if (explain) {
return results;
}
- return results.map(object =>
- this.postgresObjectToParseObject(className, object, schema)
- );
+ return results.map(object => this.postgresObjectToParseObject(className, object, schema));
});
}
@@ -1977,10 +1853,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
let coords = object[fieldName];
coords = coords.substr(2, coords.length - 4).split('),(');
coords = coords.map(point => {
- return [
- parseFloat(point.split(',')[1]),
- parseFloat(point.split(',')[0]),
- ];
+ return [parseFloat(point.split(',')[1]), parseFloat(point.split(',')[0])];
});
object[fieldName] = {
__type: 'Polygon',
@@ -2052,37 +1925,26 @@ export class PostgresStorageAdapter implements StorageAdapter {
// As such, we shouldn't expose this function to users of parse until we have an out-of-band
// Way of determining if a field is nullable. Undefined doesn't count against uniqueness,
// which is why we use sparse indexes.
- async ensureUniqueness(
- className: string,
- schema: SchemaType,
- fieldNames: string[]
- ) {
+ async ensureUniqueness(className: string, schema: SchemaType, fieldNames: string[]) {
const constraintName = `${className}_unique_${fieldNames.sort().join('_')}`;
- const constraintPatterns = fieldNames.map(
- (fieldName, index) => `$${index + 3}:name`
- );
+ const constraintPatterns = fieldNames.map((fieldName, index) => `$${index + 3}:name`);
const qs = `CREATE UNIQUE INDEX IF NOT EXISTS $2:name ON $1:name(${constraintPatterns.join()})`;
- return this._client
- .none(qs, [className, constraintName, ...fieldNames])
- .catch(error => {
- if (
- error.code === PostgresDuplicateRelationError &&
- error.message.includes(constraintName)
- ) {
- // Index already exists. Ignore error.
- } else if (
- error.code === PostgresUniqueIndexViolationError &&
- error.message.includes(constraintName)
- ) {
- // Cast the error into the proper parse error
- throw new Parse.Error(
- Parse.Error.DUPLICATE_VALUE,
- 'A duplicate value for a field with unique values was provided'
- );
- } else {
- throw error;
- }
- });
+ return this._client.none(qs, [className, constraintName, ...fieldNames]).catch(error => {
+ if (error.code === PostgresDuplicateRelationError && error.message.includes(constraintName)) {
+ // Index already exists. Ignore error.
+ } else if (
+ error.code === PostgresUniqueIndexViolationError &&
+ error.message.includes(constraintName)
+ ) {
+ // Cast the error into the proper parse error
+ throw new Parse.Error(
+ Parse.Error.DUPLICATE_VALUE,
+ 'A duplicate value for a field with unique values was provided'
+ );
+ } else {
+ throw error;
+ }
+ });
}
// Executes a count.
@@ -2103,15 +1965,13 @@ export class PostgresStorageAdapter implements StorageAdapter {
});
values.push(...where.values);
- const wherePattern =
- where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
+ const wherePattern = where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
let qs = '';
if (where.pattern.length > 0 || !estimate) {
qs = `SELECT count(*) FROM $1:name ${wherePattern}`;
} else {
- qs =
- 'SELECT reltuples AS approximate_row_count FROM pg_class WHERE relname = $1';
+ qs = 'SELECT reltuples AS approximate_row_count FROM pg_class WHERE relname = $1';
}
return this._client
@@ -2130,12 +1990,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
});
}
- async distinct(
- className: string,
- schema: SchemaType,
- query: QueryType,
- fieldName: string
- ) {
+ async distinct(className: string, schema: SchemaType, query: QueryType, fieldName: string) {
debug('distinct', className, query);
let field = fieldName;
let column = fieldName;
@@ -2145,13 +2000,9 @@ export class PostgresStorageAdapter implements StorageAdapter {
column = fieldName.split('.')[0];
}
const isArrayField =
- schema.fields &&
- schema.fields[fieldName] &&
- schema.fields[fieldName].type === 'Array';
+ schema.fields && schema.fields[fieldName] && schema.fields[fieldName].type === 'Array';
const isPointerField =
- schema.fields &&
- schema.fields[fieldName] &&
- schema.fields[fieldName].type === 'Pointer';
+ schema.fields && schema.fields[fieldName] && schema.fields[fieldName].type === 'Pointer';
const values = [field, column, className];
const where = buildWhereClause({
schema,
@@ -2161,8 +2012,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
});
values.push(...where.values);
- const wherePattern =
- where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
+ const wherePattern = where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
const transformer = isArrayField ? 'jsonb_array_elements' : 'ON';
let qs = `SELECT DISTINCT ${transformer}($1:name) $2:name FROM $3:name ${wherePattern}`;
if (isNested) {
@@ -2195,9 +2045,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
return results.map(object => object[column][child]);
})
.then(results =>
- results.map(object =>
- this.postgresObjectToParseObject(className, object, schema)
- )
+ results.map(object => this.postgresObjectToParseObject(className, object, schema))
);
}
@@ -2235,11 +2083,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
index += 1;
continue;
}
- if (
- field === '_id' &&
- typeof value === 'object' &&
- Object.keys(value).length !== 0
- ) {
+ if (field === '_id' && typeof value === 'object' && Object.keys(value).length !== 0) {
groupValues = value;
const groupByFields = [];
for (const alias in value) {
@@ -2261,9 +2105,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
columns.push(
`EXTRACT(${
mongoAggregateToPostgres[operation]
- } FROM $${index}:name AT TIME ZONE 'UTC') AS $${
- index + 1
- }:name`
+ } FROM $${index}:name AT TIME ZONE 'UTC') AS $${index + 1}:name`
);
values.push(source, alias);
index += 2;
@@ -2323,10 +2165,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
if (stage.$match) {
const patterns = [];
- const orOrAnd = Object.prototype.hasOwnProperty.call(
- stage.$match,
- '$or'
- )
+ const orOrAnd = Object.prototype.hasOwnProperty.call(stage.$match, '$or')
? ' OR '
: ' AND ';
@@ -2345,9 +2184,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
Object.keys(ParseToPosgresComparator).forEach(cmp => {
if (value[cmp]) {
const pgComparator = ParseToPosgresComparator[cmp];
- matchPatterns.push(
- `$${index}:name ${pgComparator} $${index + 1}`
- );
+ matchPatterns.push(`$${index}:name ${pgComparator} $${index + 1}`);
values.push(field, toPostgresValue(value[cmp]));
index += 2;
}
@@ -2355,18 +2192,13 @@ export class PostgresStorageAdapter implements StorageAdapter {
if (matchPatterns.length > 0) {
patterns.push(`(${matchPatterns.join(' AND ')})`);
}
- if (
- schema.fields[field] &&
- schema.fields[field].type &&
- matchPatterns.length === 0
- ) {
+ if (schema.fields[field] && schema.fields[field].type && matchPatterns.length === 0) {
patterns.push(`$${index}:name = $${index + 1}`);
values.push(field, value);
index += 2;
}
}
- wherePattern =
- patterns.length > 0 ? `WHERE ${patterns.join(` ${orOrAnd} `)}` : '';
+ wherePattern = patterns.length > 0 ? `WHERE ${patterns.join(` ${orOrAnd} `)}` : '';
}
if (stage.$limit) {
limitPattern = `LIMIT $${index}`;
@@ -2390,8 +2222,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
})
.join();
values.push(...keys);
- sortPattern =
- sort !== undefined && sorting.length > 0 ? `ORDER BY ${sorting}` : '';
+ sortPattern = sort !== undefined && sorting.length > 0 ? `ORDER BY ${sorting}` : '';
}
}
@@ -2406,17 +2237,13 @@ export class PostgresStorageAdapter implements StorageAdapter {
const originalQuery = `SELECT ${columns
.filter(Boolean)
.join()} FROM $1:name ${wherePattern} ${skipPattern} ${groupPattern} ${sortPattern} ${limitPattern}`;
- const qs = explain
- ? this.createExplainableQuery(originalQuery)
- : originalQuery;
+ const qs = explain ? this.createExplainableQuery(originalQuery) : originalQuery;
debug(qs, values);
return this._client.any(qs, values).then(a => {
if (explain) {
return a;
}
- const results = a.map(object =>
- this.postgresObjectToParseObject(className, object, schema)
- );
+ const results = a.map(object => this.postgresObjectToParseObject(className, object, schema));
results.forEach(result => {
if (!Object.prototype.hasOwnProperty.call(result, 'objectId')) {
result.objectId = null;
@@ -2437,6 +2264,11 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
async performInitialization({ VolatileClassesSchemas }: any) {
+ if (!this._stream) {
+ this._stream = await this._client.connect({ direct: true });
+ this._stream.client.on('notification', () => this._onchange());
+ await this._stream.none('LISTEN $1~', 'schema.change');
+ }
// TODO: This method needs to be rewritten to make proper use of connections (@vitaly-t)
debug('performInitialization');
const promises = VolatileClassesSchemas.map(schema => {
@@ -2474,11 +2306,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
});
}
- async createIndexes(
- className: string,
- indexes: any,
- conn: ?any
- ): Promise {
+ async createIndexes(className: string, indexes: any, conn: ?any): Promise {
return (conn || this._client).tx(t =>
t.batch(
indexes.map(i => {
@@ -2498,9 +2326,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
type: any,
conn: ?any
): Promise {
- await (
- conn || this._client
- ).none('CREATE INDEX IF NOT EXISTS $1:name ON $2:name ($3:name)', [
+ await (conn || this._client).none('CREATE INDEX IF NOT EXISTS $1:name ON $2:name ($3:name)', [
fieldName,
className,
type,
@@ -2512,9 +2338,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
query: 'DROP INDEX $1:name',
values: i,
}));
- await (conn || this._client).tx(t =>
- t.none(this._pgp.helpers.concat(queries))
- );
+ await (conn || this._client).tx(t => t.none(this._pgp.helpers.concat(queries)));
}
async getIndexes(className: string) {
@@ -2547,18 +2371,14 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
commitTransactionalSession(transactionalSession: any): Promise {
- transactionalSession.resolve(
- transactionalSession.t.batch(transactionalSession.batch)
- );
+ transactionalSession.resolve(transactionalSession.t.batch(transactionalSession.batch));
return transactionalSession.result;
}
abortTransactionalSession(transactionalSession: any): Promise {
const result = transactionalSession.result.catch();
transactionalSession.batch.push(Promise.reject());
- transactionalSession.resolve(
- transactionalSession.t.batch(transactionalSession.batch)
- );
+ transactionalSession.resolve(transactionalSession.t.batch(transactionalSession.batch));
return result;
}
@@ -2575,41 +2395,34 @@ export class PostgresStorageAdapter implements StorageAdapter {
const indexNameOptions: Object =
indexName != null ? { name: indexName } : { name: defaultIndexName };
const constraintPatterns = caseInsensitive
- ? fieldNames.map(
- (fieldName, index) => `lower($${index + 3}:name) varchar_pattern_ops`
- )
+ ? fieldNames.map((fieldName, index) => `lower($${index + 3}:name) varchar_pattern_ops`)
: fieldNames.map((fieldName, index) => `$${index + 3}:name`);
const qs = `CREATE INDEX IF NOT EXISTS $1:name ON $2:name (${constraintPatterns.join()})`;
- await conn
- .none(qs, [indexNameOptions.name, className, ...fieldNames])
- .catch(error => {
- if (
- error.code === PostgresDuplicateRelationError &&
- error.message.includes(indexNameOptions.name)
- ) {
- // Index already exists. Ignore error.
- } else if (
- error.code === PostgresUniqueIndexViolationError &&
- error.message.includes(indexNameOptions.name)
- ) {
- // Cast the error into the proper parse error
- throw new Parse.Error(
- Parse.Error.DUPLICATE_VALUE,
- 'A duplicate value for a field with unique values was provided'
- );
- } else {
- throw error;
- }
- });
+ await conn.none(qs, [indexNameOptions.name, className, ...fieldNames]).catch(error => {
+ if (
+ error.code === PostgresDuplicateRelationError &&
+ error.message.includes(indexNameOptions.name)
+ ) {
+ // Index already exists. Ignore error.
+ } else if (
+ error.code === PostgresUniqueIndexViolationError &&
+ error.message.includes(indexNameOptions.name)
+ ) {
+ // Cast the error into the proper parse error
+ throw new Parse.Error(
+ Parse.Error.DUPLICATE_VALUE,
+ 'A duplicate value for a field with unique values was provided'
+ );
+ } else {
+ throw error;
+ }
+ });
}
}
function convertPolygonToSQL(polygon) {
if (polygon.length < 3) {
- throw new Parse.Error(
- Parse.Error.INVALID_JSON,
- `Polygon must have at least 3 values`
- );
+ throw new Parse.Error(Parse.Error.INVALID_JSON, `Polygon must have at least 3 values`);
}
if (
polygon[0][0] !== polygon[polygon.length - 1][0] ||
@@ -2757,9 +2570,7 @@ function literalizeRegexPart(s: string) {
var GeoPointCoder = {
isValidJSON(value) {
- return (
- typeof value === 'object' && value !== null && value.__type === 'GeoPoint'
- );
+ return typeof value === 'object' && value !== null && value.__type === 'GeoPoint';
},
};
diff --git a/src/Adapters/Storage/StorageAdapter.js b/src/Adapters/Storage/StorageAdapter.js
index 5139cc3248..d46265f64f 100644
--- a/src/Adapters/Storage/StorageAdapter.js
+++ b/src/Adapters/Storage/StorageAdapter.js
@@ -34,18 +34,10 @@ export interface StorageAdapter {
classExists(className: string): Promise;
setClassLevelPermissions(className: string, clps: any): Promise;
createClass(className: string, schema: SchemaType): Promise;
- addFieldIfNotExists(
- className: string,
- fieldName: string,
- type: any
- ): Promise;
+ addFieldIfNotExists(className: string, fieldName: string, type: any): Promise;
deleteClass(className: string): Promise;
deleteAllClasses(fast: boolean): Promise;
- deleteFields(
- className: string,
- schema: SchemaType,
- fieldNames: Array
- ): Promise;
+ deleteFields(className: string, schema: SchemaType, fieldNames: Array): Promise;
getAllClasses(): Promise;
getClass(className: string): Promise;
createObject(
@@ -95,11 +87,7 @@ export interface StorageAdapter {
caseSensitive?: boolean,
options?: Object
): Promise;
- ensureUniqueness(
- className: string,
- schema: SchemaType,
- fieldNames: Array
- ): Promise;
+ ensureUniqueness(className: string, schema: SchemaType, fieldNames: Array): Promise;
count(
className: string,
schema: SchemaType,
@@ -123,6 +111,7 @@ export interface StorageAdapter {
explain?: boolean
): Promise;
performInitialization(options: ?any): Promise;
+ watch(callback: () => void): void;
// Indexing
createIndexes(className: string, indexes: any, conn: ?any): Promise;
diff --git a/src/Config.js b/src/Config.js
index 5c64df180a..680c1c60e9 100644
--- a/src/Config.js
+++ b/src/Config.js
@@ -3,10 +3,12 @@
// mount is the URL for the root of the API; includes http, domain, etc.
import AppCache from './cache';
-import SchemaCache from './Controllers/SchemaCache';
import DatabaseController from './Controllers/DatabaseController';
import net from 'net';
-import { IdempotencyOptions } from './Options/Definitions';
+import {
+ IdempotencyOptions,
+ FileUploadOptions,
+} from './Options/Definitions';
function removeTrailingSlash(str) {
if (!str) {
@@ -28,12 +30,7 @@ export class Config {
config.applicationId = applicationId;
Object.keys(cacheInfo).forEach(key => {
if (key == 'databaseController') {
- const schemaCache = new SchemaCache(
- cacheInfo.cacheController,
- cacheInfo.schemaCacheTTL,
- cacheInfo.enableSingleSchemaCache
- );
- config.database = new DatabaseController(cacheInfo.databaseController.adapter, schemaCache);
+ config.database = new DatabaseController(cacheInfo.databaseController.adapter);
} else {
config[key] = cacheInfo[key];
}
@@ -71,6 +68,7 @@ export class Config {
allowHeaders,
idempotencyOptions,
emailVerifyTokenReuseIfValid,
+ fileUpload,
}) {
if (masterKey === readOnlyMasterKey) {
throw new Error('masterKey and readOnlyMasterKey should be different');
@@ -88,8 +86,8 @@ export class Config {
}
this.validateAccountLockoutPolicy(accountLockout);
-
this.validatePasswordPolicy(passwordPolicy);
+ this.validateFileUploadOptions(fileUpload);
if (typeof revokeSessionOnPasswordReset !== 'boolean') {
throw 'revokeSessionOnPasswordReset must be a boolean value';
@@ -245,6 +243,30 @@ export class Config {
}
}
+ static validateFileUploadOptions(fileUpload) {
+ if (!fileUpload) {
+ fileUpload = {};
+ }
+ if (typeof fileUpload !== 'object' || fileUpload instanceof Array) {
+ throw 'fileUpload must be an object value.';
+ }
+ if (fileUpload.enableForAnonymousUser === undefined) {
+ fileUpload.enableForAnonymousUser = FileUploadOptions.enableForAnonymousUser.default;
+ } else if (typeof fileUpload.enableForAnonymousUser !== 'boolean') {
+ throw 'fileUpload.enableForAnonymousUser must be a boolean value.';
+ }
+ if (fileUpload.enableForPublic === undefined) {
+ fileUpload.enableForPublic = FileUploadOptions.enableForPublic.default;
+ } else if (typeof fileUpload.enableForPublic !== 'boolean') {
+ throw 'fileUpload.enableForPublic must be a boolean value.';
+ }
+ if (fileUpload.enableForAuthenticatedUser === undefined) {
+ fileUpload.enableForAuthenticatedUser = FileUploadOptions.enableForAuthenticatedUser.default;
+ } else if (typeof fileUpload.enableForAuthenticatedUser !== 'boolean') {
+ throw 'fileUpload.enableForAuthenticatedUser must be a boolean value.';
+ }
+ }
+
static validateMasterKeyIps(masterKeyIps) {
for (const ip of masterKeyIps) {
if (!net.isIP(ip)) {
diff --git a/src/Controllers/DatabaseController.js b/src/Controllers/DatabaseController.js
index 21ba2e9477..e974096139 100644
--- a/src/Controllers/DatabaseController.js
+++ b/src/Controllers/DatabaseController.js
@@ -398,14 +398,18 @@ class DatabaseController {
schemaPromise: ?Promise;
_transactionalSession: ?any;
- constructor(adapter: StorageAdapter, schemaCache: any) {
+ constructor(adapter: StorageAdapter) {
this.adapter = adapter;
- this.schemaCache = schemaCache;
// We don't want a mutable this.schema, because then you could have
// one request that uses different schemas for different parts of
// it. Instead, use loadSchema to get a schema.
this.schemaPromise = null;
this._transactionalSession = null;
+ // Used for Testing only
+ this.schemaCache = {
+ clear: () => SchemaController.clearSingleSchemaCache(),
+ get: () => SchemaController.getSingleSchemaCache(),
+ };
}
collectionExists(className: string): Promise {
@@ -434,7 +438,7 @@ class DatabaseController {
if (this.schemaPromise != null) {
return this.schemaPromise;
}
- this.schemaPromise = SchemaController.load(this.adapter, this.schemaCache, options);
+ this.schemaPromise = SchemaController.load(this.adapter, options);
this.schemaPromise.then(
() => delete this.schemaPromise,
() => delete this.schemaPromise
@@ -916,7 +920,8 @@ class DatabaseController {
*/
deleteEverything(fast: boolean = false): Promise {
this.schemaPromise = null;
- return Promise.all([this.adapter.deleteAllClasses(fast), this.schemaCache.clear()]);
+ this.schemaCache.clear();
+ return this.adapter.deleteAllClasses(fast);
}
// Returns a promise for a list of related ids given an owning id.
@@ -1325,8 +1330,12 @@ class DatabaseController {
}
deleteSchema(className: string): Promise {
+ let schemaController;
return this.loadSchema({ clearCache: true })
- .then(schemaController => schemaController.getOneSchema(className, true))
+ .then(s => {
+ schemaController = s;
+ return schemaController.getOneSchema(className, true);
+ })
.catch(error => {
if (error === undefined) {
return { fields: {} };
@@ -1356,7 +1365,10 @@ class DatabaseController {
this.adapter.deleteClass(joinTableName(className, name))
)
).then(() => {
- return;
+ schemaController._cache.allClasses = (
+ schemaController._cache.allClasses || []
+ ).filter(cached => cached.className !== className);
+ return schemaController.reloadData();
});
} else {
return Promise.resolve();
@@ -1365,6 +1377,83 @@ class DatabaseController {
});
}
+ // This helps to create intermediate objects for simpler comparison of
+ // key value pairs used in query objects. Each key value pair will represented
+ // in a similar way to json
+ objectToEntriesStrings(query: any): Array {
+ return Object.entries(query).map(a => a.map(s => JSON.stringify(s)).join(':'));
+ }
+
+ // Naive logic reducer for OR operations meant to be used only for pointer permissions.
+ reduceOrOperation(query: { $or: Array }): any {
+ if (!query.$or) {
+ return query;
+ }
+ const queries = query.$or.map(q => this.objectToEntriesStrings(q));
+ let repeat = false;
+ do {
+ repeat = false;
+ for (let i = 0; i < queries.length - 1; i++) {
+ for (let j = i + 1; j < queries.length; j++) {
+ const [shorter, longer] = queries[i].length > queries[j].length ? [j, i] : [i, j];
+ const foundEntries = queries[shorter].reduce(
+ (acc, entry) => acc + (queries[longer].includes(entry) ? 1 : 0),
+ 0
+ );
+ const shorterEntries = queries[shorter].length;
+ if (foundEntries === shorterEntries) {
+ // If the shorter query is completely contained in the longer one, we can strike
+ // out the longer query.
+ query.$or.splice(longer, 1);
+ queries.splice(longer, 1);
+ repeat = true;
+ break;
+ }
+ }
+ }
+ } while (repeat);
+ if (query.$or.length === 1) {
+ query = { ...query, ...query.$or[0] };
+ delete query.$or;
+ }
+ return query;
+ }
+
+ // Naive logic reducer for AND operations meant to be used only for pointer permissions.
+ reduceAndOperation(query: { $and: Array }): any {
+ if (!query.$and) {
+ return query;
+ }
+ const queries = query.$and.map(q => this.objectToEntriesStrings(q));
+ let repeat = false;
+ do {
+ repeat = false;
+ for (let i = 0; i < queries.length - 1; i++) {
+ for (let j = i + 1; j < queries.length; j++) {
+ const [shorter, longer] = queries[i].length > queries[j].length ? [j, i] : [i, j];
+ const foundEntries = queries[shorter].reduce(
+ (acc, entry) => acc + (queries[longer].includes(entry) ? 1 : 0),
+ 0
+ );
+ const shorterEntries = queries[shorter].length;
+ if (foundEntries === shorterEntries) {
+ // If the shorter query is completely contained in the longer one, we can strike
+ // out the shorter query.
+ query.$and.splice(shorter, 1);
+ queries.splice(shorter, 1);
+ repeat = true;
+ break;
+ }
+ }
+ }
+ } while (repeat);
+ if (query.$and.length === 1) {
+ query = { ...query, ...query.$and[0] };
+ delete query.$and;
+ }
+ return query;
+ }
+
// Constraints query using CLP's pointer permissions (PP) if any.
// 1. Etract the user id from caller's ACLgroup;
// 2. Exctract a list of field names that are PP for target collection and operation;
@@ -1448,13 +1537,13 @@ class DatabaseController {
}
// if we already have a constraint on the key, use the $and
if (Object.prototype.hasOwnProperty.call(query, key)) {
- return { $and: [queryClause, query] };
+ return this.reduceAndOperation({ $and: [queryClause, query] });
}
// otherwise just add the constaint
return Object.assign({}, query, queryClause);
});
- return queries.length === 1 ? queries[0] : { $or: queries };
+ return queries.length === 1 ? queries[0] : this.reduceOrOperation({ $or: queries });
} else {
return query;
}
diff --git a/src/Controllers/SchemaCache.js b/src/Controllers/SchemaCache.js
deleted file mode 100644
index 9fe79daa93..0000000000
--- a/src/Controllers/SchemaCache.js
+++ /dev/null
@@ -1,55 +0,0 @@
-const MAIN_SCHEMA = '__MAIN_SCHEMA';
-const SCHEMA_CACHE_PREFIX = '__SCHEMA';
-
-import { randomString } from '../cryptoUtils';
-import defaults from '../defaults';
-
-export default class SchemaCache {
- cache: Object;
-
- constructor(cacheController, ttl = defaults.schemaCacheTTL, singleCache = false) {
- this.ttl = ttl;
- if (typeof ttl == 'string') {
- this.ttl = parseInt(ttl);
- }
- this.cache = cacheController;
- this.prefix = SCHEMA_CACHE_PREFIX;
- if (!singleCache) {
- this.prefix += randomString(20);
- }
- }
-
- getAllClasses() {
- if (!this.ttl) {
- return Promise.resolve(null);
- }
- return this.cache.get(this.prefix + MAIN_SCHEMA);
- }
-
- setAllClasses(schema) {
- if (!this.ttl) {
- return Promise.resolve(null);
- }
- return this.cache.put(this.prefix + MAIN_SCHEMA, schema);
- }
-
- getOneSchema(className) {
- if (!this.ttl) {
- return Promise.resolve(null);
- }
- return this.cache.get(this.prefix + MAIN_SCHEMA).then(cachedSchemas => {
- cachedSchemas = cachedSchemas || [];
- const schema = cachedSchemas.find(cachedSchema => {
- return cachedSchema.className === className;
- });
- if (schema) {
- return Promise.resolve(schema);
- }
- return Promise.resolve(null);
- });
- }
-
- clear() {
- return this.cache.del(this.prefix + MAIN_SCHEMA);
- }
-}
diff --git a/src/Controllers/SchemaController.js b/src/Controllers/SchemaController.js
index a5e7d2838a..3f7b19ca42 100644
--- a/src/Controllers/SchemaController.js
+++ b/src/Controllers/SchemaController.js
@@ -687,10 +687,10 @@ export default class SchemaController {
protectedFields: any;
userIdRegEx: RegExp;
- constructor(databaseAdapter: StorageAdapter, schemaCache: any) {
+ constructor(databaseAdapter: StorageAdapter, singleSchemaCache: Object) {
this._dbAdapter = databaseAdapter;
- this._cache = schemaCache;
- this.schemaData = new SchemaData();
+ this._cache = singleSchemaCache;
+ this.schemaData = new SchemaData(this._cache.allClasses || [], this.protectedFields);
this.protectedFields = Config.get(Parse.applicationId).protectedFields;
const customIds = Config.get(Parse.applicationId).allowCustomObjectId;
@@ -699,6 +699,10 @@ export default class SchemaController {
const autoIdRegEx = /^[a-zA-Z0-9]{1,}$/;
this.userIdRegEx = customIds ? customIdRegEx : autoIdRegEx;
+
+ this._dbAdapter.watch(() => {
+ this.reloadData({ clearCache: true });
+ });
}
reloadData(options: LoadSchemaOptions = { clearCache: false }): Promise {
@@ -725,12 +729,10 @@ export default class SchemaController {
if (options.clearCache) {
return this.setAllClasses();
}
- return this._cache.getAllClasses().then(allClasses => {
- if (allClasses && allClasses.length) {
- return Promise.resolve(allClasses);
- }
- return this.setAllClasses();
- });
+ if (this._cache.allClasses && this._cache.allClasses.length) {
+ return Promise.resolve(this._cache.allClasses);
+ }
+ return this.setAllClasses();
}
setAllClasses(): Promise> {
@@ -738,11 +740,7 @@ export default class SchemaController {
.getAllClasses()
.then(allSchemas => allSchemas.map(injectDefaultSchema))
.then(allSchemas => {
- /* eslint-disable no-console */
- this._cache
- .setAllClasses(allSchemas)
- .catch(error => console.error('Error saving schema to cache:', error));
- /* eslint-enable no-console */
+ this._cache.allClasses = allSchemas;
return allSchemas;
});
}
@@ -752,32 +750,28 @@ export default class SchemaController {
allowVolatileClasses: boolean = false,
options: LoadSchemaOptions = { clearCache: false }
): Promise {
- let promise = Promise.resolve();
if (options.clearCache) {
- promise = this._cache.clear();
+ delete this._cache.allClasses;
}
- return promise.then(() => {
- if (allowVolatileClasses && volatileClasses.indexOf(className) > -1) {
- const data = this.schemaData[className];
- return Promise.resolve({
- className,
- fields: data.fields,
- classLevelPermissions: data.classLevelPermissions,
- indexes: data.indexes,
- });
- }
- return this._cache.getOneSchema(className).then(cached => {
- if (cached && !options.clearCache) {
- return Promise.resolve(cached);
- }
- return this.setAllClasses().then(allSchemas => {
- const oneSchema = allSchemas.find(schema => schema.className === className);
- if (!oneSchema) {
- return Promise.reject(undefined);
- }
- return oneSchema;
- });
+ if (allowVolatileClasses && volatileClasses.indexOf(className) > -1) {
+ const data = this.schemaData[className];
+ return Promise.resolve({
+ className,
+ fields: data.fields,
+ classLevelPermissions: data.classLevelPermissions,
+ indexes: data.indexes,
});
+ }
+ const cached = (this._cache.allClasses || []).find(schema => schema.className === className);
+ if (cached && !options.clearCache) {
+ return Promise.resolve(cached);
+ }
+ return this.setAllClasses().then(allSchemas => {
+ const oneSchema = allSchemas.find(schema => schema.className === className);
+ if (!oneSchema) {
+ return Promise.reject(undefined);
+ }
+ return oneSchema;
});
}
@@ -814,7 +808,19 @@ export default class SchemaController {
className,
})
)
- .then(convertAdapterSchemaToParseSchema)
+ .then(adapterSchema => {
+ const parseSchema = convertAdapterSchemaToParseSchema(adapterSchema);
+ this._cache.allClasses = this._cache.allClasses || [];
+ const index = this._cache.allClasses.findIndex(
+ cached => cached.className === parseSchema.className
+ );
+ if (index >= 0) {
+ this._cache.allClasses[index] = parseSchema;
+ } else {
+ this._cache.allClasses.push(parseSchema);
+ }
+ return parseSchema;
+ })
.catch(error => {
if (error && error.code === Parse.Error.DUPLICATE_VALUE) {
throw new Parse.Error(
@@ -940,7 +946,7 @@ export default class SchemaController {
return (
this.addClassIfNotExists(className)
// The schema update succeeded. Reload the schema
- .then(() => this.reloadData({ clearCache: true }))
+ .then(() => this.reloadData())
.catch(() => {
// The schema update failed. This can be okay - it might
// have failed because there's a race condition and a different
@@ -1050,12 +1056,16 @@ export default class SchemaController {
}
// Sets the Class-level permissions for a given className, which must exist.
- setPermissions(className: string, perms: any, newSchema: SchemaFields) {
+ async setPermissions(className: string, perms: any, newSchema: SchemaFields) {
if (typeof perms === 'undefined') {
return Promise.resolve();
}
validateCLP(perms, newSchema, this.userIdRegEx);
- return this._dbAdapter.setClassLevelPermissions(className, perms);
+ await this._dbAdapter.setClassLevelPermissions(className, perms);
+ const cached = (this._cache.allClasses || []).find(schema => schema.className === className);
+ if (cached) {
+ cached.classLevelPermissions = perms;
+ }
}
// Returns a promise that resolves successfully to the new schema
@@ -1122,6 +1132,12 @@ export default class SchemaController {
return Promise.resolve();
})
.then(() => {
+ const cached = (this._cache.allClasses || []).find(
+ schema => schema.className === className
+ );
+ if (cached && !cached.fields[fieldName]) {
+ cached.fields[fieldName] = type;
+ }
return {
className,
fieldName,
@@ -1203,7 +1219,9 @@ export default class SchemaController {
);
});
})
- .then(() => this._cache.clear());
+ .then(() => {
+ delete this._cache.allClasses;
+ });
}
// Validates an object provided in REST format.
@@ -1212,7 +1230,7 @@ export default class SchemaController {
async validateObject(className: string, object: any, query: any) {
let geocount = 0;
const schema = await this.enforceClassExists(className);
- const promises = [];
+ const results = [];
for (const fieldName in object) {
if (object[fieldName] === undefined) {
@@ -1239,13 +1257,12 @@ export default class SchemaController {
// Every object has ACL implicitly.
continue;
}
- promises.push(schema.enforceFieldExists(className, fieldName, expected));
+ results.push(await schema.enforceFieldExists(className, fieldName, expected));
}
- const results = await Promise.all(promises);
const enforceFields = results.filter(result => !!result);
if (enforceFields.length !== 0) {
- await this.reloadData({ clearCache: true });
+ await this.reloadData();
}
this.ensureFields(enforceFields);
@@ -1412,16 +1429,20 @@ export default class SchemaController {
}
}
+const singleSchemaCache = {};
+
// Returns a promise for a new Schema.
-const load = (
- dbAdapter: StorageAdapter,
- schemaCache: any,
- options: any
-): Promise => {
- const schema = new SchemaController(dbAdapter, schemaCache);
+const load = (dbAdapter: StorageAdapter, options: any): Promise => {
+ const schema = new SchemaController(dbAdapter, singleSchemaCache);
return schema.reloadData(options).then(() => schema);
};
+const clearSingleSchemaCache = () => {
+ delete singleSchemaCache.allClasses;
+};
+
+const getSingleSchemaCache = () => singleSchemaCache.allClasses;
+
// Builds a new schema (in schema API response format) out of an
// existing mongo schema + a schemas API put request. This response
// does not include the default fields, as it is intended to be passed
@@ -1581,6 +1602,8 @@ function getObjectType(obj): ?(SchemaField | string) {
export {
load,
+ clearSingleSchemaCache,
+ getSingleSchemaCache,
classNameIsValid,
fieldNameIsValid,
invalidClassNameMessage,
diff --git a/src/Controllers/index.js b/src/Controllers/index.js
index 1e4765b666..e02269ad04 100644
--- a/src/Controllers/index.js
+++ b/src/Controllers/index.js
@@ -15,7 +15,6 @@ import { PushController } from './PushController';
import { PushQueue } from '../Push/PushQueue';
import { PushWorker } from '../Push/PushWorker';
import DatabaseController from './DatabaseController';
-import SchemaCache from './SchemaCache';
// Adapters
import { GridFSBucketAdapter } from '../Adapters/Files/GridFSBucketAdapter';
@@ -41,7 +40,7 @@ export function getControllers(options: ParseServerOptions) {
const cacheController = getCacheController(options);
const analyticsController = getAnalyticsController(options);
const liveQueryController = getLiveQueryController(options);
- const databaseController = getDatabaseController(options, cacheController);
+ const databaseController = getDatabaseController(options);
const hooksController = getHooksController(options, databaseController);
const authDataManager = getAuthDataManager(options);
const parseGraphQLController = getParseGraphQLController(options, {
@@ -141,18 +140,9 @@ export function getLiveQueryController(options: ParseServerOptions): LiveQueryCo
return new LiveQueryController(options.liveQuery);
}
-export function getDatabaseController(
- options: ParseServerOptions,
- cacheController: CacheController
-): DatabaseController {
- const {
- databaseURI,
- databaseOptions,
- collectionPrefix,
- schemaCacheTTL,
- enableSingleSchemaCache,
- } = options;
- let { databaseAdapter } = options;
+export function getDatabaseController(options: ParseServerOptions): DatabaseController {
+ const { databaseURI, collectionPrefix, replicaSet } = options;
+ let { databaseAdapter, databaseOptions } = options;
if (
(databaseOptions ||
(databaseURI && databaseURI !== defaults.databaseURI) ||
@@ -161,14 +151,14 @@ export function getDatabaseController(
) {
throw 'You cannot specify both a databaseAdapter and a databaseURI/databaseOptions/collectionPrefix.';
} else if (!databaseAdapter) {
+ databaseOptions = databaseOptions || {};
+ databaseOptions.replicaSet = replicaSet;
databaseAdapter = getDatabaseAdapter(databaseURI, collectionPrefix, databaseOptions);
} else {
databaseAdapter = loadAdapter(databaseAdapter);
+ databaseAdapter.replicaSet = !!replicaSet;
}
- return new DatabaseController(
- databaseAdapter,
- new SchemaCache(cacheController, schemaCacheTTL, enableSingleSchemaCache)
- );
+ return new DatabaseController(databaseAdapter);
}
export function getHooksController(
diff --git a/src/GraphQL/helpers/objectsMutations.js b/src/GraphQL/helpers/objectsMutations.js
index e4f32dbb14..72fb84bc86 100644
--- a/src/GraphQL/helpers/objectsMutations.js
+++ b/src/GraphQL/helpers/objectsMutations.js
@@ -5,40 +5,17 @@ const createObject = async (className, fields, config, auth, info) => {
fields = {};
}
- return (
- await rest.create(
- config,
- auth,
- className,
- fields,
- info.clientSDK,
- info.context
- )
- ).response;
+ return (await rest.create(config, auth, className, fields, info.clientSDK, info.context))
+ .response;
};
-const updateObject = async (
- className,
- objectId,
- fields,
- config,
- auth,
- info
-) => {
+const updateObject = async (className, objectId, fields, config, auth, info) => {
if (!fields) {
fields = {};
}
return (
- await rest.update(
- config,
- auth,
- className,
- { objectId },
- fields,
- info.clientSDK,
- info.context
- )
+ await rest.update(config, auth, className, { objectId }, fields, info.clientSDK, info.context)
).response;
};
diff --git a/src/GraphQL/loaders/defaultGraphQLQueries.js b/src/GraphQL/loaders/defaultGraphQLQueries.js
index 8e8616ca5f..535cf62430 100644
--- a/src/GraphQL/loaders/defaultGraphQLQueries.js
+++ b/src/GraphQL/loaders/defaultGraphQLQueries.js
@@ -6,8 +6,7 @@ const load = parseGraphQLSchema => {
parseGraphQLSchema.addGraphQLQuery(
'health',
{
- description:
- 'The health query can be used to check if the server is up and running.',
+ description: 'The health query can be used to check if the server is up and running.',
type: new GraphQLNonNull(GraphQLBoolean),
resolve: () => true,
},
diff --git a/src/GraphQL/loaders/defaultGraphQLTypes.js b/src/GraphQL/loaders/defaultGraphQLTypes.js
index ec333aa272..d1d092ef6f 100644
--- a/src/GraphQL/loaders/defaultGraphQLTypes.js
+++ b/src/GraphQL/loaders/defaultGraphQLTypes.js
@@ -23,7 +23,7 @@ class TypeValidationError extends Error {
}
}
-const parseStringValue = (value) => {
+const parseStringValue = value => {
if (typeof value === 'string') {
return value;
}
@@ -31,7 +31,7 @@ const parseStringValue = (value) => {
throw new TypeValidationError(value, 'String');
};
-const parseIntValue = (value) => {
+const parseIntValue = value => {
if (typeof value === 'string') {
const int = Number(value);
if (Number.isInteger(int)) {
@@ -42,7 +42,7 @@ const parseIntValue = (value) => {
throw new TypeValidationError(value, 'Int');
};
-const parseFloatValue = (value) => {
+const parseFloatValue = value => {
if (typeof value === 'string') {
const float = Number(value);
if (!isNaN(float)) {
@@ -53,7 +53,7 @@ const parseFloatValue = (value) => {
throw new TypeValidationError(value, 'Float');
};
-const parseBooleanValue = (value) => {
+const parseBooleanValue = value => {
if (typeof value === 'boolean') {
return value;
}
@@ -61,7 +61,7 @@ const parseBooleanValue = (value) => {
throw new TypeValidationError(value, 'Boolean');
};
-const parseValue = (value) => {
+const parseValue = value => {
switch (value.kind) {
case Kind.STRING:
return parseStringValue(value.value);
@@ -86,15 +86,15 @@ const parseValue = (value) => {
}
};
-const parseListValues = (values) => {
+const parseListValues = values => {
if (Array.isArray(values)) {
- return values.map((value) => parseValue(value));
+ return values.map(value => parseValue(value));
}
throw new TypeValidationError(values, 'List');
};
-const parseObjectFields = (fields) => {
+const parseObjectFields = fields => {
if (Array.isArray(fields)) {
return fields.reduce(
(object, field) => ({
@@ -112,15 +112,14 @@ const ANY = new GraphQLScalarType({
name: 'Any',
description:
'The Any scalar type is used in operations and types that involve any type of value.',
- parseValue: (value) => value,
- serialize: (value) => value,
- parseLiteral: (ast) => parseValue(ast),
+ parseValue: value => value,
+ serialize: value => value,
+ parseLiteral: ast => parseValue(ast),
});
const OBJECT = new GraphQLScalarType({
name: 'Object',
- description:
- 'The Object scalar type is used in operations and types that involve objects.',
+ description: 'The Object scalar type is used in operations and types that involve objects.',
parseValue(value) {
if (typeof value === 'object') {
return value;
@@ -144,7 +143,7 @@ const OBJECT = new GraphQLScalarType({
},
});
-const parseDateIsoValue = (value) => {
+const parseDateIsoValue = value => {
if (typeof value === 'string') {
const date = new Date(value);
if (!isNaN(date)) {
@@ -157,7 +156,7 @@ const parseDateIsoValue = (value) => {
throw new TypeValidationError(value, 'Date');
};
-const serializeDateIso = (value) => {
+const serializeDateIso = value => {
if (typeof value === 'string') {
return value;
}
@@ -168,7 +167,7 @@ const serializeDateIso = (value) => {
throw new TypeValidationError(value, 'Date');
};
-const parseDateIsoLiteral = (ast) => {
+const parseDateIsoLiteral = ast => {
if (ast.kind === Kind.STRING) {
return parseDateIsoValue(ast.value);
}
@@ -178,19 +177,14 @@ const parseDateIsoLiteral = (ast) => {
const DATE = new GraphQLScalarType({
name: 'Date',
- description:
- 'The Date scalar type is used in operations and types that involve dates.',
+ description: 'The Date scalar type is used in operations and types that involve dates.',
parseValue(value) {
if (typeof value === 'string' || value instanceof Date) {
return {
__type: 'Date',
iso: parseDateIsoValue(value),
};
- } else if (
- typeof value === 'object' &&
- value.__type === 'Date' &&
- value.iso
- ) {
+ } else if (typeof value === 'object' && value.__type === 'Date' && value.iso) {
return {
__type: value.__type,
iso: parseDateIsoValue(value.iso),
@@ -202,11 +196,7 @@ const DATE = new GraphQLScalarType({
serialize(value) {
if (typeof value === 'string' || value instanceof Date) {
return serializeDateIso(value);
- } else if (
- typeof value === 'object' &&
- value.__type === 'Date' &&
- value.iso
- ) {
+ } else if (typeof value === 'object' && value.__type === 'Date' && value.iso) {
return serializeDateIso(value.iso);
}
@@ -219,8 +209,8 @@ const DATE = new GraphQLScalarType({
iso: parseDateIsoLiteral(ast),
};
} else if (ast.kind === Kind.OBJECT) {
- const __type = ast.fields.find((field) => field.name.value === '__type');
- const iso = ast.fields.find((field) => field.name.value === 'iso');
+ const __type = ast.fields.find(field => field.name.value === '__type');
+ const iso = ast.fields.find(field => field.name.value === 'iso');
if (__type && __type.value && __type.value.value === 'Date' && iso) {
return {
__type: __type.value.value,
@@ -273,8 +263,8 @@ const BYTES = new GraphQLScalarType({
base64: ast.value,
};
} else if (ast.kind === Kind.OBJECT) {
- const __type = ast.fields.find((field) => field.name.value === '__type');
- const base64 = ast.fields.find((field) => field.name.value === 'base64');
+ const __type = ast.fields.find(field => field.name.value === '__type');
+ const base64 = ast.fields.find(field => field.name.value === 'base64');
if (
__type &&
__type.value &&
@@ -294,7 +284,7 @@ const BYTES = new GraphQLScalarType({
},
});
-const parseFileValue = (value) => {
+const parseFileValue = value => {
if (typeof value === 'string') {
return {
__type: 'File',
@@ -314,10 +304,9 @@ const parseFileValue = (value) => {
const FILE = new GraphQLScalarType({
name: 'File',
- description:
- 'The File scalar type is used in operations and types that involve files.',
+ description: 'The File scalar type is used in operations and types that involve files.',
parseValue: parseFileValue,
- serialize: (value) => {
+ serialize: value => {
if (typeof value === 'string') {
return value;
} else if (
@@ -335,9 +324,9 @@ const FILE = new GraphQLScalarType({
if (ast.kind === Kind.STRING) {
return parseFileValue(ast.value);
} else if (ast.kind === Kind.OBJECT) {
- const __type = ast.fields.find((field) => field.name.value === '__type');
- const name = ast.fields.find((field) => field.name.value === 'name');
- const url = ast.fields.find((field) => field.name.value === 'url');
+ const __type = ast.fields.find(field => field.name.value === '__type');
+ const name = ast.fields.find(field => field.name.value === 'name');
+ const url = ast.fields.find(field => field.name.value === 'url');
if (__type && __type.value && name && name.value) {
return parseFileValue({
__type: __type.value.value,
@@ -353,8 +342,7 @@ const FILE = new GraphQLScalarType({
const FILE_INFO = new GraphQLObjectType({
name: 'FileInfo',
- description:
- 'The FileInfo object type is used to return the information about files.',
+ description: 'The FileInfo object type is used to return the information about files.',
fields: {
name: {
description: 'This is the file name.',
@@ -407,8 +395,7 @@ const GEO_POINT_INPUT = new GraphQLInputObjectType({
const GEO_POINT = new GraphQLObjectType({
name: 'GeoPoint',
- description:
- 'The GeoPoint object type is used to return the information about geo point fields.',
+ description: 'The GeoPoint object type is used to return the information about geo point fields.',
fields: GEO_POINT_FIELDS,
});
@@ -444,13 +431,11 @@ const ROLE_ACL_INPUT = new GraphQLInputObjectType({
type: new GraphQLNonNull(GraphQLString),
},
read: {
- description:
- 'Allow users who are members of the role to read the current object.',
+ description: 'Allow users who are members of the role to read the current object.',
type: new GraphQLNonNull(GraphQLBoolean),
},
write: {
- description:
- 'Allow users who are members of the role to write on the current object.',
+ description: 'Allow users who are members of the role to write on the current object.',
type: new GraphQLNonNull(GraphQLBoolean),
},
},
@@ -521,13 +506,11 @@ const ROLE_ACL = new GraphQLObjectType({
type: new GraphQLNonNull(GraphQLID),
},
read: {
- description:
- 'Allow users who are members of the role to read the current object.',
+ description: 'Allow users who are members of the role to read the current object.',
type: new GraphQLNonNull(GraphQLBoolean),
},
write: {
- description:
- 'Allow users who are members of the role to write on the current object.',
+ description: 'Allow users who are members of the role to write on the current object.',
type: new GraphQLNonNull(GraphQLBoolean),
},
},
@@ -557,7 +540,7 @@ const ACL = new GraphQLObjectType({
type: new GraphQLList(new GraphQLNonNull(USER_ACL)),
resolve(p) {
const users = [];
- Object.keys(p).forEach((rule) => {
+ Object.keys(p).forEach(rule => {
if (rule !== '*' && rule.indexOf('role:') !== 0) {
users.push({
userId: toGlobalId('_User', rule),
@@ -574,7 +557,7 @@ const ACL = new GraphQLObjectType({
type: new GraphQLList(new GraphQLNonNull(ROLE_ACL)),
resolve(p) {
const roles = [];
- Object.keys(p).forEach((rule) => {
+ Object.keys(p).forEach(rule => {
if (rule.indexOf('role:') === 0) {
roles.push({
roleName: rule.replace('role:', ''),
@@ -610,8 +593,7 @@ const CLASS_NAME_ATT = {
};
const GLOBAL_OR_OBJECT_ID_ATT = {
- description:
- 'This is the object id. You can use either the global or the object id.',
+ description: 'This is the object id. You can use either the global or the object id.',
type: OBJECT_ID,
};
@@ -686,8 +668,7 @@ const READ_PREFERENCE_ATT = {
};
const INCLUDE_READ_PREFERENCE_ATT = {
- description:
- 'The read preference for the queries to be executed to include fields.',
+ description: 'The read preference for the queries to be executed to include fields.',
type: READ_PREFERENCE,
};
@@ -713,8 +694,7 @@ const READ_OPTIONS_ATT = {
};
const WHERE_ATT = {
- description:
- 'These are the conditions that the objects need to match in order to be found',
+ description: 'These are the conditions that the objects need to match in order to be found',
type: OBJECT,
};
@@ -736,8 +716,7 @@ const COUNT_ATT = {
const SEARCH_INPUT = new GraphQLInputObjectType({
name: 'SearchInput',
- description:
- 'The SearchInput type is used to specifiy a search operation on a full text search.',
+ description: 'The SearchInput type is used to specifiy a search operation on a full text search.',
fields: {
term: {
description: 'This is the term to be searched.',
@@ -749,13 +728,11 @@ const SEARCH_INPUT = new GraphQLInputObjectType({
type: GraphQLString,
},
caseSensitive: {
- description:
- 'This is the flag to enable or disable case sensitive search.',
+ description: 'This is the flag to enable or disable case sensitive search.',
type: GraphQLBoolean,
},
diacriticSensitive: {
- description:
- 'This is the flag to enable or disable diacritic sensitive search.',
+ description: 'This is the flag to enable or disable diacritic sensitive search.',
type: GraphQLBoolean,
},
},
@@ -763,8 +740,7 @@ const SEARCH_INPUT = new GraphQLInputObjectType({
const TEXT_INPUT = new GraphQLInputObjectType({
name: 'TextInput',
- description:
- 'The TextInput type is used to specify a text operation on a constraint.',
+ description: 'The TextInput type is used to specify a text operation on a constraint.',
fields: {
search: {
description: 'This is the search to be executed.',
@@ -775,8 +751,7 @@ const TEXT_INPUT = new GraphQLInputObjectType({
const BOX_INPUT = new GraphQLInputObjectType({
name: 'BoxInput',
- description:
- 'The BoxInput type is used to specifiy a box operation on a within geo query.',
+ description: 'The BoxInput type is used to specifiy a box operation on a within geo query.',
fields: {
bottomLeft: {
description: 'This is the bottom left coordinates of the box.',
@@ -791,8 +766,7 @@ const BOX_INPUT = new GraphQLInputObjectType({
const WITHIN_INPUT = new GraphQLInputObjectType({
name: 'WithinInput',
- description:
- 'The WithinInput type is used to specify a within operation on a constraint.',
+ description: 'The WithinInput type is used to specify a within operation on a constraint.',
fields: {
box: {
description: 'This is the box to be specified.',
@@ -819,8 +793,7 @@ const CENTER_SPHERE_INPUT = new GraphQLInputObjectType({
const GEO_WITHIN_INPUT = new GraphQLInputObjectType({
name: 'GeoWithinInput',
- description:
- 'The GeoWithinInput type is used to specify a geoWithin operation on a constraint.',
+ description: 'The GeoWithinInput type is used to specify a geoWithin operation on a constraint.',
fields: {
polygon: {
description: 'This is the polygon to be specified.',
@@ -845,49 +818,49 @@ const GEO_INTERSECTS_INPUT = new GraphQLInputObjectType({
},
});
-const equalTo = (type) => ({
+const equalTo = type => ({
description:
'This is the equalTo operator to specify a constraint to select the objects where the value of a field equals to a specified value.',
type,
});
-const notEqualTo = (type) => ({
+const notEqualTo = type => ({
description:
'This is the notEqualTo operator to specify a constraint to select the objects where the value of a field do not equal to a specified value.',
type,
});
-const lessThan = (type) => ({
+const lessThan = type => ({
description:
'This is the lessThan operator to specify a constraint to select the objects where the value of a field is less than a specified value.',
type,
});
-const lessThanOrEqualTo = (type) => ({
+const lessThanOrEqualTo = type => ({
description:
'This is the lessThanOrEqualTo operator to specify a constraint to select the objects where the value of a field is less than or equal to a specified value.',
type,
});
-const greaterThan = (type) => ({
+const greaterThan = type => ({
description:
'This is the greaterThan operator to specify a constraint to select the objects where the value of a field is greater than a specified value.',
type,
});
-const greaterThanOrEqualTo = (type) => ({
+const greaterThanOrEqualTo = type => ({
description:
'This is the greaterThanOrEqualTo operator to specify a constraint to select the objects where the value of a field is greater than or equal to a specified value.',
type,
});
-const inOp = (type) => ({
+const inOp = type => ({
description:
'This is the in operator to specify a constraint to select the objects where the value of a field equals any value in the specified array.',
type: new GraphQLList(type),
});
-const notIn = (type) => ({
+const notIn = type => ({
description:
'This is the notIn operator to specify a constraint to select the objects where the value of a field do not equal any value in the specified array.',
type: new GraphQLList(type),
@@ -913,8 +886,7 @@ const options = {
const SUBQUERY_INPUT = new GraphQLInputObjectType({
name: 'SubqueryInput',
- description:
- 'The SubqueryInput type is used to specify a sub query to another class.',
+ description: 'The SubqueryInput type is used to specify a sub query to another class.',
fields: {
className: CLASS_NAME_ATT,
where: Object.assign({}, WHERE_ATT, {
@@ -988,8 +960,7 @@ const STRING_WHERE_INPUT = new GraphQLInputObjectType({
matchesRegex,
options,
text: {
- description:
- 'This is the $text operator to specify a full text search constraint.',
+ description: 'This is the $text operator to specify a full text search constraint.',
type: TEXT_INPUT,
},
inQueryKey,
@@ -1225,27 +1196,21 @@ let ARRAY_RESULT;
const loadArrayResult = (parseGraphQLSchema, parseClasses) => {
const classTypes = parseClasses
- .filter((parseClass) =>
- parseGraphQLSchema.parseClassTypes[parseClass.className]
- .classGraphQLOutputType
- ? true
- : false
+ .filter(parseClass =>
+ parseGraphQLSchema.parseClassTypes[parseClass.className].classGraphQLOutputType ? true : false
)
.map(
- (parseClass) =>
- parseGraphQLSchema.parseClassTypes[parseClass.className]
- .classGraphQLOutputType
+ parseClass => parseGraphQLSchema.parseClassTypes[parseClass.className].classGraphQLOutputType
);
ARRAY_RESULT = new GraphQLUnionType({
name: 'ArrayResult',
description:
'Use Inline Fragment on Array to get results: https://graphql.org/learn/queries/#inline-fragments',
types: () => [ELEMENT, ...classTypes],
- resolveType: (value) => {
+ resolveType: value => {
if (value.__type === 'Object' && value.className && value.objectId) {
if (parseGraphQLSchema.parseClassTypes[value.className]) {
- return parseGraphQLSchema.parseClassTypes[value.className]
- .classGraphQLOutputType;
+ return parseGraphQLSchema.parseClassTypes[value.className].classGraphQLOutputType;
} else {
return ELEMENT;
}
@@ -1257,7 +1222,7 @@ const loadArrayResult = (parseGraphQLSchema, parseClasses) => {
parseGraphQLSchema.graphQLTypes.push(ARRAY_RESULT);
};
-const load = (parseGraphQLSchema) => {
+const load = parseGraphQLSchema => {
parseGraphQLSchema.addGraphQLType(GraphQLUpload, true);
parseGraphQLSchema.addGraphQLType(ANY, true);
parseGraphQLSchema.addGraphQLType(OBJECT, true);
diff --git a/src/GraphQL/loaders/defaultRelaySchema.js b/src/GraphQL/loaders/defaultRelaySchema.js
index b7af16a327..87f19955ba 100644
--- a/src/GraphQL/loaders/defaultRelaySchema.js
+++ b/src/GraphQL/loaders/defaultRelaySchema.js
@@ -39,8 +39,7 @@ const load = parseGraphQLSchema => {
}
},
obj => {
- return parseGraphQLSchema.parseClassTypes[obj.className]
- .classGraphQLOutputType;
+ return parseGraphQLSchema.parseClassTypes[obj.className].classGraphQLOutputType;
}
);
diff --git a/src/GraphQL/loaders/filesMutations.js b/src/GraphQL/loaders/filesMutations.js
index 0bb031f31d..a732c277ef 100644
--- a/src/GraphQL/loaders/filesMutations.js
+++ b/src/GraphQL/loaders/filesMutations.js
@@ -14,7 +14,7 @@ const handleUpload = async (upload, config) => {
const chunks = [];
stream
.on('error', reject)
- .on('data', (chunk) => chunks.push(chunk))
+ .on('data', chunk => chunks.push(chunk))
.on('end', () => resolve(Buffer.concat(chunks)));
});
}
@@ -28,35 +28,23 @@ const handleUpload = async (upload, config) => {
}
if (!filename.match(/^[_a-zA-Z0-9][a-zA-Z0-9@\.\ ~_-]*$/)) {
- throw new Parse.Error(
- Parse.Error.INVALID_FILE_NAME,
- 'Filename contains invalid characters.'
- );
+ throw new Parse.Error(Parse.Error.INVALID_FILE_NAME, 'Filename contains invalid characters.');
}
try {
return {
- fileInfo: await config.filesController.createFile(
- config,
- filename,
- data,
- mimetype
- ),
+ fileInfo: await config.filesController.createFile(config, filename, data, mimetype),
};
} catch (e) {
logger.error('Error creating a file: ', e);
- throw new Parse.Error(
- Parse.Error.FILE_SAVE_ERROR,
- `Could not store file: ${filename}.`
- );
+ throw new Parse.Error(Parse.Error.FILE_SAVE_ERROR, `Could not store file: ${filename}.`);
}
};
-const load = (parseGraphQLSchema) => {
+const load = parseGraphQLSchema => {
const createMutation = mutationWithClientMutationId({
name: 'CreateFile',
- description:
- 'The createFile mutation can be used to create and upload a new file.',
+ description: 'The createFile mutation can be used to create and upload a new file.',
inputFields: {
upload: {
description: 'This is the new file to be created and uploaded.',
@@ -80,18 +68,9 @@ const load = (parseGraphQLSchema) => {
},
});
- parseGraphQLSchema.addGraphQLType(
- createMutation.args.input.type.ofType,
- true,
- true
- );
+ parseGraphQLSchema.addGraphQLType(createMutation.args.input.type.ofType, true, true);
parseGraphQLSchema.addGraphQLType(createMutation.type, true, true);
- parseGraphQLSchema.addGraphQLMutation(
- 'createFile',
- createMutation,
- true,
- true
- );
+ parseGraphQLSchema.addGraphQLMutation('createFile', createMutation, true, true);
};
export { load, handleUpload };
diff --git a/src/GraphQL/loaders/functionsMutations.js b/src/GraphQL/loaders/functionsMutations.js
index 418791583e..0722ca9378 100644
--- a/src/GraphQL/loaders/functionsMutations.js
+++ b/src/GraphQL/loaders/functionsMutations.js
@@ -24,8 +24,7 @@ const load = parseGraphQLSchema => {
const callCloudCodeMutation = mutationWithClientMutationId({
name: 'CallCloudCode',
- description:
- 'The callCloudCode mutation can be used to invoke a cloud code function.',
+ description: 'The callCloudCode mutation can be used to invoke a cloud code function.',
inputFields: {
functionName: {
description: 'This is the function to be called.',
@@ -38,8 +37,7 @@ const load = parseGraphQLSchema => {
},
outputFields: {
result: {
- description:
- 'This is the result value of the cloud code function execution.',
+ description: 'This is the result value of the cloud code function execution.',
type: defaultGraphQLTypes.ANY,
},
},
@@ -49,15 +47,17 @@ const load = parseGraphQLSchema => {
const { config, auth, info } = context;
return {
- result: (await FunctionsRouter.handleCloudFunction({
- params: {
- functionName,
- },
- config,
- auth,
- info,
- body: params,
- })).response.result,
+ result: (
+ await FunctionsRouter.handleCloudFunction({
+ params: {
+ functionName,
+ },
+ config,
+ auth,
+ info,
+ body: params,
+ })
+ ).response.result,
};
} catch (e) {
parseGraphQLSchema.handleError(e);
@@ -65,18 +65,9 @@ const load = parseGraphQLSchema => {
},
});
- parseGraphQLSchema.addGraphQLType(
- callCloudCodeMutation.args.input.type.ofType,
- true,
- true
- );
+ parseGraphQLSchema.addGraphQLType(callCloudCodeMutation.args.input.type.ofType, true, true);
parseGraphQLSchema.addGraphQLType(callCloudCodeMutation.type, true, true);
- parseGraphQLSchema.addGraphQLMutation(
- 'callCloudCode',
- callCloudCodeMutation,
- true,
- true
- );
+ parseGraphQLSchema.addGraphQLMutation('callCloudCode', callCloudCodeMutation, true, true);
}
};
diff --git a/src/GraphQL/loaders/parseClassMutations.js b/src/GraphQL/loaders/parseClassMutations.js
index 1a6e01df1b..2ef41eccbd 100644
--- a/src/GraphQL/loaders/parseClassMutations.js
+++ b/src/GraphQL/loaders/parseClassMutations.js
@@ -2,10 +2,7 @@ import { GraphQLNonNull } from 'graphql';
import { fromGlobalId, mutationWithClientMutationId } from 'graphql-relay';
import getFieldNames from 'graphql-list-fields';
import * as defaultGraphQLTypes from './defaultGraphQLTypes';
-import {
- extractKeysAndInclude,
- getParseClassMutationConfig,
-} from '../parseGraphQLUtils';
+import { extractKeysAndInclude, getParseClassMutationConfig } from '../parseGraphQLUtils';
import * as objectsMutations from '../helpers/objectsMutations';
import * as objectsQueries from '../helpers/objectsQueries';
import { ParseGraphQLClassConfig } from '../../Controllers/ParseGraphQLController';
@@ -18,17 +15,10 @@ const getOnlyRequiredFields = (
includedFieldsString,
nativeObjectFields
) => {
- const includedFields = includedFieldsString
- ? includedFieldsString.split(',')
- : [];
- const selectedFields = selectedFieldsString
- ? selectedFieldsString.split(',')
- : [];
+ const includedFields = includedFieldsString ? includedFieldsString.split(',') : [];
+ const selectedFields = selectedFieldsString ? selectedFieldsString.split(',') : [];
const missingFields = selectedFields
- .filter(
- field =>
- !nativeObjectFields.includes(field) || includedFields.includes(field)
- )
+ .filter(field => !nativeObjectFields.includes(field) || includedFields.includes(field))
.join(',');
if (!missingFields.length) {
return { needGet: false, keys: '' };
@@ -37,15 +27,10 @@ const getOnlyRequiredFields = (
}
};
-const load = function(
- parseGraphQLSchema,
- parseClass,
- parseClassConfig: ?ParseGraphQLClassConfig
-) {
+const load = function (parseGraphQLSchema, parseClass, parseClassConfig: ?ParseGraphQLClassConfig) {
const className = parseClass.className;
const graphQLClassName = transformClassNameToGraphQL(className);
- const getGraphQLQueryName =
- graphQLClassName.charAt(0).toLowerCase() + graphQLClassName.slice(1);
+ const getGraphQLQueryName = graphQLClassName.charAt(0).toLowerCase() + graphQLClassName.slice(1);
const {
create: isCreateEnabled = true,
@@ -63,24 +48,20 @@ const load = function(
} = parseGraphQLSchema.parseClassTypes[className];
if (isCreateEnabled) {
- const createGraphQLMutationName =
- createAlias || `create${graphQLClassName}`;
+ const createGraphQLMutationName = createAlias || `create${graphQLClassName}`;
const createGraphQLMutation = mutationWithClientMutationId({
name: `Create${graphQLClassName}`,
description: `The ${createGraphQLMutationName} mutation can be used to create a new object of the ${graphQLClassName} class.`,
inputFields: {
fields: {
- description:
- 'These are the fields that will be used to create the new object.',
+ description: 'These are the fields that will be used to create the new object.',
type: classGraphQLCreateType || defaultGraphQLTypes.OBJECT,
},
},
outputFields: {
[getGraphQLQueryName]: {
description: 'This is the created object.',
- type: new GraphQLNonNull(
- classGraphQLOutputType || defaultGraphQLTypes.OBJECT
- ),
+ type: new GraphQLNonNull(classGraphQLOutputType || defaultGraphQLTypes.OBJECT),
},
},
mutateAndGetPayload: async (args, context, mutationInfo) => {
@@ -106,12 +87,12 @@ const load = function(
.filter(field => field.startsWith(`${getGraphQLQueryName}.`))
.map(field => field.replace(`${getGraphQLQueryName}.`, ''));
const { keys, include } = extractKeysAndInclude(selectedFields);
- const { keys: requiredKeys, needGet } = getOnlyRequiredFields(
- fields,
- keys,
- include,
- ['id', 'objectId', 'createdAt', 'updatedAt']
- );
+ const { keys: requiredKeys, needGet } = getOnlyRequiredFields(fields, keys, include, [
+ 'id',
+ 'objectId',
+ 'createdAt',
+ 'updatedAt',
+ ]);
const needToGetAllKeys = objectsQueries.needToGetAllKeys(
parseClass.fields,
keys,
@@ -160,38 +141,29 @@ const load = function(
});
if (
- parseGraphQLSchema.addGraphQLType(
- createGraphQLMutation.args.input.type.ofType
- ) &&
+ parseGraphQLSchema.addGraphQLType(createGraphQLMutation.args.input.type.ofType) &&
parseGraphQLSchema.addGraphQLType(createGraphQLMutation.type)
) {
- parseGraphQLSchema.addGraphQLMutation(
- createGraphQLMutationName,
- createGraphQLMutation
- );
+ parseGraphQLSchema.addGraphQLMutation(createGraphQLMutationName, createGraphQLMutation);
}
}
if (isUpdateEnabled) {
- const updateGraphQLMutationName =
- updateAlias || `update${graphQLClassName}`;
+ const updateGraphQLMutationName = updateAlias || `update${graphQLClassName}`;
const updateGraphQLMutation = mutationWithClientMutationId({
name: `Update${graphQLClassName}`,
description: `The ${updateGraphQLMutationName} mutation can be used to update an object of the ${graphQLClassName} class.`,
inputFields: {
id: defaultGraphQLTypes.GLOBAL_OR_OBJECT_ID_ATT,
fields: {
- description:
- 'These are the fields that will be used to update the object.',
+ description: 'These are the fields that will be used to update the object.',
type: classGraphQLUpdateType || defaultGraphQLTypes.OBJECT,
},
},
outputFields: {
[getGraphQLQueryName]: {
description: 'This is the updated object.',
- type: new GraphQLNonNull(
- classGraphQLOutputType || defaultGraphQLTypes.OBJECT
- ),
+ type: new GraphQLNonNull(classGraphQLOutputType || defaultGraphQLTypes.OBJECT),
},
},
mutateAndGetPayload: async (args, context, mutationInfo) => {
@@ -225,12 +197,11 @@ const load = function(
.filter(field => field.startsWith(`${getGraphQLQueryName}.`))
.map(field => field.replace(`${getGraphQLQueryName}.`, ''));
const { keys, include } = extractKeysAndInclude(selectedFields);
- const { keys: requiredKeys, needGet } = getOnlyRequiredFields(
- fields,
- keys,
- include,
- ['id', 'objectId', 'updatedAt']
- );
+ const { keys: requiredKeys, needGet } = getOnlyRequiredFields(fields, keys, include, [
+ 'id',
+ 'objectId',
+ 'updatedAt',
+ ]);
const needToGetAllKeys = objectsQueries.needToGetAllKeys(
parseClass.fields,
keys,
@@ -279,21 +250,15 @@ const load = function(
});
if (
- parseGraphQLSchema.addGraphQLType(
- updateGraphQLMutation.args.input.type.ofType
- ) &&
+ parseGraphQLSchema.addGraphQLType(updateGraphQLMutation.args.input.type.ofType) &&
parseGraphQLSchema.addGraphQLType(updateGraphQLMutation.type)
) {
- parseGraphQLSchema.addGraphQLMutation(
- updateGraphQLMutationName,
- updateGraphQLMutation
- );
+ parseGraphQLSchema.addGraphQLMutation(updateGraphQLMutationName, updateGraphQLMutation);
}
}
if (isDestroyEnabled) {
- const deleteGraphQLMutationName =
- destroyAlias || `delete${graphQLClassName}`;
+ const deleteGraphQLMutationName = destroyAlias || `delete${graphQLClassName}`;
const deleteGraphQLMutation = mutationWithClientMutationId({
name: `Delete${graphQLClassName}`,
description: `The ${deleteGraphQLMutationName} mutation can be used to delete an object of the ${graphQLClassName} class.`,
@@ -303,9 +268,7 @@ const load = function(
outputFields: {
[getGraphQLQueryName]: {
description: 'This is the deleted object.',
- type: new GraphQLNonNull(
- classGraphQLOutputType || defaultGraphQLTypes.OBJECT
- ),
+ type: new GraphQLNonNull(classGraphQLOutputType || defaultGraphQLTypes.OBJECT),
},
},
mutateAndGetPayload: async (args, context, mutationInfo) => {
@@ -324,11 +287,7 @@ const load = function(
.map(field => field.replace(`${getGraphQLQueryName}.`, ''));
const { keys, include } = extractKeysAndInclude(selectedFields);
let optimizedObject = {};
- if (
- keys &&
- keys.split(',').filter(key => !['id', 'objectId'].includes(key))
- .length > 0
- ) {
+ if (keys && keys.split(',').filter(key => !['id', 'objectId'].includes(key)).length > 0) {
optimizedObject = await objectsQueries.getObject(
className,
id,
@@ -342,13 +301,7 @@ const load = function(
parseGraphQLSchema.parseClasses
);
}
- await objectsMutations.deleteObject(
- className,
- id,
- config,
- auth,
- info
- );
+ await objectsMutations.deleteObject(className, id, config, auth, info);
return {
[getGraphQLQueryName]: {
objectId: id,
@@ -362,15 +315,10 @@ const load = function(
});
if (
- parseGraphQLSchema.addGraphQLType(
- deleteGraphQLMutation.args.input.type.ofType
- ) &&
+ parseGraphQLSchema.addGraphQLType(deleteGraphQLMutation.args.input.type.ofType) &&
parseGraphQLSchema.addGraphQLType(deleteGraphQLMutation.type)
) {
- parseGraphQLSchema.addGraphQLMutation(
- deleteGraphQLMutationName,
- deleteGraphQLMutation
- );
+ parseGraphQLSchema.addGraphQLMutation(deleteGraphQLMutationName, deleteGraphQLMutation);
}
}
};
diff --git a/src/GraphQL/loaders/parseClassQueries.js b/src/GraphQL/loaders/parseClassQueries.js
index 185be16d58..4fc3fa83d5 100644
--- a/src/GraphQL/loaders/parseClassQueries.js
+++ b/src/GraphQL/loaders/parseClassQueries.js
@@ -8,20 +8,11 @@ import { ParseGraphQLClassConfig } from '../../Controllers/ParseGraphQLControlle
import { transformClassNameToGraphQL } from '../transformers/className';
import { extractKeysAndInclude } from '../parseGraphQLUtils';
-const getParseClassQueryConfig = function (
- parseClassConfig: ?ParseGraphQLClassConfig
-) {
+const getParseClassQueryConfig = function (parseClassConfig: ?ParseGraphQLClassConfig) {
return (parseClassConfig && parseClassConfig.query) || {};
};
-const getQuery = async (
- parseClass,
- _source,
- args,
- context,
- queryInfo,
- parseClasses
-) => {
+const getQuery = async (parseClass, _source, args, context, queryInfo, parseClasses) => {
let { id } = args;
const { options } = args;
const { readPreference, includeReadPreference } = options || {};
@@ -50,11 +41,7 @@ const getQuery = async (
);
};
-const load = function (
- parseGraphQLSchema,
- parseClass,
- parseClassConfig: ?ParseGraphQLClassConfig
-) {
+const load = function (parseGraphQLSchema, parseClass, parseClassConfig: ?ParseGraphQLClassConfig) {
const className = parseClass.className;
const graphQLClassName = transformClassNameToGraphQL(className);
const {
@@ -71,8 +58,7 @@ const load = function (
} = parseGraphQLSchema.parseClassTypes[className];
if (isGetEnabled) {
- const lowerCaseClassName =
- graphQLClassName.charAt(0).toLowerCase() + graphQLClassName.slice(1);
+ const lowerCaseClassName = graphQLClassName.charAt(0).toLowerCase() + graphQLClassName.slice(1);
const getGraphQLQueryName = getAlias || lowerCaseClassName;
@@ -82,9 +68,7 @@ const load = function (
id: defaultGraphQLTypes.GLOBAL_OR_OBJECT_ID_ATT,
options: defaultGraphQLTypes.READ_OPTIONS_ATT,
},
- type: new GraphQLNonNull(
- classGraphQLOutputType || defaultGraphQLTypes.OBJECT
- ),
+ type: new GraphQLNonNull(classGraphQLOutputType || defaultGraphQLTypes.OBJECT),
async resolve(_source, args, context, queryInfo) {
try {
return await getQuery(
@@ -103,41 +87,25 @@ const load = function (
}
if (isFindEnabled) {
- const lowerCaseClassName =
- graphQLClassName.charAt(0).toLowerCase() + graphQLClassName.slice(1);
+ const lowerCaseClassName = graphQLClassName.charAt(0).toLowerCase() + graphQLClassName.slice(1);
const findGraphQLQueryName = findAlias || pluralize(lowerCaseClassName);
parseGraphQLSchema.addGraphQLQuery(findGraphQLQueryName, {
description: `The ${findGraphQLQueryName} query can be used to find objects of the ${graphQLClassName} class.`,
args: classGraphQLFindArgs,
- type: new GraphQLNonNull(
- classGraphQLFindResultType || defaultGraphQLTypes.OBJECT
- ),
+ type: new GraphQLNonNull(classGraphQLFindResultType || defaultGraphQLTypes.OBJECT),
async resolve(_source, args, context, queryInfo) {
try {
- const {
- where,
- order,
- skip,
- first,
- after,
- last,
- before,
- options,
- } = args;
- const {
- readPreference,
- includeReadPreference,
- subqueryReadPreference,
- } = options || {};
+ const { where, order, skip, first, after, last, before, options } = args;
+ const { readPreference, includeReadPreference, subqueryReadPreference } = options || {};
const { config, auth, info } = context;
const selectedFields = getFieldNames(queryInfo);
const { keys, include } = extractKeysAndInclude(
selectedFields
- .filter((field) => field.startsWith('edges.node.'))
- .map((field) => field.replace('edges.node.', ''))
+ .filter(field => field.startsWith('edges.node.'))
+ .map(field => field.replace('edges.node.', ''))
);
const parseOrder = order && order.join(',');
diff --git a/src/GraphQL/loaders/parseClassTypes.js b/src/GraphQL/loaders/parseClassTypes.js
index 49981a72ac..22d90b52b7 100644
--- a/src/GraphQL/loaders/parseClassTypes.js
+++ b/src/GraphQL/loaders/parseClassTypes.js
@@ -8,11 +8,7 @@ import {
GraphQLBoolean,
GraphQLEnumType,
} from 'graphql';
-import {
- globalIdField,
- connectionArgs,
- connectionDefinitions,
-} from 'graphql-relay';
+import { globalIdField, connectionArgs, connectionDefinitions } from 'graphql-relay';
import getFieldNames from 'graphql-list-fields';
import * as defaultGraphQLTypes from './defaultGraphQLTypes';
import * as objectsQueries from '../helpers/objectsQueries';
@@ -21,14 +17,9 @@ import { transformClassNameToGraphQL } from '../transformers/className';
import { transformInputTypeToGraphQL } from '../transformers/inputType';
import { transformOutputTypeToGraphQL } from '../transformers/outputType';
import { transformConstraintTypeToGraphQL } from '../transformers/constraintType';
-import {
- extractKeysAndInclude,
- getParseClassMutationConfig,
-} from '../parseGraphQLUtils';
+import { extractKeysAndInclude, getParseClassMutationConfig } from '../parseGraphQLUtils';
-const getParseClassTypeConfig = function (
- parseClassConfig: ?ParseGraphQLClassConfig
-) {
+const getParseClassTypeConfig = function (parseClassConfig: ?ParseGraphQLClassConfig) {
return (parseClassConfig && parseClassConfig.type) || {};
};
@@ -51,22 +42,19 @@ const getInputFieldsAndConstraints = function (
let classSortFields;
// All allowed customs fields
- const classCustomFields = classFields.filter((field) => {
- return (
- !Object.keys(defaultGraphQLTypes.PARSE_OBJECT_FIELDS).includes(field) &&
- field !== 'id'
- );
+ const classCustomFields = classFields.filter(field => {
+ return !Object.keys(defaultGraphQLTypes.PARSE_OBJECT_FIELDS).includes(field) && field !== 'id';
});
if (allowedInputFields && allowedInputFields.create) {
- classCreateFields = classCustomFields.filter((field) => {
+ classCreateFields = classCustomFields.filter(field => {
return allowedInputFields.create.includes(field);
});
} else {
classCreateFields = classCustomFields;
}
if (allowedInputFields && allowedInputFields.update) {
- classUpdateFields = classCustomFields.filter((field) => {
+ classUpdateFields = classCustomFields.filter(field => {
return allowedInputFields.update.includes(field);
});
} else {
@@ -74,7 +62,7 @@ const getInputFieldsAndConstraints = function (
}
if (allowedOutputFields) {
- classOutputFields = classCustomFields.filter((field) => {
+ classOutputFields = classCustomFields.filter(field => {
return allowedOutputFields.includes(field);
});
} else {
@@ -82,13 +70,11 @@ const getInputFieldsAndConstraints = function (
}
// Filters the "password" field from class _User
if (parseClass.className === '_User') {
- classOutputFields = classOutputFields.filter(
- (outputField) => outputField !== 'password'
- );
+ classOutputFields = classOutputFields.filter(outputField => outputField !== 'password');
}
if (allowedConstraintFields) {
- classConstraintFields = classCustomFields.filter((field) => {
+ classConstraintFields = classCustomFields.filter(field => {
return allowedConstraintFields.includes(field);
});
} else {
@@ -107,7 +93,7 @@ const getInputFieldsAndConstraints = function (
});
}
} else {
- classSortFields = classFields.map((field) => {
+ classSortFields = classFields.map(field => {
return { field, asc: true, desc: true };
});
}
@@ -121,11 +107,7 @@ const getInputFieldsAndConstraints = function (
};
};
-const load = (
- parseGraphQLSchema,
- parseClass,
- parseClassConfig: ?ParseGraphQLClassConfig
-) => {
+const load = (parseGraphQLSchema, parseClass, parseClassConfig: ?ParseGraphQLClassConfig) => {
const className = parseClass.className;
const graphQLClassName = transformClassNameToGraphQL(className);
const {
@@ -159,8 +141,7 @@ const load = (
[field]: {
description: `This is the object ${field}.`,
type:
- (className === '_User' &&
- (field === 'username' || field === 'password')) ||
+ (className === '_User' && (field === 'username' || field === 'password')) ||
parseClass.fields[field].required
? new GraphQLNonNull(type)
: type,
@@ -175,9 +156,7 @@ const load = (
}
),
});
- classGraphQLCreateType = parseGraphQLSchema.addGraphQLType(
- classGraphQLCreateType
- );
+ classGraphQLCreateType = parseGraphQLSchema.addGraphQLType(classGraphQLCreateType);
const classGraphQLUpdateTypeName = `Update${graphQLClassName}FieldsInput`;
let classGraphQLUpdateType = new GraphQLInputObjectType({
@@ -208,9 +187,7 @@ const load = (
}
),
});
- classGraphQLUpdateType = parseGraphQLSchema.addGraphQLType(
- classGraphQLUpdateType
- );
+ classGraphQLUpdateType = parseGraphQLSchema.addGraphQLType(classGraphQLUpdateType);
const classGraphQLPointerTypeName = `${graphQLClassName}PointerInput`;
let classGraphQLPointerType = new GraphQLInputObjectType({
@@ -233,8 +210,7 @@ const load = (
},
});
classGraphQLPointerType =
- parseGraphQLSchema.addGraphQLType(classGraphQLPointerType) ||
- defaultGraphQLTypes.OBJECT;
+ parseGraphQLSchema.addGraphQLType(classGraphQLPointerType) || defaultGraphQLTypes.OBJECT;
const classGraphQLRelationTypeName = `${graphQLClassName}RelationInput`;
let classGraphQLRelationType = new GraphQLInputObjectType({
@@ -261,8 +237,7 @@ const load = (
},
});
classGraphQLRelationType =
- parseGraphQLSchema.addGraphQLType(classGraphQLRelationType) ||
- defaultGraphQLTypes.OBJECT;
+ parseGraphQLSchema.addGraphQLType(classGraphQLRelationType) || defaultGraphQLTypes.OBJECT;
const classGraphQLConstraintsTypeName = `${graphQLClassName}WhereInput`;
let classGraphQLConstraintsType = new GraphQLInputObjectType({
@@ -310,8 +285,7 @@ const load = (
}),
});
classGraphQLConstraintsType =
- parseGraphQLSchema.addGraphQLType(classGraphQLConstraintsType) ||
- defaultGraphQLTypes.OBJECT;
+ parseGraphQLSchema.addGraphQLType(classGraphQLConstraintsType) || defaultGraphQLTypes.OBJECT;
const classGraphQLRelationConstraintsTypeName = `${graphQLClassName}RelationWhereInput`;
let classGraphQLRelationConstraintsType = new GraphQLInputObjectType({
@@ -319,8 +293,7 @@ const load = (
description: `The ${classGraphQLRelationConstraintsTypeName} input type is used in operations that involve filtering objects of ${graphQLClassName} class.`,
fields: () => ({
have: {
- description:
- 'Run a relational/pointer query where at least one child object can match.',
+ description: 'Run a relational/pointer query where at least one child object can match.',
type: classGraphQLConstraintsType,
},
haveNot: {
@@ -357,14 +330,11 @@ const load = (
return updatedSortFields;
}, {}),
});
- classGraphQLOrderType = parseGraphQLSchema.addGraphQLType(
- classGraphQLOrderType
- );
+ classGraphQLOrderType = parseGraphQLSchema.addGraphQLType(classGraphQLOrderType);
const classGraphQLFindArgs = {
where: {
- description:
- 'These are the conditions that the objects need to match in order to be found.',
+ description: 'These are the conditions that the objects need to match in order to be found.',
type: classGraphQLConstraintsType,
},
order: {
@@ -378,12 +348,9 @@ const load = (
options: defaultGraphQLTypes.READ_OPTIONS_ATT,
};
const classGraphQLOutputTypeName = `${graphQLClassName}`;
- const interfaces = [
- defaultGraphQLTypes.PARSE_OBJECT,
- parseGraphQLSchema.relayNodeInterface,
- ];
+ const interfaces = [defaultGraphQLTypes.PARSE_OBJECT, parseGraphQLSchema.relayNodeInterface];
const parseObjectFields = {
- id: globalIdField(className, (obj) => obj.objectId),
+ id: globalIdField(className, obj => obj.objectId),
...defaultGraphQLTypes.PARSE_OBJECT_FIELDS,
};
const outputFields = () => {
@@ -395,44 +362,26 @@ const load = (
);
if (parseClass.fields[field].type === 'Relation') {
const targetParseClassTypes =
- parseGraphQLSchema.parseClassTypes[
- parseClass.fields[field].targetClass
- ];
- const args = targetParseClassTypes
- ? targetParseClassTypes.classGraphQLFindArgs
- : undefined;
+ parseGraphQLSchema.parseClassTypes[parseClass.fields[field].targetClass];
+ const args = targetParseClassTypes ? targetParseClassTypes.classGraphQLFindArgs : undefined;
return {
...fields,
[field]: {
description: `This is the object ${field}.`,
args,
- type: parseClass.fields[field].required
- ? new GraphQLNonNull(type)
- : type,
+ type: parseClass.fields[field].required ? new GraphQLNonNull(type) : type,
async resolve(source, args, context, queryInfo) {
try {
- const {
- where,
- order,
- skip,
- first,
- after,
- last,
- before,
- options,
- } = args;
- const {
- readPreference,
- includeReadPreference,
- subqueryReadPreference,
- } = options || {};
+ const { where, order, skip, first, after, last, before, options } = args;
+ const { readPreference, includeReadPreference, subqueryReadPreference } =
+ options || {};
const { config, auth, info } = context;
const selectedFields = getFieldNames(queryInfo);
const { keys, include } = extractKeysAndInclude(
selectedFields
- .filter((field) => field.startsWith('edges.node.'))
- .map((field) => field.replace('edges.node.', ''))
+ .filter(field => field.startsWith('edges.node.'))
+ .map(field => field.replace('edges.node.', ''))
);
const parseOrder = order && order.join(',');
@@ -478,12 +427,10 @@ const load = (
...fields,
[field]: {
description: `This is the object ${field}.`,
- type: parseClass.fields[field].required
- ? new GraphQLNonNull(type)
- : type,
+ type: parseClass.fields[field].required ? new GraphQLNonNull(type) : type,
async resolve(source) {
if (source[field] && source[field].coordinates) {
- return source[field].coordinates.map((coordinate) => ({
+ return source[field].coordinates.map(coordinate => ({
latitude: coordinate[0],
longitude: coordinate[1],
}));
@@ -498,17 +445,11 @@ const load = (
...fields,
[field]: {
description: `Use Inline Fragment on Array to get results: https://graphql.org/learn/queries/#inline-fragments`,
- type: parseClass.fields[field].required
- ? new GraphQLNonNull(type)
- : type,
+ type: parseClass.fields[field].required ? new GraphQLNonNull(type) : type,
async resolve(source) {
if (!source[field]) return null;
- return source[field].map(async (elem) => {
- if (
- elem.className &&
- elem.objectId &&
- elem.__type === 'Object'
- ) {
+ return source[field].map(async elem => {
+ if (elem.className && elem.objectId && elem.__type === 'Object') {
return elem;
} else {
return { value: elem };
@@ -522,9 +463,7 @@ const load = (
...fields,
[field]: {
description: `This is the object ${field}.`,
- type: parseClass.fields[field].required
- ? new GraphQLNonNull(type)
- : type,
+ type: parseClass.fields[field].required ? new GraphQLNonNull(type) : type,
},
};
} else {
@@ -538,9 +477,7 @@ const load = (
interfaces,
fields: outputFields,
});
- classGraphQLOutputType = parseGraphQLSchema.addGraphQLType(
- classGraphQLOutputType
- );
+ classGraphQLOutputType = parseGraphQLSchema.addGraphQLType(classGraphQLOutputType);
const { connectionType, edgeType } = connectionDefinitions({
name: graphQLClassName,
diff --git a/src/GraphQL/loaders/schemaDirectives.js b/src/GraphQL/loaders/schemaDirectives.js
index f0366778f0..43a8fe273b 100644
--- a/src/GraphQL/loaders/schemaDirectives.js
+++ b/src/GraphQL/loaders/schemaDirectives.js
@@ -21,15 +21,17 @@ const load = parseGraphQLSchema => {
functionName = this.args.to;
}
- return (await FunctionsRouter.handleCloudFunction({
- params: {
- functionName,
- },
- config,
- auth,
- info,
- body: args,
- })).response.result;
+ return (
+ await FunctionsRouter.handleCloudFunction({
+ params: {
+ functionName,
+ },
+ config,
+ auth,
+ info,
+ body: args,
+ })
+ ).response.result;
} catch (e) {
parseGraphQLSchema.handleError(e);
}
diff --git a/src/GraphQL/loaders/schemaMutations.js b/src/GraphQL/loaders/schemaMutations.js
index 12a828e9d7..89798f9784 100644
--- a/src/GraphQL/loaders/schemaMutations.js
+++ b/src/GraphQL/loaders/schemaMutations.js
@@ -2,10 +2,7 @@ import Parse from 'parse/node';
import { GraphQLNonNull } from 'graphql';
import { mutationWithClientMutationId } from 'graphql-relay';
import * as schemaTypes from './schemaTypes';
-import {
- transformToParse,
- transformToGraphQL,
-} from '../transformers/schemaFields';
+import { transformToParse, transformToGraphQL } from '../transformers/schemaFields';
import { enforceMasterKeyAccess } from '../parseGraphQLUtils';
import { getClass } from './schemaQueries';
@@ -42,10 +39,7 @@ const load = parseGraphQLSchema => {
}
const schema = await config.database.loadSchema({ clearCache: true });
- const parseClass = await schema.addClassIfNotExists(
- name,
- transformToParse(schemaFields)
- );
+ const parseClass = await schema.addClassIfNotExists(name, transformToParse(schemaFields));
return {
class: {
name: parseClass.className,
@@ -58,18 +52,9 @@ const load = parseGraphQLSchema => {
},
});
- parseGraphQLSchema.addGraphQLType(
- createClassMutation.args.input.type.ofType,
- true,
- true
- );
+ parseGraphQLSchema.addGraphQLType(createClassMutation.args.input.type.ofType, true, true);
parseGraphQLSchema.addGraphQLType(createClassMutation.type, true, true);
- parseGraphQLSchema.addGraphQLMutation(
- 'createClass',
- createClassMutation,
- true,
- true
- );
+ parseGraphQLSchema.addGraphQLMutation('createClass', createClassMutation, true, true);
const updateClassMutation = mutationWithClientMutationId({
name: 'UpdateClass',
@@ -123,23 +108,13 @@ const load = parseGraphQLSchema => {
},
});
- parseGraphQLSchema.addGraphQLType(
- updateClassMutation.args.input.type.ofType,
- true,
- true
- );
+ parseGraphQLSchema.addGraphQLType(updateClassMutation.args.input.type.ofType, true, true);
parseGraphQLSchema.addGraphQLType(updateClassMutation.type, true, true);
- parseGraphQLSchema.addGraphQLMutation(
- 'updateClass',
- updateClassMutation,
- true,
- true
- );
+ parseGraphQLSchema.addGraphQLMutation('updateClass', updateClassMutation, true, true);
const deleteClassMutation = mutationWithClientMutationId({
name: 'DeleteClass',
- description:
- 'The deleteClass mutation can be used to delete an existing object class.',
+ description: 'The deleteClass mutation can be used to delete an existing object class.',
inputFields: {
name: schemaTypes.CLASS_NAME_ATT,
},
@@ -178,18 +153,9 @@ const load = parseGraphQLSchema => {
},
});
- parseGraphQLSchema.addGraphQLType(
- deleteClassMutation.args.input.type.ofType,
- true,
- true
- );
+ parseGraphQLSchema.addGraphQLType(deleteClassMutation.args.input.type.ofType, true, true);
parseGraphQLSchema.addGraphQLType(deleteClassMutation.type, true, true);
- parseGraphQLSchema.addGraphQLMutation(
- 'deleteClass',
- deleteClassMutation,
- true,
- true
- );
+ parseGraphQLSchema.addGraphQLMutation('deleteClass', deleteClassMutation, true, true);
};
export { load };
diff --git a/src/GraphQL/loaders/schemaQueries.js b/src/GraphQL/loaders/schemaQueries.js
index f5a166433a..cd049ce017 100644
--- a/src/GraphQL/loaders/schemaQueries.js
+++ b/src/GraphQL/loaders/schemaQueries.js
@@ -9,15 +9,9 @@ const getClass = async (name, schema) => {
return await schema.getOneSchema(name, true);
} catch (e) {
if (e === undefined) {
- throw new Parse.Error(
- Parse.Error.INVALID_CLASS_NAME,
- `Class ${name} does not exist.`
- );
+ throw new Parse.Error(Parse.Error.INVALID_CLASS_NAME, `Class ${name} does not exist.`);
} else {
- throw new Parse.Error(
- Parse.Error.INTERNAL_SERVER_ERROR,
- 'Database adapter error.'
- );
+ throw new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'Database adapter error.');
}
}
};
@@ -26,8 +20,7 @@ const load = parseGraphQLSchema => {
parseGraphQLSchema.addGraphQLQuery(
'class',
{
- description:
- 'The class query can be used to retrieve an existing object class.',
+ description: 'The class query can be used to retrieve an existing object class.',
args: {
name: schemaTypes.CLASS_NAME_ATT,
},
@@ -57,11 +50,8 @@ const load = parseGraphQLSchema => {
parseGraphQLSchema.addGraphQLQuery(
'classes',
{
- description:
- 'The classes query can be used to retrieve the existing object classes.',
- type: new GraphQLNonNull(
- new GraphQLList(new GraphQLNonNull(schemaTypes.CLASS))
- ),
+ description: 'The classes query can be used to retrieve the existing object classes.',
+ type: new GraphQLNonNull(new GraphQLList(new GraphQLNonNull(schemaTypes.CLASS))),
resolve: async (_source, _args, context) => {
try {
const { config, auth } = context;
diff --git a/src/GraphQL/loaders/schemaTypes.js b/src/GraphQL/loaders/schemaTypes.js
index 6572969787..bd057f0217 100644
--- a/src/GraphQL/loaders/schemaTypes.js
+++ b/src/GraphQL/loaders/schemaTypes.js
@@ -14,8 +14,7 @@ const SCHEMA_FIELD_NAME_ATT = {
const SCHEMA_FIELD_INPUT = new GraphQLInputObjectType({
name: 'SchemaFieldInput',
- description:
- 'The SchemaFieldInput is used to specify a field of an object class schema.',
+ description: 'The SchemaFieldInput is used to specify a field of an object class schema.',
fields: {
name: SCHEMA_FIELD_NAME_ATT,
},
@@ -57,8 +56,7 @@ const SCHEMA_STRING_FIELD_INPUT = new GraphQLInputObjectType({
const SCHEMA_STRING_FIELD = new GraphQLObjectType({
name: 'SchemaStringField',
- description:
- 'The SchemaStringField is used to return information of a String field.',
+ description: 'The SchemaStringField is used to return information of a String field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -76,8 +74,7 @@ const SCHEMA_NUMBER_FIELD_INPUT = new GraphQLInputObjectType({
const SCHEMA_NUMBER_FIELD = new GraphQLObjectType({
name: 'SchemaNumberField',
- description:
- 'The SchemaNumberField is used to return information of a Number field.',
+ description: 'The SchemaNumberField is used to return information of a Number field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -95,8 +92,7 @@ const SCHEMA_BOOLEAN_FIELD_INPUT = new GraphQLInputObjectType({
const SCHEMA_BOOLEAN_FIELD = new GraphQLObjectType({
name: 'SchemaBooleanField',
- description:
- 'The SchemaBooleanField is used to return information of a Boolean field.',
+ description: 'The SchemaBooleanField is used to return information of a Boolean field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -114,8 +110,7 @@ const SCHEMA_ARRAY_FIELD_INPUT = new GraphQLInputObjectType({
const SCHEMA_ARRAY_FIELD = new GraphQLObjectType({
name: 'SchemaArrayField',
- description:
- 'The SchemaArrayField is used to return information of an Array field.',
+ description: 'The SchemaArrayField is used to return information of an Array field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -133,8 +128,7 @@ const SCHEMA_OBJECT_FIELD_INPUT = new GraphQLInputObjectType({
const SCHEMA_OBJECT_FIELD = new GraphQLObjectType({
name: 'SchemaObjectField',
- description:
- 'The SchemaObjectField is used to return information of an Object field.',
+ description: 'The SchemaObjectField is used to return information of an Object field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -152,8 +146,7 @@ const SCHEMA_DATE_FIELD_INPUT = new GraphQLInputObjectType({
const SCHEMA_DATE_FIELD = new GraphQLObjectType({
name: 'SchemaDateField',
- description:
- 'The SchemaDateField is used to return information of a Date field.',
+ description: 'The SchemaDateField is used to return information of a Date field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -171,8 +164,7 @@ const SCHEMA_FILE_FIELD_INPUT = new GraphQLInputObjectType({
const SCHEMA_FILE_FIELD = new GraphQLObjectType({
name: 'SchemaFileField',
- description:
- 'The SchemaFileField is used to return information of a File field.',
+ description: 'The SchemaFileField is used to return information of a File field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -190,8 +182,7 @@ const SCHEMA_GEO_POINT_FIELD_INPUT = new GraphQLInputObjectType({
const SCHEMA_GEO_POINT_FIELD = new GraphQLObjectType({
name: 'SchemaGeoPointField',
- description:
- 'The SchemaGeoPointField is used to return information of a Geo Point field.',
+ description: 'The SchemaGeoPointField is used to return information of a Geo Point field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -209,8 +200,7 @@ const SCHEMA_POLYGON_FIELD_INPUT = new GraphQLInputObjectType({
const SCHEMA_POLYGON_FIELD = new GraphQLObjectType({
name: 'SchemaPolygonField',
- description:
- 'The SchemaPolygonField is used to return information of a Polygon field.',
+ description: 'The SchemaPolygonField is used to return information of a Polygon field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -228,8 +218,7 @@ const SCHEMA_BYTES_FIELD_INPUT = new GraphQLInputObjectType({
const SCHEMA_BYTES_FIELD = new GraphQLObjectType({
name: 'SchemaBytesField',
- description:
- 'The SchemaBytesField is used to return information of a Bytes field.',
+ description: 'The SchemaBytesField is used to return information of a Bytes field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -253,8 +242,7 @@ const SCHEMA_POINTER_FIELD_INPUT = new GraphQLInputObjectType({
const SCHEMA_POINTER_FIELD = new GraphQLObjectType({
name: 'SchemaPointerField',
- description:
- 'The SchemaPointerField is used to return information of a Pointer field.',
+ description: 'The SchemaPointerField is used to return information of a Pointer field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -274,8 +262,7 @@ const SCHEMA_RELATION_FIELD_INPUT = new GraphQLInputObjectType({
const SCHEMA_RELATION_FIELD = new GraphQLObjectType({
name: 'SchemaRelationField',
- description:
- 'The SchemaRelationField is used to return information of a Relation field.',
+ description: 'The SchemaRelationField is used to return information of a Relation field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -285,8 +272,7 @@ const SCHEMA_RELATION_FIELD = new GraphQLObjectType({
const SCHEMA_ACL_FIELD = new GraphQLObjectType({
name: 'SchemaACLField',
- description:
- 'The SchemaACLField is used to return information of an ACL field.',
+ description: 'The SchemaACLField is used to return information of an ACL field.',
interfaces: [SCHEMA_FIELD],
fields: {
name: SCHEMA_FIELD_NAME_ATT,
@@ -298,28 +284,23 @@ const SCHEMA_FIELDS_INPUT = new GraphQLInputObjectType({
description: `The CreateClassSchemaInput type is used to specify the schema for a new object class to be created.`,
fields: {
addStrings: {
- description:
- 'These are the String fields to be added to the class schema.',
+ description: 'These are the String fields to be added to the class schema.',
type: new GraphQLList(new GraphQLNonNull(SCHEMA_STRING_FIELD_INPUT)),
},
addNumbers: {
- description:
- 'These are the Number fields to be added to the class schema.',
+ description: 'These are the Number fields to be added to the class schema.',
type: new GraphQLList(new GraphQLNonNull(SCHEMA_NUMBER_FIELD_INPUT)),
},
addBooleans: {
- description:
- 'These are the Boolean fields to be added to the class schema.',
+ description: 'These are the Boolean fields to be added to the class schema.',
type: new GraphQLList(new GraphQLNonNull(SCHEMA_BOOLEAN_FIELD_INPUT)),
},
addArrays: {
- description:
- 'These are the Array fields to be added to the class schema.',
+ description: 'These are the Array fields to be added to the class schema.',
type: new GraphQLList(new GraphQLNonNull(SCHEMA_ARRAY_FIELD_INPUT)),
},
addObjects: {
- description:
- 'These are the Object fields to be added to the class schema.',
+ description: 'These are the Object fields to be added to the class schema.',
type: new GraphQLList(new GraphQLNonNull(SCHEMA_OBJECT_FIELD_INPUT)),
},
addDates: {
@@ -336,23 +317,19 @@ const SCHEMA_FIELDS_INPUT = new GraphQLInputObjectType({
type: SCHEMA_GEO_POINT_FIELD_INPUT,
},
addPolygons: {
- description:
- 'These are the Polygon fields to be added to the class schema.',
+ description: 'These are the Polygon fields to be added to the class schema.',
type: new GraphQLList(new GraphQLNonNull(SCHEMA_POLYGON_FIELD_INPUT)),
},
addBytes: {
- description:
- 'These are the Bytes fields to be added to the class schema.',
+ description: 'These are the Bytes fields to be added to the class schema.',
type: new GraphQLList(new GraphQLNonNull(SCHEMA_BYTES_FIELD_INPUT)),
},
addPointers: {
- description:
- 'These are the Pointer fields to be added to the class schema.',
+ description: 'These are the Pointer fields to be added to the class schema.',
type: new GraphQLList(new GraphQLNonNull(SCHEMA_POINTER_FIELD_INPUT)),
},
addRelations: {
- description:
- 'These are the Relation fields to be added to the class schema.',
+ description: 'These are the Relation fields to be added to the class schema.',
type: new GraphQLList(new GraphQLNonNull(SCHEMA_RELATION_FIELD_INPUT)),
},
remove: {
@@ -374,9 +351,7 @@ const CLASS = new GraphQLObjectType({
name: CLASS_NAME_ATT,
schemaFields: {
description: "These are the schema's fields of the object class.",
- type: new GraphQLNonNull(
- new GraphQLList(new GraphQLNonNull(SCHEMA_FIELD))
- ),
+ type: new GraphQLNonNull(new GraphQLList(new GraphQLNonNull(SCHEMA_FIELD))),
},
},
});
diff --git a/src/GraphQL/loaders/usersQueries.js b/src/GraphQL/loaders/usersQueries.js
index 976d0b3b02..c64ce6b90d 100644
--- a/src/GraphQL/loaders/usersQueries.js
+++ b/src/GraphQL/loaders/usersQueries.js
@@ -5,18 +5,10 @@ import rest from '../../rest';
import { extractKeysAndInclude } from './parseClassTypes';
import { Auth } from '../../Auth';
-const getUserFromSessionToken = async (
- context,
- queryInfo,
- keysPrefix,
- userId
-) => {
+const getUserFromSessionToken = async (context, queryInfo, keysPrefix, userId) => {
const { info, config } = context;
if (!info || !info.sessionToken) {
- throw new Parse.Error(
- Parse.Error.INVALID_SESSION_TOKEN,
- 'Invalid session token'
- );
+ throw new Parse.Error(Parse.Error.INVALID_SESSION_TOKEN, 'Invalid session token');
}
const sessionToken = info.sessionToken;
const selectedFields = getFieldNames(queryInfo)
@@ -70,10 +62,7 @@ const getUserFromSessionToken = async (
info.context
);
if (!response.results || response.results.length == 0) {
- throw new Parse.Error(
- Parse.Error.INVALID_SESSION_TOKEN,
- 'Invalid session token'
- );
+ throw new Parse.Error(Parse.Error.INVALID_SESSION_TOKEN, 'Invalid session token');
} else {
const user = response.results[0];
return {
@@ -91,17 +80,11 @@ const load = parseGraphQLSchema => {
parseGraphQLSchema.addGraphQLQuery(
'viewer',
{
- description:
- 'The viewer query can be used to return the current user data.',
+ description: 'The viewer query can be used to return the current user data.',
type: new GraphQLNonNull(parseGraphQLSchema.viewerType),
async resolve(_source, _args, context, queryInfo) {
try {
- return await getUserFromSessionToken(
- context,
- queryInfo,
- 'user.',
- false
- );
+ return await getUserFromSessionToken(context, queryInfo, 'user.', false);
} catch (e) {
parseGraphQLSchema.handleError(e);
}
diff --git a/src/GraphQL/transformers/constraintType.js b/src/GraphQL/transformers/constraintType.js
index 61a6413a8f..6da986af30 100644
--- a/src/GraphQL/transformers/constraintType.js
+++ b/src/GraphQL/transformers/constraintType.js
@@ -1,11 +1,6 @@
import * as defaultGraphQLTypes from '../loaders/defaultGraphQLTypes';
-const transformConstraintTypeToGraphQL = (
- parseType,
- targetClass,
- parseClassTypes,
- fieldName
-) => {
+const transformConstraintTypeToGraphQL = (parseType, targetClass, parseClassTypes, fieldName) => {
if (fieldName === 'id' || fieldName === 'objectId') {
return defaultGraphQLTypes.ID_WHERE_INPUT;
}
diff --git a/src/GraphQL/transformers/inputType.js b/src/GraphQL/transformers/inputType.js
index 29c91a65ea..bba838bcd3 100644
--- a/src/GraphQL/transformers/inputType.js
+++ b/src/GraphQL/transformers/inputType.js
@@ -1,16 +1,7 @@
-import {
- GraphQLString,
- GraphQLFloat,
- GraphQLBoolean,
- GraphQLList,
-} from 'graphql';
+import { GraphQLString, GraphQLFloat, GraphQLBoolean, GraphQLList } from 'graphql';
import * as defaultGraphQLTypes from '../loaders/defaultGraphQLTypes';
-const transformInputTypeToGraphQL = (
- parseType,
- targetClass,
- parseClassTypes
-) => {
+const transformInputTypeToGraphQL = (parseType, targetClass, parseClassTypes) => {
switch (parseType) {
case 'String':
return GraphQLString;
diff --git a/src/GraphQL/transformers/mutation.js b/src/GraphQL/transformers/mutation.js
index 06cc7deee0..583d330620 100644
--- a/src/GraphQL/transformers/mutation.js
+++ b/src/GraphQL/transformers/mutation.js
@@ -14,19 +14,13 @@ const transformTypes = async (
classGraphQLUpdateType,
config: { isCreateEnabled, isUpdateEnabled },
} = parseGraphQLSchema.parseClassTypes[className];
- const parseClass = parseGraphQLSchema.parseClasses.find(
- (clazz) => clazz.className === className
- );
+ const parseClass = parseGraphQLSchema.parseClasses.find(clazz => clazz.className === className);
if (fields) {
const classGraphQLCreateTypeFields =
- isCreateEnabled && classGraphQLCreateType
- ? classGraphQLCreateType.getFields()
- : null;
+ isCreateEnabled && classGraphQLCreateType ? classGraphQLCreateType.getFields() : null;
const classGraphQLUpdateTypeFields =
- isUpdateEnabled && classGraphQLUpdateType
- ? classGraphQLUpdateType.getFields()
- : null;
- const promises = Object.keys(fields).map(async (field) => {
+ isUpdateEnabled && classGraphQLUpdateType ? classGraphQLUpdateType.getFields() : null;
+ const promises = Object.keys(fields).map(async field => {
let inputTypeField;
if (inputType === 'create' && classGraphQLCreateTypeFields) {
inputTypeField = classGraphQLCreateTypeFields[field];
@@ -84,18 +78,15 @@ const transformers = {
}
throw new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'Invalid file upload.');
},
- polygon: (value) => ({
+ polygon: value => ({
__type: 'Polygon',
- coordinates: value.map((geoPoint) => [
- geoPoint.latitude,
- geoPoint.longitude,
- ]),
+ coordinates: value.map(geoPoint => [geoPoint.latitude, geoPoint.longitude]),
}),
- geoPoint: (value) => ({
+ geoPoint: value => ({
...value,
__type: 'GeoPoint',
}),
- ACL: (value) => {
+ ACL: value => {
const parseACL = {};
if (value.public) {
parseACL['*'] = {
@@ -104,7 +95,7 @@ const transformers = {
};
}
if (value.users) {
- value.users.forEach((rule) => {
+ value.users.forEach(rule => {
const globalIdObject = fromGlobalId(rule.userId);
if (globalIdObject.type === '_User') {
rule.userId = globalIdObject.id;
@@ -116,7 +107,7 @@ const transformers = {
});
}
if (value.roles) {
- value.roles.forEach((rule) => {
+ value.roles.forEach(rule => {
parseACL[`role:${rule.roleName}`] = {
read: rule.read,
write: rule.write,
@@ -125,13 +116,7 @@ const transformers = {
}
return parseACL;
},
- relation: async (
- targetClass,
- field,
- value,
- parseGraphQLSchema,
- { config, auth, info }
- ) => {
+ relation: async (targetClass, field, value, parseGraphQLSchema, { config, auth, info }) => {
if (Object.keys(value).length === 0)
throw new Parse.Error(
Parse.Error.INVALID_POINTER,
@@ -147,22 +132,16 @@ const transformers = {
if (value.createAndAdd) {
nestedObjectsToAdd = (
await Promise.all(
- value.createAndAdd.map(async (input) => {
+ value.createAndAdd.map(async input => {
const parseFields = await transformTypes('create', input, {
className: targetClass,
parseGraphQLSchema,
req: { config, auth, info },
});
- return objectsMutations.createObject(
- targetClass,
- parseFields,
- config,
- auth,
- info
- );
+ return objectsMutations.createObject(targetClass, parseFields, config, auth, info);
})
)
- ).map((object) => ({
+ ).map(object => ({
__type: 'Pointer',
className: targetClass,
objectId: object.objectId,
@@ -171,7 +150,7 @@ const transformers = {
if (value.add || nestedObjectsToAdd.length > 0) {
if (!value.add) value.add = [];
- value.add = value.add.map((input) => {
+ value.add = value.add.map(input => {
const globalIdObject = fromGlobalId(input);
if (globalIdObject.type === targetClass) {
input = globalIdObject.id;
@@ -191,7 +170,7 @@ const transformers = {
if (value.remove) {
op.ops.push({
__op: 'RemoveRelation',
- objects: value.remove.map((input) => {
+ objects: value.remove.map(input => {
const globalIdObject = fromGlobalId(input);
if (globalIdObject.type === targetClass) {
input = globalIdObject.id;
@@ -206,13 +185,7 @@ const transformers = {
}
return op;
},
- pointer: async (
- targetClass,
- field,
- value,
- parseGraphQLSchema,
- { config, auth, info }
- ) => {
+ pointer: async (targetClass, field, value, parseGraphQLSchema, { config, auth, info }) => {
if (Object.keys(value).length > 1 || Object.keys(value).length === 0)
throw new Parse.Error(
Parse.Error.INVALID_POINTER,
diff --git a/src/GraphQL/transformers/outputType.js b/src/GraphQL/transformers/outputType.js
index e56f233832..81afd421d1 100644
--- a/src/GraphQL/transformers/outputType.js
+++ b/src/GraphQL/transformers/outputType.js
@@ -1,17 +1,7 @@
import * as defaultGraphQLTypes from '../loaders/defaultGraphQLTypes';
-import {
- GraphQLString,
- GraphQLFloat,
- GraphQLBoolean,
- GraphQLList,
- GraphQLNonNull,
-} from 'graphql';
+import { GraphQLString, GraphQLFloat, GraphQLBoolean, GraphQLList, GraphQLNonNull } from 'graphql';
-const transformOutputTypeToGraphQL = (
- parseType,
- targetClass,
- parseClassTypes
-) => {
+const transformOutputTypeToGraphQL = (parseType, targetClass, parseClassTypes) => {
switch (parseType) {
case 'String':
return GraphQLString;
@@ -41,9 +31,7 @@ const transformOutputTypeToGraphQL = (
parseClassTypes[targetClass] &&
parseClassTypes[targetClass].classGraphQLFindResultType
) {
- return new GraphQLNonNull(
- parseClassTypes[targetClass].classGraphQLFindResultType
- );
+ return new GraphQLNonNull(parseClassTypes[targetClass].classGraphQLFindResultType);
} else {
return new GraphQLNonNull(defaultGraphQLTypes.OBJECT);
}
diff --git a/src/GraphQL/transformers/query.js b/src/GraphQL/transformers/query.js
index 91e5b3b3b7..92ea7be5fe 100644
--- a/src/GraphQL/transformers/query.js
+++ b/src/GraphQL/transformers/query.js
@@ -51,9 +51,7 @@ const transformQueryConstraintInputToParse = (
parentConstraints,
parseClasses
) => {
- const fields = parseClasses.find(
- parseClass => parseClass.className === className
- ).fields;
+ const fields = parseClasses.find(parseClass => parseClass.className === className).fields;
if (parentFieldName === 'id' && className) {
Object.keys(constraints).forEach(constraintName => {
const constraintValue = constraints[constraintName];
@@ -110,12 +108,7 @@ const transformQueryConstraintInputToParse = (
* }
* }
*/
- if (
- fieldValue.key &&
- fieldValue.value &&
- parentConstraints &&
- parentFieldName
- ) {
+ if (fieldValue.key && fieldValue.value && parentConstraints && parentFieldName) {
delete parentConstraints[parentFieldName];
parentConstraints[`${parentFieldName}.${fieldValue.key}`] = {
...parentConstraints[`${parentFieldName}.${fieldValue.key}`],
@@ -123,8 +116,7 @@ const transformQueryConstraintInputToParse = (
};
} else if (
fields[parentFieldName] &&
- (fields[parentFieldName].type === 'Pointer' ||
- fields[parentFieldName].type === 'Relation')
+ (fields[parentFieldName].type === 'Pointer' || fields[parentFieldName].type === 'Relation')
) {
const { targetClass } = fields[parentFieldName];
if (fieldName === 'exists') {
@@ -193,11 +185,7 @@ const transformQueryConstraintInputToParse = (
}
break;
case 'box':
- if (
- typeof fieldValue === 'object' &&
- fieldValue.bottomLeft &&
- fieldValue.upperRight
- ) {
+ if (typeof fieldValue === 'object' && fieldValue.bottomLeft && fieldValue.upperRight) {
fieldValue = [
{
__type: 'GeoPoint',
@@ -221,11 +209,7 @@ const transformQueryConstraintInputToParse = (
}
break;
case 'centerSphere':
- if (
- typeof fieldValue === 'object' &&
- fieldValue.center &&
- fieldValue.distance
- ) {
+ if (typeof fieldValue === 'object' && fieldValue.center && fieldValue.distance) {
fieldValue = [
{
__type: 'GeoPoint',
diff --git a/src/GraphQL/transformers/schemaFields.js b/src/GraphQL/transformers/schemaFields.js
index 9d94e6f80e..4e3898737e 100644
--- a/src/GraphQL/transformers/schemaFields.js
+++ b/src/GraphQL/transformers/schemaFields.js
@@ -22,20 +22,12 @@ const transformToParse = (graphQLSchemaFields, existingFields) => {
}
if (
graphQLSchemaFields.remove &&
- graphQLSchemaFields.remove.find(
- removeField => removeField.name === field.name
- )
+ graphQLSchemaFields.remove.find(removeField => removeField.name === field.name)
) {
return parseSchemaFields;
}
- if (
- parseSchemaFields[field.name] ||
- (existingFields && existingFields[field.name])
- ) {
- throw new Parse.Error(
- Parse.Error.INVALID_KEY_NAME,
- `Duplicated field name: ${field.name}`
- );
+ if (parseSchemaFields[field.name] || (existingFields && existingFields[field.name])) {
+ throw new Parse.Error(Parse.Error.INVALID_KEY_NAME, `Duplicated field name: ${field.name}`);
}
if (type === 'Relation' || type === 'Pointer') {
return {
diff --git a/src/Options/Definitions.js b/src/Options/Definitions.js
index a615db60f8..cac1382a8e 100644
--- a/src/Options/Definitions.js
+++ b/src/Options/Definitions.js
@@ -3,7 +3,7 @@
This code has been generated by resources/buildConfigDefinitions.js
Do not edit manually, but update Options/index.js
*/
-var parsers = require('./parsers');
+var parsers = require("./parsers");
module.exports.ParseServerOptions = {
accountLockout: {
@@ -127,7 +127,8 @@ module.exports.ParseServerOptions = {
},
emailVerifyTokenReuseIfValid: {
env: 'PARSE_SERVER_EMAIL_VERIFY_TOKEN_REUSE_IF_VALID',
- help: 'an existing email verify token should be reused when resend verification email is requested',
+ help:
+ 'an existing email verify token should be reused when resend verification email is requested',
action: parsers.booleanParser,
default: false,
},
@@ -148,13 +149,6 @@ module.exports.ParseServerOptions = {
action: parsers.booleanParser,
default: false,
},
- enableSingleSchemaCache: {
- env: 'PARSE_SERVER_ENABLE_SINGLE_SCHEMA_CACHE',
- help:
- 'Use a single schema cache shared across requests. Reduces number of queries made to _SCHEMA, defaults to false, i.e. unique schema cache per request.',
- action: parsers.booleanParser,
- default: false,
- },
encryptionKey: {
env: 'PARSE_SERVER_ENCRYPTION_KEY',
help: 'Key for encrypting your files',
@@ -335,6 +329,13 @@ module.exports.ParseServerOptions = {
env: 'PARSE_SERVER_READ_ONLY_MASTER_KEY',
help: 'Read-only key, which has the same capabilities as MasterKey without writes',
},
+ replicaSet: {
+ env: 'PARSE_SERVER_REPLICA_SET',
+ help:
+ 'If you are using MongoDB specify that you are using replica set. This will allow Parse Server to perform optimizations.',
+ action: parsers.booleanParser,
+ default: false,
+ },
restAPIKey: {
env: 'PARSE_SERVER_REST_API_KEY',
help: 'Key for REST calls',
@@ -352,13 +353,6 @@ module.exports.ParseServerOptions = {
action: parsers.booleanParser,
default: false,
},
- schemaCacheTTL: {
- env: 'PARSE_SERVER_SCHEMA_CACHE_TTL',
- help:
- 'The TTL for caching the schema for optimizing read/write operations. You should put a long TTL when your DB is in production. default to 5000; set 0 to disable.',
- action: parsers.numberParser('schemaCacheTTL'),
- default: 5000,
- },
serverCloseComplete: {
env: 'PARSE_SERVER_SERVER_CLOSE_COMPLETE',
help: 'Callback when server has closed',
@@ -411,191 +405,201 @@ module.exports.ParseServerOptions = {
},
};
module.exports.CustomPagesOptions = {
- choosePassword: {
- env: 'PARSE_SERVER_CUSTOM_PAGES_CHOOSE_PASSWORD',
- help: 'choose password page path',
- },
- invalidLink: {
- env: 'PARSE_SERVER_CUSTOM_PAGES_INVALID_LINK',
- help: 'invalid link page path',
- },
- invalidVerificationLink: {
- env: 'PARSE_SERVER_CUSTOM_PAGES_INVALID_VERIFICATION_LINK',
- help: 'invalid verification link page path',
- },
- linkSendFail: {
- env: 'PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_FAIL',
- help: 'verification link send fail page path',
- },
- linkSendSuccess: {
- env: 'PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_SUCCESS',
- help: 'verification link send success page path',
- },
- parseFrameURL: {
- env: 'PARSE_SERVER_CUSTOM_PAGES_PARSE_FRAME_URL',
- help: 'for masking user-facing pages',
- },
- passwordResetSuccess: {
- env: 'PARSE_SERVER_CUSTOM_PAGES_PASSWORD_RESET_SUCCESS',
- help: 'password reset success page path',
- },
- verifyEmailSuccess: {
- env: 'PARSE_SERVER_CUSTOM_PAGES_VERIFY_EMAIL_SUCCESS',
- help: 'verify email success page path',
- },
+ "choosePassword": {
+ "env": "PARSE_SERVER_CUSTOM_PAGES_CHOOSE_PASSWORD",
+ "help": "choose password page path"
+ },
+ "invalidLink": {
+ "env": "PARSE_SERVER_CUSTOM_PAGES_INVALID_LINK",
+ "help": "invalid link page path"
+ },
+ "invalidVerificationLink": {
+ "env": "PARSE_SERVER_CUSTOM_PAGES_INVALID_VERIFICATION_LINK",
+ "help": "invalid verification link page path"
+ },
+ "linkSendFail": {
+ "env": "PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_FAIL",
+ "help": "verification link send fail page path"
+ },
+ "linkSendSuccess": {
+ "env": "PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_SUCCESS",
+ "help": "verification link send success page path"
+ },
+ "parseFrameURL": {
+ "env": "PARSE_SERVER_CUSTOM_PAGES_PARSE_FRAME_URL",
+ "help": "for masking user-facing pages"
+ },
+ "passwordResetSuccess": {
+ "env": "PARSE_SERVER_CUSTOM_PAGES_PASSWORD_RESET_SUCCESS",
+ "help": "password reset success page path"
+ },
+ "verifyEmailSuccess": {
+ "env": "PARSE_SERVER_CUSTOM_PAGES_VERIFY_EMAIL_SUCCESS",
+ "help": "verify email success page path"
+ }
};
module.exports.LiveQueryOptions = {
- classNames: {
- env: 'PARSE_SERVER_LIVEQUERY_CLASSNAMES',
- help: "parse-server's LiveQuery classNames",
- action: parsers.arrayParser,
- },
- pubSubAdapter: {
- env: 'PARSE_SERVER_LIVEQUERY_PUB_SUB_ADAPTER',
- help: 'LiveQuery pubsub adapter',
- action: parsers.moduleOrObjectParser,
- },
- redisOptions: {
- env: 'PARSE_SERVER_LIVEQUERY_REDIS_OPTIONS',
- help: "parse-server's LiveQuery redisOptions",
- action: parsers.objectParser,
- },
- redisURL: {
- env: 'PARSE_SERVER_LIVEQUERY_REDIS_URL',
- help: "parse-server's LiveQuery redisURL",
- },
- wssAdapter: {
- env: 'PARSE_SERVER_LIVEQUERY_WSS_ADAPTER',
- help: 'Adapter module for the WebSocketServer',
- action: parsers.moduleOrObjectParser,
- },
+ "classNames": {
+ "env": "PARSE_SERVER_LIVEQUERY_CLASSNAMES",
+ "help": "parse-server's LiveQuery classNames",
+ "action": parsers.arrayParser
+ },
+ "pubSubAdapter": {
+ "env": "PARSE_SERVER_LIVEQUERY_PUB_SUB_ADAPTER",
+ "help": "LiveQuery pubsub adapter",
+ "action": parsers.moduleOrObjectParser
+ },
+ "redisOptions": {
+ "env": "PARSE_SERVER_LIVEQUERY_REDIS_OPTIONS",
+ "help": "parse-server's LiveQuery redisOptions",
+ "action": parsers.objectParser
+ },
+ "redisURL": {
+ "env": "PARSE_SERVER_LIVEQUERY_REDIS_URL",
+ "help": "parse-server's LiveQuery redisURL"
+ },
+ "wssAdapter": {
+ "env": "PARSE_SERVER_LIVEQUERY_WSS_ADAPTER",
+ "help": "Adapter module for the WebSocketServer",
+ "action": parsers.moduleOrObjectParser
+ }
};
module.exports.LiveQueryServerOptions = {
- appId: {
- env: 'PARSE_LIVE_QUERY_SERVER_APP_ID',
- help:
- 'This string should match the appId in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same appId.',
- },
- cacheTimeout: {
- env: 'PARSE_LIVE_QUERY_SERVER_CACHE_TIMEOUT',
- help:
- "Number in milliseconds. When clients provide the sessionToken to the LiveQuery server, the LiveQuery server will try to fetch its ParseUser's objectId from parse server and store it in the cache. The value defines the duration of the cache. Check the following Security section and our protocol specification for details, defaults to 5 * 1000 ms (5 seconds).",
- action: parsers.numberParser('cacheTimeout'),
- },
- keyPairs: {
- env: 'PARSE_LIVE_QUERY_SERVER_KEY_PAIRS',
- help:
- 'A JSON object that serves as a whitelist of keys. It is used for validating clients when they try to connect to the LiveQuery server. Check the following Security section and our protocol specification for details.',
- action: parsers.objectParser,
- },
- logLevel: {
- env: 'PARSE_LIVE_QUERY_SERVER_LOG_LEVEL',
- help:
- 'This string defines the log level of the LiveQuery server. We support VERBOSE, INFO, ERROR, NONE, defaults to INFO.',
- },
- masterKey: {
- env: 'PARSE_LIVE_QUERY_SERVER_MASTER_KEY',
- help:
- 'This string should match the masterKey in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same masterKey.',
- },
- port: {
- env: 'PARSE_LIVE_QUERY_SERVER_PORT',
- help: 'The port to run the LiveQuery server, defaults to 1337.',
- action: parsers.numberParser('port'),
- default: 1337,
- },
- pubSubAdapter: {
- env: 'PARSE_LIVE_QUERY_SERVER_PUB_SUB_ADAPTER',
- help: 'LiveQuery pubsub adapter',
- action: parsers.moduleOrObjectParser,
- },
- redisOptions: {
- env: 'PARSE_LIVE_QUERY_SERVER_REDIS_OPTIONS',
- help: "parse-server's LiveQuery redisOptions",
- action: parsers.objectParser,
- },
- redisURL: {
- env: 'PARSE_LIVE_QUERY_SERVER_REDIS_URL',
- help: "parse-server's LiveQuery redisURL",
- },
- serverURL: {
- env: 'PARSE_LIVE_QUERY_SERVER_SERVER_URL',
- help:
- 'This string should match the serverURL in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same serverURL.',
- },
- websocketTimeout: {
- env: 'PARSE_LIVE_QUERY_SERVER_WEBSOCKET_TIMEOUT',
- help:
- 'Number of milliseconds between ping/pong frames. The WebSocket server sends ping/pong frames to the clients to keep the WebSocket alive. This value defines the interval of the ping/pong frame from the server to clients, defaults to 10 * 1000 ms (10 s).',
- action: parsers.numberParser('websocketTimeout'),
- },
- wssAdapter: {
- env: 'PARSE_LIVE_QUERY_SERVER_WSS_ADAPTER',
- help: 'Adapter module for the WebSocketServer',
- action: parsers.moduleOrObjectParser,
- },
+ "appId": {
+ "env": "PARSE_LIVE_QUERY_SERVER_APP_ID",
+ "help": "This string should match the appId in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same appId."
+ },
+ "cacheTimeout": {
+ "env": "PARSE_LIVE_QUERY_SERVER_CACHE_TIMEOUT",
+ "help": "Number in milliseconds. When clients provide the sessionToken to the LiveQuery server, the LiveQuery server will try to fetch its ParseUser's objectId from parse server and store it in the cache. The value defines the duration of the cache. Check the following Security section and our protocol specification for details, defaults to 5 * 1000 ms (5 seconds).",
+ "action": parsers.numberParser("cacheTimeout")
+ },
+ "keyPairs": {
+ "env": "PARSE_LIVE_QUERY_SERVER_KEY_PAIRS",
+ "help": "A JSON object that serves as a whitelist of keys. It is used for validating clients when they try to connect to the LiveQuery server. Check the following Security section and our protocol specification for details.",
+ "action": parsers.objectParser
+ },
+ "logLevel": {
+ "env": "PARSE_LIVE_QUERY_SERVER_LOG_LEVEL",
+ "help": "This string defines the log level of the LiveQuery server. We support VERBOSE, INFO, ERROR, NONE, defaults to INFO."
+ },
+ "masterKey": {
+ "env": "PARSE_LIVE_QUERY_SERVER_MASTER_KEY",
+ "help": "This string should match the masterKey in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same masterKey."
+ },
+ "port": {
+ "env": "PARSE_LIVE_QUERY_SERVER_PORT",
+ "help": "The port to run the LiveQuery server, defaults to 1337.",
+ "action": parsers.numberParser("port"),
+ "default": 1337
+ },
+ "pubSubAdapter": {
+ "env": "PARSE_LIVE_QUERY_SERVER_PUB_SUB_ADAPTER",
+ "help": "LiveQuery pubsub adapter",
+ "action": parsers.moduleOrObjectParser
+ },
+ "redisOptions": {
+ "env": "PARSE_LIVE_QUERY_SERVER_REDIS_OPTIONS",
+ "help": "parse-server's LiveQuery redisOptions",
+ "action": parsers.objectParser
+ },
+ "redisURL": {
+ "env": "PARSE_LIVE_QUERY_SERVER_REDIS_URL",
+ "help": "parse-server's LiveQuery redisURL"
+ },
+ "serverURL": {
+ "env": "PARSE_LIVE_QUERY_SERVER_SERVER_URL",
+ "help": "This string should match the serverURL in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same serverURL."
+ },
+ "websocketTimeout": {
+ "env": "PARSE_LIVE_QUERY_SERVER_WEBSOCKET_TIMEOUT",
+ "help": "Number of milliseconds between ping/pong frames. The WebSocket server sends ping/pong frames to the clients to keep the WebSocket alive. This value defines the interval of the ping/pong frame from the server to clients, defaults to 10 * 1000 ms (10 s).",
+ "action": parsers.numberParser("websocketTimeout")
+ },
+ "wssAdapter": {
+ "env": "PARSE_LIVE_QUERY_SERVER_WSS_ADAPTER",
+ "help": "Adapter module for the WebSocketServer",
+ "action": parsers.moduleOrObjectParser
+ }
};
module.exports.IdempotencyOptions = {
- paths: {
- env: 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_PATHS',
- help:
- 'An array of paths for which the feature should be enabled. The mount path must not be included, for example instead of `/parse/functions/myFunction` specifiy `functions/myFunction`. The entries are interpreted as regular expression, for example `functions/.*` matches all functions, `jobs/.*` matches all jobs, `classes/.*` matches all classes, `.*` matches all paths.',
- action: parsers.arrayParser,
- default: [],
- },
- ttl: {
- env: 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_TTL',
- help:
- 'The duration in seconds after which a request record is discarded from the database, defaults to 300s.',
- action: parsers.numberParser('ttl'),
- default: 300,
- },
+ "paths": {
+ "env": "PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_PATHS",
+ "help": "An array of paths for which the feature should be enabled. The mount path must not be included, for example instead of `/parse/functions/myFunction` specifiy `functions/myFunction`. The entries are interpreted as regular expression, for example `functions/.*` matches all functions, `jobs/.*` matches all jobs, `classes/.*` matches all classes, `.*` matches all paths.",
+ "action": parsers.arrayParser,
+ "default": []
+ },
+ "ttl": {
+ "env": "PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_TTL",
+ "help": "The duration in seconds after which a request record is discarded from the database, defaults to 300s.",
+ "action": parsers.numberParser("ttl"),
+ "default": 300
+ }
};
module.exports.AccountLockoutOptions = {
- duration: {
- env: 'PARSE_SERVER_ACCOUNT_LOCKOUT_DURATION',
- help:
- 'number of minutes that a locked-out account remains locked out before automatically becoming unlocked.',
- action: parsers.numberParser('duration'),
- },
- threshold: {
- env: 'PARSE_SERVER_ACCOUNT_LOCKOUT_THRESHOLD',
- help: 'number of failed sign-in attempts that will cause a user account to be locked',
- action: parsers.numberParser('threshold'),
- },
+ "duration": {
+ "env": "PARSE_SERVER_ACCOUNT_LOCKOUT_DURATION",
+ "help": "number of minutes that a locked-out account remains locked out before automatically becoming unlocked.",
+ "action": parsers.numberParser("duration")
+ },
+ "threshold": {
+ "env": "PARSE_SERVER_ACCOUNT_LOCKOUT_THRESHOLD",
+ "help": "number of failed sign-in attempts that will cause a user account to be locked",
+ "action": parsers.numberParser("threshold")
+ }
};
module.exports.PasswordPolicyOptions = {
- doNotAllowUsername: {
- env: 'PARSE_SERVER_PASSWORD_POLICY_DO_NOT_ALLOW_USERNAME',
- help: 'disallow username in passwords',
- action: parsers.booleanParser,
- },
- maxPasswordAge: {
- env: 'PARSE_SERVER_PASSWORD_POLICY_MAX_PASSWORD_AGE',
- help: 'days for password expiry',
- action: parsers.numberParser('maxPasswordAge'),
- },
- maxPasswordHistory: {
- env: 'PARSE_SERVER_PASSWORD_POLICY_MAX_PASSWORD_HISTORY',
- help: 'setting to prevent reuse of previous n passwords',
- action: parsers.numberParser('maxPasswordHistory'),
- },
- resetTokenReuseIfValid: {
- env: 'PARSE_SERVER_PASSWORD_POLICY_RESET_TOKEN_REUSE_IF_VALID',
- help: "resend token if it's still valid",
- action: parsers.booleanParser,
- },
- resetTokenValidityDuration: {
- env: 'PARSE_SERVER_PASSWORD_POLICY_RESET_TOKEN_VALIDITY_DURATION',
- help: 'time for token to expire',
- action: parsers.numberParser('resetTokenValidityDuration'),
- },
- validatorCallback: {
- env: 'PARSE_SERVER_PASSWORD_POLICY_VALIDATOR_CALLBACK',
- help: 'a callback function to be invoked to validate the password',
- },
- validatorPattern: {
- env: 'PARSE_SERVER_PASSWORD_POLICY_VALIDATOR_PATTERN',
- help: 'a RegExp object or a regex string representing the pattern to enforce',
- },
+ "doNotAllowUsername": {
+ "env": "PARSE_SERVER_PASSWORD_POLICY_DO_NOT_ALLOW_USERNAME",
+ "help": "disallow username in passwords",
+ "action": parsers.booleanParser
+ },
+ "maxPasswordAge": {
+ "env": "PARSE_SERVER_PASSWORD_POLICY_MAX_PASSWORD_AGE",
+ "help": "days for password expiry",
+ "action": parsers.numberParser("maxPasswordAge")
+ },
+ "maxPasswordHistory": {
+ "env": "PARSE_SERVER_PASSWORD_POLICY_MAX_PASSWORD_HISTORY",
+ "help": "setting to prevent reuse of previous n passwords",
+ "action": parsers.numberParser("maxPasswordHistory")
+ },
+ "resetTokenReuseIfValid": {
+ "env": "PARSE_SERVER_PASSWORD_POLICY_RESET_TOKEN_REUSE_IF_VALID",
+ "help": "resend token if it's still valid",
+ "action": parsers.booleanParser
+ },
+ "resetTokenValidityDuration": {
+ "env": "PARSE_SERVER_PASSWORD_POLICY_RESET_TOKEN_VALIDITY_DURATION",
+ "help": "time for token to expire",
+ "action": parsers.numberParser("resetTokenValidityDuration")
+ },
+ "validatorCallback": {
+ "env": "PARSE_SERVER_PASSWORD_POLICY_VALIDATOR_CALLBACK",
+ "help": "a callback function to be invoked to validate the password"
+ },
+ "validatorPattern": {
+ "env": "PARSE_SERVER_PASSWORD_POLICY_VALIDATOR_PATTERN",
+ "help": "a RegExp object or a regex string representing the pattern to enforce"
+ }
+};
+module.exports.FileUploadOptions = {
+ "enableForAnonymousUser": {
+ "env": "PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_ANONYMOUS_USER",
+ "help": "Is true if file upload should be allowed for anonymous users.",
+ "action": parsers.booleanParser,
+ "default": false
+ },
+ "enableForAuthenticatedUser": {
+ "env": "PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_AUTHENTICATED_USER",
+ "help": "Is true if file upload should be allowed for authenticated users.",
+ "action": parsers.booleanParser,
+ "default": true
+ },
+ "enableForPublic": {
+ "env": "PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_PUBLIC",
+ "help": "Is true if file upload should be allowed for anyone, regardless of user authentication.",
+ "action": parsers.booleanParser,
+ "default": false
+ }
};
diff --git a/src/Options/docs.js b/src/Options/docs.js
index 576ff60a14..51c0a4ebe1 100644
--- a/src/Options/docs.js
+++ b/src/Options/docs.js
@@ -27,11 +27,11 @@
* @property {Number} emailVerifyTokenValidityDuration Email verification token validity duration, in seconds
* @property {Boolean} enableAnonymousUsers Enable (or disable) anonymous users, defaults to true
* @property {Boolean} enableExpressErrorHandler Enables the default express error handler for all errors
- * @property {Boolean} enableSingleSchemaCache Use a single schema cache shared across requests. Reduces number of queries made to _SCHEMA, defaults to false, i.e. unique schema cache per request.
* @property {String} encryptionKey Key for encrypting your files
* @property {Boolean} expireInactiveSessions Sets wether we should expire the inactive sessions, defaults to true
* @property {String} fileKey Key for your files
* @property {Adapter} filesAdapter Adapter module for the files sub-system
+ * @property {FileUploadOptions} fileUpload Options for file uploads
* @property {String} graphQLPath Mount path for the GraphQL endpoint, defaults to /graphql
* @property {String} graphQLSchema Full path to your GraphQL custom schema.graphql file
* @property {String} host The host to serve ParseServer on, defaults to 0.0.0.0
@@ -62,10 +62,10 @@
* @property {String} publicServerURL Public URL to your parse server with http:// or https://.
* @property {Any} push Configuration for push, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#push-notifications
* @property {String} readOnlyMasterKey Read-only key, which has the same capabilities as MasterKey without writes
+ * @property {Boolean} replicaSet If you are using MongoDB specify that you are using replica set. This will allow Parse Server to perform optimizations.
* @property {String} restAPIKey Key for REST calls
* @property {Boolean} revokeSessionOnPasswordReset When a user changes their password, either through the reset password email or while logged in, all sessions are revoked if this is true. Set to false if you don't want to revoke sessions.
* @property {Boolean} scheduledPush Configuration for push scheduling, defaults to false.
- * @property {Number} schemaCacheTTL The TTL for caching the schema for optimizing read/write operations. You should put a long TTL when your DB is in production. default to 5000; set 0 to disable.
* @property {Function} serverCloseComplete Callback when server has closed
* @property {Function} serverStartComplete Callback when server has started
* @property {String} serverURL URL to your parse server with http:// or https://.
@@ -137,3 +137,10 @@
* @property {Function} validatorCallback a callback function to be invoked to validate the password
* @property {String} validatorPattern a RegExp object or a regex string representing the pattern to enforce
*/
+
+/**
+ * @interface FileUploadOptions
+ * @property {Boolean} enableForAnonymousUser Is true if file upload should be allowed for anonymous users.
+ * @property {Boolean} enableForAuthenticatedUser Is true if file upload should be allowed for authenticated users.
+ * @property {Boolean} enableForPublic Is true if file upload should be allowed for anyone, regardless of user authentication.
+ */
diff --git a/src/Options/index.js b/src/Options/index.js
index d2237e08a8..e30e6e0b09 100644
--- a/src/Options/index.js
+++ b/src/Options/index.js
@@ -154,9 +154,6 @@ export interface ParseServerOptions {
/* When a user changes their password, either through the reset password email or while logged in, all sessions are revoked if this is true. Set to false if you don't want to revoke sessions.
:DEFAULT: true */
revokeSessionOnPasswordReset: ?boolean;
- /* The TTL for caching the schema for optimizing read/write operations. You should put a long TTL when your DB is in production. default to 5000; set 0 to disable.
- :DEFAULT: 5000 */
- schemaCacheTTL: ?number;
/* Sets the TTL for the in memory cache (in ms), defaults to 5000 (5 seconds)
:DEFAULT: 5000 */
cacheTTL: ?number;
@@ -167,9 +164,6 @@ export interface ParseServerOptions {
:ENV: PARSE_SERVER_ENABLE_EXPERIMENTAL_DIRECT_ACCESS
:DEFAULT: false */
directAccess: ?boolean;
- /* Use a single schema cache shared across requests. Reduces number of queries made to _SCHEMA, defaults to false, i.e. unique schema cache per request.
- :DEFAULT: false */
- enableSingleSchemaCache: ?boolean;
/* Enables the default express error handler for all errors
:DEFAULT: false */
enableExpressErrorHandler: ?boolean;
@@ -198,6 +192,9 @@ export interface ParseServerOptions {
:ENV: PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_OPTIONS
:DEFAULT: false */
idempotencyOptions: ?IdempotencyOptions;
+ /* Options for file uploads
+ :ENV: PARSE_SERVER_FILE_UPLOAD_OPTIONS */
+ fileUpload: ?FileUploadOptions;
/* Full path to your GraphQL custom schema.graphql file */
graphQLSchema: ?string;
/* Mounts the GraphQL endpoint
@@ -216,6 +213,10 @@ export interface ParseServerOptions {
:ENV: PARSE_SERVER_PLAYGROUND_PATH
:DEFAULT: /playground */
playgroundPath: ?string;
+ /* If you are using MongoDB specify that you are using replica set. This will allow Parse Server to perform optimizations.
+ :ENV: PARSE_SERVER_REPLICA_SET
+ :DEFAULT: false */
+ replicaSet: ?boolean;
/* Callback when server has started */
serverStartComplete: ?(error: ?Error) => void;
/* Callback when server has closed */
@@ -315,3 +316,15 @@ export interface PasswordPolicyOptions {
/* resend token if it's still valid */
resetTokenReuseIfValid: ?boolean;
}
+
+export interface FileUploadOptions {
+ /* Is true if file upload should be allowed for anonymous users.
+ :DEFAULT: false */
+ enableForAnonymousUser: ?boolean;
+ /* Is true if file upload should be allowed for authenticated users.
+ :DEFAULT: true */
+ enableForAuthenticatedUser: ?boolean;
+ /* Is true if file upload should be allowed for anyone, regardless of user authentication.
+ :DEFAULT: false */
+ enableForPublic: ?boolean;
+}
diff --git a/src/PromiseRouter.js b/src/PromiseRouter.js
index e1ec4eff9f..aa4d7e97a4 100644
--- a/src/PromiseRouter.js
+++ b/src/PromiseRouter.js
@@ -150,7 +150,6 @@ function makeExpressHandler(appId, promiseHandler) {
promiseHandler(req)
.then(
result => {
- clearSchemaCache(req);
if (!result.response && !result.location && !result.text) {
log.error('the handler did not include a "response" or a "location" field');
throw 'control should not get here';
@@ -183,17 +182,14 @@ function makeExpressHandler(appId, promiseHandler) {
res.json(result.response);
},
error => {
- clearSchemaCache(req);
next(error);
}
)
.catch(e => {
- clearSchemaCache(req);
log.error(`Error generating response. ${inspect(e)}`, { error: e });
next(e);
});
} catch (e) {
- clearSchemaCache(req);
log.error(`Error handling request: ${inspect(e)}`, { error: e });
next(e);
}
@@ -211,9 +207,3 @@ function maskSensitiveUrl(req) {
}
return maskUrl;
}
-
-function clearSchemaCache(req) {
- if (req.config && !req.config.enableSingleSchemaCache) {
- req.config.database.schemaCache.clear();
- }
-}
diff --git a/src/RestQuery.js b/src/RestQuery.js
index 78fd022bc1..ef3846daec 100644
--- a/src/RestQuery.js
+++ b/src/RestQuery.js
@@ -25,7 +25,8 @@ function RestQuery(
restWhere = {},
restOptions = {},
clientSDK,
- runAfterFind = true
+ runAfterFind = true,
+ context
) {
this.config = config;
this.auth = auth;
@@ -36,6 +37,7 @@ function RestQuery(
this.runAfterFind = runAfterFind;
this.response = null;
this.findOptions = {};
+ this.context = context || {};
if (!this.auth.isMaster) {
if (this.className == '_Session') {
@@ -222,7 +224,16 @@ RestQuery.prototype.each = function (callback) {
return !finished;
},
async () => {
- const query = new RestQuery(config, auth, className, restWhere, restOptions, clientSDK);
+ const query = new RestQuery(
+ config,
+ auth,
+ className,
+ restWhere,
+ restOptions,
+ clientSDK,
+ this.runAfterFind,
+ this.context
+ );
const { results } = await query.execute();
results.forEach(callback);
finished = results.length < restOptions.limit;
@@ -772,7 +783,8 @@ RestQuery.prototype.runAfterFindTrigger = function () {
this.className,
this.response.results,
this.config,
- parseQuery
+ parseQuery,
+ this.context
)
.then(results => {
// Ensure we properly set the className back
diff --git a/src/Routers/FilesRouter.js b/src/Routers/FilesRouter.js
index 2b0140fe7d..1a8b2ca50b 100644
--- a/src/Routers/FilesRouter.js
+++ b/src/Routers/FilesRouter.js
@@ -94,6 +94,27 @@ export class FilesRouter {
async createHandler(req, res, next) {
const config = req.config;
+ const user = req.auth.user;
+ const isMaster = req.auth.isMaster;
+ const isLinked = user && Parse.AnonymousUtils.isLinked(user);
+ if (!isMaster && !config.fileUpload.enableForAnonymousUser && isLinked) {
+ next(new Parse.Error(
+ Parse.Error.FILE_SAVE_ERROR,
+ 'File upload by anonymous user is disabled.'
+ ));
+ return;
+ }
+ if (!isMaster && !config.fileUpload.enableForAuthenticatedUser && !isLinked && user) {
+ next(new Parse.Error(
+ Parse.Error.FILE_SAVE_ERROR,
+ 'File upload by authenticated user is disabled.'
+ ));
+ return;
+ }
+ if (!isMaster && !config.fileUpload.enableForPublic && !user) {
+ next(new Parse.Error(Parse.Error.FILE_SAVE_ERROR, 'File upload by public is disabled.'));
+ return;
+ }
const filesController = config.filesController;
const { filename } = req.params;
const contentType = req.get('Content-type');
diff --git a/src/StatusHandler.js b/src/StatusHandler.js
index 45010f3847..32784436a0 100644
--- a/src/StatusHandler.js
+++ b/src/StatusHandler.js
@@ -298,6 +298,7 @@ export function pushStatusHandler(config, existingObjectId) {
// indicate this batch is complete
incrementOp(update, 'count', -1);
+ update.status = 'running';
return handler.update({ objectId }, update).then(res => {
if (res && res.count === 0) {
diff --git a/src/cli/definitions/parse-live-query-server.js b/src/cli/definitions/parse-live-query-server.js
index 3b4ef432dd..0fd2fca6c9 100644
--- a/src/cli/definitions/parse-live-query-server.js
+++ b/src/cli/definitions/parse-live-query-server.js
@@ -1,3 +1,2 @@
-const LiveQueryServerOptions = require('../../Options/Definitions')
- .LiveQueryServerOptions;
+const LiveQueryServerOptions = require('../../Options/Definitions').LiveQueryServerOptions;
export default LiveQueryServerOptions;
diff --git a/src/cli/definitions/parse-server.js b/src/cli/definitions/parse-server.js
index 33ddc62c17..d19dcc5d8a 100644
--- a/src/cli/definitions/parse-server.js
+++ b/src/cli/definitions/parse-server.js
@@ -1,3 +1,2 @@
-const ParseServerDefinitions = require('../../Options/Definitions')
- .ParseServerOptions;
+const ParseServerDefinitions = require('../../Options/Definitions').ParseServerOptions;
export default ParseServerDefinitions;
diff --git a/src/cli/utils/commander.js b/src/cli/utils/commander.js
index a4e9683074..d5a8208253 100644
--- a/src/cli/utils/commander.js
+++ b/src/cli/utils/commander.js
@@ -5,7 +5,7 @@ let _definitions;
let _reverseDefinitions;
let _defaults;
-Command.prototype.loadDefinitions = function(definitions) {
+Command.prototype.loadDefinitions = function (definitions) {
_definitions = definitions;
Object.keys(definitions).reduce((program, opt) => {
@@ -47,7 +47,7 @@ Command.prototype.loadDefinitions = function(definitions) {
}, {});
/* istanbul ignore next */
- this.on('--help', function() {
+ this.on('--help', function () {
console.log(' Configure From Environment:');
console.log('');
Object.keys(_reverseDefinitions).forEach(key => {
@@ -100,7 +100,7 @@ function parseConfigFile(program) {
return options;
}
-Command.prototype.setValuesIfNeeded = function(options) {
+Command.prototype.setValuesIfNeeded = function (options) {
Object.keys(options).forEach(key => {
if (!Object.prototype.hasOwnProperty.call(this, key)) {
this[key] = options[key];
@@ -110,7 +110,7 @@ Command.prototype.setValuesIfNeeded = function(options) {
Command.prototype._parse = Command.prototype.parse;
-Command.prototype.parse = function(args, env) {
+Command.prototype.parse = function (args, env) {
this._parse(args);
// Parse the environment first
const envOptions = parseEnvironment(env);
@@ -123,7 +123,7 @@ Command.prototype.parse = function(args, env) {
this.setValuesIfNeeded(_defaults);
};
-Command.prototype.getOptions = function() {
+Command.prototype.getOptions = function () {
return Object.keys(_definitions).reduce((options, key) => {
if (typeof this[key] !== 'undefined') {
options[key] = this[key];
diff --git a/src/cli/utils/runner.js b/src/cli/utils/runner.js
index cdc8e13a8b..d74a7a5928 100644
--- a/src/cli/utils/runner.js
+++ b/src/cli/utils/runner.js
@@ -24,7 +24,7 @@ function logStartupOptions(options) {
}
}
-export default function({ definitions, help, usage, start }) {
+export default function ({ definitions, help, usage, start }) {
program.loadDefinitions(definitions);
if (usage) {
program.usage(usage);
@@ -35,7 +35,7 @@ export default function({ definitions, help, usage, start }) {
program.parse(process.argv, process.env);
const options = program.getOptions();
- start(program, options, function() {
+ start(program, options, function () {
logStartupOptions(options);
});
}
diff --git a/src/rest.js b/src/rest.js
index f443d59480..fca3497a5d 100644
--- a/src/rest.js
+++ b/src/rest.js
@@ -39,7 +39,16 @@ function find(config, auth, className, restWhere, restOptions, clientSDK, contex
.then(result => {
restWhere = result.restWhere || restWhere;
restOptions = result.restOptions || restOptions;
- const query = new RestQuery(config, auth, className, restWhere, restOptions, clientSDK);
+ const query = new RestQuery(
+ config,
+ auth,
+ className,
+ restWhere,
+ restOptions,
+ clientSDK,
+ true,
+ context
+ );
return query.execute();
});
}
@@ -62,7 +71,16 @@ const get = (config, auth, className, objectId, restOptions, clientSDK, context)
.then(result => {
restWhere = result.restWhere || restWhere;
restOptions = result.restOptions || restOptions;
- const query = new RestQuery(config, auth, className, restWhere, restOptions, clientSDK);
+ const query = new RestQuery(
+ config,
+ auth,
+ className,
+ restWhere,
+ restOptions,
+ clientSDK,
+ true,
+ context
+ );
return query.execute();
});
};
@@ -187,7 +205,8 @@ function update(config, auth, className, restWhere, restObject, clientSDK, conte
restWhere,
undefined,
undefined,
- false
+ false,
+ context
).execute({
op: 'update',
});
diff --git a/src/triggers.js b/src/triggers.js
index eeb86c8369..47331675b0 100644
--- a/src/triggers.js
+++ b/src/triggers.js
@@ -237,12 +237,12 @@ export function getRequestObject(
if (originalParseObject) {
request.original = originalParseObject;
}
-
if (
triggerType === Types.beforeSave ||
triggerType === Types.afterSave ||
triggerType === Types.beforeDelete ||
- triggerType === Types.afterDelete
+ triggerType === Types.afterDelete ||
+ triggerType === Types.afterFind
) {
// Set a copy of the context on the request object.
request.context = Object.assign({}, context);
@@ -388,13 +388,21 @@ function logTriggerErrorBeforeHook(triggerType, className, input, auth, error) {
);
}
-export function maybeRunAfterFindTrigger(triggerType, auth, className, objects, config, query) {
+export function maybeRunAfterFindTrigger(
+ triggerType,
+ auth,
+ className,
+ objects,
+ config,
+ query,
+ context
+) {
return new Promise((resolve, reject) => {
const trigger = getTrigger(className, triggerType, config.applicationId);
if (!trigger) {
return resolve();
}
- const request = getRequestObject(triggerType, auth, null, null, config);
+ const request = getRequestObject(triggerType, auth, null, null, config, context);
if (query) {
request.query = query;
}