From b59517fd68a56885c9ab73525526e42ff4003333 Mon Sep 17 00:00:00 2001
From: Manuel <5673677+mtrezza@users.noreply.github.com>
Date: Sun, 7 Feb 2021 23:16:46 +0100
Subject: [PATCH] Add tests against multiple MongoDB versions (#7161)
* added tests environment with mongodb 4.4.3
* added CI test for mongodb 4.4.3
* added CI tests for MongoDB versions 4.0, 4.2
* improved flaky test (seems to max out the limit of simultaneous connections)
* added spec helpers to run tests only for specific MongoDB version
* addedn npm scripts to run tests against relevant mongodb versions
* added spec helper function to exclude specific mongodb version
* added test for changed aggregate query planner results
* fixed regex test with incorrect regex syntax
* fixed test where query has select no keys (empty array)
* added changelog entry and ordered list
* fixed test that tried to simultaneously delete and build index on same collection
* added MongoDB compatibility table to readme
* updated default local tests to use MongoDB 4.4.3
* added MongoDB badges for new versions to README
* fixed typo in readme
* added new test helper filter to contribution guide
* fixed incorrect storage engine for mongodb 4.4
* changed CI to test MongoDB 3.6. with mmapv1 storage engine and standalone
* improved CI test description
* added CI self check for new MongoDB versions
* fixed CI
* removed CI
* added CI
* added throwing error if any of the checks failed
* added github action connector
* improved error message
* improved error messages
* improved error message
* updated CI environment to MongoDB 3.6.22
* improved error messages
* update CI env name
* updated CI env name
* improved error message
* removed patch versions from CI env description
* improved status message
* removed version range from core lib
* added explicit mongodb version to redis test and node 12 test
* bumped Node 12 test to 12.20.1 (version currently recommended by AWS Elastic Beanstalk)
---
.github/workflows/ci.yml | 51 +++++++-
CHANGELOG.md | 9 +-
CONTRIBUTING.md | 5 +
README.md | 26 +++-
package-lock.json | 10 +-
package.json | 25 ++--
resources/checkMongodbVersions.js | 168 +++++++++++++++++++++++++
spec/.eslintrc.json | 4 +
spec/Auth.spec.js | 5 +-
spec/ParseQuery.hint.spec.js | 88 ++++++++++++-
spec/ParseQuery.spec.js | 7 +-
spec/helper.js | 38 ++++++
spec/schemas.spec.js | 198 ++++++++++++++++++------------
src/RestQuery.js | 2 +-
14 files changed, 522 insertions(+), 114 deletions(-)
create mode 100644 resources/checkMongodbVersions.js
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 6a1873af7d..bf5fd29334 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -11,23 +11,62 @@ env:
NODE_VERSION: 10
PARSE_SERVER_TEST_TIMEOUT: 20000
jobs:
+ check-ci:
+ name: CI Self-Check
+ timeout-minutes: 30
+ runs-on: ubuntu-18.04
+ steps:
+ - uses: actions/checkout@v2
+ - name: Use Node.js ${{ matrix.NODE_VERSION }}
+ uses: actions/setup-node@v1
+ with:
+ node-version: ${{ matrix.node-version }}
+ - name: Cache Node.js modules
+ uses: actions/cache@v2
+ with:
+ path: ~/.npm
+ key: ${{ runner.os }}-node-${{ matrix.NODE_VERSION }}-${{ hashFiles('**/package-lock.json') }}
+ restore-keys: |
+ ${{ runner.os }}-node-${{ matrix.NODE_VERSION }}-
+ - name: Install dependencies
+ run: npm ci
+ - name: CI Self-Check
+ run: npm run ci:check
check-mongo:
strategy:
matrix:
include:
- - name: Mongo 4.0.4, ReplicaSet, WiredTiger
- MONGODB_VERSION: 4.0.4
+ - name: Mongo 4.4, ReplicaSet, WiredTiger
+ MONGODB_VERSION: 4.4.3
+ MONGODB_TOPOLOGY: replicaset
+ MONGODB_STORAGE_ENGINE: wiredTiger
+ NODE_VERSION: 10
+ - name: Mongo 4.2, ReplicaSet, WiredTiger
+ MONGODB_VERSION: 4.2.12
+ MONGODB_TOPOLOGY: replicaset
+ MONGODB_STORAGE_ENGINE: wiredTiger
+ NODE_VERSION: 10
+ - name: Mongo 4.0, ReplicaSet, WiredTiger
+ MONGODB_VERSION: 4.0.22
MONGODB_TOPOLOGY: replicaset
MONGODB_STORAGE_ENGINE: wiredTiger
NODE_VERSION: 10
- - name: Mongo 3.6.21
- MONGODB_VERSION: 3.6.21
+ - name: Mongo 3.6, Standalone, MMAPv1
+ MONGODB_VERSION: 3.6.22
+ MONGODB_TOPOLOGY: standalone
+ MONGODB_STORAGE_ENGINE: mmapv1
NODE_VERSION: 10
- name: Redis Cache
PARSE_SERVER_TEST_CACHE: redis
+ MONGODB_VERSION: 4.4.3
+ MONGODB_TOPOLOGY: standalone
+ MONGODB_STORAGE_ENGINE: wiredTiger
NODE_VERSION: 10
- - name: Node 12.12.0
- NODE_VERSION: 12.12.0
+ - name: Node 12.20
+ MONGODB_VERSION: 4.4.3
+ MONGODB_TOPOLOGY: standalone
+ MONGODB_STORAGE_ENGINE: wiredTiger
+ NODE_VERSION: 12.20.1
name: ${{ matrix.name }}
timeout-minutes: 30
runs-on: ubuntu-18.04
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 411142f523..35cb601d66 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,15 +4,16 @@
[Full Changelog](https://github.com/parse-community/parse-server/compare/4.5.0...master)
__BREAKING CHANGES:__
-- NEW: Added file upload restriction. File upload is now only allowed for authenticated users by default for improved security. To allow file upload also for Anonymous Users or Public, set the `fileUpload` parameter in the [Parse Server Options](https://parseplatform.org/parse-server/api/master/ParseServerOptions.html). [#7071](https://github.com/parse-community/parse-server/pull/7071). Thanks to [dblythy](https://github.com/dblythy).
+- NEW: Added file upload restriction. File upload is now only allowed for authenticated users by default for improved security. To allow file upload also for Anonymous Users or Public, set the `fileUpload` parameter in the [Parse Server Options](https://parseplatform.org/parse-server/api/master/ParseServerOptions.html). [#7071](https://github.com/parse-community/parse-server/pull/7071). Thanks to [dblythy](https://github.com/dblythy), [Manuel Trezza](https://github.com/mtrezza).
___
+- NEW: Added convenience method Parse.Cloud.sendEmail(...) to send email via email adapter in Cloud Code. [#7089](https://github.com/parse-community/parse-server/pull/7089). Thanks to [dblythy](https://github.com/dblythy)
+- NEW: LiveQuery support for $and, $nor, $containedBy, $geoWithin, $geoIntersects queries [#7113](https://github.com/parse-community/parse-server/pull/7113). Thanks to [dplewis](https://github.com/dplewis)
+- NEW: Supporting patterns in LiveQuery server's config parameter `classNames` [#7131](https://github.com/parse-community/parse-server/pull/7131). Thanks to [Nes-si](https://github.com/Nes-si)
- IMPROVE: Added new account lockout policy option `accountLockout.unlockOnPasswordReset` to automatically unlock account on password reset. [#7146](https://github.com/parse-community/parse-server/pull/7146). Thanks to [Manuel Trezza](https://github.com/mtrezza).
+- IMPROVE: Parse Server will from now on be continuously tested against all relevant MongoDB versions (minor versions). Added MongoDB compatibility table to Parse Server docs. [7161](https://github.com/parse-community/parse-server/pull/7161). Thanks to [Manuel Trezza](https://github.com/mtrezza).
- IMPROVE: Optimize queries on classes with pointer permissions. [#7061](https://github.com/parse-community/parse-server/pull/7061). Thanks to [Pedro Diaz](https://github.com/pdiaz)
- FIX: request.context for afterFind triggers. [#7078](https://github.com/parse-community/parse-server/pull/7078). Thanks to [dblythy](https://github.com/dblythy)
-- NEW: Added convenience method Parse.Cloud.sendEmail(...) to send email via email adapter in Cloud Code. [#7089](https://github.com/parse-community/parse-server/pull/7089). Thanks to [dblythy](https://github.com/dblythy)
- FIX: Winston Logger interpolating stdout to console [#7114](https://github.com/parse-community/parse-server/pull/7114). Thanks to [dplewis](https://github.com/dplewis)
-- NEW: LiveQuery support for $and, $nor, $containedBy, $geoWithin, $geoIntersects queries [#7113](https://github.com/parse-community/parse-server/pull/7113). Thanks to [dplewis](https://github.com/dplewis)
-- NEW: Supporting patterns in LiveQuery server's config parameter `classNames` [#7131](https://github.com/parse-community/parse-server/pull/7131). Thanks to [Nes-si](https://github.com/Nes-si)
### 4.5.0
[Full Changelog](https://github.com/parse-community/parse-server/compare/4.4.0...4.5.0)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 6c2ccd3bd8..f4bac560a3 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -71,6 +71,11 @@ If your pull request introduces a change that may affect the storage or retrieva
- `it_only_db('postgres')` // will make a test that only runs on postgres
- `it_exclude_dbs(['mongo'])` // will make a test that runs against all DB's but mongo
+* If your feature is intended to work with MongoDB and PostgreSQL, you can include or exclude tests more granularly with:
+
+ - `it_only_mongodb_version('>=4.4')` // will test with any version of Postgres but only with version >=4.4 of MongoDB; accepts semver notation to specify a version range
+ - `it_exclude_mongodb_version('<4.4')` // will test with any version of Postgres and MongoDB, excluding version <4.4 of MongoDB; accepts semver notation to specify a version range
+
#### Run Postgres setup for Parse with Docker
[PostGIS images (select one with v2.2 or higher) on docker dashboard](https://hub.docker.com/r/postgis/postgis) is based off of the official [postgres](https://registry.hub.docker.com/_/postgres/) image and will work out-of-the-box (as long as you create a user with the necessary extensions for each of your Parse databases; see below). To launch the compatible Postgres instance, copy and paste the following line into your shell:
diff --git a/README.md b/README.md
index 70053e8438..dbb2f999c8 100644
--- a/README.md
+++ b/README.md
@@ -22,6 +22,8 @@
+
+
Our Sponsors
@@ -45,6 +47,8 @@ The full documentation for Parse Server is available in the [wiki](https://githu
- [Getting Started](#getting-started)
- [Running Parse Server](#running-parse-server)
+ - [Compatibility](#compatibility)
+ - [MongoDB Support](#mongodb-support)
- [Locally](#locally)
- [Docker](#inside-a-docker-container)
- [Saving an Object](#saving-your-first-object)
@@ -84,6 +88,18 @@ Before you start make sure you have installed:
- [MongoDB](https://www.mongodb.com/) or [PostgreSQL](https://www.postgresql.org/)(with [PostGIS](https://postgis.net) 2.2.0 or higher)
- Optionally [Docker](https://www.docker.com/)
+### Compatibility
+
+#### MongoDB Support
+Parse Server is continuously tested with the most recent releases of MongoDB to ensure compatibility. The rests run against the latest patch version of each MongoDB release. We follow the [MongoDB support schedule](https://www.mongodb.com/support-policy) and only test against versions that are officially supported by MongoDB and have not reached their end-of-life date yet.
+
+ | Version | Latest Patch Version | End-of-Life Date | Compatibility |
+ |-------------|----------------------|------------------|--------------------|
+ | MongoDB 3.6 | 3.6.21 | April 2021 | ✅ Fully compatible |
+ | MongoDB 4.0 | 4.0.22 | January 2022 | ✅ Fully compatible |
+ | MongoDB 4.2 | 4.2.12 | TBD | ✅ Fully compatible |
+ | MongoDB 4.4 | 4.4.3 | TBD | ✅ Fully compatible |
+
### Locally
```bash
$ npm install -g parse-server mongodb-runner
@@ -424,11 +440,11 @@ let api = new ParseServer({
```
#### Parameters
-| Parameter | Optional | Type | Default value | Example values | Environment variable | Description |
-|-----------|----------|--------|---------------|-----------|-----------|-------------|
-| `idempotencyOptions` | yes | `Object` | `undefined` | | PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_OPTIONS | Setting this enables idempotency enforcement for the specified paths. |
-| `idempotencyOptions.paths`| yes | `Array` | `[]` | `.*` (all paths, includes the examples below),
`functions/.*` (all functions),
`jobs/.*` (all jobs),
`classes/.*` (all classes),
`functions/.*` (all functions),
`users` (user creation / update),
`installations` (installation creation / update) | PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_PATHS | An array of path patterns that have to match the request path for request deduplication to be enabled. The mount path must not be included, for example to match the request path `/parse/functions/myFunction` specifiy the path pattern `functions/myFunction`. A trailing slash of the request path is ignored, for example the path pattern `functions/myFunction` matches both `/parse/functions/myFunction` and `/parse/functions/myFunction/`. |
-| `idempotencyOptions.ttl` | yes | `Integer` | `300` | `60` (60 seconds) | PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_TTL | The duration in seconds after which a request record is discarded from the database. Duplicate requests due to network issues can be expected to arrive within milliseconds up to several seconds. This value must be greater than `0`. |
+| Parameter | Optional | Type | Default value | Example values | Environment variable | Description |
+|----------------------------|----------|-----------------|---------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| `idempotencyOptions` | yes | `Object` | `undefined` | | PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_OPTIONS | Setting this enables idempotency enforcement for the specified paths. |
+| `idempotencyOptions.paths` | yes | `Array` | `[]` | `.*` (all paths, includes the examples below),
`functions/.*` (all functions),
`jobs/.*` (all jobs),
`classes/.*` (all classes),
`functions/.*` (all functions),
`users` (user creation / update),
`installations` (installation creation / update) | PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_PATHS | An array of path patterns that have to match the request path for request deduplication to be enabled. The mount path must not be included, for example to match the request path `/parse/functions/myFunction` specifiy the path pattern `functions/myFunction`. A trailing slash of the request path is ignored, for example the path pattern `functions/myFunction` matches both `/parse/functions/myFunction` and `/parse/functions/myFunction/`. |
+| `idempotencyOptions.ttl` | yes | `Integer` | `300` | `60` (60 seconds) | PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_TTL | The duration in seconds after which a request record is discarded from the database. Duplicate requests due to network issues can be expected to arrive within milliseconds up to several seconds. This value must be greater than `0`. |
#### Notes
diff --git a/package-lock.json b/package-lock.json
index 0829ec82f9..02e716276d 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -4,6 +4,12 @@
"lockfileVersion": 1,
"requires": true,
"dependencies": {
+ "@actions/core": {
+ "version": "1.2.6",
+ "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.6.tgz",
+ "integrity": "sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA==",
+ "dev": true
+ },
"@apollo/client": {
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@apollo/client/-/client-3.2.5.tgz",
@@ -9660,8 +9666,8 @@
}
},
"mongodb-runner": {
- "version": "github:mongodb-js/runner#dfb9a520147de6e2537f7c1c21a5e7005d1905f8",
- "from": "github:mongodb-js/runner",
+ "version": "4.8.1",
+ "resolved": "github:mongodb-js/runner#f4e920d0ae8a2c1de2148343e062d3744b434fb8",
"dev": true,
"requires": {
"async": "^3.1.0",
diff --git a/package.json b/package.json
index a0c9f588ed..31cf4753a8 100644
--- a/package.json
+++ b/package.json
@@ -60,6 +60,7 @@
"ws": "7.4.2"
},
"devDependencies": {
+ "@actions/core": "1.2.6",
"@babel/cli": "7.10.0",
"@babel/core": "7.10.0",
"@babel/plugin-proposal-object-rest-spread": "7.10.0",
@@ -87,23 +88,33 @@
"jsdoc": "3.6.3",
"jsdoc-babel": "0.5.0",
"lint-staged": "10.2.3",
- "mongodb-runner": "mongodb-js/runner",
+ "mongodb-runner": "4.8.1",
+ "mongodb-version-list": "1.0.0",
"node-fetch": "2.6.1",
"nyc": "15.1.0",
- "prettier": "2.0.5"
+ "prettier": "2.0.5",
+ "yaml": "1.10.0"
},
"scripts": {
+ "ci:check": "node ./resources/checkMongodbVersions.js",
"definitions": "node ./resources/buildConfigDefinitions.js && prettier --write 'src/Options/*.js'",
"docs": "jsdoc -c ./jsdoc-conf.json",
"lint": "flow && eslint --cache ./",
"lint-fix": "eslint --fix --cache ./",
"build": "babel src/ -d lib/ --copy-files",
"watch": "babel --watch src/ -d lib/ --copy-files",
- "pretest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner start",
- "testonly": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 jasmine",
+ "test:mongodb:runnerstart": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=$npm_config_dbversion} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} mongodb-runner start",
+ "test:mongodb:testonly": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=$npm_config_dbversion} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} TESTING=1 jasmine",
+ "test:mongodb": "npm run test:mongodb:runnerstart --dbversion=$npm_config_dbversion && npm run test:mongodb:testonly --dbversion=$npm_config_dbversion",
+ "test:mongodb:4.0.22": "npm run test:mongodb --dbversion=4.0.22",
+ "test:mongodb:4.2.12": "npm run test:mongodb --dbversion=4.2.12",
+ "test:mongodb:4.4.3": "npm run test:mongodb --dbversion=4.4.3",
+ "posttest:mongodb": "mongodb-runner stop",
+ "pretest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.4.3} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} mongodb-runner start",
+ "testonly": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.4.3} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} TESTING=1 jasmine",
"test": "npm run testonly",
- "posttest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner stop",
- "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 nyc jasmine",
+ "posttest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.4.3} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} mongodb-runner stop",
+ "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.4.3} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} TESTING=1 nyc jasmine",
"start": "node ./bin/parse-server",
"prettier": "prettier --write '{src,spec}/{**/*,*}.js'",
"prepare": "npm run build",
@@ -139,4 +150,4 @@
"git add"
]
}
-}
\ No newline at end of file
+}
diff --git a/resources/checkMongodbVersions.js b/resources/checkMongodbVersions.js
new file mode 100644
index 0000000000..c835888f61
--- /dev/null
+++ b/resources/checkMongodbVersions.js
@@ -0,0 +1,168 @@
+'use strict'
+
+const mongoVersionList = require('mongodb-version-list');
+const core = require('@actions/core');
+const semver = require('semver');
+const yaml = require('yaml');
+const fs = require('fs').promises;
+
+/*******************************************************************
+ * Settings
+********************************************************************/
+// The path to the GitHub workflow YAML file that contains the tests.
+const pathToCiYml = './.github/workflows/ci.yml';
+// The key path in the CI YAML file to the environment specifications.
+const ciKeyEnvironments = 'jobs.check-mongo.strategy.matrix.include';
+// The key in the CI YAML file to determine the MongoDB version.
+const ciKeyVersion = 'MONGODB_VERSION';
+// The versions to ignore when checking whether the CI tests against
+// the newest versions. This can be used in case there is a MongoDB
+// release for which Parse Server compatibility is not required.
+const ignoreReleasedVersions = [
+ '4.7.0' // This is a development release according to MongoDB support
+];
+/*******************************************************************/
+
+/**
+ * Returns the released versions of MongoDB by MongoDB; this also
+ * includes pre-releases.
+ * @return {Array} The released versions.
+ */
+async function getReleasedVersions () {
+ return new Promise((resolve, reject) => {
+ mongoVersionList(function(error, versions) {
+ if (error) {
+ reject(error);
+ }
+ resolve(versions);
+ });
+ });
+}
+
+/**
+ * Returns the test environments in the Github CI as specified in the
+ * GitHub workflow YAML file.
+ */
+async function getTests() {
+ try {
+ // Get CI workflow
+ const ciYaml = await fs.readFile(pathToCiYml, 'utf-8');
+ const ci = yaml.parse(ciYaml);
+
+ // Extract MongoDB versions
+ let versions = ciKeyEnvironments.split('.').reduce((o,k) => o !== undefined ? o[k] : undefined, ci);
+ versions = Object.entries(versions)
+ .map(entry => entry[1])
+ .filter(entry => entry[ciKeyVersion]);
+
+ return versions;
+ } catch (e) {
+ throw 'Failed to determine MongoDB versions from CI YAML file with error: ' + e;
+ }
+}
+
+/**
+ * Returns all minor and major MongoDB versions against which Parse Server
+ * is not tested in the CI.
+ * @param {Array} releasedVersions The released versions.
+ * @param {Array} testedVersions The tested versions.
+ * @returns {Array} The untested versions.
+ */
+function getUntestedMinorsAndMajors(releasedVersions, testedVersions) {
+ // Get highest tested version
+ const highestTested = semver.maxSatisfying(testedVersions, '*');
+
+ // Get all higher released versions (minor & major)
+ const higherReleased = releasedVersions.reduce((m, v) => {
+ // If the version is a pre-release, skip it
+ if ((semver.prerelease(v) || []).length > 0) {
+ return m;
+ }
+ // If the version is not greater than the highest tested version, skip it
+ if (!semver.gt(v, highestTested)) {
+ return m;
+ }
+ // If the same or a newer version has already been added, skip it
+ if (semver.maxSatisfying(m, `^${v}`) == v) {
+ return m;
+ }
+ // If there is a higher minor released version, skip it
+ if (semver.maxSatisfying(releasedVersions, `^${v}`) != v) {
+ return m;
+ }
+ // If version should be ignored, skip it
+ if (semver.satisfies(v, ignoreReleasedVersions.join(' || '))) {
+ return m;
+ }
+ // Add version
+ m.push(v);
+ return m;
+ }, []);
+
+ return higherReleased;
+}
+
+/**
+ * Returns the newest patch version for a given version.
+ * @param {Array} versions The versions in which to search.
+ * @param {String} version The version for which a newer patch
+ * version should be searched.
+ * @returns {String|undefined} The newer patch version.
+ */
+function getNewerPatch(versions, version) {
+ const latest = semver.maxSatisfying(versions, `~${version}`);
+ return semver.gt(latest, version) ? latest : undefined;
+}
+
+/**
+ * Runs the check.
+ */
+async function check() {
+ try {
+ // Get released MongoDB versions
+ const releasedVersions = await getReleasedVersions();
+
+ // Get tested MongoDB versions from CI
+ const tests = await getTests();
+
+ // Is true if any of the checks failed
+ let failed = false;
+
+ // Check whether each tested version is the latest patch
+ for (const test of tests) {
+ const version = test[ciKeyVersion];
+ const newer = getNewerPatch(releasedVersions, version);
+ if (newer) {
+ console.log(`❌ CI environment '${test.name}' uses an old MongoDB patch version ${version} instead of ${newer}.`);
+ failed = true;
+ } else {
+ console.log(`✅ CI environment '${test.name}' uses the newest MongoDB patch version ${version}.`);
+ }
+ }
+
+ // Check whether there is a newer minor version available that is not tested
+ const testedVersions = tests.map(test => test[ciKeyVersion]);
+ const untested = getUntestedMinorsAndMajors(releasedVersions, testedVersions);
+ if (untested.length > 0) {
+ console.log(`❌ CI does not have environments using the following versions of MongoDB: ${untested.join(', ')}.`);
+ failed = true;
+ } else {
+ console.log(`✅ CI environments use all recent major and minor releases of MongoDB.`);
+ }
+
+ if (failed) {
+ core.setFailed(
+ 'CI environments are not up-to-date with newest MongoDB versions.' +
+ '\n\nCheck the error messages above and update the MongoDB versions in the CI YAML ' +
+ 'file. There may be versions of MongoDB that have reached their official MongoDB end-of-life ' +
+ 'support date and should be removed from the CI; see https://www.mongodb.com/support-policy.'
+ );
+ }
+
+ } catch (e) {
+ core.setFailed('Failed to check MongoDB versions with error: ' + e);
+ throw 'Failed to check MongoDB versions with error: ' + e;
+ }
+}
+
+check();
diff --git a/spec/.eslintrc.json b/spec/.eslintrc.json
index 7031e96d6f..e76afb3a66 100644
--- a/spec/.eslintrc.json
+++ b/spec/.eslintrc.json
@@ -16,6 +16,10 @@
"expectAsync": true,
"notEqual": true,
"it_only_db": true,
+ "it_only_mongodb_version": true,
+ "fit_only_mongodb_version": true,
+ "it_exclude_mongodb_version": true,
+ "fit_exclude_mongodb_version": true,
"it_exclude_dbs": true,
"describe_only_db": true,
"describe_only": true,
diff --git a/spec/Auth.spec.js b/spec/Auth.spec.js
index 38302aa24e..1ec2971d9d 100644
--- a/spec/Auth.spec.js
+++ b/spec/Auth.spec.js
@@ -1,3 +1,5 @@
+'use strict';
+
describe('Auth', () => {
const { Auth, getAuthForSessionToken } = require('../lib/Auth.js');
const Config = require('../lib/Config');
@@ -151,7 +153,7 @@ describe('Auth', () => {
});
describe('getRolesForUser', () => {
- const rolesNumber = 300;
+ const rolesNumber = 100;
it('should load all roles without config', async () => {
const user = new Parse.User();
@@ -201,7 +203,6 @@ describe('Auth', () => {
});
it('should load all roles for different users with config', async () => {
- const rolesNumber = 100;
const user = new Parse.User();
await user.signUp({
username: 'hello',
diff --git a/spec/ParseQuery.hint.spec.js b/spec/ParseQuery.hint.spec.js
index 156c732587..164fff7880 100644
--- a/spec/ParseQuery.hint.spec.js
+++ b/spec/ParseQuery.hint.spec.js
@@ -54,7 +54,7 @@ describe_only_db('mongo')('Parse.Query hint', () => {
});
});
- it('query aggregate with hint string', async () => {
+ it_only_mongodb_version('<4.4')('query aggregate with hint string', async () => {
const object = new TestObject({ foo: 'bar' });
await object.save();
@@ -74,7 +74,31 @@ describe_only_db('mongo')('Parse.Query hint', () => {
expect(queryPlanner.winningPlan.inputStage.indexName).toBe('_id_');
});
- it('query aggregate with hint object', async () => {
+ it_only_mongodb_version('>=4.4')('query aggregate with hint string', async () => {
+ const object = new TestObject({ foo: 'bar' });
+ await object.save();
+
+ const collection = await config.database.adapter._adaptiveCollection('TestObject');
+ let result = await collection.aggregate([{ $group: { _id: '$foo' } }], {
+ explain: true,
+ });
+ let { queryPlanner } = result[0].stages[0].$cursor;
+ expect(queryPlanner.winningPlan.stage).toBe('PROJECTION_SIMPLE');
+ expect(queryPlanner.winningPlan.inputStage.stage).toBe('COLLSCAN');
+ expect(queryPlanner.winningPlan.inputStage.inputStage).toBeUndefined();
+
+ result = await collection.aggregate([{ $group: { _id: '$foo' } }], {
+ hint: '_id_',
+ explain: true,
+ });
+ queryPlanner = result[0].stages[0].$cursor.queryPlanner;
+ expect(queryPlanner.winningPlan.stage).toBe('PROJECTION_SIMPLE');
+ expect(queryPlanner.winningPlan.inputStage.stage).toBe('FETCH');
+ expect(queryPlanner.winningPlan.inputStage.inputStage.stage).toBe('IXSCAN');
+ expect(queryPlanner.winningPlan.inputStage.inputStage.indexName).toBe('_id_');
+ });
+
+ it_only_mongodb_version('<4.4')('query aggregate with hint object', async () => {
const object = new TestObject({ foo: 'bar' });
await object.save();
@@ -94,6 +118,31 @@ describe_only_db('mongo')('Parse.Query hint', () => {
expect(queryPlanner.winningPlan.inputStage.keyPattern).toEqual({ _id: 1 });
});
+ it_only_mongodb_version('>=4.4')('query aggregate with hint object', async () => {
+ const object = new TestObject({ foo: 'bar' });
+ await object.save();
+
+ const collection = await config.database.adapter._adaptiveCollection('TestObject');
+ let result = await collection.aggregate([{ $group: { _id: '$foo' } }], {
+ explain: true,
+ });
+ let { queryPlanner } = result[0].stages[0].$cursor;
+ expect(queryPlanner.winningPlan.stage).toBe('PROJECTION_SIMPLE');
+ expect(queryPlanner.winningPlan.inputStage.stage).toBe('COLLSCAN');
+ expect(queryPlanner.winningPlan.inputStage.inputStage).toBeUndefined();
+
+ result = await collection.aggregate([{ $group: { _id: '$foo' } }], {
+ hint: { _id: 1 },
+ explain: true,
+ });
+ queryPlanner = result[0].stages[0].$cursor.queryPlanner;
+ expect(queryPlanner.winningPlan.stage).toBe('PROJECTION_SIMPLE');
+ expect(queryPlanner.winningPlan.inputStage.stage).toBe('FETCH');
+ expect(queryPlanner.winningPlan.inputStage.inputStage.stage).toBe('IXSCAN');
+ expect(queryPlanner.winningPlan.inputStage.inputStage.indexName).toBe('_id_');
+ expect(queryPlanner.winningPlan.inputStage.inputStage.keyPattern).toEqual({ _id: 1 });
+ });
+
it('query find with hint (rest)', async () => {
const object = new TestObject();
await object.save();
@@ -119,7 +168,7 @@ describe_only_db('mongo')('Parse.Query hint', () => {
expect(explain.queryPlanner.winningPlan.inputStage.inputStage.indexName).toBe('_id_');
});
- it('query aggregate with hint (rest)', async () => {
+ it_only_mongodb_version('<4.4')('query aggregate with hint (rest)', async () => {
const object = new TestObject({ foo: 'bar' });
await object.save();
let options = Object.assign({}, masterKeyOptions, {
@@ -145,4 +194,37 @@ describe_only_db('mongo')('Parse.Query hint', () => {
queryPlanner = response.data.results[0].stages[0].$cursor.queryPlanner;
expect(queryPlanner.winningPlan.inputStage.keyPattern).toEqual({ _id: 1 });
});
+
+ it_only_mongodb_version('>=4.4')('query aggregate with hint (rest)', async () => {
+ const object = new TestObject({ foo: 'bar' });
+ await object.save();
+ let options = Object.assign({}, masterKeyOptions, {
+ url: Parse.serverURL + '/aggregate/TestObject',
+ qs: {
+ explain: true,
+ group: JSON.stringify({ objectId: '$foo' }),
+ },
+ });
+ let response = await request(options);
+ let { queryPlanner } = response.data.results[0].stages[0].$cursor;
+ expect(queryPlanner.winningPlan.stage).toBe('PROJECTION_SIMPLE');
+ expect(queryPlanner.winningPlan.inputStage.stage).toBe('COLLSCAN');
+ expect(queryPlanner.winningPlan.inputStage.inputStage).toBeUndefined();
+
+ options = Object.assign({}, masterKeyOptions, {
+ url: Parse.serverURL + '/aggregate/TestObject',
+ qs: {
+ explain: true,
+ hint: '_id_',
+ group: JSON.stringify({ objectId: '$foo' }),
+ },
+ });
+ response = await request(options);
+ queryPlanner = response.data.results[0].stages[0].$cursor.queryPlanner;
+ expect(queryPlanner.winningPlan.stage).toBe('PROJECTION_SIMPLE');
+ expect(queryPlanner.winningPlan.inputStage.stage).toBe('FETCH');
+ expect(queryPlanner.winningPlan.inputStage.inputStage.stage).toBe('IXSCAN');
+ expect(queryPlanner.winningPlan.inputStage.inputStage.indexName).toBe('_id_');
+ expect(queryPlanner.winningPlan.inputStage.inputStage.keyPattern).toEqual({ _id: 1 });
+ });
});
diff --git a/spec/ParseQuery.spec.js b/spec/ParseQuery.spec.js
index 92daad50a6..c2069241f0 100644
--- a/spec/ParseQuery.spec.js
+++ b/spec/ParseQuery.spec.js
@@ -2048,9 +2048,9 @@ describe('Parse.Query testing', () => {
const query = new Parse.Query(TestObject);
query.matches(
'myString',
- "parse # First fragment. We'll write this in one case but match " +
- 'insensitively\n.com # Second fragment. This can be separated by any ' +
- 'character, including newline',
+ 'parse # First fragment. We\'ll write this in one case but match insensitively\n' +
+ '.com # Second fragment. This can be separated by any character, including newline;' +
+ 'however, this comment must end with a newline to recognize it as a comment\n',
'mixs'
);
query.find().then(
@@ -3209,6 +3209,7 @@ describe('Parse.Query testing', () => {
}
);
});
+
it('exclude keys', async () => {
const obj = new TestObject({ foo: 'baz', hello: 'world' });
await obj.save();
diff --git a/spec/helper.js b/spec/helper.js
index dbea84edd2..a06f3da708 100644
--- a/spec/helper.js
+++ b/spec/helper.js
@@ -1,4 +1,6 @@
'use strict';
+const semver = require('semver');
+
// Sets up a Parse API server for testing.
jasmine.DEFAULT_TIMEOUT_INTERVAL = process.env.PARSE_SERVER_TEST_TIMEOUT || 5000;
@@ -417,6 +419,42 @@ global.it_only_db = db => {
}
};
+global.it_only_mongodb_version = version => {
+ const envVersion = process.env.MONGODB_VERSION;
+ if (!envVersion || semver.satisfies(envVersion, version)) {
+ return it;
+ } else {
+ return xit;
+ }
+};
+
+global.fit_only_mongodb_version = version => {
+ const envVersion = process.env.MONGODB_VERSION;
+ if (!envVersion || semver.satisfies(envVersion, version)) {
+ return fit;
+ } else {
+ return xit;
+ }
+};
+
+global.it_exclude_mongodb_version = version => {
+ const envVersion = process.env.MONGODB_VERSION;
+ if (!envVersion || !semver.satisfies(envVersion, version)) {
+ return it;
+ } else {
+ return xit;
+ }
+};
+
+global.fit_exclude_mongodb_version = version => {
+ const envVersion = process.env.MONGODB_VERSION;
+ if (!envVersion || !semver.satisfies(envVersion, version)) {
+ return fit;
+ } else {
+ return xit;
+ }
+};
+
global.fit_exclude_dbs = excluded => {
if (excluded.indexOf(process.env.PARSE_SERVER_TEST_DB) >= 0) {
return xit;
diff --git a/spec/schemas.spec.js b/spec/schemas.spec.js
index 6cdb610e9d..8442a5796a 100644
--- a/spec/schemas.spec.js
+++ b/spec/schemas.spec.js
@@ -3340,94 +3340,130 @@ describe('schemas', () => {
});
});
- it('lets you add and delete indexes', done => {
- request({
+ it('lets you add and delete indexes', async () => {
+ // Wait due to index building in MongoDB on background process with collection lock
+ const waitForIndexBuild = new Promise(r => setTimeout(r, 500));
+
+ await request({
url: 'http://localhost:8378/1/schemas/NewClass',
method: 'POST',
headers: masterKeyHeaders,
json: true,
body: {},
- }).then(() => {
- request({
- url: 'http://localhost:8378/1/schemas/NewClass',
- method: 'PUT',
- headers: masterKeyHeaders,
- json: true,
- body: {
- fields: {
- aString: { type: 'String' },
- bString: { type: 'String' },
- cString: { type: 'String' },
- dString: { type: 'String' },
- },
- indexes: {
- name1: { aString: 1 },
- name2: { bString: 1 },
- name3: { cString: 1 },
- },
+ });
+
+ let response = await request({
+ url: 'http://localhost:8378/1/schemas/NewClass',
+ method: 'PUT',
+ headers: masterKeyHeaders,
+ json: true,
+ body: {
+ fields: {
+ aString: { type: 'String' },
+ bString: { type: 'String' },
+ cString: { type: 'String' },
+ dString: { type: 'String' },
},
- }).then(response => {
- expect(
- dd(response.data, {
- className: 'NewClass',
- fields: {
- ACL: { type: 'ACL' },
- createdAt: { type: 'Date' },
- updatedAt: { type: 'Date' },
- objectId: { type: 'String' },
- aString: { type: 'String' },
- bString: { type: 'String' },
- cString: { type: 'String' },
- dString: { type: 'String' },
- },
- classLevelPermissions: defaultClassLevelPermissions,
- indexes: {
- _id_: { _id: 1 },
- name1: { aString: 1 },
- name2: { bString: 1 },
- name3: { cString: 1 },
- },
- })
- ).toEqual(undefined);
- request({
- url: 'http://localhost:8378/1/schemas/NewClass',
- method: 'PUT',
- headers: masterKeyHeaders,
- json: true,
- body: {
- indexes: {
- name1: { __op: 'Delete' },
- name2: { __op: 'Delete' },
- name4: { dString: 1 },
- },
- },
- }).then(response => {
- expect(response.data).toEqual({
- className: 'NewClass',
- fields: {
- ACL: { type: 'ACL' },
- createdAt: { type: 'Date' },
- updatedAt: { type: 'Date' },
- objectId: { type: 'String' },
- aString: { type: 'String' },
- bString: { type: 'String' },
- cString: { type: 'String' },
- dString: { type: 'String' },
- },
- classLevelPermissions: defaultClassLevelPermissions,
- indexes: {
- _id_: { _id: 1 },
- name3: { cString: 1 },
- name4: { dString: 1 },
- },
- });
- config.database.adapter.getIndexes('NewClass').then(indexes => {
- expect(indexes.length).toEqual(3);
- done();
- });
- });
- });
+ indexes: {
+ name1: { aString: 1 },
+ name2: { bString: 1 },
+ name3: { cString: 1 },
+ },
+ },
+ });
+
+ expect(
+ dd(response.data, {
+ className: 'NewClass',
+ fields: {
+ ACL: { type: 'ACL' },
+ createdAt: { type: 'Date' },
+ updatedAt: { type: 'Date' },
+ objectId: { type: 'String' },
+ aString: { type: 'String' },
+ bString: { type: 'String' },
+ cString: { type: 'String' },
+ dString: { type: 'String' },
+ },
+ classLevelPermissions: defaultClassLevelPermissions,
+ indexes: {
+ _id_: { _id: 1 },
+ name1: { aString: 1 },
+ name2: { bString: 1 },
+ name3: { cString: 1 },
+ },
+ })
+ ).toEqual(undefined);
+
+ await waitForIndexBuild;
+ response = await request({
+ url: 'http://localhost:8378/1/schemas/NewClass',
+ method: 'PUT',
+ headers: masterKeyHeaders,
+ json: true,
+ body: {
+ indexes: {
+ name1: { __op: 'Delete' },
+ name2: { __op: 'Delete' },
+ },
+ },
+ });
+
+ expect(response.data).toEqual({
+ className: 'NewClass',
+ fields: {
+ ACL: { type: 'ACL' },
+ createdAt: { type: 'Date' },
+ updatedAt: { type: 'Date' },
+ objectId: { type: 'String' },
+ aString: { type: 'String' },
+ bString: { type: 'String' },
+ cString: { type: 'String' },
+ dString: { type: 'String' },
+ },
+ classLevelPermissions: defaultClassLevelPermissions,
+ indexes: {
+ _id_: { _id: 1 },
+ name3: { cString: 1 },
+ },
+ });
+
+ await waitForIndexBuild;
+ response = await request({
+ url: 'http://localhost:8378/1/schemas/NewClass',
+ method: 'PUT',
+ headers: masterKeyHeaders,
+ json: true,
+ body: {
+ indexes: {
+ name4: { dString: 1 },
+ },
+ },
});
+
+ expect(response.data).toEqual({
+ className: 'NewClass',
+ fields: {
+ ACL: { type: 'ACL' },
+ createdAt: { type: 'Date' },
+ updatedAt: { type: 'Date' },
+ objectId: { type: 'String' },
+ aString: { type: 'String' },
+ bString: { type: 'String' },
+ cString: { type: 'String' },
+ dString: { type: 'String' },
+ },
+ classLevelPermissions: defaultClassLevelPermissions,
+ indexes: {
+ _id_: { _id: 1 },
+ name3: { cString: 1 },
+ name4: { dString: 1 },
+ },
+ });
+
+ await waitForIndexBuild;
+ const indexes = await config.database.adapter.getIndexes('NewClass');
+ expect(indexes.length).toEqual(3);
});
it('cannot delete index that does not exist', done => {
diff --git a/src/RestQuery.js b/src/RestQuery.js
index ef3846daec..48dc9bcdb1 100644
--- a/src/RestQuery.js
+++ b/src/RestQuery.js
@@ -100,7 +100,7 @@ function RestQuery(
for (var option in restOptions) {
switch (option) {
case 'keys': {
- const keys = restOptions.keys.split(',').concat(AlwaysSelectedKeys);
+ const keys = restOptions.keys.split(',').filter(key => key.length > 0).concat(AlwaysSelectedKeys);
this.keys = Array.from(new Set(keys));
break;
}