52 Commits

Author SHA1 Message Date
e69caaa536 fix for loop for apple 2026-02-06 11:48:08 +00:00
a2fba8502a fix apple auth and update version to 2.8.5 2026-02-03 23:14:29 +00:00
Joe Bain
ac73dab494 Added apple gamekit auth
Untested rn
2026-01-26 15:36:05 +00:00
ccd90508dd Move nintendo and steam auth config to options file 2025-11-17 17:07:13 +00:00
51c33a89e0 Nintendo auth is working 2025-11-17 14:54:18 +00:00
bb070be37d first draft of nintendo auth 2025-11-12 22:28:56 +00:00
79cf7c9e9f steam auth working with web ticket api 2025-11-12 12:24:08 +00:00
4ce607a1e1 Added steam auth using encrypted application tickets
Not tested working though yet
2025-11-07 16:53:17 +00:00
Florent Vilmart
18b9641360 Release 2.8.4 2018-08-12 17:49:09 -04:00
Florent Vilmart
0e9725b231 Release 2.8.3 2018-08-12 17:49:09 -04:00
GabrielLomba
5a0d09e90c Fix typo in FilesController.spec (#4917)
* Add the addFileNameHash option that allows users to remove the hash from file names

* Change option name to preserveFileName

* Revert changes to package-lock.json

* Fix typo in FileController.spec
2018-08-12 17:49:09 -04:00
Florent Vilmart
b92e76d61b Pins uws due to drame (#4916) 2018-08-12 17:49:09 -04:00
GabrielLomba
37e6459608 Add the addFileNameHash option that allows users to remove the hash f… (#4915)
* Add the addFileNameHash option that allows users to remove the hash from file names

* Change option name to preserveFileName

* Revert changes to package-lock.json
2018-08-12 17:49:09 -04:00
greenkeeper[bot]
d9cf3ce476 Update ws to the latest version 🚀 (#4909)
* fix(package): update ws to version 6.0.0

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
Florent Vilmart
83cf68a513 Ensures the test suites are properly formed for vscode jasmine plugin (#4901)
* Ensures the test suites are properly formed for vscode jasmine plugin

* nit
2018-08-12 17:49:09 -04:00
Florent Vilmart
5a32eb33e6 Ensure legacy users with authData are not locked out (#4898)
* Adds fix for issue, ensuring legacy users with no ACL are properly handled

* Runs tests only on mongo
2018-08-12 17:49:09 -04:00
Saulo Tauil
2c316ceaad Enable express error handler (#4697)
* Propagate error to express handler in all situations

* Call the default error handler if `enableExpressErrorHandler` is truthy

* Updating options interface and definitions

* Testing express error handler

* Test spec fixes

* Fix test
2018-08-12 17:49:09 -04:00
Ross Bayer
faa04f7209 Support incrementing push badge value by more than 1 (#4889)
* Support 'IncrementByN' badge value for higher push badge increments

* Fix test

* Rely on object for badge incrementation (i.e. {increment: 3}) rather than string (IncrementBy3)

* For badge incrementation, utilize format similar to other operation notation
2018-08-12 17:49:09 -04:00
greenkeeper[bot]
c99cbbf530 Update flow-bin to the latest version 🚀 (#4877)
* chore(package): update flow-bin to version 0.76.0

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
greenkeeper[bot]
41b0bf09bb Update bcrypt to the latest version 🚀 (#4875)
* chore(package): update bcrypt to version 3.0.0

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
Florent Vilmart
31d8947c1f Greenkeeper/nodemon 1.18.1 (#4891)
* chore(package): update nodemon to version 1.18.1

Closes #4885

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
greenkeeper[bot]
27d8f861e7 Update ws to the latest version 🚀 (#4888)
* fix(package): update ws to version 5.2.2

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
greenkeeper[bot]
3e7b1b3fe1 Update mongodb to the latest version 🚀 (#4874)
* fix(package): update mongodb to version 3.1.1

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
Florent Vilmart
3c89eb3e16 Greenkeeper/flow bin 0.75.0 (#4872)
* chore(package): update flow-bin to version 0.75.0

Closes #4816

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
greenkeeper[bot]
fb7d9b5a7c Update mongodb-runner to the latest version 🚀 (#4869)
* chore(package): update mongodb-runner to version 4.0.0

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
Florent Vilmart
461ca5d3fe Even faster tests (#4868)
* Various improvements in test name / de-duplications

* Reverts to class by class deletion, introduced fast mode that just delete data for mongo

- Speeds up are incredible Executed 1695 of 1713 specs INCOMPLETE (18 PENDING) in 4 mins 19 secs.

* Adds documentation about the deleteEverything
2018-08-12 17:49:09 -04:00
Florent Vilmart
305e4ba445 Removes need to use babel-register (#4865)
* Removes need to use babel-register

- Adds watch to watch changes when running the test to regenerate
- Tests are now pure node 8

* Adds timing to helper.js

* Update contribution guide

* Adds inline sourcemaps generation to restore coverage

* nits
2018-08-12 17:49:09 -04:00
Florent Vilmart
f23eb4c419 Update issue templates (#4867) 2018-08-12 17:49:09 -04:00
greenkeeper[bot]
566ac26789 Update pg-promise to the latest version 🚀 (#4866)
* fix(package): update pg-promise to version 8.4.5

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
Florent Vilmart
96bc74433b Testing improvements (#4863)
* Adds supports-color so spec reporter dont output colors when not supported

* Adds babelrc so breakpoints dont jump around in specs

* Rename test to avoid duplicated name

* Removes unused methods

* Better naming for it_exclude
2018-08-12 17:49:09 -04:00
Florent Vilmart
7319aabf7a improves test performance on mongodb (#4862)
* improves test performance on mongodb

* Removes unused methdos
2018-08-12 17:49:09 -04:00
greenkeeper[bot]
ec27bc7e8e Update commander to the latest version 🚀 (#4861)
* fix(package): update commander to version 2.16.0

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
Florent Vilmart
6b36ce1bb5 Ensure User ACL's are more flexible and secure #3588 (#4860)
* Fixes an issue that would let the beforeDelete be called when user has no access to the object

* Ensure we properly lock user

- Improves find method so we can attempt to read for a write poking the right ACL instead of using masterKey
- This ensure we do not run beforeDelete/beforeFind/beforeSave in the wrong scenarios

* nits

* Caps insufficient
2018-08-12 17:49:09 -04:00
Florent Vilmart
82fec72ec4 Removes unused package multer, fixes audit (#4859) 2018-08-12 17:49:09 -04:00
greenkeeper[bot]
938564acbb Update cross-env to the latest version 🚀 (#4830)
* chore(package): update cross-env to version 5.2.0

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
greenkeeper[bot]
f969c7bb2a Update multer to the latest version 🚀 (#4858)
* fix(package): update multer to version 1.3.1

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
greenkeeper[bot]
320eba1081 Update mongodb to the latest version 🚀 (#4855)
* fix(package): update mongodb to version 3.1.0

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile

* starting mongo 3.1.0, read preferences are passed again

* Adds test confirming #4831 is properly functional now
2018-08-12 17:49:09 -04:00
Chris
43be9fed9d #4678: Converting strings to Date when schema.type is Date within agg… (#4743)
* #4678: Converting strings to Date when schema.type is Date within aggregate function

* Added test cases to test new date match aggregate query

* Added function to parse match aggregate arguments and convert necessary values to Date objects

* Added missing return value

* Improved code quality based on suggestions and figured out why tests were failing

* Added tests from @dplewis

* Supporting project aggregation as well as exists operator

* Excluding exists match for postgres

* Handling the $group operator similar to $match and $project

* Added more tests for better code coverage

* Excluding certain tests from being run on postgres

* Excluding one more test  from postgres

* clean up
2018-08-12 17:49:09 -04:00
greenkeeper[bot]
f1ebdef28b Update eslint to the latest version 🚀 (#4848)
* chore(package): update eslint to version 5.0.0

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
Florent Vilmart
b660ca5ed8 Removes npm-git.sh scripts (#4853) 2018-08-12 17:49:09 -04:00
greenkeeper[bot]
dccd7d7a1d Update ws to the latest version 🚀 (#4849)
* fix(package): update ws to version 5.2.1

* chore(package): update lockfile

https://npm.im/greenkeeper-lockfile
2018-08-12 17:49:09 -04:00
Diamond Lewis
df01846361 Aggregate allow multiple of same stage (#4835)
* Aggregate Allow Multiple Stages

* remove testing files

* nit

* spread them
2018-08-12 17:49:09 -04:00
Diamond Lewis
197190fc29 nit 2018-08-12 17:49:09 -04:00
Diamond Lewis
48ddcdb303 add includeAll option 2018-08-12 17:49:09 -04:00
Johnny
c8b303a9d2 Added verify password to users router and tests. (#4747)
* Added verify password to users router and tests.

* Added more tests to support more coverage.

* Added additional tests to spec. Removed condition from verifyPassword function where authData null keys condition wasn't necessary.

* Removed POST handling from verifyPassword.

* Refactored handleLogin and handleVerifyPassword to use shared helper function to validate the password provided in the request.

* Refactored verifyPassword and login to not use try/catch. Parent promise returns the error. Moved login specific functions to login handler.

* Added account lockout policy to verify password function. Added test spec for account lockout in verify password.

* no message

* Merged new changes from master. Made changes as requested from comments.

* We cannot remove hidden properties from the helper before returning to the login function. The password expiration check in the login function is dependent on some hidden properties, otherwise three password policy tests fail.
2018-08-12 17:49:09 -04:00
Florent Vilmart
4d81f8fc30 Ensure read preference is never overriden, so DB config prevails (#4833) 2018-08-12 17:49:09 -04:00
Manuel
cddb924703 add support for geoWithin.centerSphere queries via withJSON (#4825)
* add support for geoWithin.centerSphere queries via withJSON

* added test for passing array of lat, lng instead of Parse.GeoPoint

* added postgres support

* added more tests

* improved tests and validation

* added more tests
2018-08-12 17:49:09 -04:00
Arthur Cinader
f2f92858f1 Change name of function and variable from baseFieldName to rootFieldName (#4817) 2018-08-12 17:49:09 -04:00
Arthur Cinader
1427eeac5a Don't error when attempting to sort on an object field (#4806)
* add failing test to demonstrate that you can't sort on a
field in an object.

* Only validate the base of the field name.

* fix test name

* Only test sort for mongo.

* pg order by nested object

* level 2 test

* Factor out operation to get a field's base name.  Add comment.

* tweak comment wording so it wont make my grammar teacher angry.
2018-08-12 17:49:09 -04:00
Jaeger
348489784a Don't merge JSON fields after save() when using Postgres to keep same behaviour as MongoDB (#4808) (#4815) 2018-08-12 17:49:09 -04:00
Florent Vilmart
108c7e95e2 Release 2.8.3 2018-08-07 11:21:17 -04:00
Florent Vilmart
d15a3ce8f5 Adds exposed headers to avoid issue in JS SDK (#4934)
* Adds exposed headers to avoid issue in JS SDK

* Adds test for headers
2018-08-07 11:19:49 -04:00
126 changed files with 42711 additions and 7759 deletions

View File

@@ -9,5 +9,6 @@
"node": "8"
}
}]
]
],
"sourceMaps": "inline"
}

View File

@@ -0,0 +1,17 @@
---
name: "\U0001F4A1 Feature request"
about: Suggest an idea for this project
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@@ -0,0 +1,7 @@
---
name: "\U0001F4F2 Push Notifications"
about: Issues with setting up or delivering push notifications
---

View File

@@ -0,0 +1,53 @@
---
name: "\U0001F41B Report an issue"
about: Report an issue on parse-server
---
<!---
We use GitHub Issues for bugs.
If you have a non-bug question, ask on Stack Overflow or Server Fault:
- https://stackoverflow.com/questions/tagged/parse.com
- https://serverfault.com/tags/parse
If you have a vulnerability disclosure, please follow our policy available here https://github.com/parse-community/parse-server/blob/master/SECURITY.md
You may also search through existing issues before opening a new one: https://github.com/parse-community/parse-server/issues?utf8=%E2%9C%93&q=is%3Aissue
--- Please use this template. If you don't use this template, your issue may be closed without comment. ---
--->
### Issue Description
<!--- Describe your issue in as much detail as possible. -->
### Steps to reproduce
<!--- Please include a detailed list of steps that reproduce the issue. Include curl commands when applicable. --->
### Expected Results
<!--- What you expected to happen. --->
### Actual Outcome
<!--- What is happening instead. --->
### Environment Setup
- **Server**
- parse-server version (Be specific! Don't say 'latest'.) : [FILL THIS OUT]
- Operating System: [FILL THIS OUT]
- Hardware: [FILL THIS OUT]
- Localhost or remote server? (AWS, Heroku, Azure, Digital Ocean, etc): [FILL THIS OUT]
- **Database**
- MongoDB version: [FILL THIS OUT]
- Storage engine: [FILL THIS OUT]
- Hardware: [FILL THIS OUT]
- Localhost or remote server? (AWS, mLab, ObjectRocket, Digital Ocean, etc): [FILL THIS OUT]
### Logs/Trace
<!--- Include all relevant logs. You can turn on additional logging by configuring VERBOSE=1 in your environment. --->

2
.nycrc
View File

@@ -5,7 +5,7 @@
],
"exclude": [
"**/spec/**",
"lib/"
"src/"
]
}

View File

@@ -55,7 +55,7 @@ after_script:
jobs:
include:
# release on github latest branch
# release on npm on tags
- stage: release
node_js: '8.10'
env:
@@ -63,13 +63,6 @@ jobs:
after_script: skip
script: skip
deploy:
- provider: script
skip_cleanup: true
script: ./resources/npm-git.sh && npm install -g https://github.com/parse-community/parse-server#latest
on:
branch:
- master
- 3.x
- provider: npm
skip_cleanup: true
email:

View File

@@ -1,10 +1,50 @@
## Parse Server Changelog
### master
[Full Changelog](https://github.com/parse-community/parse-server/compare/2.8.2...master)
[Full Changelog](https://github.com/parse-community/parse-server/compare/2.8.4...master)
### 2.8.4
[Full Changelog](https://github.com/parse-community/parse-server/compare/2.8.3...2.8.4)
#### Improvements
* Adds ability to forward errors to express handler (#4697)
* Adds ability to increment the push badge with an arbitrary value (#4889)
* Adds ability to preserve the file names when uploading (#4915)
* `_User` now follow regular ACL policy. Letting administrator lock user out. (#4860) and (#4898)
* Ensure dates are properly handled in aggregates (#4743)
* Aggregates: Improved support for stages sharing the same name
* Add includeAll option
* Added verify password to users router and tests. (#4747)
* Ensure read preference is never overriden, so DB config prevails (#4833)
* add support for geoWithin.centerSphere queries via withJSON (#4825)
* Allow sorting an object field (#4806)
* Postgres: Don't merge JSON fields after save() to keep same behaviour as MongoDB (#4808) (#4815)
#### Dependency updates
* [commander@2.16.0](https://www.npmjs.com/package/commander)
* [mongodb@3.1.1](https://www.npmjs.com/package/mongodb)
* [pg-promise@8.4.5](https://www.npmjs.com/package/pg-promise)
* [ws@6.0.0](https://www.npmjs.com/package/ws)
* [bcrypt@3.0.0](https://www.npmjs.com/package/bcrypt)
* [uws@10.148.1](https://www.npmjs.com/package/uws)
##### Devevelopment Dependencies Updates:
* [cross-env@5.2.0](https://www.npmjs.com/package/cross-env)
* [eslint@5.0.0](https://www.npmjs.com/package/eslint)
* [flow-bin@0.76.0](https://www.npmjs.com/package/flow-bin)
* [mongodb-runner@4.0.0](https://www.npmjs.com/package/mongodb-runner)
* [nodemon@1.18.1](https://www.npmjs.com/package/nodemon)
* [nyc@12.0.2](https://www.npmjs.com/package/nyc)
* [request-promise@4.2.2](https://www.npmjs.com/package/request-promise)
* [supports-color@5.4.0](https://www.npmjs.com/package/supports-color)
### 2.8.3
[Full Changelog](https://github.com/parse-community/parse-server/compare/2.8.2...2.8.3)
* Adds support for JS SDK 2.0 job status header
### 2.8.2
[Full Changelog](https://github.com/parse-community/parse-server/compare/2.8.2...2.8.1)
[Full Changelog](https://github.com/parse-community/parse-server/compare/2.8.1...2.8.2)
##### Bug Fixes:
* Ensure legacy users without ACL's are not locked out, thanks to [Florent Vilmart](https://github.com/flovilmart)

View File

@@ -1,10 +1,49 @@
### Contributing to Parse Server
# Contributing to Parse Server
#### Pull Requests Welcome!
We really want Parse to be yours, to see it grow and thrive in the open source community.
We really want Parse to be yours, to see it grow and thrive in the open source community.
If you are not familiar with Pull Requests and want to know more about them, you can visit the [Creating a pull request](https://help.github.com/articles/creating-a-pull-request/) article. It contains detailed informations about the process.
##### Please Do's
## Setting up the project for debugging and contributing:
### Recommended setup:
* [vscode](https://code.visualstudio.com), the popular IDE.
* [Jasmine Test Explorer](https://marketplace.visualstudio.com/items?itemName=hbenl.vscode-test-explorer), a very practical test exploration plugin which let you run, debug and see the test results inline.
### Setting up you local machine:
* [Fork](https://github.com/parse-community/parse-server) this project and clone the fork on your local machine:
```sh
$ git clone https://github.com/parse-community/parse-server
$ cd parse-server # go into the clone directory
$ npm install # install all the node dependencies
$ code . # launch vscode
$ npm run watch # run babel watching for local file changes
```
Once you have babel running in watch mode, you can start making changes to parse-server.
### Good to know:
* The lib/ folder is not commited, so never make changes in there.
* Always make changes to files in the `src/` folder.
* All the tests should point to sources in the `lib/` folder.
### Troubleshooting:
*Question*: I modify the code in the src folder but it doesn't seem to have any effect.<br/>
*Answer*: Check that `npm run watch` is running
*Question*: How do I use breakpoints and debug step by step?<br/>
*Answer*: The easiest way is to install [Jasmine Test Explorer](https://marketplace.visualstudio.com/items?itemName=hbenl.vscode-test-explorer), it will let you run selectively tests and debug them.
*Question*: How do I deploy my forked version on my servers?<br/>
*Answer*: In your `package.json`, update the `parse-server` dependency to `https://github.com/MY_USERNAME/parse-server#MY_FEATURE`. Run `npm install`, commit the changes and deploy to your servers.
### Please Do's
* Begin by reading the [Development Guide](http://docs.parseplatform.org/parse-server/guide/#development-guide) to learn how to get started running the parse-server.
* Take testing seriously! Aim to increase the test coverage with every pull request. To obtain the test coverage of the project, run:
@@ -17,7 +56,7 @@ We really want Parse to be yours, to see it grow and thrive in the open source c
* Lint your code by running `npm run lint` to make sure the code is not going to be rejected by the CI.
* **Do not** publish the *lib* folder.
##### Run your tests against Postgres (optional)
### Run your tests against Postgres (optional)
If your pull request introduces a change that may affect the storage or retrieval of objects, you may want to make sure it plays nice with Postgres.
@@ -28,6 +67,6 @@ If your pull request introduces a change that may affect the storage or retrieva
- `it_only_db('mongo')` // will make a test that only runs on mongo
- `it_exclude_dbs(['postgres'])` // will make a test that runs against all DB's but postgres
##### Code of Conduct
### Code of Conduct
This project adheres to the [Contributor Covenant Code of Conduct](https://github.com/parse-community/parse-server/blob/master/CODE_OF_CONDUCT.md). By participating, you are expected to honor this code.

View File

@@ -232,6 +232,7 @@ The client keys used with Parse are no longer necessary with Parse Server. If yo
#### Advanced options
* `fileKey` - For migrated apps, this is necessary to provide access to files already hosted on Parse.
* `preserveFileName` - Set to true to remove the unique hash added to the file names. Defaults to false.
* `allowClientClassCreation` - Set to false to disable client class creation. Defaults to true.
* `enableAnonymousUsers` - Set to false to disable anonymous users. Defaults to true.
* `auth` - Used to configure support for [3rd party authentication](http://docs.parseplatform.org/parse-server/guide/#oauth-and-3rd-party-authentication).
@@ -393,16 +394,24 @@ If you believe you've found an issue with Parse Server, make sure these boxes ar
# Want to ride the bleeding edge?
The `latest` branch in this repository is automatically maintained to be the last
commit to `master` to pass all tests, in the same form found on npm. It is
recommend to use builds deployed npm for many reasons, but if you want to use
It is recommend to use builds deployed npm for many reasons, but if you want to use
the latest not-yet-released version of parse-server, you can do so by depending
directly on this branch:
```
npm install parseplatform/parse-server.git#latest
npm install parse-community/parse-server.git#master
```
## Experimenting
You can also use your own forks, and work in progress branches by specifying them:
```
npm install github:myUsername/parse-server#my-awesome-feature
```
And don't forget, if you plan to deploy it remotely, you should run `npm install` with the `--save` option.
# Contributing
We really want Parse to be yours, to see it grow and thrive in the open source community. Please see the [Contributing to Parse Server guide](CONTRIBUTING.md).

45799
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "parse-server",
"version": "2.8.2",
"version": "2.8.6",
"description": "An express module providing a Parse-compatible API server",
"main": "lib/index.js",
"repository": {
@@ -23,27 +23,32 @@
"@parse/push-adapter": "3.0.0-alpha2",
"@parse/s3-files-adapter": "1.2.1",
"@parse/simple-mailgun-adapter": "1.0.2",
"bcrypt": "3.0.0",
"bcryptjs": "2.4.3",
"body-parser": "1.18.3",
"commander": "2.15.1",
"commander": "2.16.0",
"deepcopy": "1.0.0",
"express": "4.16.2",
"gamecenter-identity-verifier": "^0.1.1",
"intersect": "1.0.1",
"jsonwebtoken": "^8.5.1",
"jwks-rsa": "^1.12.3",
"lodash": "4.17.5",
"lru-cache": "4.1.2",
"mime": "2.3.1",
"mongodb": "3.0.7",
"multer": "1.3.0",
"mongodb": "3.1.1",
"parse": "1.11.1",
"pg-promise": "8.4.0",
"pg-promise": "8.4.5",
"redis": "2.8.0",
"request": "2.85.0",
"semver": "5.5.0",
"steam-appticket": "1.0.1",
"tv4": "1.3.0",
"uuid": "^3.1.0",
"uws": "10.148.1",
"winston": "2.4.1",
"winston-daily-rotate-file": "1.7.2",
"ws": "5.2.0"
"ws": "6.0.0"
},
"devDependencies": {
"babel-cli": "6.26.0",
@@ -53,23 +58,25 @@
"babel-plugin-transform-object-rest-spread": "^6.26.0",
"babel-preset-env": "1.6.1",
"bcrypt-nodejs": "0.0.3",
"cross-env": "5.1.6",
"cross-env": "5.2.0",
"deep-diff": "1.0.1",
"eslint": "^4.9.0",
"eslint": "^5.0.0",
"eslint-plugin-flowtype": "^2.39.1",
"flow-bin": "^0.73.0",
"flow-bin": "^0.76.0",
"gaze": "1.1.3",
"jasmine": "3.1.0",
"jasmine-spec-reporter": "^4.1.0",
"mongodb-runner": "3.6.1",
"nodemon": "1.17.5",
"nyc": "^11.0.2",
"request-promise": "4.2.2"
"mongodb-runner": "4.0.0",
"nodemon": "1.18.1",
"nyc": "^12.0.2",
"request-promise": "4.2.2",
"supports-color": "^5.4.0"
},
"scripts": {
"dev": "npm run build && node bin/dev",
"lint": "flow && eslint --cache ./",
"build": "babel src/ -d lib/ --copy-files",
"watch": "babel --watch src/ -d lib/ --copy-files",
"pretest": "npm run lint",
"test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=3.2.6} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 jasmine",
"coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=3.2.6} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 nyc jasmine",
@@ -84,8 +91,8 @@
"parse-server": "./bin/parse-server"
},
"optionalDependencies": {
"bcrypt": "2.0.1",
"uws": "^10.148.0"
"bcrypt": "3.0.0",
"uws": "10.148.1"
},
"collective": {
"type": "opencollective",

View File

@@ -1,40 +0,0 @@
#!/bin/sh -e
# This script maintains a git branch which mirrors master but in a form that
# what will eventually be deployed to npm, allowing npm dependencies to use:
#
# "parse-server": "parseplatform/parse-server#latest"
#
# From: https://github.com/graphql/graphql-js/blob/master/resources/npm-git.sh
BUILD_DIR=latest
BRANCH="${TRAVIS_BRANCH}"
TARGET="latest"
if [ "$BRANCH" != "master" ];
then
TARGET="$BRANCH-preview"
fi
npm run build
mkdir -p $BUILD_DIR
cp package.json $BUILD_DIR/
cp README.md $BUILD_DIR/
cp LICENSE $BUILD_DIR/
cp PATENTS $BUILD_DIR/
cp CHANGELOG.md $BUILD_DIR/
cp postinstall.js $BUILD_DIR/
cp -R lib $BUILD_DIR
cp -R bin $BUILD_DIR
cp -R public_html $BUILD_DIR
cp -R views $BUILD_DIR
cd $BUILD_DIR
git init
git config user.name "Travis CI"
git config user.email "github@fb.com"
git add .
git commit -m "Deploy $BRANCH to $TARGET branch"
git push --force --quiet "https://${GH_TOKEN}@github.com/parse-community/parse-server.git" master:$TARGET

14
spec/.babelrc Normal file
View File

@@ -0,0 +1,14 @@
{
"plugins": [
"transform-object-rest-spread"
],
"presets": [
["env", {
"targets": {
"node": "8"
}
}]
],
"sourceMaps": "inline",
"retainLines": true
}

View File

@@ -1,6 +1,6 @@
"use strict";
const Config = require("../src/Config");
const Config = require("../lib/Config");
const loginWithWrongCredentialsShouldFail = function(username, password) {
return new Promise((resolve, reject) => {

View File

@@ -1,7 +1,7 @@
const AdaptableController = require("../src/Controllers/AdaptableController").AdaptableController;
const FilesAdapter = require("../src/Adapters/Files/FilesAdapter").default;
const FilesController = require("../src/Controllers/FilesController").FilesController;
const AdaptableController = require("../lib/Controllers/AdaptableController").AdaptableController;
const FilesAdapter = require("../lib/Adapters/Files/FilesAdapter").default;
const FilesController = require("../lib/Controllers/FilesController").FilesController;
const MockController = function(options) {
AdaptableController.call(this, options);
@@ -70,7 +70,7 @@ describe("AdaptableController", ()=>{
done();
});
it("should accept an object adapter", (done) => {
it("should accept an prototype based object adapter", (done) => {
function AGoodAdapter() {}
AGoodAdapter.prototype.createFile = function() { };
AGoodAdapter.prototype.deleteFile = function() { };

View File

@@ -1,9 +1,9 @@
const loadAdapter = require("../src/Adapters/AdapterLoader").loadAdapter;
const loadAdapter = require("../lib/Adapters/AdapterLoader").loadAdapter;
const FilesAdapter = require("@parse/fs-files-adapter").default;
const S3Adapter = require("@parse/s3-files-adapter").default;
const ParsePushAdapter = require("@parse/push-adapter").default;
const Config = require('../src/Config');
const Config = require('../lib/Config');
describe("AdapterLoader", ()=>{
@@ -33,7 +33,7 @@ describe("AdapterLoader", ()=>{
});
it("should instantiate an adapter from string that is module", (done) => {
const adapterPath = require('path').resolve("./src/Adapters/Files/FilesAdapter");
const adapterPath = require('path').resolve("./lib/Adapters/Files/FilesAdapter");
const adapter = loadAdapter({
adapter: adapterPath
});

View File

@@ -1,7 +1,7 @@
const auth = require('../src/Auth');
const Config = require('../src/Config');
const rest = require('../src/rest');
const AudiencesRouter = require('../src/Routers/AudiencesRouter').AudiencesRouter;
const auth = require('../lib/Auth');
const Config = require('../lib/Config');
const rest = require('../lib/rest');
const AudiencesRouter = require('../lib/Routers/AudiencesRouter').AudiencesRouter;
describe('AudiencesRouter', () => {
it('uses find condition from request.body', (done) => {

View File

@@ -1,5 +1,5 @@
describe('Auth', () => {
const Auth = require('../src/Auth.js').Auth;
const Auth = require('../lib/Auth.js').Auth;
describe('getUserRoles', () => {
let auth;

View File

@@ -1,13 +1,13 @@
const request = require('request');
const Config = require("../src/Config");
const defaultColumns = require('../src/Controllers/SchemaController').defaultColumns;
const authenticationLoader = require('../src/Adapters/Auth');
const Config = require("../lib/Config");
const defaultColumns = require('../lib/Controllers/SchemaController').defaultColumns;
const authenticationLoader = require('../lib/Adapters/Auth');
const path = require('path');
describe('AuthenticationProviders', function() {
["facebook", "facebookaccountkit", "github", "instagram", "google", "linkedin", "meetup", "twitter", "janrainengage", "janraincapture", "vkontakte"].map(function(providerName){
it("Should validate structure of " + providerName, (done) => {
const provider = require("../src/Adapters/Auth/" + providerName);
const provider = require("../lib/Adapters/Auth/" + providerName);
jequal(typeof provider.validateAuthData, "function");
jequal(typeof provider.validateAppId, "function");
const authDataPromise = provider.validateAuthData({}, {});
@@ -301,7 +301,7 @@ describe('AuthenticationProviders', function() {
})
});
it('properly loads custom adapter module object', (done) => {
it('properly loads custom adapter module object (again)', (done) => {
const authenticationHandler = authenticationLoader({
customAuthentication: { module: path.resolve('./spec/support/CustomAuthFunction.js'), options: { token: 'valid-token' }}
});

View File

@@ -1,7 +1,7 @@
'use strict';
import commander from '../src/cli/utils/commander';
import definitions from '../src/cli/definitions/parse-server';
import liveQueryDefinitions from '../src/cli/definitions/parse-live-query-server';
const commander = require('../lib/cli/utils/commander').default;
const definitions = require('../lib/cli/definitions/parse-server').default;
const liveQueryDefinitions = require('../lib/cli/definitions/parse-live-query-server').default;
const testDefinitions = {
'arg0': 'PROGRAM_ARG_0',
@@ -173,7 +173,7 @@ describe('LiveQuery definitions', () => {
if (typeof definition.env !== 'undefined') {
expect(typeof definition.env).toBe('string');
}
expect(typeof definition.help).toBe('string');
expect(typeof definition.help).toBe('string', `help for ${key} should be a string`);
if (typeof definition.required !== 'undefined') {
expect(typeof definition.required).toBe('boolean');
}

View File

@@ -1,4 +1,4 @@
const CacheController = require('../src/Controllers/CacheController.js').default;
const CacheController = require('../lib/Controllers/CacheController.js').default;
describe('CacheController', function() {
let FakeCacheAdapter;

View File

@@ -1,5 +1,5 @@
const Client = require('../src/LiveQuery/Client').Client;
const ParseWebSocket = require('../src/LiveQuery/ParseWebSocketServer').ParseWebSocket;
const Client = require('../lib/LiveQuery/Client').Client;
const ParseWebSocket = require('../lib/LiveQuery/ParseWebSocketServer').ParseWebSocket;
describe('Client', function() {
it('can be initialized', function() {

View File

@@ -1,4 +1,4 @@
const ClientSDK = require('../src/ClientSDK');
const ClientSDK = require('../lib/ClientSDK');
describe('ClientSDK', () => {
it('should properly parse the SDK versions', () => {

View File

@@ -1,7 +1,7 @@
"use strict"
const Parse = require("parse/node");
const rp = require('request-promise');
const InMemoryCacheAdapter = require('../src/Adapters/Cache/InMemoryCacheAdapter').InMemoryCacheAdapter;
const InMemoryCacheAdapter = require('../lib/Adapters/Cache/InMemoryCacheAdapter').InMemoryCacheAdapter;
describe('Cloud Code', () => {
it('can load absolute cloud code file', done => {
@@ -983,7 +983,7 @@ describe('Cloud Code', () => {
TODO: fix for Postgres
trying to delete a field that doesn't exists doesn't play nice
*/
it_exclude_dbs(['postgres'])('should fully delete objects when using `unset` with beforeSave (regression test for #1840)', done => {
it_exclude_dbs(['postgres'])('should fully delete objects when using `unset` and `set` with beforeSave (regression test for #1840)', done => {
const TestObject = Parse.Object.extend('TestObject');
const BeforeSaveObject = Parse.Object.extend('BeforeSaveChanged');

View File

@@ -1,5 +1,5 @@
const LoggerController = require('../src/Controllers/LoggerController').LoggerController;
const WinstonLoggerAdapter = require('../src/Adapters/Logger/WinstonLoggerAdapter').WinstonLoggerAdapter;
const LoggerController = require('../lib/Controllers/LoggerController').LoggerController;
const WinstonLoggerAdapter = require('../lib/Adapters/Logger/WinstonLoggerAdapter').WinstonLoggerAdapter;
const fs = require('fs');
const loremFile = __dirname + '/support/lorem.txt';

View File

@@ -1,4 +1,4 @@
const DatabaseController = require('../src/Controllers/DatabaseController.js');
const DatabaseController = require('../lib/Controllers/DatabaseController.js');
const validateQuery = DatabaseController._validateQuery;
describe('DatabaseController', function() {

View File

@@ -2,7 +2,7 @@
const request = require('request');
const requestp = require('request-promise');
const Config = require('../src/Config');
const Config = require('../lib/Config');
describe("Email Verification Token Expiration: ", () => {

View File

@@ -0,0 +1,66 @@
const ParseServer = require("../lib/index");
const express = require('express');
const rp = require('request-promise');
describe('Enable express error handler', () => {
it('should call the default handler in case of error, like updating a non existing object', done => {
const serverUrl = "http://localhost:12667/parse"
const appId = "anOtherTestApp";
const masterKey = "anOtherTestMasterKey";
let server;
let lastError;
const parseServer = ParseServer.ParseServer(Object.assign({},
defaultConfiguration, {
appId: appId,
masterKey: masterKey,
serverURL: serverUrl,
enableExpressErrorHandler: true,
__indexBuildCompletionCallbackForTests: promise => {
promise.then(() => {
expect(Parse.applicationId).toEqual("anOtherTestApp");
const app = express();
app.use('/parse', parseServer);
server = app.listen(12667);
app.use(function (err, req, res, next) {
next;
lastError = err;
})
rp({
method: 'PUT',
uri: serverUrl + '/classes/AnyClass/nonExistingId',
headers: {
'X-Parse-Application-Id': appId,
'X-Parse-Master-Key': masterKey
},
body: { someField: "blablabla" },
json: true
}).then(() => {
fail('Should throw error');
}).catch(e => {
expect(e).toBeDefined();
const reqError = e.error;
expect(reqError).toBeDefined();
expect(lastError).toBeDefined();
expect(lastError.code).toEqual(101)
expect(lastError.message).toEqual('Object not found.')
expect(lastError.code).toEqual(reqError.code);
expect(lastError.message).toEqual(reqError.error);
}).then(() => {
server.close(done);
});
})
}
}
));
});
});

View File

@@ -1,6 +1,6 @@
const auth = require('../src/Auth');
const Config = require('../src/Config');
const rest = require('../src/rest');
const auth = require('../lib/Auth');
const Config = require('../lib/Config');
const rest = require('../lib/rest');
describe('Enable single schema cache', () => {
beforeEach((done) => {

View File

@@ -1,4 +1,4 @@
const EventEmitterPubSub = require('../src/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
const EventEmitterPubSub = require('../lib/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
describe('EventEmitterPubSub', function() {
it('can publish and subscribe', function() {

View File

@@ -1,8 +1,8 @@
const LoggerController = require('../src/Controllers/LoggerController').LoggerController;
const WinstonLoggerAdapter = require('../src/Adapters/Logger/WinstonLoggerAdapter').WinstonLoggerAdapter;
const GridStoreAdapter = require("../src/Adapters/Files/GridStoreAdapter").GridStoreAdapter;
const Config = require("../src/Config");
const FilesController = require('../src/Controllers/FilesController').default;
const LoggerController = require('../lib/Controllers/LoggerController').LoggerController;
const WinstonLoggerAdapter = require('../lib/Adapters/Logger/WinstonLoggerAdapter').WinstonLoggerAdapter;
const GridStoreAdapter = require("../lib/Adapters/Files/GridStoreAdapter").GridStoreAdapter;
const Config = require("../lib/Config");
const FilesController = require('../lib/Controllers/FilesController').default;
const mockAdapter = {
createFile: () => {
@@ -14,13 +14,13 @@ const mockAdapter = {
}
// Small additional tests to improve overall coverage
describe("FilesController",() =>{
describe("FilesController", () => {
it("should properly expand objects", (done) => {
const config = Config.get(Parse.applicationId);
const gridStoreAdapter = new GridStoreAdapter('mongodb://localhost:27017/parse');
const filesController = new FilesController(gridStoreAdapter)
const result = filesController.expandFilesInObject(config, function(){});
const result = filesController.expandFilesInObject(config, function () { });
expect(result).toBeUndefined();
@@ -43,7 +43,7 @@ describe("FilesController",() =>{
reconfigureServer({ filesAdapter: mockAdapter })
.then(() => new Promise(resolve => setTimeout(resolve, 1000)))
.then(() => new Parse.File("yolo.txt", [1,2,3], "text/plain").save())
.then(() => new Parse.File("yolo.txt", [1, 2, 3], "text/plain").save())
.then(
() => done.fail('should not succeed'),
() => setImmediate(() => Parse.Promise.as('done'))
@@ -62,4 +62,40 @@ describe("FilesController",() =>{
done();
});
});
it("should add a unique hash to the file name when the preserveFileName option is false", (done) => {
const config = Config.get(Parse.applicationId)
const gridStoreAdapter = new GridStoreAdapter('mongodb://localhost:27017/parse')
spyOn(gridStoreAdapter, 'createFile')
gridStoreAdapter.createFile.and.returnValue(Promise.resolve())
const fileName = 'randomFileName.pdf'
const regexEscapedFileName = fileName.replace(/\./g, "\\$&")
const filesController = new FilesController(gridStoreAdapter, null, { preserveFileName: false })
filesController.createFile(config, fileName)
expect(gridStoreAdapter.createFile).toHaveBeenCalledTimes(1)
expect(gridStoreAdapter.createFile.calls.mostRecent().args[0]).toMatch(`^.{32}_${regexEscapedFileName}$`)
done();
});
it("should not add a unique hash to the file name when the preserveFileName option is true", (done) => {
const config = Config.get(Parse.applicationId)
const gridStoreAdapter = new GridStoreAdapter('mongodb://localhost:27017/parse')
spyOn(gridStoreAdapter, 'createFile')
gridStoreAdapter.createFile.and.returnValue(Promise.resolve())
const fileName = 'randomFileName.pdf'
const filesController = new FilesController(gridStoreAdapter, null, { preserveFileName: true })
filesController.createFile(config, fileName)
expect(gridStoreAdapter.createFile).toHaveBeenCalledTimes(1)
expect(gridStoreAdapter.createFile.calls.mostRecent().args[0]).toEqual(fileName)
done();
});
});

View File

@@ -1,9 +1,9 @@
const MongoClient = require("mongodb").MongoClient;
const GridStore = require("mongodb").GridStore;
const GridStoreAdapter = require("../src/Adapters/Files/GridStoreAdapter").GridStoreAdapter;
const Config = require("../src/Config");
const FilesController = require('../src/Controllers/FilesController').default;
const GridStoreAdapter = require("../lib/Adapters/Files/GridStoreAdapter").GridStoreAdapter;
const Config = require("../lib/Config");
const FilesController = require('../lib/Controllers/FilesController').default;
// Small additional tests to improve overall coverage

View File

@@ -1,42 +1,52 @@
'use strict';
const httpRequest = require("../src/cloud-code/httpRequest"),
HTTPResponse = require('../src/cloud-code/HTTPResponse').default,
const httpRequest = require("../lib/cloud-code/httpRequest"),
HTTPResponse = require('../lib/cloud-code/HTTPResponse').default,
bodyParser = require('body-parser'),
express = require("express");
const port = 13371;
const httpRequestServer = "http://localhost:" + port;
const app = express();
app.use(bodyParser.json({ 'type': '*/*' }));
app.get("/hello", function(req, res){
res.json({response: "OK"});
});
function startServer(done) {
const app = express();
app.use(bodyParser.json({ 'type': '*/*' }));
app.get("/hello", function(req, res){
res.json({response: "OK"});
});
app.get("/404", function(req, res){
res.status(404);
res.send("NO");
});
app.get("/404", function(req, res){
res.status(404);
res.send("NO");
});
app.get("/301", function(req, res){
res.status(301);
res.location("/hello");
res.send();
});
app.get("/301", function(req, res){
res.status(301);
res.location("/hello");
res.send();
});
app.post('/echo', function(req, res){
res.json(req.body);
});
app.post('/echo', function(req, res){
res.json(req.body);
});
app.get('/qs', function(req, res){
res.json(req.query);
});
app.listen(13371);
app.get('/qs', function(req, res){
res.json(req.query);
});
return app.listen(13371, undefined, done);
}
describe("httpRequest", () => {
let server;
beforeAll((done) => {
server = startServer(done);
});
afterAll((done) => {
server.close(done);
});
it("should do /hello", (done) => {
httpRequest({
url: httpRequestServer + "/hello"
@@ -122,21 +132,6 @@ describe("httpRequest", () => {
});
})
it("should fail on 404", (done) => {
httpRequest({
url: httpRequestServer + "/404",
}).then(function(){
fail("should not succeed");
done();
}, function(httpResponse){
expect(httpResponse.status).toBe(404);
expect(httpResponse.buffer).toEqual(new Buffer('NO'));
expect(httpResponse.text).toEqual('NO');
expect(httpResponse.data).toBe(undefined);
done();
})
})
it("should post on echo", (done) => {
let calls = 0;
httpRequest({

View File

@@ -1,4 +1,4 @@
const InMemoryCache = require('../src/Adapters/Cache/InMemoryCache').default;
const InMemoryCache = require('../lib/Adapters/Cache/InMemoryCache').default;
describe('InMemoryCache', function() {

View File

@@ -1,4 +1,4 @@
const InMemoryCacheAdapter = require('../src/Adapters/Cache/InMemoryCacheAdapter').default;
const InMemoryCacheAdapter = require('../lib/Adapters/Cache/InMemoryCacheAdapter').default;
describe('InMemoryCacheAdapter', function() {
const KEY = 'hello';

View File

@@ -1,7 +1,7 @@
const auth = require('../src/Auth');
const Config = require('../src/Config');
const rest = require('../src/rest');
const InstallationsRouter = require('../src/Routers/InstallationsRouter').InstallationsRouter;
const auth = require('../lib/Auth');
const Config = require('../lib/Config');
const rest = require('../lib/rest');
const InstallationsRouter = require('../lib/Routers/InstallationsRouter').InstallationsRouter;
describe('InstallationsRouter', () => {
it('uses find condition from request.body', (done) => {

View File

@@ -50,11 +50,11 @@ describe('JobSchedule', () => {
rp.put(Parse.serverURL + '/cloud_code/jobs/jobId', defaultOptions).then(done.fail, () => done());
});
it('should reject access when not using masterKey (PUT /jobs/id)', (done) => {
it('should reject access when not using masterKey (DELETE /jobs/id)', (done) => {
rp.del(Parse.serverURL + '/cloud_code/jobs/jobId', defaultOptions).then(done.fail, () => done());
});
it('should allow access when using masterKey (/jobs)', (done) => {
it('should allow access when using masterKey (GET /jobs)', (done) => {
rp.get(Parse.serverURL + '/cloud_code/jobs', masterKeyOptions).then(done, done.fail);
});

View File

@@ -1,4 +1,4 @@
const logging = require('../src/Adapters/Logger/WinstonLogger');
const logging = require('../lib/Adapters/Logger/WinstonLogger');
const winston = require('winston');
class TestTransport extends winston.Transport {

View File

@@ -1,8 +1,8 @@
const LoggerController = require('../src/Controllers/LoggerController').LoggerController;
const WinstonLoggerAdapter = require('../src/Adapters/Logger/WinstonLoggerAdapter').WinstonLoggerAdapter;
const LoggerController = require('../lib/Controllers/LoggerController').LoggerController;
const WinstonLoggerAdapter = require('../lib/Adapters/Logger/WinstonLoggerAdapter').WinstonLoggerAdapter;
describe('LoggerController', () => {
it('can check process a query without throwing', (done) => {
it('can process an empty query without throwing', (done) => {
// Make mock request
const query = {};
@@ -37,7 +37,7 @@ describe('LoggerController', () => {
done();
});
it('can process a query without throwing', (done) => {
it('can process an ascending query without throwing', (done) => {
// Make mock request
const query = {
from: "2016-01-01Z00:00:00",
@@ -58,7 +58,7 @@ describe('LoggerController', () => {
done();
});
it('can check process a query without throwing', (done) => {
it('can process a descending query without throwing', (done) => {
// Make mock request
const query = {
from: "2016-01-01",

View File

@@ -1,9 +1,9 @@
'use strict';
const request = require('request');
const LogsRouter = require('../src/Routers/LogsRouter').LogsRouter;
const LoggerController = require('../src/Controllers/LoggerController').LoggerController;
const WinstonLoggerAdapter = require('../src/Adapters/Logger/WinstonLoggerAdapter').WinstonLoggerAdapter;
const LogsRouter = require('../lib/Routers/LogsRouter').LogsRouter;
const LoggerController = require('../lib/Controllers/LoggerController').LoggerController;
const WinstonLoggerAdapter = require('../lib/Adapters/Logger/WinstonLoggerAdapter').WinstonLoggerAdapter;
const loggerController = new LoggerController(new WinstonLoggerAdapter());

View File

@@ -1,5 +1,5 @@
const middlewares = require('../src/middlewares');
const AppCache = require('../src/cache').AppCache;
const middlewares = require('../lib/middlewares');
const AppCache = require('../lib/cache').AppCache;
describe('middlewares', () => {
@@ -290,4 +290,16 @@ describe('middlewares', () => {
middlewares.handleParseHeaders(fakeReq, fakeRes);
expect(fakeRes.status).toHaveBeenCalledWith(403);
});
it('should properly expose the headers', () => {
const headers = {};
const res = {
header: (key, value) => {
headers[key] = value
}
};
middlewares.allowCrossDomain({}, res, () => {});
expect(Object.keys(headers).length).toBe(4);
expect(headers['Access-Control-Expose-Headers']).toBe('X-Parse-Job-Status-Id, X-Parse-Push-Status-Id');
});
});

View File

@@ -1,6 +1,6 @@
'use strict';
const MongoSchemaCollection = require('../src/Adapters/Storage/Mongo/MongoSchemaCollection').default;
const MongoSchemaCollection = require('../lib/Adapters/Storage/Mongo/MongoSchemaCollection').default;
describe('MongoSchemaCollection', () => {
it('can transform legacy _client_permissions keys to parse format', done => {

View File

@@ -1,6 +1,6 @@
'use strict';
import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter';
const MongoStorageAdapter = require('../lib/Adapters/Storage/Mongo/MongoStorageAdapter').default;
const { MongoClient } = require('mongodb');
const databaseURI = 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase';

View File

@@ -1,7 +1,7 @@
// These tests are unit tests designed to only test transform.js.
"use strict";
const transform = require('../src/Adapters/Storage/Mongo/MongoTransform');
const transform = require('../lib/Adapters/Storage/Mongo/MongoTransform');
const dd = require('deep-diff');
const mongodb = require('mongodb');
@@ -24,7 +24,7 @@ describe('parseObjectToMongoObjectForCreate', () => {
done();
});
it('built-in timestamps', (done) => {
it('built-in timestamps with date', (done) => {
const input = {
createdAt: "2015-10-06T21:24:50.332Z",
updatedAt: "2015-10-06T21:24:50.332Z"

View File

@@ -1,4 +1,4 @@
const NullCacheAdapter = require('../src/Adapters/Cache/NullCacheAdapter').default;
const NullCacheAdapter = require('../lib/Adapters/Cache/NullCacheAdapter').default;
describe('NullCacheAdapter', function() {
const KEY = 'hello';

View File

@@ -1,4 +1,4 @@
const OAuth = require("../src/Adapters/Auth/OAuth1Client");
const OAuth = require("../lib/Adapters/Auth/OAuth1Client");
describe('OAuth', function() {
it("Nonce should have right length", (done) => {

View File

@@ -1,8 +1,8 @@
// This is a port of the test suite:
// hungry/js/test/parse_acl_test.js
const rest = require('../src/rest');
const Config = require('../src/Config');
const auth = require('../src/Auth');
const rest = require('../lib/rest');
const Config = require('../lib/Config');
const auth = require('../lib/Auth');
describe('Parse.ACL', () => {
it("acl must be valid", (done) => {

View File

@@ -5,9 +5,9 @@
const request = require('request');
const rp = require('request-promise');
const Parse = require("parse/node");
const Config = require('../src/Config');
const SchemaController = require('../src/Controllers/SchemaController');
const TestUtils = require('../src/TestUtils');
const Config = require('../lib/Config');
const SchemaController = require('../lib/Controllers/SchemaController');
const TestUtils = require('../lib/TestUtils');
const userSchema = SchemaController.convertSchemaToAdapterSchema({ className: '_User', fields: Object.assign({}, SchemaController.defaultColumns._Default, SchemaController.defaultColumns._User) });
@@ -975,6 +975,25 @@ describe('miscellaneous', function() {
});
});
it('test beforeDelete with locked down ACL', async () => {
let called = false;
Parse.Cloud.beforeDelete('GameScore', (req, res) => {
called = true;
res.success();
});
const object = new Parse.Object('GameScore');
object.setACL(new Parse.ACL());
await object.save();
const objects = await new Parse.Query('GameScore').find();
expect(objects.length).toBe(0);
try {
await object.destroy();
} catch(e) {
expect(e.code).toBe(Parse.Error.OBJECT_NOT_FOUND);
}
expect(called).toBe(false);
});
it('test cloud function query parameters', (done) => {
Parse.Cloud.define('echoParams', (req, res) => {
res.success(req.params);

View File

@@ -1,4 +1,4 @@
const ParseCloudCodePublisher = require('../src/LiveQuery/ParseCloudCodePublisher').ParseCloudCodePublisher;
const ParseCloudCodePublisher = require('../lib/LiveQuery/ParseCloudCodePublisher').ParseCloudCodePublisher;
const Parse = require('parse/node');
describe('ParseCloudCodePublisher', function() {
@@ -14,7 +14,7 @@ describe('ParseCloudCodePublisher', function() {
on: jasmine.createSpy('on')
})
};
jasmine.mockLibrary('../src/LiveQuery/ParsePubSub', 'ParsePubSub', mockParsePubSub);
jasmine.mockLibrary('../lib/LiveQuery/ParsePubSub', 'ParsePubSub', mockParsePubSub);
done();
});
@@ -22,7 +22,7 @@ describe('ParseCloudCodePublisher', function() {
const config = {}
new ParseCloudCodePublisher(config);
const ParsePubSub = require('../src/LiveQuery/ParsePubSub').ParsePubSub;
const ParsePubSub = require('../lib/LiveQuery/ParsePubSub').ParsePubSub;
expect(ParsePubSub.createPublisher).toHaveBeenCalledWith(config);
});
@@ -64,6 +64,6 @@ describe('ParseCloudCodePublisher', function() {
});
afterEach(function(){
jasmine.restoreLibrary('../src/LiveQuery/ParsePubSub', 'ParsePubSub');
jasmine.restoreLibrary('../lib/LiveQuery/ParsePubSub', 'ParsePubSub');
});
});

View File

@@ -632,24 +632,6 @@ describe('Parse.File testing', () => {
});
});
it('fails to upload without a file name', done => {
const headers = {
'Content-Type': 'application/octet-stream',
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest'
};
request.post({
headers: headers,
url: 'http://localhost:8378/1/files/',
body: 'yolo',
}, (error, response, body) => {
expect(error).toBe(null);
expect(response.statusCode).toBe(400);
expect(body).toEqual('{"code":122,"error":"Filename not provided."}');
done();
});
});
it('fails to delete an unkown file', done => {
const headers = {
'Content-Type': 'application/octet-stream',

View File

@@ -1,7 +1,7 @@
'use strict';
const request = require('request');
const Config = require('../src/Config');
const Config = require('../lib/Config');
describe('a GlobalConfig', () => {
beforeEach(done => {

View File

@@ -1,20 +1,28 @@
"use strict";
/* global describe, it, expect, fail, Parse */
const request = require('request');
const triggers = require('../src/triggers');
const HooksController = require('../src/Controllers/HooksController').default;
const triggers = require('../lib/triggers');
const HooksController = require('../lib/Controllers/HooksController').default;
const express = require("express");
const bodyParser = require('body-parser');
const port = 12345;
const hookServerURL = "http://localhost:" + port;
const AppCache = require('../src/cache').AppCache;
const app = express();
app.use(bodyParser.json({ 'type': '*/*' }))
app.listen(12345);
const AppCache = require('../lib/cache').AppCache;
describe('Hooks', () => {
let server;
let app;
beforeAll((done) => {
app = express();
app.use(bodyParser.json({ 'type': '*/*' }))
server = app.listen(12345, undefined, done);
});
afterAll((done) => {
server.close(done);
});
it("should have no hooks registered", (done) => {
Parse.Hooks.getFunctions().then((res) => {
expect(res.constructor).toBe(Array.prototype.constructor);
@@ -328,7 +336,7 @@ describe('Hooks', () => {
});
});
it("should run the function on the test server", (done) => {
it("should run the function on the test server (error handling)", (done) => {
app.post("/SomeFunctionError", function(req, res) {
res.json({error: {code: 1337, error: "hacking that one!"}});

View File

@@ -2,15 +2,15 @@
// These tests check the Installations functionality of the REST API.
// Ported from installation_collection_test.go
const auth = require('../src/Auth');
const Config = require('../src/Config');
const auth = require('../lib/Auth');
const Config = require('../lib/Config');
const Parse = require('parse/node').Parse;
const rest = require('../src/rest');
const rest = require('../lib/rest');
const request = require("request");
let config;
let database;
const defaultColumns = require('../src/Controllers/SchemaController').defaultColumns;
const defaultColumns = require('../lib/Controllers/SchemaController').defaultColumns;
const delay = function delay(delay) {
return new Promise(resolve => setTimeout(resolve, delay));

View File

@@ -1,6 +1,6 @@
const Parse = require('parse/node');
const ParseLiveQueryServer = require('../src/LiveQuery/ParseLiveQueryServer').ParseLiveQueryServer;
const ParseServer = require('../src/ParseServer').default;
const ParseLiveQueryServer = require('../lib/LiveQuery/ParseLiveQueryServer').ParseLiveQueryServer;
const ParseServer = require('../lib/ParseServer').default;
// Global mock info
const queryHashValue = 'hash';
@@ -11,7 +11,7 @@ describe('ParseLiveQueryServer', function() {
beforeEach(function(done) {
// Mock ParseWebSocketServer
const mockParseWebSocketServer = jasmine.createSpy('ParseWebSocketServer');
jasmine.mockLibrary('../src/LiveQuery/ParseWebSocketServer', 'ParseWebSocketServer', mockParseWebSocketServer);
jasmine.mockLibrary('../lib/LiveQuery/ParseWebSocketServer', 'ParseWebSocketServer', mockParseWebSocketServer);
// Mock Client
const mockClient = function(id, socket, hasMasterKey) {
this.pushConnect = jasmine.createSpy('pushConnect');
@@ -28,19 +28,19 @@ describe('ParseLiveQueryServer', function() {
this.hasMasterKey = hasMasterKey;
}
mockClient.pushError = jasmine.createSpy('pushError');
jasmine.mockLibrary('../src/LiveQuery/Client', 'Client', mockClient);
jasmine.mockLibrary('../lib/LiveQuery/Client', 'Client', mockClient);
// Mock Subscription
const mockSubscriotion = function() {
this.addClientSubscription = jasmine.createSpy('addClientSubscription');
this.deleteClientSubscription = jasmine.createSpy('deleteClientSubscription');
}
jasmine.mockLibrary('../src/LiveQuery/Subscription', 'Subscription', mockSubscriotion);
jasmine.mockLibrary('../lib/LiveQuery/Subscription', 'Subscription', mockSubscriotion);
// Mock queryHash
const mockQueryHash = jasmine.createSpy('matchesQuery').and.returnValue(queryHashValue);
jasmine.mockLibrary('../src/LiveQuery/QueryTools', 'queryHash', mockQueryHash);
jasmine.mockLibrary('../lib/LiveQuery/QueryTools', 'queryHash', mockQueryHash);
// Mock matchesQuery
const mockMatchesQuery = jasmine.createSpy('matchesQuery').and.returnValue(true);
jasmine.mockLibrary('../src/LiveQuery/QueryTools', 'matchesQuery', mockMatchesQuery);
jasmine.mockLibrary('../lib/LiveQuery/QueryTools', 'matchesQuery', mockMatchesQuery);
// Mock ParsePubSub
const mockParsePubSub = {
createPublisher: function() {
@@ -56,7 +56,7 @@ describe('ParseLiveQueryServer', function() {
}
}
};
jasmine.mockLibrary('../src/LiveQuery/ParsePubSub', 'ParsePubSub', mockParsePubSub);
jasmine.mockLibrary('../lib/LiveQuery/ParsePubSub', 'ParsePubSub', mockParsePubSub);
// Make mock SessionTokenCache
const mockSessionTokenCache = function(){
this.getUserId = function(sessionToken){
@@ -69,7 +69,7 @@ describe('ParseLiveQueryServer', function() {
return Parse.Promise.as(testUserId);
};
};
jasmine.mockLibrary('../src/LiveQuery/SessionTokenCache', 'SessionTokenCache', mockSessionTokenCache);
jasmine.mockLibrary('../lib/LiveQuery/SessionTokenCache', 'SessionTokenCache', mockSessionTokenCache);
done();
});
@@ -167,7 +167,7 @@ describe('ParseLiveQueryServer', function() {
};
parseLiveQueryServer._handleSubscribe(incompleteParseConn, {});
const Client = require('../src/LiveQuery/Client').Client;
const Client = require('../lib/LiveQuery/Client').Client;
expect(Client.pushError).toHaveBeenCalled();
});
@@ -273,7 +273,7 @@ describe('ParseLiveQueryServer', function() {
};
parseLiveQueryServer._handleUnsubscribe(incompleteParseConn, {});
const Client = require('../src/LiveQuery/Client').Client;
const Client = require('../lib/LiveQuery/Client').Client;
expect(Client.pushError).toHaveBeenCalled();
});
@@ -284,7 +284,7 @@ describe('ParseLiveQueryServer', function() {
};
parseLiveQueryServer._handleUnsubscribe(parseWebSocket, {});
const Client = require('../src/LiveQuery/Client').Client;
const Client = require('../lib/LiveQuery/Client').Client;
expect(Client.pushError).toHaveBeenCalled();
});
@@ -299,7 +299,7 @@ describe('ParseLiveQueryServer', function() {
};
parseLiveQueryServer._handleUnsubscribe(parseWebSocket, {});
const Client = require('../src/LiveQuery/Client').Client;
const Client = require('../lib/LiveQuery/Client').Client;
expect(Client.pushError).toHaveBeenCalled();
});
@@ -445,7 +445,7 @@ describe('ParseLiveQueryServer', function() {
const invalidRequest = '{}';
// Trigger message event
parseWebSocket.emit('message', invalidRequest);
const Client = require('../src/LiveQuery/Client').Client;
const Client = require('../lib/LiveQuery/Client').Client;
expect(Client.pushError).toHaveBeenCalled();
});
@@ -461,7 +461,7 @@ describe('ParseLiveQueryServer', function() {
const unknownRequest = '{"op":"unknown"}';
// Trigger message event
parseWebSocket.emit('message', unknownRequest);
const Client = require('../src/LiveQuery/Client').Client;
const Client = require('../lib/LiveQuery/Client').Client;
expect(Client.pushError).toHaveBeenCalled();
});
@@ -788,7 +788,7 @@ describe('ParseLiveQueryServer', function() {
const parseObject = {};
expect(parseLiveQueryServer._matchesSubscription(parseObject, subscription)).toBe(true);
// Make sure matchesQuery is called
const matchesQuery = require('../src/LiveQuery/QueryTools').matchesQuery;
const matchesQuery = require('../lib/LiveQuery/QueryTools').matchesQuery;
expect(matchesQuery).toHaveBeenCalledWith(parseObject, subscription.query);
});
@@ -1209,18 +1209,18 @@ describe('ParseLiveQueryServer', function() {
});
afterEach(function(){
jasmine.restoreLibrary('../src/LiveQuery/ParseWebSocketServer', 'ParseWebSocketServer');
jasmine.restoreLibrary('../src/LiveQuery/Client', 'Client');
jasmine.restoreLibrary('../src/LiveQuery/Subscription', 'Subscription');
jasmine.restoreLibrary('../src/LiveQuery/QueryTools', 'queryHash');
jasmine.restoreLibrary('../src/LiveQuery/QueryTools', 'matchesQuery');
jasmine.restoreLibrary('../src/LiveQuery/ParsePubSub', 'ParsePubSub');
jasmine.restoreLibrary('../src/LiveQuery/SessionTokenCache', 'SessionTokenCache');
jasmine.restoreLibrary('../lib/LiveQuery/ParseWebSocketServer', 'ParseWebSocketServer');
jasmine.restoreLibrary('../lib/LiveQuery/Client', 'Client');
jasmine.restoreLibrary('../lib/LiveQuery/Subscription', 'Subscription');
jasmine.restoreLibrary('../lib/LiveQuery/QueryTools', 'queryHash');
jasmine.restoreLibrary('../lib/LiveQuery/QueryTools', 'matchesQuery');
jasmine.restoreLibrary('../lib/LiveQuery/ParsePubSub', 'ParsePubSub');
jasmine.restoreLibrary('../lib/LiveQuery/SessionTokenCache', 'SessionTokenCache');
});
// Helper functions to add mock client and subscription to a liveQueryServer
function addMockClient(parseLiveQueryServer, clientId) {
const Client = require('../src/LiveQuery/Client').Client;
const Client = require('../lib/LiveQuery/Client').Client;
const client = new Client(clientId, {});
parseLiveQueryServer.clients.set(clientId, client);
return client;

View File

@@ -1980,4 +1980,31 @@ describe('Parse.Object testing', () => {
done();
})
});
it ('Update object field should store exactly same sent object', async (done) => {
let object = new TestObject();
// Set initial data
object.set("jsonData", { a: "b" });
object = await object.save();
equal(object.get('jsonData'), { a: "b" });
// Set empty JSON
object.set("jsonData", {});
object = await object.save();
equal(object.get('jsonData'), {});
// Set new JSON data
object.unset('jsonData')
object.set("jsonData", { c: "d" });
object = await object.save();
equal(object.get('jsonData'), { c: "d" });
// Fetch object from server
object = await object.fetch()
console.log(object.id, object.get('jsonData'))
equal(object.get('jsonData'), { c: "d" });
done();
});
});

View File

@@ -1,5 +1,5 @@
const TestObject = Parse.Object.extend('TestObject');
import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter';
const MongoStorageAdapter = require('../lib/Adapters/Storage/Mongo/MongoStorageAdapter').default;
const mongoURI = 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase';
const rp = require('request-promise');
const defaultHeaders = {
@@ -8,6 +8,9 @@ const defaultHeaders = {
}
describe('Parse.Polygon testing', () => {
beforeAll(() => require('../lib/TestUtils').destroyAllDataPermanently());
it('polygon save open path', (done) => {
const coords = [[0,0],[0,1],[1,1],[1,0]];
const closed = [[0,0],[0,1],[1,1],[1,0],[0,0]];
@@ -128,144 +131,150 @@ describe('Parse.Polygon testing', () => {
}, done.fail);
});
it('polygonContain query', (done) => {
const points1 = [[0,0],[0,1],[1,1],[1,0]];
const points2 = [[0,0],[0,2],[2,2],[2,0]];
const points3 = [[10,10],[10,15],[15,15],[15,10],[10,10]];
const polygon1 = new Parse.Polygon(points1);
const polygon2 = new Parse.Polygon(points2);
const polygon3 = new Parse.Polygon(points3);
const obj1 = new TestObject({location: polygon1});
const obj2 = new TestObject({location: polygon2});
const obj3 = new TestObject({location: polygon3});
Parse.Object.saveAll([obj1, obj2, obj3]).then(() => {
const where = {
location: {
$geoIntersects: {
$point: { __type: 'GeoPoint', latitude: 0.5, longitude: 0.5 }
}
}
};
return rp.post({
url: Parse.serverURL + '/classes/TestObject',
json: { where, '_method': 'GET' },
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Javascript-Key': Parse.javaScriptKey
}
});
}).then((resp) => {
expect(resp.results.length).toBe(2);
done();
}, done.fail);
});
describe('with location', () => {
beforeAll(() => require('../lib/TestUtils').destroyAllDataPermanently());
it('polygonContain query no reverse input (Regression test for #4608)', (done) => {
const points1 = [[.25,0],[.25,1.25],[.75,1.25],[.75,0]];
const points2 = [[0,0],[0,2],[2,2],[2,0]];
const points3 = [[10,10],[10,15],[15,15],[15,10],[10,10]];
const polygon1 = new Parse.Polygon(points1);
const polygon2 = new Parse.Polygon(points2);
const polygon3 = new Parse.Polygon(points3);
const obj1 = new TestObject({location: polygon1});
const obj2 = new TestObject({location: polygon2});
const obj3 = new TestObject({location: polygon3});
Parse.Object.saveAll([obj1, obj2, obj3]).then(() => {
const where = {
location: {
$geoIntersects: {
$point: { __type: 'GeoPoint', latitude: 0.5, longitude:1.0 }
it('polygonContain query', (done) => {
const points1 = [[0,0],[0,1],[1,1],[1,0]];
const points2 = [[0,0],[0,2],[2,2],[2,0]];
const points3 = [[10,10],[10,15],[15,15],[15,10],[10,10]];
const polygon1 = new Parse.Polygon(points1);
const polygon2 = new Parse.Polygon(points2);
const polygon3 = new Parse.Polygon(points3);
const obj1 = new TestObject({location: polygon1});
const obj2 = new TestObject({location: polygon2});
const obj3 = new TestObject({location: polygon3});
Parse.Object.saveAll([obj1, obj2, obj3]).then(() => {
const where = {
location: {
$geoIntersects: {
$point: { __type: 'GeoPoint', latitude: 0.5, longitude: 0.5 }
}
}
}
};
return rp.post({
url: Parse.serverURL + '/classes/TestObject',
json: { where, '_method': 'GET' },
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Javascript-Key': Parse.javaScriptKey
}
});
}).then((resp) => {
expect(resp.results.length).toBe(2);
done();
}, done.fail);
});
};
return rp.post({
url: Parse.serverURL + '/classes/TestObject',
json: { where, '_method': 'GET' },
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Javascript-Key': Parse.javaScriptKey
}
});
}).then((resp) => {
expect(resp.results.length).toBe(2);
done();
}, done.fail);
});
it('polygonContain query real data (Regression test for #4608)', (done) => {
const detroit = [[42.631655189280224,-83.78406753121705],[42.633047793854814,-83.75333640366955],[42.61625254348911,-83.75149921669944],[42.61526926650296,-83.78161794858735],[42.631655189280224,-83.78406753121705]];
const polygon = new Parse.Polygon(detroit);
const obj = new TestObject({location: polygon});
obj.save().then(() => {
const where = {
location: {
$geoIntersects: {
$point: { __type: 'GeoPoint', latitude: 42.624599, longitude:-83.770162 }
it('polygonContain query no reverse input (Regression test for #4608)', (done) => {
const points1 = [[.25,0],[.25,1.25],[.75,1.25],[.75,0]];
const points2 = [[0,0],[0,2],[2,2],[2,0]];
const points3 = [[10,10],[10,15],[15,15],[15,10],[10,10]];
const polygon1 = new Parse.Polygon(points1);
const polygon2 = new Parse.Polygon(points2);
const polygon3 = new Parse.Polygon(points3);
const obj1 = new TestObject({location: polygon1});
const obj2 = new TestObject({location: polygon2});
const obj3 = new TestObject({location: polygon3});
Parse.Object.saveAll([obj1, obj2, obj3]).then(() => {
const where = {
location: {
$geoIntersects: {
$point: { __type: 'GeoPoint', latitude: 0.5, longitude:1.0 }
}
}
}
};
return rp.post({
url: Parse.serverURL + '/classes/TestObject',
json: { where, '_method': 'GET' },
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Javascript-Key': Parse.javaScriptKey
}
});
}).then((resp) => {
expect(resp.results.length).toBe(1);
done();
}, done.fail);
});
};
return rp.post({
url: Parse.serverURL + '/classes/TestObject',
json: { where, '_method': 'GET' },
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Javascript-Key': Parse.javaScriptKey
}
});
}).then((resp) => {
expect(resp.results.length).toBe(2);
done();
}, done.fail);
});
it('polygonContain invalid input', (done) => {
const points = [[0,0],[0,1],[1,1],[1,0]];
const polygon = new Parse.Polygon(points);
const obj = new TestObject({location: polygon});
obj.save().then(() => {
const where = {
location: {
$geoIntersects: {
$point: { __type: 'GeoPoint', latitude: 181, longitude: 181 }
it('polygonContain query real data (Regression test for #4608)', (done) => {
const detroit = [[42.631655189280224,-83.78406753121705],[42.633047793854814,-83.75333640366955],[42.61625254348911,-83.75149921669944],[42.61526926650296,-83.78161794858735],[42.631655189280224,-83.78406753121705]];
const polygon = new Parse.Polygon(detroit);
const obj = new TestObject({location: polygon});
obj.save().then(() => {
const where = {
location: {
$geoIntersects: {
$point: { __type: 'GeoPoint', latitude: 42.624599, longitude:-83.770162 }
}
}
}
};
return rp.post({
url: Parse.serverURL + '/classes/TestObject',
json: { where, '_method': 'GET' },
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Javascript-Key': Parse.javaScriptKey
}
});
}).then(done.fail, () => done());
});
};
return rp.post({
url: Parse.serverURL + '/classes/TestObject',
json: { where, '_method': 'GET' },
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Javascript-Key': Parse.javaScriptKey
}
});
}).then((resp) => {
expect(resp.results.length).toBe(1);
done();
}, done.fail);
});
it('polygonContain invalid geoPoint', (done) => {
const points = [[0,0],[0,1],[1,1],[1,0]];
const polygon = new Parse.Polygon(points);
const obj = new TestObject({location: polygon});
obj.save().then(() => {
const where = {
location: {
$geoIntersects: {
$point: []
it('polygonContain invalid input', (done) => {
const points = [[0,0],[0,1],[1,1],[1,0]];
const polygon = new Parse.Polygon(points);
const obj = new TestObject({location: polygon});
obj.save().then(() => {
const where = {
location: {
$geoIntersects: {
$point: { __type: 'GeoPoint', latitude: 181, longitude: 181 }
}
}
}
};
return rp.post({
url: Parse.serverURL + '/classes/TestObject',
json: { where, '_method': 'GET' },
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Javascript-Key': Parse.javaScriptKey
}
});
}).then(done.fail, () => done());
};
return rp.post({
url: Parse.serverURL + '/classes/TestObject',
json: { where, '_method': 'GET' },
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Javascript-Key': Parse.javaScriptKey
}
});
}).then(done.fail, () => done());
});
it('polygonContain invalid geoPoint', (done) => {
const points = [[0,0],[0,1],[1,1],[1,0]];
const polygon = new Parse.Polygon(points);
const obj = new TestObject({location: polygon});
obj.save().then(() => {
const where = {
location: {
$geoIntersects: {
$point: []
}
}
};
return rp.post({
url: Parse.serverURL + '/classes/TestObject',
json: { where, '_method': 'GET' },
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Javascript-Key': Parse.javaScriptKey
}
});
}).then(done.fail, () => done());
});
});
});
describe_only_db('mongo')('Parse.Polygon testing', () => {
beforeEach(() => require('../lib/TestUtils').destroyAllDataPermanently());
it('support 2d and 2dsphere', (done) => {
const coords = [[0,0],[0,1],[1,1],[1,0],[0,0]];
// testings against REST API, use raw formats
@@ -314,7 +323,7 @@ describe_only_db('mongo')('Parse.Polygon testing', () => {
});
it('polygon coordinates reverse input', (done) => {
const Config = require('../src/Config');
const Config = require('../lib/Config');
const config = Config.get('test');
// When stored the first point should be the last point

View File

@@ -1,4 +1,4 @@
const ParsePubSub = require('../src/LiveQuery/ParsePubSub').ParsePubSub;
const ParsePubSub = require('../lib/LiveQuery/ParsePubSub').ParsePubSub;
describe('ParsePubSub', function() {
@@ -8,13 +8,13 @@ describe('ParsePubSub', function() {
createPublisher: jasmine.createSpy('createPublisherRedis'),
createSubscriber: jasmine.createSpy('createSubscriberRedis')
};
jasmine.mockLibrary('../src/Adapters/PubSub/RedisPubSub', 'RedisPubSub', mockRedisPubSub);
jasmine.mockLibrary('../lib/Adapters/PubSub/RedisPubSub', 'RedisPubSub', mockRedisPubSub);
// Mock EventEmitterPubSub
const mockEventEmitterPubSub = {
createPublisher: jasmine.createSpy('createPublisherEventEmitter'),
createSubscriber: jasmine.createSpy('createSubscriberEventEmitter')
};
jasmine.mockLibrary('../src/Adapters/PubSub/EventEmitterPubSub', 'EventEmitterPubSub', mockEventEmitterPubSub);
jasmine.mockLibrary('../lib/Adapters/PubSub/EventEmitterPubSub', 'EventEmitterPubSub', mockEventEmitterPubSub);
done();
});
@@ -23,8 +23,8 @@ describe('ParsePubSub', function() {
redisURL: 'redisURL'
});
const RedisPubSub = require('../src/Adapters/PubSub/RedisPubSub').RedisPubSub;
const EventEmitterPubSub = require('../src/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
const RedisPubSub = require('../lib/Adapters/PubSub/RedisPubSub').RedisPubSub;
const EventEmitterPubSub = require('../lib/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
expect(RedisPubSub.createPublisher).toHaveBeenCalledWith({redisURL: 'redisURL'});
expect(EventEmitterPubSub.createPublisher).not.toHaveBeenCalled();
});
@@ -32,8 +32,8 @@ describe('ParsePubSub', function() {
it('can create event emitter publisher', function() {
ParsePubSub.createPublisher({});
const RedisPubSub = require('../src/Adapters/PubSub/RedisPubSub').RedisPubSub;
const EventEmitterPubSub = require('../src/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
const RedisPubSub = require('../lib/Adapters/PubSub/RedisPubSub').RedisPubSub;
const EventEmitterPubSub = require('../lib/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
expect(RedisPubSub.createPublisher).not.toHaveBeenCalled();
expect(EventEmitterPubSub.createPublisher).toHaveBeenCalled();
});
@@ -43,8 +43,8 @@ describe('ParsePubSub', function() {
redisURL: 'redisURL'
});
const RedisPubSub = require('../src/Adapters/PubSub/RedisPubSub').RedisPubSub;
const EventEmitterPubSub = require('../src/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
const RedisPubSub = require('../lib/Adapters/PubSub/RedisPubSub').RedisPubSub;
const EventEmitterPubSub = require('../lib/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
expect(RedisPubSub.createSubscriber).toHaveBeenCalledWith({redisURL: 'redisURL'});
expect(EventEmitterPubSub.createSubscriber).not.toHaveBeenCalled();
});
@@ -52,8 +52,8 @@ describe('ParsePubSub', function() {
it('can create event emitter subscriber', function() {
ParsePubSub.createSubscriber({});
const RedisPubSub = require('../src/Adapters/PubSub/RedisPubSub').RedisPubSub;
const EventEmitterPubSub = require('../src/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
const RedisPubSub = require('../lib/Adapters/PubSub/RedisPubSub').RedisPubSub;
const EventEmitterPubSub = require('../lib/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
expect(RedisPubSub.createSubscriber).not.toHaveBeenCalled();
expect(EventEmitterPubSub.createSubscriber).toHaveBeenCalled();
});
@@ -73,8 +73,8 @@ describe('ParsePubSub', function() {
});
expect(adapter.createSubscriber).toHaveBeenCalled();
const RedisPubSub = require('../src/Adapters/PubSub/RedisPubSub').RedisPubSub;
const EventEmitterPubSub = require('../src/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
const RedisPubSub = require('../lib/Adapters/PubSub/RedisPubSub').RedisPubSub;
const EventEmitterPubSub = require('../lib/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
expect(RedisPubSub.createSubscriber).not.toHaveBeenCalled();
expect(EventEmitterPubSub.createSubscriber).not.toHaveBeenCalled();
expect(RedisPubSub.createPublisher).not.toHaveBeenCalled();
@@ -100,8 +100,8 @@ describe('ParsePubSub', function() {
});
expect(adapter.createSubscriber).toHaveBeenCalled();
const RedisPubSub = require('../src/Adapters/PubSub/RedisPubSub').RedisPubSub;
const EventEmitterPubSub = require('../src/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
const RedisPubSub = require('../lib/Adapters/PubSub/RedisPubSub').RedisPubSub;
const EventEmitterPubSub = require('../lib/Adapters/PubSub/EventEmitterPubSub').EventEmitterPubSub;
expect(RedisPubSub.createSubscriber).not.toHaveBeenCalled();
expect(EventEmitterPubSub.createSubscriber).not.toHaveBeenCalled();
expect(RedisPubSub.createPublisher).not.toHaveBeenCalled();
@@ -109,7 +109,7 @@ describe('ParsePubSub', function() {
});
afterEach(function(){
jasmine.restoreLibrary('../src/Adapters/PubSub/RedisPubSub', 'RedisPubSub');
jasmine.restoreLibrary('../src/Adapters/PubSub/EventEmitterPubSub', 'EventEmitterPubSub');
jasmine.restoreLibrary('../lib/Adapters/PubSub/RedisPubSub', 'RedisPubSub');
jasmine.restoreLibrary('../lib/Adapters/PubSub/EventEmitterPubSub', 'EventEmitterPubSub');
});
});

View File

@@ -164,6 +164,124 @@ describe('Parse.Query Aggregate testing', () => {
});
});
it('group by date object transform', (done) => {
const obj1 = new TestObject();
const obj2 = new TestObject();
const obj3 = new TestObject();
const pipeline = [{
group: {
objectId: { day: { $dayOfMonth: "$updatedAt" }, month: { $month: "$createdAt" }, year: { $year: "$createdAt" } },
count: { $sum: 1 }
}
}];
Parse.Object.saveAll([obj1, obj2, obj3]).then(() => {
const query = new Parse.Query(TestObject);
return query.aggregate(pipeline);
}).then((results) => {
const createdAt = new Date(obj1.createdAt);
expect(results[0].objectId.day).toEqual(createdAt.getUTCDate());
expect(results[0].objectId.month).toEqual(createdAt.getMonth() + 1);
expect(results[0].objectId.year).toEqual(createdAt.getUTCFullYear());
done();
});
});
it_exclude_dbs(['postgres'])('group and multiply transform', (done) => {
const obj1 = new TestObject({ name: 'item a', quantity: 2, price: 10 });
const obj2 = new TestObject({ name: 'item b', quantity: 5, price: 5 });
const pipeline = [{
group: {
objectId: null,
total: { $sum: { $multiply: [ '$quantity', '$price' ] } }
}
}];
Parse.Object.saveAll([obj1, obj2]).then(() => {
const query = new Parse.Query(TestObject);
return query.aggregate(pipeline);
}).then((results) => {
expect(results.length).toEqual(1);
expect(results[0].total).toEqual(45);
done();
});
});
it_exclude_dbs(['postgres'])('project and multiply transform', (done) => {
const obj1 = new TestObject({ name: 'item a', quantity: 2, price: 10 });
const obj2 = new TestObject({ name: 'item b', quantity: 5, price: 5 });
const pipeline = [
{
match: { quantity: { $exists: true } }
},
{
project: {
name: 1,
total: { $multiply: [ '$quantity', '$price' ] }
}
}
];
Parse.Object.saveAll([obj1, obj2]).then(() => {
const query = new Parse.Query(TestObject);
return query.aggregate(pipeline);
}).then((results) => {
expect(results.length).toEqual(2);
if (results[0].name === 'item a') {
expect(results[0].total).toEqual(20);
expect(results[1].total).toEqual(25);
}
else {
expect(results[0].total).toEqual(25);
expect(results[1].total).toEqual(20);
}
done();
});
});
it_exclude_dbs(['postgres'])('project without objectId transform', (done) => {
const obj1 = new TestObject({ name: 'item a', quantity: 2, price: 10 });
const obj2 = new TestObject({ name: 'item b', quantity: 5, price: 5 });
const pipeline = [
{
match: { quantity: { $exists: true } }
},
{
project: {
objectId: 0,
total: { $multiply: [ '$quantity', '$price' ] }
}
},
{
sort: { total: 1 }
}
];
Parse.Object.saveAll([obj1, obj2]).then(() => {
const query = new Parse.Query(TestObject);
return query.aggregate(pipeline);
}).then((results) => {
expect(results.length).toEqual(2);
expect(results[0].total).toEqual(20);
expect(results[0].objectId).toEqual(undefined);
expect(results[1].total).toEqual(25);
expect(results[1].objectId).toEqual(undefined);
done();
});
});
it_exclude_dbs(['postgres'])('project updatedAt only transform', (done) => {
const pipeline = [{
project: { objectId: 0, updatedAt: 1 }
}];
const query = new Parse.Query(TestObject);
query.aggregate(pipeline).then((results) => {
expect(results.length).toEqual(4);
for (let i = 0; i < results.length; i++) {
const item = results[i];
expect(item.hasOwnProperty('updatedAt')).toEqual(true);
expect(item.hasOwnProperty('objectId')).toEqual(false);
}
done();
});
});
it_exclude_dbs(['postgres'])('cannot group by date field (excluding createdAt and updatedAt)', (done) => {
const obj1 = new TestObject({ dateField: new Date(1990, 11, 1) });
const obj2 = new TestObject({ dateField: new Date(1990, 5, 1) });
@@ -339,6 +457,27 @@ describe('Parse.Query Aggregate testing', () => {
}).catch(done.fail);
});
it('match comparison date query', (done) => {
const today = new Date();
const yesterday = new Date();
const tomorrow = new Date();
yesterday.setDate(today.getDate() - 1);
tomorrow.setDate(today.getDate() + 1);
const obj1 = new TestObject({ dateField: yesterday });
const obj2 = new TestObject({ dateField: today });
const obj3 = new TestObject({ dateField: tomorrow });
const pipeline = [
{ match: { dateField: { $lt: tomorrow } } }
];
Parse.Object.saveAll([obj1, obj2, obj3]).then(() => {
const query = new Parse.Query(TestObject);
return query.aggregate(pipeline);
}).then((results) => {
expect(results.length).toBe(2);
done();
});
});
it('match comparison query', (done) => {
const options = Object.assign({}, masterKeyOptions, {
body: {
@@ -474,6 +613,96 @@ describe('Parse.Query Aggregate testing', () => {
});
});
it_exclude_dbs(['postgres'])('match exists query', (done) => {
const pipeline = [
{ match: { score: { $exists: true } } }
];
const query = new Parse.Query(TestObject);
query.aggregate(pipeline).then((results) => {
expect(results.length).toEqual(4);
done();
});
});
it('match date query - createdAt', (done) => {
const obj1 = new TestObject();
const obj2 = new TestObject();
Parse.Object.saveAll([obj1, obj2]).then(() => {
const now = new Date();
const today = new Date(now.getFullYear(), now.getMonth(), now.getDate());
const pipeline = [
{ match: { 'createdAt': { $gte: today } } }
];
const query = new Parse.Query(TestObject);
return query.aggregate(pipeline);
}).then((results) => {
// Four objects were created initially, we added two more.
expect(results.length).toEqual(6);
done();
});
});
it('match date query - updatedAt', (done) => {
const obj1 = new TestObject();
const obj2 = new TestObject();
Parse.Object.saveAll([obj1, obj2]).then(() => {
const now = new Date();
const today = new Date(now.getFullYear(), now.getMonth(), now.getDate());
const pipeline = [
{ match: { 'updatedAt': { $gte: today } } }
];
const query = new Parse.Query(TestObject);
return query.aggregate(pipeline);
}).then((results) => {
// Four objects were added initially, we added two more.
expect(results.length).toEqual(6);
done();
});
});
it('match date query - empty', (done) => {
const obj1 = new TestObject();
const obj2 = new TestObject();
Parse.Object.saveAll([obj1, obj2]).then(() => {
const now = new Date();
const future = new Date(now.getFullYear(), now.getMonth() + 1, now.getDate());
const pipeline = [
{ match: { 'createdAt': future } }
];
const query = new Parse.Query(TestObject);
return query.aggregate(pipeline);
}).then((results) => {
expect(results.length).toEqual(0);
done();
});
});
it_exclude_dbs(['postgres'])('match pointer with operator query', (done) => {
const pointer = new PointerObject();
const obj1 = new TestObject({ pointer });
const obj2 = new TestObject({ pointer });
const obj3 = new TestObject();
Parse.Object.saveAll([pointer, obj1, obj2, obj3]).then(() => {
const pipeline = [
{ match: { pointer: { $exists: true } } }
];
const query = new Parse.Query(TestObject);
return query.aggregate(pipeline);
}).then((results) => {
expect(results.length).toEqual(2);
expect(results[0].pointer.objectId).toEqual(pointer.id);
expect(results[1].pointer.objectId).toEqual(pointer.id);
expect(results.some(result => result.objectId === obj1.id)).toEqual(true);
expect(results.some(result => result.objectId === obj2.id)).toEqual(true);
done();
});
});
it('project query', (done) => {
const options = Object.assign({}, masterKeyOptions, {
body: {
@@ -512,6 +741,26 @@ describe('Parse.Query Aggregate testing', () => {
}).catch(done.fail);
});
it('project pointer query', (done) => {
const pointer = new PointerObject();
const obj = new TestObject({ pointer, name: 'hello' });
obj.save().then(() => {
const pipeline = [
{ match: { objectId: obj.id } },
{ project: { pointer: 1, name: 1, createdAt: 1 } }
];
const query = new Parse.Query(TestObject);
return query.aggregate(pipeline);
}).then((results) => {
expect(results.length).toEqual(1);
expect(results[0].name).toEqual('hello');
expect(results[0].createdAt).not.toBe(undefined);
expect(results[0].pointer.objectId).toEqual(pointer.id);
done();
});
});
it('project with group query', (done) => {
const options = Object.assign({}, masterKeyOptions, {
body: {
@@ -744,4 +993,47 @@ describe('Parse.Query Aggregate testing', () => {
fail(err);
});
});
it_exclude_dbs(['postgres'])('aggregate allow multiple of same stage', (done) => {
const pointer1 = new TestObject({ value: 1});
const pointer2 = new TestObject({ value: 2});
const pointer3 = new TestObject({ value: 3});
const obj1 = new TestObject({ pointer: pointer1, name: 'Hello' });
const obj2 = new TestObject({ pointer: pointer2, name: 'Hello' });
const obj3 = new TestObject({ pointer: pointer3, name: 'World' });
const options = Object.assign({}, masterKeyOptions, {
body: [{
match: { name: "Hello" },
}, {
// Transform className$objectId to objectId and store in new field tempPointer
project: {
tempPointer: { $substr: [ "$_p_pointer", 11, -1 ] }, // Remove TestObject$
},
}, {
// Left Join, replace objectId stored in tempPointer with an actual object
lookup: {
from: "test_TestObject",
localField: "tempPointer",
foreignField: "_id",
as: "tempPointer"
},
}, {
// lookup returns an array, Deconstructs an array field to objects
unwind: {
path: "$tempPointer",
},
}, {
match : { "tempPointer.value" : 2 },
}]
});
Parse.Object.saveAll([pointer1, pointer2, pointer3, obj1, obj2, obj3]).then(() => {
return rp.get(Parse.serverURL + '/aggregate/TestObject', options);
}).then((resp) => {
expect(resp.results.length).toEqual(1);
expect(resp.results[0].tempPointer.value).toEqual(2);
done();
});
});
});

View File

@@ -1,8 +1,8 @@
'use strict';
import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter';
const MongoStorageAdapter = require('../lib/Adapters/Storage/Mongo/MongoStorageAdapter').default;
const mongoURI = 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase';
import PostgresStorageAdapter from '../src/Adapters/Storage/Postgres/PostgresStorageAdapter';
const PostgresStorageAdapter = require('../lib/Adapters/Storage/Postgres/PostgresStorageAdapter').default;
const postgresURI = 'postgres://localhost:5432/parse_server_postgres_adapter_test_database';
const Parse = require('parse/node');
const rp = require('request-promise');
@@ -280,7 +280,7 @@ describe('Parse.Query Full Text Search testing', () => {
});
});
describe_only_db('mongo')('Parse.Query Full Text Search testing', () => {
describe_only_db('mongo')('[mongodb] Parse.Query Full Text Search testing', () => {
it('fullTextSearch: does not create text index if compound index exist', (done) => {
fullTextHelper().then(() => {
return databaseAdapter.dropAllIndexes('TestObject');
@@ -451,7 +451,7 @@ describe_only_db('mongo')('Parse.Query Full Text Search testing', () => {
});
});
describe_only_db('postgres')('Parse.Query Full Text Search testing', () => {
describe_only_db('postgres')('[postgres] Parse.Query Full Text Search testing', () => {
it('fullTextSearch: $diacriticSensitive - false', (done) => {
fullTextHelper().then(() => {
const where = {

View File

@@ -1538,6 +1538,90 @@ describe('Parse.Query testing', () => {
});
});
it('can order on an object string field', function (done) {
const testSet = [
{ sortField: { value: "Z" } },
{ sortField: { value: "A" } },
{ sortField: { value: "M" } },
];
const objects = testSet.map(e => new Parse.Object('Test', e));
Parse.Object.saveAll(objects)
.then(() => new Parse.Query('Test').addDescending('sortField.value').first())
.then((result) => {
expect(result.get('sortField').value).toBe("Z");
return new Parse.Query('Test').addAscending('sortField.value').first()
})
.then((result) => {
expect(result.get('sortField').value).toBe("A");
done();
})
.catch(done.fail);
});
it('can order on an object string field (level 2)', function (done) {
const testSet = [
{ sortField: { value: { field: "Z" } } },
{ sortField: { value: { field: "A" } } },
{ sortField: { value: { field: "M" } } },
];
const objects = testSet.map(e => new Parse.Object('Test', e));
Parse.Object.saveAll(objects)
.then(() => new Parse.Query('Test').addDescending('sortField.value.field').first())
.then((result) => {
expect(result.get('sortField').value.field).toBe("Z");
return new Parse.Query('Test').addAscending('sortField.value.field').first()
})
.then((result) => {
expect(result.get('sortField').value.field).toBe("A");
done();
})
.catch(done.fail);
});
it('can order on an object number field', function (done) {
const testSet = [
{ sortField: { value: 10 } },
{ sortField: { value: 1 } },
{ sortField: { value: 5 } },
];
const objects = testSet.map(e => new Parse.Object('Test', e));
Parse.Object.saveAll(objects)
.then(() => new Parse.Query('Test').addDescending('sortField.value').first())
.then((result) => {
expect(result.get('sortField').value).toBe(10);
return new Parse.Query('Test').addAscending('sortField.value').first()
})
.then((result) => {
expect(result.get('sortField').value).toBe(1);
done();
})
.catch(done.fail);
});
it('can order on an object number field (level 2)', function (done) {
const testSet = [
{ sortField: { value: { field: 10 } } },
{ sortField: { value: { field: 1 } } },
{ sortField: { value: { field: 5 } } },
];
const objects = testSet.map(e => new Parse.Object('Test', e));
Parse.Object.saveAll(objects)
.then(() => new Parse.Query('Test').addDescending('sortField.value.field').first())
.then((result) => {
expect(result.get('sortField').value.field).toBe(10);
return new Parse.Query('Test').addAscending('sortField.value.field').first()
})
.then((result) => {
expect(result.get('sortField').value.field).toBe(1);
done();
})
.catch(done.fail);
});
it("order by ascending number then descending string", function(done) {
const strings = ["a", "b", "c", "d"];
const makeBoxedNumber = function(num, i) {
@@ -3600,6 +3684,75 @@ describe('Parse.Query testing', () => {
});
});
it('includeAll', (done) => {
const child1 = new TestObject({ foo: 'bar', name: 'ac' });
const child2 = new TestObject({ foo: 'baz', name: 'flo' });
const child3 = new TestObject({ foo: 'bad', name: 'mo' });
const parent = new Container({ child1, child2, child3 });
Parse.Object.saveAll([parent, child1, child2, child3]).then(() => {
const options = Object.assign({}, masterKeyOptions, {
body: {
where: { objectId: parent.id },
includeAll: true,
}
});
return rp.get(Parse.serverURL + "/classes/Container", options);
}).then((resp) => {
const result = resp.results[0];
equal(result.child1.foo, 'bar');
equal(result.child2.foo, 'baz');
equal(result.child3.foo, 'bad');
equal(result.child1.name, 'ac');
equal(result.child2.name, 'flo');
equal(result.child3.name, 'mo');
done();
});
});
it('select nested keys 2 level includeAll', (done) => {
const Foobar = new Parse.Object('Foobar');
const BarBaz = new Parse.Object('Barbaz');
const Bazoo = new Parse.Object('Bazoo');
const Tang = new Parse.Object('Tang');
Bazoo.set('some', 'thing');
Bazoo.set('otherSome', 'value');
Bazoo.save().then(() => {
BarBaz.set('key', 'value');
BarBaz.set('otherKey', 'value');
BarBaz.set('bazoo', Bazoo);
return BarBaz.save();
}).then(() => {
Tang.set('clan', 'wu');
return Tang.save();
}).then(() => {
Foobar.set('foo', 'bar');
Foobar.set('fizz', 'buzz');
Foobar.set('barBaz', BarBaz);
Foobar.set('group', Tang);
return Foobar.save();
}).then((savedFoobar) => {
const options = Object.assign({}, masterKeyOptions, {
body: {
where: { objectId: savedFoobar.id },
includeAll: true,
keys: 'fizz,barBaz.key,barBaz.bazoo.some',
}
});
return rp.get(Parse.serverURL + "/classes/Foobar", options);
}).then((resp) => {
const result = resp.results[0];
equal(result.group.clan, 'wu');
equal(result.foo, undefined);
equal(result.fizz, 'buzz');
equal(result.barBaz.key, 'value');
equal(result.barBaz.otherKey, undefined);
equal(result.barBaz.bazoo.some, 'thing');
equal(result.barBaz.bazoo.otherSome, undefined);
done();
})
});
it('select nested keys 2 level without include (issue #3185)', function(done) {
const Foobar = new Parse.Object('Foobar');
const BarBaz = new Parse.Object('Barbaz');
@@ -3901,4 +4054,106 @@ describe('Parse.Query testing', () => {
})
});
it('withJSON supports geoWithin.centerSphere', (done) => {
const inbound = new Parse.GeoPoint(1.5, 1.5);
const onbound = new Parse.GeoPoint(10, 10);
const outbound = new Parse.GeoPoint(20, 20);
const obj1 = new Parse.Object('TestObject', {location: inbound});
const obj2 = new Parse.Object('TestObject', {location: onbound});
const obj3 = new Parse.Object('TestObject', {location: outbound});
const center = new Parse.GeoPoint(0, 0);
const distanceInKilometers = 1569 + 1; // 1569km is the approximate distance between {0, 0} and {10, 10}.
Parse.Object.saveAll([obj1, obj2, obj3]).then(() => {
const q = new Parse.Query(TestObject);
const jsonQ = q.toJSON();
jsonQ.where.location = {
'$geoWithin': {
'$centerSphere': [
center,
distanceInKilometers / 6371.0
]
}
};
q.withJSON(jsonQ);
return q.find();
}).then(results => {
equal(results.length, 2);
const q = new Parse.Query(TestObject);
const jsonQ = q.toJSON();
jsonQ.where.location = {
'$geoWithin': {
'$centerSphere': [
[0, 0],
distanceInKilometers / 6371.0
]
}
};
q.withJSON(jsonQ);
return q.find();
}).then(results => {
equal(results.length, 2);
done();
}).catch(error => {
fail(error);
done();
});
});
it('withJSON with geoWithin.centerSphere fails without parameters', (done) => {
const q = new Parse.Query(TestObject);
const jsonQ = q.toJSON();
jsonQ.where.location = {
'$geoWithin': {
'$centerSphere': [
]
}
};
q.withJSON(jsonQ);
q.find(expectError(Parse.Error.INVALID_JSON, done));
});
it('withJSON with geoWithin.centerSphere fails with invalid distance', (done) => {
const q = new Parse.Query(TestObject);
const jsonQ = q.toJSON();
jsonQ.where.location = {
'$geoWithin': {
'$centerSphere': [
[0, 0],
'invalid_distance'
]
}
};
q.withJSON(jsonQ);
q.find(expectError(Parse.Error.INVALID_JSON, done));
});
it('withJSON with geoWithin.centerSphere fails with invalid coordinate', (done) => {
const q = new Parse.Query(TestObject);
const jsonQ = q.toJSON();
jsonQ.where.location = {
'$geoWithin': {
'$centerSphere': [
[-190,-190],
1
]
}
};
q.withJSON(jsonQ);
q.find(expectError(undefined, done));
});
it('withJSON with geoWithin.centerSphere fails with invalid geo point', (done) => {
const q = new Parse.Query(TestObject);
const jsonQ = q.toJSON();
jsonQ.where.location = {
'$geoWithin': {
'$centerSphere': [
{'longitude': 0, 'dummytude': 0},
1
]
}
};
q.withJSON(jsonQ);
q.find(expectError(undefined, done));
});
});

View File

@@ -2,9 +2,9 @@
// Roles are not accessible without the master key, so they are not intended
// for use by clients. We can manually test them using the master key.
const RestQuery = require("../src/RestQuery");
const Auth = require("../src/Auth").Auth;
const Config = require("../src/Config");
const RestQuery = require("../lib/RestQuery");
const Auth = require("../lib/Auth").Auth;
const Config = require("../lib/Config");
describe('Parse Role testing', () => {
it('Do a bunch of basic role testing', done => {

View File

@@ -1,19 +1,26 @@
'use strict';
/* Tests for ParseServer.js */
const express = require('express');
import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter';
import PostgresStorageAdapter from '../src/Adapters/Storage/Postgres/PostgresStorageAdapter';
import ParseServer from '../src/ParseServer';
const MongoStorageAdapter = require('../lib/Adapters/Storage/Mongo/MongoStorageAdapter').default;
const PostgresStorageAdapter = require('../lib/Adapters/Storage/Postgres/PostgresStorageAdapter').default;
const ParseServer = require('../lib/ParseServer').default;
describe('Server Url Checks', () => {
const app = express();
app.get('/health', function(req, res){
res.json({
status: 'ok'
let server;
beforeAll((done) => {
const app = express();
app.get('/health', function(req, res){
res.json({
status: 'ok'
});
});
server = app.listen(13376, undefined, done);
});
afterAll((done) => {
server.close(done);
});
app.listen(13376);
it('validate good server url', (done) => {
Parse.serverURL = 'http://localhost:13376';

View File

@@ -1,5 +1,5 @@
const ParseServerRESTController = require('../src/ParseServerRESTController').ParseServerRESTController;
const ParseServer = require('../src/ParseServer').default;
const ParseServerRESTController = require('../lib/ParseServerRESTController').ParseServerRESTController;
const ParseServer = require('../lib/ParseServer').default;
const Parse = require('parse/node').Parse;
let RESTController;

View File

@@ -7,10 +7,10 @@
"use strict";
import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter';
const MongoStorageAdapter = require('../lib/Adapters/Storage/Mongo/MongoStorageAdapter').default;
const request = require('request');
const passwordCrypto = require('../src/password');
const Config = require('../src/Config');
const passwordCrypto = require('../lib/password');
const Config = require('../lib/Config');
const rp = require('request-promise');
function verifyACL(user) {
@@ -101,7 +101,7 @@ describe('Parse.User testing', () => {
});
});
it('user login with non-string username with REST API', (done) => {
it('user login with non-string username with REST API (again)', (done) => {
Parse.User.signUp('asdf', 'zxcv', null, {
success: () => {
return rp.post({
@@ -525,6 +525,71 @@ describe('Parse.User testing', () => {
});
});
it('never locks himself up', async () => {
const user = new Parse.User();
await user.signUp({
username: 'username',
password: 'password'
});
user.setACL(new Parse.ACL());
await user.save();
await user.fetch();
expect(user.getACL().getReadAccess(user)).toBe(true);
expect(user.getACL().getWriteAccess(user)).toBe(true);
const publicReadACL = new Parse.ACL();
publicReadACL.setPublicReadAccess(true);
// Create an administrator role with a single admin user
const role = new Parse.Role('admin', publicReadACL);
const admin = new Parse.User();
await admin.signUp({
username: 'admin',
password: 'admin',
});
role.getUsers().add(admin);
await role.save(null, { useMasterKey: true });
// Grant the admins write rights on the user
const acl = user.getACL();
acl.setRoleWriteAccess(role, true);
acl.setRoleReadAccess(role, true);
// Update with the masterKey just to be sure
await user.save({ ACL: acl }, { useMasterKey: true });
// Try to update from admin... should all work fine
await user.save({ key: 'fromAdmin'}, { sessionToken: admin.getSessionToken() });
await user.fetch();
expect(user.toJSON().key).toEqual('fromAdmin');
// Try to save when logged out (public)
let failed = false;
try {
// Ensure no session token is sent
await Parse.User.logOut();
await user.save({ key: 'fromPublic'});
} catch(e) {
failed = true;
expect(e.code).toBe(Parse.Error.SESSION_MISSING);
}
expect({ failed }).toEqual({ failed: true });
// Try to save with a random user, should fail
failed = false;
const anyUser = new Parse.User();
await anyUser.signUp({
username: 'randomUser',
password: 'password'
});
try {
await user.save({ key: 'fromAnyUser'});
} catch(e) {
failed = true;
expect(e.code).toBe(Parse.Error.SESSION_MISSING);
}
expect({ failed }).toEqual({ failed: true });
});
it("current user", (done) => {
const user = new Parse.User();
user.set("password", "asdf");
@@ -1823,7 +1888,7 @@ describe('Parse.User testing', () => {
});
});
it('should fail linking with existing', (done) => {
it('should fail linking with existing through REST', (done) => {
const provider = getMockFacebookProvider();
Parse.User._registerAuthenticationProvider(provider);
Parse.User._logInWith("facebook", {
@@ -2379,7 +2444,7 @@ describe('Parse.User testing', () => {
}, (error, response, body) => {
expect(error).toBe(null);
const b = JSON.parse(body);
expect(b.error).toBe('invalid session token');
expect(b.error).toBe('Invalid session token');
request.put({
headers: {
'X-Parse-Application-Id': 'test',
@@ -2471,7 +2536,7 @@ describe('Parse.User testing', () => {
expect(error).toBe(null);
const b = JSON.parse(body);
expect(b.code).toEqual(209);
expect(b.error).toBe('invalid session token');
expect(b.error).toBe('Invalid session token');
done();
});
});
@@ -2513,7 +2578,7 @@ describe('Parse.User testing', () => {
}, (error,response,body) => {
const b = JSON.parse(body);
expect(b.code).toEqual(209);
expect(b.error).toBe('invalid session token');
expect(b.error).toBe('Invalid session token');
done();
});
});
@@ -2550,7 +2615,7 @@ describe('Parse.User testing', () => {
done();
}, function(err) {
expect(err.code).toBe(Parse.Error.INVALID_SESSION_TOKEN);
expect(err.message).toBe('invalid session token');
expect(err.message).toBe('Invalid session token');
done();
});
});
@@ -2626,7 +2691,7 @@ describe('Parse.User testing', () => {
});
});
it("invalid session tokens are rejected", (done) => {
it("Invalid session tokens are rejected", (done) => {
Parse.User.signUp("asdf", "zxcv", null, {
success: function() {
request.get({
@@ -2639,7 +2704,7 @@ describe('Parse.User testing', () => {
},
}, (error, response, body) => {
expect(body.code).toBe(209);
expect(body.error).toBe('invalid session token');
expect(body.error).toBe('Invalid session token');
done();
})
}
@@ -3641,4 +3706,36 @@ describe('Parse.User testing', () => {
expect(results.length).toBe(1);
}).then(done, done.fail);
});
describe('issue #4897', () => {
it_only_db('mongo')("should be able to login with a legacy user (no ACL)", async () => {
// This issue is a side effect of the locked users and legacy users which don't have ACL's
// In this scenario, a legacy user wasn't be able to login as there's no ACL on it
const database = Config.get(Parse.applicationId).database;
const collection = await database.adapter._adaptiveCollection('_User');
await collection.insertOne({
"_id": "ABCDEF1234",
"name": "<some_name>",
"email": "<some_email>",
"username": "<some_username>",
"_hashed_password": "<some_password>",
"_auth_data_facebook": {
"id": "8675309",
"access_token": "jenny"
},
"sessionToken": "<some_session_token>",
});
const provider = getMockFacebookProvider();
Parse.User._registerAuthenticationProvider(provider);
const model = await Parse.User._logInWith("facebook", {});
expect(model.id).toBe('ABCDEF1234');
ok(model instanceof Parse.User, "Model should be a Parse.User");
strictEqual(Parse.User.current(), model);
ok(model.extended(), "Should have used subclass.");
strictEqual(provider.authData.id, provider.synchronizedUserId);
strictEqual(provider.authData.access_token, provider.synchronizedAuthToken);
strictEqual(provider.authData.expiration_date, provider.synchronizedExpiration);
ok(model._isLinked("facebook"), "User should be linked to facebook");
});
});
});

View File

@@ -1,4 +1,4 @@
const ParseWebSocket = require('../src/LiveQuery/ParseWebSocketServer').ParseWebSocket;
const ParseWebSocket = require('../lib/LiveQuery/ParseWebSocketServer').ParseWebSocket;
describe('ParseWebSocket', function() {

View File

@@ -1,4 +1,4 @@
const ParseWebSocketServer = require('../src/LiveQuery/ParseWebSocketServer').ParseWebSocketServer;
const ParseWebSocketServer = require('../lib/LiveQuery/ParseWebSocketServer').ParseWebSocketServer;
describe('ParseWebSocketServer', function() {

View File

@@ -1,5 +1,5 @@
'use strict';
const Config = require('../src/Config');
const Config = require('../lib/Config');
describe('Pointer Permissions', () => {

View File

@@ -1,4 +1,4 @@
const parser = require('../src/Adapters/Storage/Postgres/PostgresConfigParser');
const parser = require('../lib/Adapters/Storage/Postgres/PostgresConfigParser');
const queryParamTests = {
'a=1&b=2': { a: '1', b: '2' },

View File

@@ -1,7 +1,7 @@
const Parse = require('parse/node').Parse;
import PostgresStorageAdapter from '../src/Adapters/Storage/Postgres/PostgresStorageAdapter';
const PostgresStorageAdapter = require('../lib/Adapters/Storage/Postgres/PostgresStorageAdapter').default;
const postgresURI = 'postgres://localhost:5432/parse_server_postgres_adapter_test_database';
const ParseServer = require("../src/index");
const ParseServer = require("../lib/index");
const express = require('express');
//public schema
const databaseOptions1 = {

View File

@@ -1,4 +1,4 @@
import PostgresStorageAdapter from '../src/Adapters/Storage/Postgres/PostgresStorageAdapter';
const PostgresStorageAdapter = require('../lib/Adapters/Storage/Postgres/PostgresStorageAdapter').default;
const databaseURI = 'postgres://localhost:5432/parse_server_postgres_adapter_test_database';
const getColumns = (client, className) => {

View File

@@ -1,4 +1,4 @@
const PromiseRouter = require("../src/PromiseRouter").default;
const PromiseRouter = require("../lib/PromiseRouter").default;
describe("PromiseRouter", () => {
it("should properly handle rejects", (done) => {

View File

@@ -1,8 +1,8 @@
"use strict";
const PushController = require('../src/Controllers/PushController').PushController;
const StatusHandler = require('../src/StatusHandler');
const Config = require('../src/Config');
const validatePushType = require('../src/Push/utils').validatePushType;
const PushController = require('../lib/Controllers/PushController').PushController;
const StatusHandler = require('../lib/StatusHandler');
const Config = require('../lib/Config');
const validatePushType = require('../lib/Push/utils').validatePushType;
const successfulTransmissions = function(body, installations) {
@@ -245,6 +245,84 @@ describe('PushController', () => {
});
});
it('properly increment badges by more than 1', (done) => {
const pushAdapter = {
send: function(body, installations) {
const badge = body.data.badge;
installations.forEach((installation) => {
expect(installation.badge).toEqual(badge);
expect(installation.originalBadge + 3).toEqual(installation.badge);
})
return successfulTransmissions(body, installations);
},
getValidPushTypes: function() {
return ["ios", "android"];
}
}
const payload = {data:{
alert: "Hello World!",
badge: { __op: 'Increment', amount: 3 },
}}
const installations = [];
while(installations.length != 10) {
const installation = new Parse.Object("_Installation");
installation.set("installationId", "installation_" + installations.length);
installation.set("deviceToken","device_token_" + installations.length)
installation.set("badge", installations.length);
installation.set("originalBadge", installations.length);
installation.set("deviceType", "ios");
installations.push(installation);
}
while(installations.length != 15) {
const installation = new Parse.Object("_Installation");
installation.set("installationId", "installation_" + installations.length);
installation.set("deviceToken","device_token_" + installations.length);
installation.set("badge", installations.length);
installation.set("originalBadge", installations.length);
installation.set("deviceType", "android");
installations.push(installation);
}
const config = Config.get(Parse.applicationId);
const auth = {
isMaster: true
}
const pushController = new PushController();
reconfigureServer({
push: { adapter: pushAdapter }
}).then(() => {
return Parse.Object.saveAll(installations)
}).then(() => {
return pushController.sendPush(payload, {}, config, auth);
}).then(() => {
// Wait so the push is completed.
return new Promise((resolve) => { setTimeout(() => { resolve(); }, 1000); });
}).then(() => {
// Check we actually sent 15 pushes.
const query = new Parse.Query('_PushStatus');
return query.find({ useMasterKey: true })
}).then((results) => {
expect(results.length).toBe(1);
const pushStatus = results[0];
expect(pushStatus.get('numSent')).toBe(15);
}).then(() => {
// Check that the installations were actually updated.
const query = new Parse.Query('_Installation');
return query.find({ useMasterKey: true })
}).then((results) => {
expect(results.length).toBe(15);
for (let i = 0; i < 15; i++) {
const installation = results[i];
expect(installation.get('badge')).toBe(parseInt(installation.get('originalBadge')) + 3);
}
done()
}).catch((err) => {
jfail(err);
done();
});
});
it('properly set badges to 1', (done) => {
const pushAdapter = {

View File

@@ -1,5 +1,5 @@
import Config from "../src/Config";
import {PushQueue} from "../src/Push/PushQueue";
const Config = require("../lib/Config");
const {PushQueue} = require("../lib/Push/PushQueue");
describe('PushQueue', () => {
describe('With a defined channel', () => {

View File

@@ -1,4 +1,4 @@
const PushRouter = require('../src/Routers/PushRouter').PushRouter;
const PushRouter = require('../lib/Routers/PushRouter').PushRouter;
const request = require('request');
describe('PushRouter', () => {

View File

@@ -1,8 +1,8 @@
const PushWorker = require('../src').PushWorker;
const PushUtils = require('../src/Push/utils');
const Config = require('../src/Config');
const { pushStatusHandler } = require('../src/StatusHandler');
const rest = require('../src/rest');
const PushWorker = require('../lib').PushWorker;
const PushUtils = require('../lib/Push/utils');
const Config = require('../lib/Config');
const { pushStatusHandler } = require('../lib/StatusHandler');
const rest = require('../lib/rest');
describe('PushWorker', () => {
it('should run with small batch', (done) => {
@@ -90,6 +90,10 @@ describe('PushWorker', () => {
expect(locales).toEqual(['fr']);
});
it('should handle empty body data', () => {
expect(PushUtils.getLocalesFromPush({})).toEqual([]);
});
it('transforms body appropriately', () => {
const cleanBody = PushUtils.transformPushBodyForLocale({
data: {

View File

@@ -1,7 +1,7 @@
const Parse = require('parse/node');
const Id = require('../src/LiveQuery/Id');
const QueryTools = require('../src/LiveQuery/QueryTools');
const Id = require('../lib/LiveQuery/Id');
const QueryTools = require('../lib/LiveQuery/QueryTools');
const queryHash = QueryTools.queryHash;
const matchesQuery = QueryTools.matchesQuery;

View File

@@ -3,7 +3,7 @@
const Parse = require('parse/node');
const ReadPreference = require('mongodb').ReadPreference;
const rp = require('request-promise');
const Config = require("../src/Config");
const Config = require("../lib/Config");
describe_only_db('mongo')('Read preference option', () => {
it('should find in primary by default', (done) => {
@@ -27,17 +27,56 @@ describe_only_db('mongo')('Read preference option', () => {
let myObjectReadPreference = null;
databaseAdapter.database.serverConfig.cursor.calls.all().forEach((call) => {
if (call.args[0].indexOf('MyObject') >= 0) {
myObjectReadPreference = call.args[2].readPreference.preference;
myObjectReadPreference = true;
expect(call.args[2].readPreference.preference).toBe(ReadPreference.PRIMARY);
}
});
expect(myObjectReadPreference).toEqual(ReadPreference.PRIMARY);
expect(myObjectReadPreference).toBe(true);
done();
});
});
});
it('should preserve the read preference set (#4831)', async () => {
const { MongoStorageAdapter } = require('../lib/Adapters/Storage/Mongo/MongoStorageAdapter');
const adapterOptions = {
uri: 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase',
mongoOptions: {
readPreference: ReadPreference.NEAREST,
}
};
await reconfigureServer({ databaseAdapter: new MongoStorageAdapter(adapterOptions) });
const databaseAdapter = (Config.get(Parse.applicationId)).database.adapter;
const obj0 = new Parse.Object('MyObject');
obj0.set('boolKey', false);
const obj1 = new Parse.Object('MyObject');
obj1.set('boolKey', true);
await Parse.Object.saveAll([obj0, obj1])
spyOn(databaseAdapter.database.serverConfig, 'cursor').and.callThrough();
const query = new Parse.Query('MyObject');
query.equalTo('boolKey', false);
const results = await query.find();
expect(results.length).toBe(1);
expect(results[0].get('boolKey')).toBe(false);
let myObjectReadPreference = null;
databaseAdapter.database.serverConfig.cursor.calls.all().forEach((call) => {
if (call.args[0].indexOf('MyObject') >= 0) {
myObjectReadPreference = true;
expect(call.args[2].readPreference.preference).toBe(ReadPreference.NEAREST);
}
});
expect(myObjectReadPreference).toBe(true);
});
it('should change read preference in the beforeFind trigger', (done) => {
const databaseAdapter = (Config.get(Parse.applicationId)).database.adapter;
@@ -442,18 +481,20 @@ describe_only_db('mongo')('Read preference option', () => {
let myObjectReadPreference2 = null;
databaseAdapter.database.serverConfig.cursor.calls.all().forEach((call) => {
if (call.args[0].indexOf('MyObject0') >= 0) {
myObjectReadPreference0 = call.args[2].readPreference.preference;
myObjectReadPreference0 = true;
expect(call.args[2].readPreference.preference).toBe(ReadPreference.PRIMARY);
}
if (call.args[0].indexOf('MyObject1') >= 0) {
myObjectReadPreference1 = call.args[2].readPreference.preference;
myObjectReadPreference1 = true;
expect(call.args[2].readPreference.preference).toBe(ReadPreference.PRIMARY);
}
if (call.args[0].indexOf('MyObject2') >= 0) {
myObjectReadPreference2 = call.args[2].readPreference.preference;
}
});
expect(myObjectReadPreference0).toEqual(ReadPreference.PRIMARY);
expect(myObjectReadPreference1).toEqual(ReadPreference.PRIMARY);
expect(myObjectReadPreference0).toBe(true);
expect(myObjectReadPreference1).toBe(true);
expect(myObjectReadPreference2).toEqual(ReadPreference.SECONDARY);
done();
@@ -555,18 +596,20 @@ describe_only_db('mongo')('Read preference option', () => {
let myObjectReadPreference2 = null;
databaseAdapter.database.serverConfig.cursor.calls.all().forEach((call) => {
if (call.args[0].indexOf('MyObject0') >= 0) {
myObjectReadPreference0 = call.args[2].readPreference.preference;
myObjectReadPreference0 = true;
expect(call.args[2].readPreference.preference).toBe(ReadPreference.PRIMARY);
}
if (call.args[0].indexOf('MyObject1') >= 0) {
myObjectReadPreference1 = call.args[2].readPreference.preference;
myObjectReadPreference1 = true;
expect(call.args[2].readPreference.preference).toBe(ReadPreference.PRIMARY);
}
if (call.args[0].indexOf('MyObject2') >= 0) {
myObjectReadPreference2 = call.args[2].readPreference.preference;
}
});
expect(myObjectReadPreference0).toEqual(ReadPreference.PRIMARY);
expect(myObjectReadPreference1).toEqual(ReadPreference.PRIMARY);
expect(myObjectReadPreference0).toBe(true);
expect(myObjectReadPreference1).toBe(true);
expect(myObjectReadPreference2).toEqual(ReadPreference.SECONDARY);
done();

View File

@@ -1,4 +1,4 @@
const RedisCacheAdapter = require('../src/Adapters/Cache/RedisCacheAdapter').default;
const RedisCacheAdapter = require('../lib/Adapters/Cache/RedisCacheAdapter').default;
/*
To run this test part of the complete suite
set PARSE_SERVER_TEST_CACHE='redis'

View File

@@ -1,4 +1,4 @@
const RedisPubSub = require('../src/Adapters/PubSub/RedisPubSub').RedisPubSub;
const RedisPubSub = require('../lib/Adapters/PubSub/RedisPubSub').RedisPubSub;
describe('RedisPubSub', function() {

View File

@@ -1,8 +1,8 @@
'use strict'
// These tests check the "find" functionality of the REST API.
const auth = require('../src/Auth');
const Config = require('../src/Config');
const rest = require('../src/rest');
const auth = require('../lib/Auth');
const Config = require('../lib/Config');
const rest = require('../lib/rest');
const querystring = require('querystring');
const rp = require('request-promise');

View File

@@ -1,4 +1,4 @@
const Config = require('../src/Config');
const Config = require('../lib/Config');
const sessionToken = 'legacySessionToken';
const rp = require('request-promise');
const Parse = require('parse/node');

View File

@@ -1,7 +1,7 @@
'use strict';
const Config = require('../src/Config');
const SchemaController = require('../src/Controllers/SchemaController');
const Config = require('../lib/Config');
const SchemaController = require('../lib/Controllers/SchemaController');
const dd = require('deep-diff');
let config;

View File

@@ -1,6 +1,6 @@
const CacheController = require('../src/Controllers/CacheController.js').default;
const InMemoryCacheAdapter = require('../src/Adapters/Cache/InMemoryCacheAdapter').default;
const SchemaCache = require('../src/Controllers/SchemaCache').default;
const CacheController = require('../lib/Controllers/CacheController.js').default;
const InMemoryCacheAdapter = require('../lib/Adapters/Cache/InMemoryCacheAdapter').default;
const SchemaCache = require('../lib/Controllers/SchemaCache').default;
describe('SchemaCache', () => {
let cacheController;

View File

@@ -1,4 +1,4 @@
const SessionTokenCache = require('../src/LiveQuery/SessionTokenCache').SessionTokenCache;
const SessionTokenCache = require('../lib/LiveQuery/SessionTokenCache').SessionTokenCache;
describe('SessionTokenCache', function() {

View File

@@ -1,9 +1,9 @@
const Subscription = require('../src/LiveQuery/Subscription').Subscription;
const Subscription = require('../lib/LiveQuery/Subscription').Subscription;
let logger;
describe('Subscription', function() {
beforeEach(function() {
logger = require('../src/logger').logger;
logger = require('../lib/logger').logger;
spyOn(logger, 'error').and.callThrough();
});

View File

@@ -1,4 +1,4 @@
const twitter = require('../src/Adapters/Auth/twitter');
const twitter = require('../lib/Adapters/Auth/twitter');
describe('Twitter Auth', () => {
it('should use the proper configuration', () => {

View File

@@ -1,7 +1,7 @@
'use strict';
const Parse = require("parse/node");
const Config = require('../src/Config');
const Config = require('../lib/Config');
describe('Uniqueness', function() {
it('fail when create duplicate value in unique field', done => {

View File

@@ -1,6 +1,6 @@
const UserController = require('../src/Controllers/UserController').UserController;
const UserController = require('../lib/Controllers/UserController').UserController;
const emailAdapter = require('./MockEmailAdapter')
const AppCache = require('../src/cache').AppCache;
const AppCache = require('../lib/cache').AppCache;
describe('UserController', () => {
const user = {

View File

@@ -3,7 +3,7 @@
const Parse = require('parse/node');
const request = require('request-promise');
// const Config = require('../src/Config');
// const Config = require('../lib/Config');
const EMAIL = 'foo@bar.com';
const ZIP = '10001';

View File

@@ -2,7 +2,7 @@
const MockEmailAdapterWithOptions = require('./MockEmailAdapterWithOptions');
const request = require('request');
const Config = require("../src/Config");
const Config = require("../lib/Config");
describe("Custom Pages, Email Verification, Password Reset", () => {
it("should set the custom pages", (done) => {

View File

@@ -0,0 +1,494 @@
"use strict";
const rp = require('request-promise');
const MockEmailAdapterWithOptions = require('./MockEmailAdapterWithOptions');
const verifyPassword = function (login, password, isEmail = false) {
const body = (!isEmail) ? { username: login, password } : { email: login, password };
return rp.get({
url: Parse.serverURL + '/verifyPassword',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-REST-API-Key': 'rest'
},
body,
json: true
}).then((res) => res)
.catch((err) => err);
};
const isAccountLockoutError = function (username, password, duration, waitTime) {
return new Promise((resolve, reject) => {
setTimeout(() => {
Parse.User.logIn(username, password)
.then(() => reject('login should have failed'))
.catch(err => {
if (err.message === 'Your account is locked due to multiple failed login attempts. Please try again after ' + duration + ' minute(s)') {
resolve();
} else {
reject(err);
}
});
}, waitTime);
});
};
describe("Verify User Password", () => {
it('fails to verify password when masterKey has locked out user', (done) => {
const user = new Parse.User();
const ACL = new Parse.ACL();
ACL.setPublicReadAccess(false);
ACL.setPublicWriteAccess(false);
user.setUsername('testuser');
user.setPassword('mypass');
user.setACL(ACL);
user.signUp().then(() => {
return Parse.User.logIn('testuser', 'mypass');
}).then((user) => {
equal(user.get('username'), 'testuser');
// Lock the user down
const ACL = new Parse.ACL();
user.setACL(ACL);
return user.save(null, { useMasterKey: true });
}).then(() => {
expect(user.getACL().getPublicReadAccess()).toBe(false);
return rp.get({
url: Parse.serverURL + '/verifyPassword',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-REST-API-Key': 'rest'
},
qs: {
username: 'testuser',
password: 'mypass',
}
});
}).then((res) => {
fail(res);
done();
}).catch((err) => {
expect(err.statusCode).toBe(404);
expect(err.error).toMatch('{"code":101,"error":"Invalid username/password."}');
done();
});
});
it('fails to verify password when username is not provided in query string REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return rp.get({
url: Parse.serverURL + '/verifyPassword',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-REST-API-Key': 'rest'
},
qs: {
username: '',
password: 'mypass',
}
});
}).then((res) => {
fail(res);
done();
}).catch((err) => {
expect(err.statusCode).toBe(400);
expect(err.error).toMatch('{"code":200,"error":"username/email is required."}');
done();
});
});
it('fails to verify password when email is not provided in query string REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return rp.get({
url: Parse.serverURL + '/verifyPassword',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-REST-API-Key': 'rest'
},
qs: {
email: '',
password: 'mypass',
}
});
}).then((res) => {
fail(res);
done();
}).catch((err) => {
expect(err.statusCode).toBe(400);
expect(err.error).toMatch('{"code":200,"error":"username/email is required."}');
done();
});
});
it('fails to verify password when username is not provided with json payload REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return verifyPassword('', 'mypass');
}).then((res) => {
expect(res.statusCode).toBe(400);
expect(JSON.stringify(res.error)).toMatch('{"code":200,"error":"username/email is required."}');
done();
}).catch((err) => {
fail(err);
done();
});
});
it('fails to verify password when email is not provided with json payload REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return verifyPassword('', 'mypass', true);
}).then((res) => {
expect(res.statusCode).toBe(400);
expect(JSON.stringify(res.error)).toMatch('{"code":200,"error":"username/email is required."}');
done();
}).catch((err) => {
fail(err);
done();
});
});
it('fails to verify password when password is not provided with json payload REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return verifyPassword('testuser', '');
}).then((res) => {
expect(res.statusCode).toBe(400);
expect(JSON.stringify(res.error)).toMatch('{"code":201,"error":"password is required."}');
done();
}).catch((err) => {
fail(err);
done();
});
});
it('fails to verify password when username matches but password does not match hash with json payload REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return verifyPassword('testuser', 'wrong password');
}).then((res) => {
expect(res.statusCode).toBe(404);
expect(JSON.stringify(res.error)).toMatch('{"code":101,"error":"Invalid username/password."}');
done();
}).catch((err) => {
fail(err);
done();
});
});
it('fails to verify password when email matches but password does not match hash with json payload REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return verifyPassword('my@user.com', 'wrong password', true);
}).then((res) => {
expect(res.statusCode).toBe(404);
expect(JSON.stringify(res.error)).toMatch('{"code":101,"error":"Invalid username/password."}');
done();
}).catch((err) => {
fail(err);
done();
});
});
it('fails to verify password when typeof username does not equal string REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return verifyPassword(123, 'mypass');
}).then((res) => {
expect(res.statusCode).toBe(404);
expect(JSON.stringify(res.error)).toMatch('{"code":101,"error":"Invalid username/password."}');
done();
}).catch((err) => {
fail(err);
done();
});
});
it('fails to verify password when typeof email does not equal string REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return verifyPassword(123, 'mypass', true);
}).then((res) => {
expect(res.statusCode).toBe(404);
expect(JSON.stringify(res.error)).toMatch('{"code":101,"error":"Invalid username/password."}');
done();
}).catch((err) => {
fail(err);
done();
});
});
it('fails to verify password when typeof password does not equal string REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return verifyPassword('my@user.com', 123, true);
}).then((res) => {
expect(res.statusCode).toBe(404);
expect(JSON.stringify(res.error)).toMatch('{"code":101,"error":"Invalid username/password."}');
done();
}).catch((err) => {
fail(err);
done();
});
});
it('fails to verify password when username cannot be found REST API', (done) => {
verifyPassword('mytestuser', 'mypass')
.then((res) => {
expect(res.statusCode).toBe(404);
expect(JSON.stringify(res.error)).toMatch('{"code":101,"error":"Invalid username/password."}');
done();
}).catch((err) => {
fail(err);
done();
});
});
it('fails to verify password when email cannot be found REST API', (done) => {
verifyPassword('my@user.com', 'mypass', true)
.then((res) => {
expect(res.statusCode).toBe(404);
expect(JSON.stringify(res.error)).toMatch('{"code":101,"error":"Invalid username/password."}');
done();
}).catch((err) => {
fail(err);
done();
});
});
it('fails to verify password when preventLoginWithUnverifiedEmail is set to true REST API', (done) => {
reconfigureServer({
publicServerURL: "http://localhost:8378/",
appName: 'emailVerify',
verifyUserEmails: true,
preventLoginWithUnverifiedEmail: true,
emailAdapter: MockEmailAdapterWithOptions({
fromAddress: 'parse@example.com',
apiKey: 'k',
domain: 'd',
}),
}).then(() => {
const user = new Parse.User();
return user.save({
username: 'unverified-user',
password: 'mypass',
email: 'unverified-email@user.com'
});
}).then(() => {
return verifyPassword('unverified-email@user.com', 'mypass', true);
}).then((res) => {
expect(res.statusCode).toBe(400);
expect(JSON.stringify(res.error)).toMatch('{"code":205,"error":"User email is not verified."}');
done();
}).catch((err) => {
fail(err);
done();
});
});
it('verify password lock account if failed verify password attempts are above threshold', done => {
reconfigureServer({
appName: 'lockout threshold',
accountLockout: {
duration: 1,
threshold: 2
},
publicServerURL: "http://localhost:8378/"
})
.then(() => {
const user = new Parse.User();
return user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
})
})
.then(() => {
return verifyPassword('testuser', 'wrong password');
})
.then(() => {
return verifyPassword('testuser', 'wrong password');
})
.then(() => {
return verifyPassword('testuser', 'wrong password');
})
.then(() => {
return isAccountLockoutError('testuser', 'wrong password', 1, 1);
})
.then(() => {
done();
})
.catch(err => {
fail('lock account after failed login attempts test failed: ' + JSON.stringify(err));
done();
});
});
it('succeed in verifying password when username and email are provided and password matches hash with json payload REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return rp.get({
url: Parse.serverURL + '/verifyPassword',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-REST-API-Key': 'rest'
},
body: {
username: 'testuser',
email: 'my@user.com',
password: 'mypass'
},
json: true
}).then((res) => res)
.catch((err) => err);
}).then((res) => {
expect(typeof res).toBe('object');
expect(typeof res['objectId']).toEqual('string');
expect(res.hasOwnProperty('sessionToken')).toEqual(false);
expect(res.hasOwnProperty('password')).toEqual(false);
done();
}).catch((err) => {
fail(err);
done();
});
});
it('succeed in verifying password when username and password matches hash with json payload REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return verifyPassword('testuser', 'mypass');
}).then((res) => {
expect(typeof res).toBe('object');
expect(typeof res['objectId']).toEqual('string');
expect(res.hasOwnProperty('sessionToken')).toEqual(false);
expect(res.hasOwnProperty('password')).toEqual(false);
done();
});
});
it('succeed in verifying password when email and password matches hash with json payload REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return verifyPassword('my@user.com', 'mypass', true);
}).then((res) => {
expect(typeof res).toBe('object');
expect(typeof res['objectId']).toEqual('string');
expect(res.hasOwnProperty('sessionToken')).toEqual(false);
expect(res.hasOwnProperty('password')).toEqual(false);
done();
});
});
it('succeed to verify password when username and password provided in query string REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return rp.get({
url: Parse.serverURL + '/verifyPassword',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-REST-API-Key': 'rest'
},
qs: {
username: 'testuser',
password: 'mypass',
}
});
}).then((res) => {
expect(typeof res).toBe('string');
const body = JSON.parse(res);
expect(typeof body['objectId']).toEqual('string');
expect(body.hasOwnProperty('sessionToken')).toEqual(false);
expect(body.hasOwnProperty('password')).toEqual(false);
done();
});
});
it('succeed to verify password when email and password provided in query string REST API', (done) => {
const user = new Parse.User();
user.save({
username: 'testuser',
password: 'mypass',
email: 'my@user.com'
}).then(() => {
return rp.get({
url: Parse.serverURL + '/verifyPassword',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-REST-API-Key': 'rest'
},
qs: {
email: 'my@user.com',
password: 'mypass',
}
});
}).then((res) => {
expect(typeof res).toBe('string');
const body = JSON.parse(res);
expect(typeof body['objectId']).toEqual('string');
expect(body.hasOwnProperty('sessionToken')).toEqual(false);
expect(body.hasOwnProperty('password')).toEqual(false);
done();
});
});
it('succeed to verify password with username when user1 has username === user2 email REST API', (done) => {
const user1 = new Parse.User();
user1.save({
username: 'email@user.com',
password: 'mypass1',
email: '1@user.com'
}).then(() => {
const user2 = new Parse.User();
return user2.save({
username: 'user2',
password: 'mypass2',
email: 'email@user.com'
});
}).then(() => {
return verifyPassword('email@user.com', 'mypass1');
}).then((res) => {
expect(typeof res).toBe('object');
expect(typeof res['objectId']).toEqual('string');
expect(res.hasOwnProperty('sessionToken')).toEqual(false);
expect(res.hasOwnProperty('password')).toEqual(false);
done();
});
});
})

View File

@@ -1,6 +1,6 @@
'use strict';
const WinstonLoggerAdapter = require('../src/Adapters/Logger/WinstonLoggerAdapter').WinstonLoggerAdapter;
const WinstonLoggerAdapter = require('../lib/Adapters/Logger/WinstonLoggerAdapter').WinstonLoggerAdapter;
const request = require('request');
describe('info logs', () => {

View File

@@ -1,4 +1,4 @@
const batch = require('../src/batch');
const batch = require('../lib/batch');
const originalURL = '/parse/batch';
const serverURL = 'http://localhost:1234/parse';

View File

@@ -1,4 +1,4 @@
const cryptoUtils = require('../src/cryptoUtils');
const cryptoUtils = require('../lib/cryptoUtils');
function givesUniqueResults(fn, iterations) {
const results = {};

View File

@@ -1,11 +1,11 @@
"use strict"
// Sets up a Parse API server for testing.
const SpecReporter = require('jasmine-spec-reporter').SpecReporter;
const supportsColor = require('supports-color');
jasmine.DEFAULT_TIMEOUT_INTERVAL = process.env.PARSE_SERVER_TEST_TIMEOUT || 5000;
jasmine.getEnv().clearReporters();
jasmine.getEnv().addReporter(new SpecReporter());
jasmine.getEnv().addReporter(new SpecReporter({ colors: { enabled: supportsColor.stdout }, spec: { displayDuration: true }}));
global.on_db = (db, callback, elseCallback) => {
if (process.env.PARSE_SERVER_TEST_DB == db) {
@@ -23,15 +23,15 @@ if (global._babelPolyfill) {
process.exit(1);
}
const cache = require('../src/cache').default;
const ParseServer = require('../src/index').ParseServer;
const cache = require('../lib/cache').default;
const ParseServer = require('../lib/index').ParseServer;
const path = require('path');
const TestUtils = require('../src/TestUtils');
const GridStoreAdapter = require('../src/Adapters/Files/GridStoreAdapter').GridStoreAdapter;
const TestUtils = require('../lib/TestUtils');
const GridStoreAdapter = require('../lib/Adapters/Files/GridStoreAdapter').GridStoreAdapter;
const FSAdapter = require('@parse/fs-files-adapter');
import PostgresStorageAdapter from '../src/Adapters/Storage/Postgres/PostgresStorageAdapter';
import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter';
const RedisCacheAdapter = require('../src/Adapters/Cache/RedisCacheAdapter').default;
const PostgresStorageAdapter = require('../lib/Adapters/Storage/Postgres/PostgresStorageAdapter').default;
const MongoStorageAdapter = require('../lib/Adapters/Storage/Mongo/MongoStorageAdapter').default;
const RedisCacheAdapter = require('../lib/Adapters/Cache/RedisCacheAdapter').default;
const mongoURI = 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase';
const postgresURI = 'postgres://localhost:5432/parse_server_postgres_adapter_test_database';
@@ -114,7 +114,6 @@ if (process.env.PARSE_SERVER_TEST_CACHE === 'redis') {
}
const openConnections = {};
// Set up a default API server for testing with default configuration.
let server;
@@ -173,7 +172,7 @@ beforeEach(done => {
throw error;
}
}
TestUtils.destroyAllDataPermanently()
TestUtils.destroyAllDataPermanently(true)
.catch(error => {
// For tests that connect to their own mongo, there won't be any data to delete.
if (error.message === 'ns not found' || error.message.startsWith('connect ECONNREFUSED')) {
@@ -197,7 +196,7 @@ afterEach(function(done) {
fail('There were open connections to the server left after the test finished');
}
on_db('postgres', () => {
TestUtils.destroyAllDataPermanently().then(done, done);
TestUtils.destroyAllDataPermanently(true).then(done, done);
}, done);
};
Parse.Cloud._removeAllHooks();
@@ -294,11 +293,13 @@ function expectError(errorCode, callback) {
error: function(obj, e) {
// Some methods provide 2 parameters.
e = e || obj;
if (!e) {
fail('expected a specific error but got a blank error');
return;
if (errorCode !== undefined) {
if (!e) {
fail('expected a specific error but got a blank error');
return;
}
expect(e.code).toEqual(errorCode, e.message);
}
expect(e.code).toEqual(errorCode, e.message);
if (callback) {
callback(e);
}
@@ -417,7 +418,7 @@ global.it_exclude_dbs = excluded => {
}
global.it_only_db = db => {
if (process.env.PARSE_SERVER_TEST_DB === db) {
if (process.env.PARSE_SERVER_TEST_DB === db || !process.env.PARSE_SERVER_TEST_DB && db == 'mongo') {
return it;
} else {
return xit;
@@ -438,7 +439,7 @@ global.describe_only_db = db => {
} else if (!process.env.PARSE_SERVER_TEST_DB && db == 'mongo') {
return describe;
} else {
return () => {};
return xdescribe;
}
}

View File

@@ -2,11 +2,11 @@
const request = require('request');
const parseServerPackage = require('../package.json');
const MockEmailAdapterWithOptions = require('./MockEmailAdapterWithOptions');
const ParseServer = require("../src/index");
const Config = require('../src/Config');
const ParseServer = require("../lib/index");
const Config = require('../lib/Config');
const express = require('express');
import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter';
const MongoStorageAdapter = require('../lib/Adapters/Storage/Mongo/MongoStorageAdapter').default;
describe('server', () => {
it('requires a master key and app id', done => {

View File

@@ -1,4 +1,4 @@
import {
const {
numberParser,
numberOrBoolParser,
booleanParser,
@@ -6,7 +6,7 @@ import {
arrayParser,
moduleOrObjectParser,
nullParser,
} from '../src/Options/parsers';
} = require('../lib/Options/parsers');
describe('parsers', () => {
it('parses correctly with numberParser', () => {

View File

@@ -1,10 +1,10 @@
"use strict";
// These tests check the "create" / "update" functionality of the REST API.
const auth = require('../src/Auth');
const Config = require('../src/Config');
const auth = require('../lib/Auth');
const Config = require('../lib/Config');
const Parse = require('parse/node').Parse;
const rest = require('../src/rest');
const RestWrite = require('../src/RestWrite');
const rest = require('../lib/rest');
const RestWrite = require('../lib/RestWrite');
const request = require('request');
const rp = require('request-promise');

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,6 @@
"*spec.js"
],
"helpers": [
"../node_modules/babel-core/register.js",
"helper.js"
],
"random": false

Some files were not shown because too many files have changed in this diff Show More