Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions changelogs/CHANGELOG_alpha.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,24 @@
## [9.4.1-alpha.3](https://github.com/parse-community/parse-server/compare/9.4.1-alpha.2...9.4.1-alpha.3) (2026-03-04)


### Bug Fixes

* Cloud Hooks and Cloud Jobs bypass `readOnlyMasterKey` write restriction ([GHSA-vc89-5g3r-cmhh](https://github.com/parse-community/parse-server/security/advisories/GHSA-vc89-5g3r-cmhh)) ([#10088](https://github.com/parse-community/parse-server/issues/10088)) ([9a3dd4d](https://github.com/parse-community/parse-server/commit/9a3dd4d2d55ad506348062b43a7fe42e22a57fe9))

## [9.4.1-alpha.2](https://github.com/parse-community/parse-server/compare/9.4.1-alpha.1...9.4.1-alpha.2) (2026-03-03)


### Performance Improvements

* Upgrade to mongodb 7.1.0 ([#10087](https://github.com/parse-community/parse-server/issues/10087)) ([bebf2fd](https://github.com/parse-community/parse-server/commit/bebf2fd62b51cfc35c271ad4c76b8f552f886ce8))

## [9.4.1-alpha.1](https://github.com/parse-community/parse-server/compare/9.4.0...9.4.1-alpha.1) (2026-03-03)


### Bug Fixes

* MongoDB default batch size changed from 1000 to 100 without announcement ([#10085](https://github.com/parse-community/parse-server/issues/10085)) ([8f17397](https://github.com/parse-community/parse-server/commit/8f1739788d434c91109f049a438c32bdd4fc26a5))

# [9.4.0-alpha.2](https://github.com/parse-community/parse-server/compare/9.4.0-alpha.1...9.4.0-alpha.2) (2026-02-27)


Expand Down
23 changes: 12 additions & 11 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "parse-server",
"version": "9.4.0",
"version": "9.4.1-alpha.3",
"description": "An express module providing a Parse-compatible API server",
"main": "lib/index.js",
"repository": {
Expand Down Expand Up @@ -45,7 +45,7 @@
"lodash": "4.17.23",
"lru-cache": "10.4.0",
"mime": "4.0.7",
"mongodb": "7.0.0",
"mongodb": "7.1.0",
"mustache": "4.2.0",
"otpauth": "9.4.0",
"parse": "8.3.0",
Expand Down
8 changes: 8 additions & 0 deletions spec/GridFSBucketStorageAdapter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ describe_only_db('mongo')('GridFSBucket', () => {
enableSchemaHooks: true,
schemaCacheTtl: 5000,
maxTimeMS: 30000,
batchSize: 500,
disableIndexFieldValidation: true,
logClientEvents: [{ name: 'commandStarted' }],
createIndexUserUsername: true,
Expand All @@ -46,6 +47,13 @@ describe_only_db('mongo')('GridFSBucket', () => {
expect(db.options?.retryWrites).toEqual(true);
});

it('should store batchSize and filter it from MongoClient options', async () => {
const gfsAdapter = new GridFSBucketAdapter(databaseURI, { batchSize: 500 });
expect(gfsAdapter._batchSize).toEqual(500);
// Verify batchSize is filtered from MongoClient options
expect(gfsAdapter._mongoOptions.batchSize).toBeUndefined();
});

it('should save an encrypted file that can only be decrypted by a GridFS adapter with the encryptionKey', async () => {
const unencryptedAdapter = new GridFSBucketAdapter(databaseURI);
const encryptedAdapter = new GridFSBucketAdapter(
Expand Down
52 changes: 52 additions & 0 deletions spec/MongoStorageAdapter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,58 @@ describe_only_db('mongo')('MongoStorageAdapter', () => {
);
});

it('passes batchSize to the MongoDB driver find() call', async () => {
const batchSize = 50;
const adapter = new MongoStorageAdapter({
uri: databaseURI,
mongoOptions: { batchSize },
});
await adapter.createObject('BatchTest', { fields: {} }, { objectId: 'obj1' });

// Spy on the MongoDB driver's Collection.prototype.find to verify batchSize is forwarded
const originalFind = Collection.prototype.find;
let capturedOptions;
spyOn(Collection.prototype, 'find').and.callFake(function (query, options) {
capturedOptions = options;
return originalFind.call(this, query, options);
});

await adapter.find('BatchTest', { fields: {} }, {}, {});
expect(capturedOptions).toBeDefined();
expect(capturedOptions.batchSize).toEqual(50);
});

it('passes batchSize to the MongoDB driver aggregate() call', async () => {
const batchSize = 50;
const adapter = new MongoStorageAdapter({
uri: databaseURI,
mongoOptions: { batchSize },
});
await adapter.createObject('AggBatchTest', { fields: { count: { type: 'Number' } } }, { objectId: 'obj1', count: 1 });

// Spy on the MongoDB driver's Collection.prototype.aggregate to verify batchSize is forwarded
const originalAggregate = Collection.prototype.aggregate;
let capturedOptions;
spyOn(Collection.prototype, 'aggregate').and.callFake(function (pipeline, options) {
capturedOptions = options;
return originalAggregate.call(this, pipeline, options);
});

await adapter.aggregate('AggBatchTest', { fields: { count: { type: 'Number' } } }, [{ $match: {} }]);
expect(capturedOptions).toBeDefined();
expect(capturedOptions.batchSize).toEqual(50);
});

it('defaults batchSize to 1000', async () => {
await reconfigureServer({
databaseURI: databaseURI,
collectionPrefix: 'test_',
databaseAdapter: undefined,
});
const adapter = Config.get(Parse.applicationId).database.adapter;
expect(adapter._batchSize).toEqual(1000);
});

it('stores pointers with a _p_ prefix', done => {
const obj = {
objectId: 'bar',
Expand Down
135 changes: 135 additions & 0 deletions spec/rest.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -1172,6 +1172,141 @@ describe('read-only masterKey', () => {
done();
});
});

it('should throw when trying to create a hook function', async () => {
loggerErrorSpy.calls.reset();
try {
await request({
url: `${Parse.serverURL}/hooks/functions`,
method: 'POST',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': 'read-only-test',
'Content-Type': 'application/json',
},
body: { functionName: 'readOnlyTest', url: 'https://example.com/hook' },
});
fail('should have thrown');
} catch (res) {
expect(res.data.code).toBe(Parse.Error.OPERATION_FORBIDDEN);
expect(res.data.error).toBe('Permission denied');
}
});

it('should throw when trying to create a hook trigger', async () => {
loggerErrorSpy.calls.reset();
try {
await request({
url: `${Parse.serverURL}/hooks/triggers`,
method: 'POST',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': 'read-only-test',
'Content-Type': 'application/json',
},
body: { className: 'MyClass', triggerName: 'beforeSave', url: 'https://example.com/hook' },
});
fail('should have thrown');
} catch (res) {
expect(res.data.code).toBe(Parse.Error.OPERATION_FORBIDDEN);
expect(res.data.error).toBe('Permission denied');
}
});

it('should throw when trying to update a hook function', async () => {
// First create the hook with the real master key
await request({
url: `${Parse.serverURL}/hooks/functions`,
method: 'POST',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': Parse.masterKey,
'Content-Type': 'application/json',
},
body: { functionName: 'readOnlyUpdateTest', url: 'https://example.com/hook' },
});
loggerErrorSpy.calls.reset();
try {
await request({
url: `${Parse.serverURL}/hooks/functions/readOnlyUpdateTest`,
method: 'PUT',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': 'read-only-test',
'Content-Type': 'application/json',
},
body: { url: 'https://example.com/hacked' },
});
fail('should have thrown');
} catch (res) {
expect(res.data.code).toBe(Parse.Error.OPERATION_FORBIDDEN);
expect(res.data.error).toBe('Permission denied');
}
});

it('should throw when trying to delete a hook function', async () => {
// First create the hook with the real master key
await request({
url: `${Parse.serverURL}/hooks/functions`,
method: 'POST',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': Parse.masterKey,
'Content-Type': 'application/json',
},
body: { functionName: 'readOnlyDeleteTest', url: 'https://example.com/hook' },
});
loggerErrorSpy.calls.reset();
try {
await request({
url: `${Parse.serverURL}/hooks/functions/readOnlyDeleteTest`,
method: 'PUT',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': 'read-only-test',
'Content-Type': 'application/json',
},
body: { __op: 'Delete' },
});
fail('should have thrown');
} catch (res) {
expect(res.data.code).toBe(Parse.Error.OPERATION_FORBIDDEN);
expect(res.data.error).toBe('Permission denied');
}
});

it('should throw when trying to run a job with readOnlyMasterKey', async () => {
Parse.Cloud.job('readOnlyTestJob', () => {});
loggerErrorSpy.calls.reset();
try {
await request({
url: `${Parse.serverURL}/jobs/readOnlyTestJob`,
method: 'POST',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': 'read-only-test',
'Content-Type': 'application/json',
},
body: {},
});
fail('should have thrown');
} catch (res) {
expect(res.data.code).toBe(Parse.Error.OPERATION_FORBIDDEN);
expect(res.data.error).toBe('Permission denied');
}
});

it('should allow reading hooks with readOnlyMasterKey', async () => {
const res = await request({
url: `${Parse.serverURL}/hooks/functions`,
method: 'GET',
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': 'read-only-test',
},
});
expect(Array.isArray(res.data)).toBe(true);
});
});

describe('rest context', () => {
Expand Down
9 changes: 5 additions & 4 deletions src/Adapters/Files/GridFSBucketAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
const defaultMongoOptions = {};
const _mongoOptions = Object.assign(defaultMongoOptions, mongoOptions);
this._clientMetadata = mongoOptions.clientMetadata;
this._batchSize = mongoOptions.batchSize;
// Remove Parse Server-specific options that should not be passed to MongoDB client
for (const key of ParseServerDatabaseOptions) {
delete _mongoOptions[key];
Expand Down Expand Up @@ -135,7 +136,7 @@ export class GridFSBucketAdapter extends FilesAdapter {

async deleteFile(filename: string) {
const bucket = await this._getBucket();
const documents = await bucket.find({ filename }).toArray();
const documents = await bucket.find({ filename }, { batchSize: this._batchSize }).toArray();
if (documents.length === 0) {
throw new Error('FileNotFound');
}
Expand Down Expand Up @@ -196,7 +197,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
if (options.fileNames !== undefined) {
fileNames = options.fileNames;
} else {
const fileNamesIterator = await bucket.find().toArray();
const fileNamesIterator = await bucket.find({}, { batchSize: this._batchSize }).toArray();
fileNamesIterator.forEach(file => {
fileNames.push(file.filename);
});
Expand Down Expand Up @@ -226,7 +227,7 @@ export class GridFSBucketAdapter extends FilesAdapter {

async getMetadata(filename) {
const bucket = await this._getBucket();
const files = await bucket.find({ filename }).toArray();
const files = await bucket.find({ filename }, { batchSize: this._batchSize }).toArray();
if (files.length === 0) {
return {};
}
Expand All @@ -236,7 +237,7 @@ export class GridFSBucketAdapter extends FilesAdapter {

async handleFileStream(filename: string, req, res, contentType) {
const bucket = await this._getBucket();
const files = await bucket.find({ filename }).toArray();
const files = await bucket.find({ filename }, { batchSize: this._batchSize }).toArray();
if (files.length === 0) {
throw new Error('FileNotFound');
}
Expand Down
Loading
Loading