From 11b7293ae86e30153eb30c8893c542c8d579471c Mon Sep 17 00:00:00 2001 From: Daniel Date: Mon, 16 Mar 2026 21:16:16 +1100 Subject: [PATCH 01/14] docs: add Cloud Code Adapter design specification Defines the architecture for a next-generation Parse.Cloud system that replaces triggers.js with a CloudCodeManager, supports BYO adapters, and enables multi-language cloud code via three built-in adapter types (Legacy, InProcess, ExternalProcess). --- .../2026-03-16-cloud-code-adapter-design.md | 336 ++++++++++++++++++ 1 file changed, 336 insertions(+) create mode 100644 docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md diff --git a/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md b/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md new file mode 100644 index 0000000000..300e07515c --- /dev/null +++ b/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md @@ -0,0 +1,336 @@ +# Cloud Code Adapter — Design Specification + +**Status:** Approved +**Target:** Parse Server 10.x +**Date:** 2026-03-16 +**Related:** [ParseCloud/1.0 Protocol](../../../parse-lite-sdks/docs/cloud-code-protocol.md), [Adapter Proposal](../../../parse-lite-sdks/docs/cloud-code-adapter-proposal.md) + +--- + +## 1. Problem Statement + +Parse Server's cloud code system (`Parse.Cloud.define`, `Parse.Cloud.beforeSave`, etc.) has fundamental limitations: + +1. **JavaScript only** — No support for cloud code in Swift, C#, Go, or other languages. +2. **Global singleton** — All cloud code shares `Parse.Cloud` namespace. No composition, difficult testing. +3. **In-process only** — No supported mechanism for cloud code as a separate process or service. +4. **No adapter pattern** — Hard-wired implementation with no pluggable interface. +5. **Manual webhook registration** — External webhooks require manual REST API calls. + +## 2. Design Decisions + +| Decision | Choice | Rationale | +|----------|--------|-----------| +| Adapter composition | Multiple adapters coexist | Users can run legacy JS + external Swift + custom adapters simultaneously | +| Hook conflicts | Error on conflict at startup | Fail fast, no ambiguity about which adapter handles a hook | +| Hot reload | Startup-only for v1 | Simpler implementation; can be added later | +| Registry API | Adapters only (no public registry) | Clean boundary, single integration point | +| Implementation location | In parse-server directly | Core server functionality | +| Webhook key | Explicitly configured (required) | No auto-generation, no persistence question | +| Language | TypeScript | Type safety throughout | +| Architecture | Replace triggers.js entirely | CloudCodeManager becomes single source of truth | + +## 3. Architecture + +### 3.1 CloudCodeManager — The New Core + +`CloudCodeManager` replaces `triggers.js` as the single source of truth for all hook registration, lookup, and execution. + +```typescript +class CloudCodeManager { + private adapters: Map; + private store: HookStore; + + // Lifecycle + async initialize(adapterConfigs: AdapterConfig[], serverConfig: ParseServerConfig): Promise; + async shutdown(): Promise; + async healthCheck(): Promise>; + + // Registration (called by adapters via CloudCodeRegistry) + defineFunction(source: string, name: string, handler: CloudFunctionHandler, validator?: ValidatorHandler): void; + defineTrigger(source: string, className: string, triggerName: TriggerName, handler: CloudTriggerHandler, validator?: ValidatorHandler): void; + defineJob(source: string, name: string, handler: CloudJobHandler): void; + unregisterAll(source: string): void; + + // Lookup (consumed by routers, rest of Parse Server) + getFunction(name: string, applicationId: string): CloudFunctionHandler | undefined; + getTrigger(className: string, triggerType: string, applicationId: string): CloudTriggerHandler | undefined; + getJob(name: string, applicationId: string): CloudJobHandler | undefined; + getFunctionNames(applicationId: string): string[]; + getValidator(functionName: string, applicationId: string): ValidatorHandler | undefined; + + // Execution (replaces maybeRunTrigger, maybeRunValidator) + async runTrigger(triggerType: string, auth: Auth, parseObject: ParseObject, ...): Promise; + async runValidator(request: any, functionName: string, auth: Auth): Promise; +} +``` + +### 3.2 HookStore + +Typed internal structure replacing `Object.create(null)` pattern: + +```typescript +interface HookStore { + functions: Map; + triggers: Map; + // key format: `${triggerType}.${className}` + jobs: Map; + liveQueryHandlers: Array<{ handler: LiveQueryHandler; source: string }>; +} +``` + +### 3.3 CloudCodeAdapter Interface + +```typescript +interface CloudCodeAdapter { + /** Unique identifier for this adapter instance */ + readonly name: string; + + /** Register all hooks with the registry. Called once at startup. */ + initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise; + + /** Return true if adapter is healthy and ready. */ + isHealthy(): Promise; + + /** Clean up resources. Called during Parse Server shutdown. */ + shutdown(): Promise; +} +``` + +### 3.4 CloudCodeRegistry + +Scoped per-adapter. Created by `CloudCodeManager` with the adapter's `name` bound as `source`: + +```typescript +interface CloudCodeRegistry { + defineFunction(name: string, handler: CloudFunctionHandler, validator?: ValidatorHandler): void; + defineTrigger(className: string, triggerName: TriggerName, handler: CloudTriggerHandler, validator?: ValidatorHandler): void; + defineJob(name: string, handler: CloudJobHandler): void; +} + +type TriggerName = + | 'beforeSave' | 'afterSave' + | 'beforeDelete' | 'afterDelete' + | 'beforeFind' | 'afterFind' + | 'beforeLogin' | 'afterLogin' | 'afterLogout' + | 'beforeConnect' | 'beforeSubscribe' | 'afterEvent' + | 'beforeSaveFile' | 'afterSaveFile' + | 'beforeDeleteFile' | 'afterDeleteFile'; +``` + +## 4. Built-in Adapter Implementations + +### 4.1 LegacyAdapter + +Wraps `cloud: './main.js'` or `cloud: (parse) => {}`. Zero breaking changes. + +- `initialize()` temporarily patches `Parse.Cloud.*` methods to route through the registry, loads the user's cloud code file, then restores originals. +- `isHealthy()` always returns `true` (in-process). +- `shutdown()` is a no-op. + +### 4.2 InProcessAdapter + +Wraps `cloud: cloudInstance` where `cloudInstance` has a `getRouter()` method (duck-typed). + +- `initialize()` calls `getRouter().getManifest()`, creates bridge handlers for each hook that convert Parse Server requests to webhook body format and call `dispatchFunction`/`dispatchTrigger`/`dispatchJob`. +- `isHealthy()` always returns `true` (in-process). +- `shutdown()` is a no-op. + +**Duck-typed interface:** + +```typescript +interface InProcessCloudCode { + getRouter(): { + getManifest(): CloudManifest; + dispatchFunction(name: string, body: Record): Promise; + dispatchTrigger(className: string, triggerName: string, body: Record): Promise; + dispatchJob(name: string, body: Record): Promise; + }; +} + +interface CloudManifest { + protocol: string; + hooks: { + functions: Array<{ name: string }>; + triggers: Array<{ className: string; triggerName: string }>; + jobs: Array<{ name: string }>; + }; +} + +type WebhookResponse = + | { success: unknown } + | { error: { code: number; message: string } }; +``` + +### 4.3 ExternalProcessAdapter + +Wraps `cloudCodeCommand: 'swift run CloudCode'`. + +- `initialize()` spawns child process with environment variables, waits for `PARSE_CLOUD_READY:` on stdout, fetches manifest via `GET http://localhost:/`, registers bridge handlers. +- `isHealthy()` calls `GET http://localhost:/health`. +- `shutdown()` sends `SIGTERM`, waits `shutdownTimeout`, then `SIGKILL`. +- Crash recovery: unregisters hooks, restarts with exponential backoff (1s, 2s, 4s, 8s, capped at `maxRestartDelay`). + +**Environment variables passed to child process:** + +| Variable | Source | +|----------|--------| +| `PARSE_SERVER_URL` | Parse Server's own URL | +| `PARSE_APPLICATION_ID` | `appId` from config | +| `PARSE_MASTER_KEY` | `masterKey` from config | +| `PARSE_WEBHOOK_KEY` | `webhookKey` from config (required) | +| `PARSE_CLOUD_PORT` | `0` (OS-assigned) | + +## 5. Configuration + +### 5.1 ParseServerOptions Extension + +```typescript +interface ParseServerOptions { + // Existing (unchanged, routes through LegacyAdapter): + cloud?: string | ((parse: any) => void) | InProcessCloudCode; + + // New — external process: + cloudCodeCommand?: string; + webhookKey?: string; // Required when cloudCodeCommand is set + cloudCodeOptions?: { + startupTimeout?: number; // default 30000ms + healthCheckInterval?: number; // default 30000ms + shutdownTimeout?: number; // default 5000ms + maxRestartDelay?: number; // default 30000ms + }; + + // New — explicit BYO adapter(s): + cloudCodeAdapters?: CloudCodeAdapter[]; +} +``` + +### 5.2 Resolution Order + +All sources compose. Any hook collision throws at startup. + +```typescript +function resolveAdapters(options: ParseServerOptions): CloudCodeAdapter[] { + const adapters: CloudCodeAdapter[] = []; + + if (options.cloudCodeAdapters) { + adapters.push(...options.cloudCodeAdapters); + } + + if (options.cloud) { + if (typeof options.cloud === 'object' && typeof options.cloud.getRouter === 'function') { + adapters.push(new InProcessAdapter(options.cloud)); + } else { + adapters.push(new LegacyAdapter(options.cloud)); + } + } + + if (options.cloudCodeCommand) { + if (!options.webhookKey) { + throw new Error('webhookKey is required when using cloudCodeCommand'); + } + adapters.push(new ExternalProcessAdapter( + options.cloudCodeCommand, + options.webhookKey, + options.cloudCodeOptions + )); + } + + return adapters; +} +``` + +### 5.3 Startup Sequence + +1. `ParseServer` constructor +2. `resolveAdapters(options)` → `CloudCodeAdapter[]` +3. `CloudCodeManager.initialize(adapters, config)` + - For each adapter: create scoped `CloudCodeRegistry`, call `adapter.initialize(registry, config)` + - Registry calls flow into `HookStore` with conflict checks +4. If any conflict → throw, server does not start +5. All routers use `CloudCodeManager` for lookups + +### 5.4 Conflict Error Format + +``` +"Cloud code conflict: beforeSave on 'Todo' registered by both 'legacy' and 'external-process'" +``` + +## 6. Migration Strategy — Replacing triggers.js + +### 6.1 Current Consumers + +| Consumer | triggers.js Usage | Migration | +|----------|-------------------|-----------| +| `Parse.Cloud.js` | `addFunction`, `addTrigger`, `addJob`, `addConnectTrigger`, `addLiveQueryEventHandler` | LegacyAdapter delegates to `CloudCodeRegistry` | +| `FunctionsRouter.js` | `getFunction`, `getJob`, `getFunctionNames`, `maybeRunValidator` | Import from `CloudCodeManager` | +| `CloudCodeRouter.js` | `getJob` (scheduled jobs) | Import from `CloudCodeManager` | +| `RestWrite.js` | `getTrigger`, `maybeRunTrigger`, `getRequestObject` | Import from `CloudCodeManager` | +| `RestQuery.js` | `getTrigger`, `maybeRunTrigger` | Import from `CloudCodeManager` | +| `UsersRouter.js` | `getTrigger` (login/logout) | Import from `CloudCodeManager` | +| `FilesRouter.js` | `getTrigger` (file triggers) | Import from `CloudCodeManager` | +| `LiveQuery/` | `getTrigger`, `maybeRunTrigger`, connect/subscribe | Import from `CloudCodeManager` | +| `Config.js` | Validates cloud config | Updated for new options | + +### 6.2 Migration Approach + +1. **`triggers.ts` becomes a thin re-export facade** — all exports delegate to `CloudCodeManager` on the current app's `Config`. Existing import sites work without immediate changes. +2. **Incremental consumer migration** — update consumers one file at a time from `triggers.*` to `config.cloud.*` (the `CloudCodeManager` instance on `Config`). +3. **Facade removal** — once all consumers are migrated, delete `triggers.ts`. + +### 6.3 Parse.Cloud.js Transformation + +`LegacyAdapter` temporarily patches `Parse.Cloud.*` during `initialize()`: + +```typescript +class LegacyAdapter implements CloudCodeAdapter { + readonly name = 'legacy'; + + async initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise { + const originalDefine = Parse.Cloud.define; + Parse.Cloud.define = (name, handler, validator) => { + registry.defineFunction(name, handler, validator); + }; + // ... same for beforeSave, afterSave, etc. + + if (typeof this.cloud === 'string') { + require(this.cloud); + } else if (typeof this.cloud === 'function') { + this.cloud(Parse); + } + + Parse.Cloud.define = originalDefine; + // ... + } +} +``` + +### 6.4 Utility Functions + +Pure data transformation helpers from `triggers.js` (`getRequestObject()`, `getResponseObject()`, `resolveError()`, `toJSONwithObjects()`) move to `src/cloud-code/request-utils.ts`. They have no dependency on the hook store. + +## 7. Request/Response Bridge + +For `InProcessAdapter` and `ExternalProcessAdapter`, a bridge converts between Parse Server's internal request objects and the webhook body format. + +### Parse Request → Webhook Body + +Converts `Parse.Object` instances to JSON, maps all trigger-specific fields (object, original, query, file, context, etc.). + +### Webhook Response → Parse Result + +- `{ success: }` → return value +- `{ error: { code, message } }` → throw `Parse.Error` + +### beforeSave Special Case + +- Empty object `{}` → accept original (no changes) +- Object with fields → apply field changes to `request.object` +- Error → reject save + +## 8. Non-Goals (v1) + +- **Hot reload** — hooks registered once at startup +- **Public CloudCodeRegistry API** — all registration through adapters +- **Multi-process orchestration** — one external process per adapter +- **Auto-generated webhook key** — must be explicitly configured From 550ccada0ec9e23d272f6cfc19e6b0c3a51b0bb1 Mon Sep 17 00:00:00 2001 From: Daniel Date: Mon, 16 Mar 2026 21:19:26 +1100 Subject: [PATCH 02/14] docs: fix blocking issues in Cloud Code Adapter spec - Add beforePasswordResetRequest to TriggerName - Replace file/config trigger names with virtual className pattern (@File, @Config, @Connect) matching triggers.js internals - Add applicationId scoping (one CloudCodeManager per app on Config) - Add GlobalConfigRouter.js to consumer migration table - Add missing API methods (triggerExists, getJobs, runQueryTrigger, runFileTrigger, runGlobalConfigTrigger, runLiveQueryEventHandlers) - Clarify validators/rate-limiting only for LegacyAdapter - Document crash recovery ownership (manager calls unregisterAll) --- .../2026-03-16-cloud-code-adapter-design.md | 62 ++++++++++++++----- 1 file changed, 48 insertions(+), 14 deletions(-) diff --git a/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md b/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md index 300e07515c..24e30df610 100644 --- a/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md +++ b/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md @@ -26,15 +26,16 @@ Parse Server's cloud code system (`Parse.Cloud.define`, `Parse.Cloud.beforeSave` | Hot reload | Startup-only for v1 | Simpler implementation; can be added later | | Registry API | Adapters only (no public registry) | Clean boundary, single integration point | | Implementation location | In parse-server directly | Core server functionality | -| Webhook key | Explicitly configured (required) | No auto-generation, no persistence question | +| Webhook key | Explicitly configured (required) | No auto-generation, no persistence question. Diverges from proposal which offered auto-generation. | | Language | TypeScript | Type safety throughout | | Architecture | Replace triggers.js entirely | CloudCodeManager becomes single source of truth | +| applicationId scoping | One CloudCodeManager per app | Stored on `Config`, mirrors existing `_triggerStore[applicationId]` pattern | ## 3. Architecture ### 3.1 CloudCodeManager — The New Core -`CloudCodeManager` replaces `triggers.js` as the single source of truth for all hook registration, lookup, and execution. +`CloudCodeManager` replaces `triggers.js` as the single source of truth for all hook registration, lookup, and execution. One instance exists per `applicationId`, stored on the app's `Config` object. ```typescript class CloudCodeManager { @@ -50,21 +51,30 @@ class CloudCodeManager { defineFunction(source: string, name: string, handler: CloudFunctionHandler, validator?: ValidatorHandler): void; defineTrigger(source: string, className: string, triggerName: TriggerName, handler: CloudTriggerHandler, validator?: ValidatorHandler): void; defineJob(source: string, name: string, handler: CloudJobHandler): void; + defineLiveQueryHandler(source: string, handler: LiveQueryHandler): void; unregisterAll(source: string): void; // Lookup (consumed by routers, rest of Parse Server) - getFunction(name: string, applicationId: string): CloudFunctionHandler | undefined; - getTrigger(className: string, triggerType: string, applicationId: string): CloudTriggerHandler | undefined; - getJob(name: string, applicationId: string): CloudJobHandler | undefined; - getFunctionNames(applicationId: string): string[]; - getValidator(functionName: string, applicationId: string): ValidatorHandler | undefined; - - // Execution (replaces maybeRunTrigger, maybeRunValidator) + getFunction(name: string): CloudFunctionHandler | undefined; + getTrigger(className: string, triggerType: string): CloudTriggerHandler | undefined; + triggerExists(className: string, triggerType: string): boolean; + getJob(name: string): CloudJobHandler | undefined; + getJobs(): Map; + getFunctionNames(): string[]; + getValidator(functionName: string): ValidatorHandler | undefined; + + // Execution (replaces maybeRunTrigger, maybeRunValidator, and specialized variants) async runTrigger(triggerType: string, auth: Auth, parseObject: ParseObject, ...): Promise; + async runQueryTrigger(triggerType: string, className: string, query: any, ...): Promise; + async runFileTrigger(triggerType: string, file: any, ...): Promise; + async runGlobalConfigTrigger(triggerType: string, config: any, ...): Promise; async runValidator(request: any, functionName: string, auth: Auth): Promise; + async runLiveQueryEventHandlers(data: any): void; } ``` +Since the manager is scoped per-app, lookup methods no longer need an `applicationId` parameter. + ### 3.2 HookStore Typed internal structure replacing `Object.create(null)` pattern: @@ -106,6 +116,7 @@ interface CloudCodeRegistry { defineFunction(name: string, handler: CloudFunctionHandler, validator?: ValidatorHandler): void; defineTrigger(className: string, triggerName: TriggerName, handler: CloudTriggerHandler, validator?: ValidatorHandler): void; defineJob(name: string, handler: CloudJobHandler): void; + defineLiveQueryHandler(handler: LiveQueryHandler): void; } type TriggerName = @@ -113,11 +124,23 @@ type TriggerName = | 'beforeDelete' | 'afterDelete' | 'beforeFind' | 'afterFind' | 'beforeLogin' | 'afterLogin' | 'afterLogout' - | 'beforeConnect' | 'beforeSubscribe' | 'afterEvent' - | 'beforeSaveFile' | 'afterSaveFile' - | 'beforeDeleteFile' | 'afterDeleteFile'; + | 'beforePasswordResetRequest' + | 'beforeConnect' | 'beforeSubscribe' | 'afterEvent'; ``` +**Virtual classNames for special trigger targets:** + +File and Config triggers use standard trigger names (`beforeSave`, `afterSave`, etc.) with virtual classNames: + +| Target | Virtual className | Example registration | +|--------|-------------------|---------------------| +| `Parse.File` | `@File` | `defineTrigger('@File', 'beforeSave', handler)` | +| `Parse.Config` | `@Config` | `defineTrigger('@Config', 'beforeSave', handler)` | +| `beforeConnect` | `@Connect` | `defineTrigger('@Connect', 'beforeConnect', handler)` | +| `beforeSubscribe` | class name | `defineTrigger('Todo', 'beforeSubscribe', handler)` | + +This matches the existing internal storage pattern in `triggers.js` where `getClassName(Parse.File)` returns `'@File'`. The LegacyAdapter maps `Parse.Cloud.beforeSaveFile(handler)` to `defineTrigger('@File', 'beforeSave', handler)`, and `Parse.Cloud.beforeConnect(handler)` to `defineTrigger('@Connect', 'beforeConnect', handler)`. + ## 4. Built-in Adapter Implementations ### 4.1 LegacyAdapter @@ -169,7 +192,7 @@ Wraps `cloudCodeCommand: 'swift run CloudCode'`. - `initialize()` spawns child process with environment variables, waits for `PARSE_CLOUD_READY:` on stdout, fetches manifest via `GET http://localhost:/`, registers bridge handlers. - `isHealthy()` calls `GET http://localhost:/health`. - `shutdown()` sends `SIGTERM`, waits `shutdownTimeout`, then `SIGKILL`. -- Crash recovery: unregisters hooks, restarts with exponential backoff (1s, 2s, 4s, 8s, capped at `maxRestartDelay`). +- Crash recovery: the `CloudCodeManager` calls `unregisterAll(adapter.name)` internally, then the adapter restarts with exponential backoff (1s, 2s, 4s, 8s, capped at `maxRestartDelay`). **Environment variables passed to child process:** @@ -269,6 +292,7 @@ function resolveAdapters(options: ParseServerOptions): CloudCodeAdapter[] { | `RestQuery.js` | `getTrigger`, `maybeRunTrigger` | Import from `CloudCodeManager` | | `UsersRouter.js` | `getTrigger` (login/logout) | Import from `CloudCodeManager` | | `FilesRouter.js` | `getTrigger` (file triggers) | Import from `CloudCodeManager` | +| `GlobalConfigRouter.js` | `maybeRunGlobalConfigTrigger` | Import from `CloudCodeManager` | | `LiveQuery/` | `getTrigger`, `maybeRunTrigger`, connect/subscribe | Import from `CloudCodeManager` | | `Config.js` | Validates cloud config | Updated for new options | @@ -307,7 +331,17 @@ class LegacyAdapter implements CloudCodeAdapter { ### 6.4 Utility Functions -Pure data transformation helpers from `triggers.js` (`getRequestObject()`, `getResponseObject()`, `resolveError()`, `toJSONwithObjects()`) move to `src/cloud-code/request-utils.ts`. They have no dependency on the hook store. +Pure data transformation helpers from `triggers.js` move to `src/cloud-code/request-utils.ts`. They have no dependency on the hook store: + +- `getRequestObject()`, `getResponseObject()` — build request/response objects for trigger handlers +- `getRequestQueryObject()` — build request for query triggers +- `resolveError()` — normalize error responses +- `toJSONwithObjects()` — serialize with Parse object preservation +- `inflate()` — inflate REST data into Parse Objects (used by `RestWrite.js`) + +### 6.5 Validators and Rate Limiting + +Validators (including `requireUser`, `requireMaster`, `fields`, `rateLimit`) are supported only through the `LegacyAdapter`. Non-legacy adapters (InProcess, External) handle validation within their own process — Parse Server does not apply server-side validators for hooks registered by these adapters. Rate limiting middleware integration (`addRateLimit`) is handled by the `LegacyAdapter` during `initialize()`, preserving existing behavior. ## 7. Request/Response Bridge From 8e6029b8ba55032908d60c096eba7a151e0f829b Mon Sep 17 00:00:00 2001 From: Daniel Date: Mon, 16 Mar 2026 21:21:23 +1100 Subject: [PATCH 03/14] docs: address minor review feedback in Cloud Code Adapter spec - Rename getValidator param to key (covers both functions and triggers) - Note runTrigger subsumes maybeRunAfterFindTrigger - Make runLiveQueryEventHandlers synchronous to match existing behavior - Add defineTrigger validation rules (enforced for all adapters) - Add getRequestFileObject to utility function list --- .../specs/2026-03-16-cloud-code-adapter-design.md | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md b/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md index 24e30df610..7e546e065f 100644 --- a/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md +++ b/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md @@ -61,18 +61,26 @@ class CloudCodeManager { getJob(name: string): CloudJobHandler | undefined; getJobs(): Map; getFunctionNames(): string[]; - getValidator(functionName: string): ValidatorHandler | undefined; + getValidator(key: string): ValidatorHandler | undefined; + // key is a function name or `${triggerType}.${className}` for trigger validators // Execution (replaces maybeRunTrigger, maybeRunValidator, and specialized variants) + // runTrigger also subsumes maybeRunAfterFindTrigger (className + triggerType lookup) async runTrigger(triggerType: string, auth: Auth, parseObject: ParseObject, ...): Promise; async runQueryTrigger(triggerType: string, className: string, query: any, ...): Promise; async runFileTrigger(triggerType: string, file: any, ...): Promise; async runGlobalConfigTrigger(triggerType: string, config: any, ...): Promise; async runValidator(request: any, functionName: string, auth: Auth): Promise; - async runLiveQueryEventHandlers(data: any): void; + runLiveQueryEventHandlers(data: any): void; // synchronous, matches existing behavior } ``` +**Registration validation:** `defineTrigger()` enforces className/triggerType rules for all adapters (not just Legacy): +- No `beforeSave` on `_PushStatus` +- `beforeLogin`/`afterLogin`/`beforePasswordResetRequest` only on `_User` +- `afterLogout` only on `_Session` +``` + Since the manager is scoped per-app, lookup methods no longer need an `applicationId` parameter. ### 3.2 HookStore @@ -335,6 +343,7 @@ Pure data transformation helpers from `triggers.js` move to `src/cloud-code/requ - `getRequestObject()`, `getResponseObject()` — build request/response objects for trigger handlers - `getRequestQueryObject()` — build request for query triggers +- `getRequestFileObject()` — build request for file triggers - `resolveError()` — normalize error responses - `toJSONwithObjects()` — serialize with Parse object preservation - `inflate()` — inflate REST data into Parse Objects (used by `RestWrite.js`) From 3843f005e468e58c8f8de58e922e3cb7dfc3f20f Mon Sep 17 00:00:00 2001 From: Daniel Date: Mon, 16 Mar 2026 21:41:13 +1100 Subject: [PATCH 04/14] docs: add Cloud Code Adapter implementation plan Comprehensive implementation plan addressing all review feedback: - LegacyAdapter loads files only (no Parse.Cloud patching) - triggers.js facade delegates reads AND writes with correct signatures - AppCache persistence survives Config.put() overwrites - _unregisterAll uses synchronous clearAll() for test cleanup - getJobsObject() for facade backwards compatibility - @parse-lite/cloud usage example in plan header --- .../plans/2026-03-16-cloud-code-adapter.md | 2020 +++++++++++++++++ 1 file changed, 2020 insertions(+) create mode 100644 docs/superpowers/plans/2026-03-16-cloud-code-adapter.md diff --git a/docs/superpowers/plans/2026-03-16-cloud-code-adapter.md b/docs/superpowers/plans/2026-03-16-cloud-code-adapter.md new file mode 100644 index 0000000000..baac476cb1 --- /dev/null +++ b/docs/superpowers/plans/2026-03-16-cloud-code-adapter.md @@ -0,0 +1,2020 @@ +# Cloud Code Adapter Implementation Plan + +> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Replace `triggers.js` with a `CloudCodeManager` that supports composable adapters (Legacy, InProcess, ExternalProcess, BYO), enabling multi-language cloud code without breaking existing Parse Server users. + +**Architecture:** One `CloudCodeManager` per `applicationId`, stored on `this.config` (which flows into `AppCache` via `Config.put()`). The manager owns hook registration (with conflict detection), lookup, and execution. Three built-in adapters cover existing use cases. `triggers.js` becomes a facade that delegates both reads AND writes to `CloudCodeManager` when one exists, preserving all existing `Parse.Cloud.js` behavior (validators, rate limiting, auth trigger argument parsing) without reimplementing it. + +**Tech Stack:** TypeScript, Babel (transpiles `.ts` via `@babel/preset-typescript`), Jasmine tests, Parse JS SDK + +**Spec:** `docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md` + +**Target User Experience (`@parse-lite/cloud`):** + +```typescript +import ParseServer from 'parse-server'; +import { ParseCloud } from '@parse-lite/cloud'; + +const cloud = new ParseCloud(); +cloud.class('Todo') + .requireUser() + .beforeSave(async ({ object, isNew, user }) => { + if (isNew) object.authorId = user!.objectId; + return object; + }); +cloud.function('getStats').requireMaster().handle(async () => ({ total: 42 })); + +new ParseServer({ + databaseURI: 'mongodb://localhost:27017/myapp', + appId: 'myapp', + masterKey: 'secret', + cloud: cloud, // ← ParseCloud has getRouter(), detected as InProcessAdapter +}); +``` + +--- + +## File Structure + +### New Files + +| File | Responsibility | +|------|---------------| +| `src/cloud-code/types.ts` | TypeScript interfaces: `CloudCodeAdapter`, `CloudCodeRegistry`, `HookStore`, `TriggerName`, handler types, config types | +| `src/cloud-code/CloudCodeManager.ts` | Core manager: adapter lifecycle, hook store, registration with conflict detection, lookup, execution | +| `src/cloud-code/adapters/LegacyAdapter.ts` | Loads user cloud code file (delegates to existing `Parse.Cloud.js` → `triggers.js` facade → manager) | +| `src/cloud-code/adapters/InProcessAdapter.ts` | Duck-typed `getRouter()` integration, manifest-based registration, webhook body bridge | +| `src/cloud-code/adapters/ExternalProcessAdapter.ts` | Child process lifecycle, ParseCloud/1.0 protocol, health monitoring | +| `src/cloud-code/adapters/webhook-bridge.ts` | `requestToWebhookBody()` and `webhookResponseToResult()` shared by InProcess and External adapters | +| `spec/CloudCodeManager.spec.js` | Unit tests for CloudCodeManager | +| `spec/InProcessAdapter.spec.js` | Tests for InProcessAdapter with mock router | +| `spec/ExternalProcessAdapter.spec.js` | Tests for ExternalProcessAdapter lifecycle | +| `spec/CloudCodeAdapter.integration.spec.js` | Integration tests for composable adapters | + +### Modified Files + +| File | Change | +|------|--------| +| `src/triggers.js` | Becomes facade: all `add*`/`get*`/`remove*` functions delegate to `CloudCodeManager` via `AppCache` when one exists | +| `src/ParseServer.ts` | Initialize `CloudCodeManager` with resolved adapters during `start()`, store on `AppCache` | +| `src/Options/index.js` | Add `cloudCodeCommand`, `webhookKey`, `cloudCodeOptions`, `cloudCodeAdapters` types | +| `src/Options/Definitions.js` | Add option definitions for new config fields | + +### Key Architectural Decision: LegacyAdapter Does NOT Patch Parse.Cloud + +The LegacyAdapter simply loads the user's cloud code file. `Parse.Cloud.define()`, `Parse.Cloud.beforeSave()`, etc. continue to call `triggers.addFunction()`, `triggers.addTrigger()` exactly as they do today. The facade in `triggers.js` intercepts these writes and delegates to `CloudCodeManager.defineFunction()` / `CloudCodeManager.defineTrigger()`. + +This approach: +- Preserves `validateValidator()` and `addRateLimit()` calls in `Parse.Cloud.js` without reimplementation +- Preserves complex auth trigger argument parsing (`beforeLogin` can be called with or without className) +- Preserves `getClassName()` / `getRoute()` / `isParseObjectConstructor()` logic +- Eliminates a whole class of patching/restore bugs + +--- + +## Chunk 1: Types and CloudCodeManager Core + +### Task 1: Define TypeScript Interfaces + +**Files:** +- Create: `src/cloud-code/types.ts` + +- [ ] **Step 1: Create the types file** + +```typescript +// src/cloud-code/types.ts + +// --- Trigger Types (mirrors existing Types object in triggers.js) --- + +export const TriggerTypes = Object.freeze({ + beforeLogin: 'beforeLogin', + afterLogin: 'afterLogin', + afterLogout: 'afterLogout', + beforePasswordResetRequest: 'beforePasswordResetRequest', + beforeSave: 'beforeSave', + afterSave: 'afterSave', + beforeDelete: 'beforeDelete', + afterDelete: 'afterDelete', + beforeFind: 'beforeFind', + afterFind: 'afterFind', + beforeConnect: 'beforeConnect', + beforeSubscribe: 'beforeSubscribe', + afterEvent: 'afterEvent', +}); + +export type TriggerName = keyof typeof TriggerTypes; + +// --- Handler Types --- + +export type CloudFunctionHandler = (request: any) => any; +export type CloudTriggerHandler = (request: any) => any; +export type CloudJobHandler = (request: any) => any; +export type LiveQueryHandler = (data: any) => void; +export type ValidatorHandler = Record | ((request: any) => any); + +// --- Hook Store --- + +export interface FunctionEntry { + handler: CloudFunctionHandler; + source: string; + validator?: ValidatorHandler; +} + +export interface TriggerEntry { + handler: CloudTriggerHandler; + source: string; + validator?: ValidatorHandler; +} + +export interface JobEntry { + handler: CloudJobHandler; + source: string; +} + +export interface LiveQueryEntry { + handler: LiveQueryHandler; + source: string; +} + +export interface HookStore { + functions: Map; + triggers: Map; + jobs: Map; + liveQueryHandlers: LiveQueryEntry[]; +} + +// --- Server Config --- + +export interface ParseServerConfig { + appId: string; + masterKey: string; + serverURL: string; +} + +// --- Registry (scoped per-adapter) --- + +export interface CloudCodeRegistry { + defineFunction(name: string, handler: CloudFunctionHandler, validator?: ValidatorHandler): void; + defineTrigger(className: string, triggerName: TriggerName, handler: CloudTriggerHandler, validator?: ValidatorHandler): void; + defineJob(name: string, handler: CloudJobHandler): void; + defineLiveQueryHandler(handler: LiveQueryHandler): void; +} + +// --- Adapter Interface --- + +export interface CloudCodeAdapter { + readonly name: string; + initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise; + isHealthy(): Promise; + shutdown(): Promise; +} + +// --- InProcess duck-typed interface --- + +export interface CloudManifest { + protocol: string; + hooks: { + functions: Array<{ name: string }>; + triggers: Array<{ className: string; triggerName: string }>; + jobs: Array<{ name: string }>; + }; +} + +export type WebhookResponse = + | { success: unknown } + | { error: { code: number; message: string } }; + +export interface CloudRouter { + getManifest(): CloudManifest; + dispatchFunction(name: string, body: Record): Promise; + dispatchTrigger(className: string, triggerName: string, body: Record): Promise; + dispatchJob(name: string, body: Record): Promise; +} + +export interface InProcessCloudCode { + getRouter(): CloudRouter; +} + +// --- External Process Options --- + +export interface CloudCodeOptions { + startupTimeout?: number; + healthCheckInterval?: number; + shutdownTimeout?: number; + maxRestartDelay?: number; +} +``` + +- [ ] **Step 2: Verify it compiles** + +Run: `npm run build` +Expected: No errors + +- [ ] **Step 3: Commit** + +```bash +git add -f src/cloud-code/types.ts +git commit -m "feat: add TypeScript types for Cloud Code Adapter system" +``` + +--- + +### Task 2: CloudCodeManager — Registration, Conflict Detection, Lookup + +**Files:** +- Create: `src/cloud-code/CloudCodeManager.ts` +- Test: `spec/CloudCodeManager.spec.js` + +- [ ] **Step 1: Write failing tests** + +```javascript +// spec/CloudCodeManager.spec.js +const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); + +describe('CloudCodeManager', () => { + let manager; + + beforeEach(() => { + manager = new CloudCodeManager(); + }); + + describe('createRegistry', () => { + it('creates a scoped registry for an adapter', () => { + const registry = manager.createRegistry('test-adapter'); + expect(registry.defineFunction).toBeDefined(); + expect(registry.defineTrigger).toBeDefined(); + expect(registry.defineJob).toBeDefined(); + expect(registry.defineLiveQueryHandler).toBeDefined(); + }); + }); + + describe('defineFunction', () => { + it('registers a cloud function', () => { + const handler = () => {}; + manager.defineFunction('test', 'hello', handler); + expect(manager.getFunction('hello')).toBe(handler); + }); + + it('throws on duplicate function from different source', () => { + manager.defineFunction('adapter-a', 'hello', () => {}); + expect(() => { + manager.defineFunction('adapter-b', 'hello', () => {}); + }).toThrowError(/Cloud code conflict.*hello.*adapter-a.*adapter-b/); + }); + + it('allows re-registration from same source (overwrite)', () => { + const handler1 = () => 'first'; + const handler2 = () => 'second'; + manager.defineFunction('test', 'hello', handler1); + manager.defineFunction('test', 'hello', handler2); + expect(manager.getFunction('hello')).toBe(handler2); + }); + + it('supports namespaced function names', () => { + const handler = () => {}; + manager.defineFunction('test', 'ns.sub.func', handler); + expect(manager.getFunction('ns.sub.func')).toBe(handler); + }); + }); + + describe('defineTrigger', () => { + it('registers a trigger', () => { + const handler = () => {}; + manager.defineTrigger('test', 'Todo', 'beforeSave', handler); + expect(manager.getTrigger('Todo', 'beforeSave')).toBe(handler); + }); + + it('throws on duplicate trigger from different source', () => { + manager.defineTrigger('adapter-a', 'Todo', 'beforeSave', () => {}); + expect(() => { + manager.defineTrigger('adapter-b', 'Todo', 'beforeSave', () => {}); + }).toThrowError(/Cloud code conflict.*beforeSave.*Todo.*adapter-a.*adapter-b/); + }); + + it('rejects beforeSave on _PushStatus', () => { + expect(() => { + manager.defineTrigger('test', '_PushStatus', 'beforeSave', () => {}); + }).toThrowError(); + }); + + it('allows afterSave on _PushStatus', () => { + expect(() => { + manager.defineTrigger('test', '_PushStatus', 'afterSave', () => {}); + }).not.toThrow(); + }); + + it('allows beforeLogin only on _User', () => { + expect(() => { + manager.defineTrigger('test', 'Todo', 'beforeLogin', () => {}); + }).toThrowError(); + expect(() => { + manager.defineTrigger('test', '_User', 'beforeLogin', () => {}); + }).not.toThrow(); + }); + + it('rejects all triggers on _Session except afterLogout', () => { + expect(() => { + manager.defineTrigger('test', '_Session', 'beforeSave', () => {}); + }).toThrowError(/Only the afterLogout trigger/); + expect(() => { + manager.defineTrigger('test', '_Session', 'afterLogout', () => {}); + }).not.toThrow(); + }); + + it('uses virtual className @File for file triggers', () => { + const handler = () => {}; + manager.defineTrigger('test', '@File', 'beforeSave', handler); + expect(manager.getTrigger('@File', 'beforeSave')).toBe(handler); + }); + + it('uses virtual className @Connect for connect triggers', () => { + const handler = () => {}; + manager.defineTrigger('test', '@Connect', 'beforeConnect', handler); + expect(manager.getTrigger('@Connect', 'beforeConnect')).toBe(handler); + }); + }); + + describe('defineJob', () => { + it('registers a job', () => { + const handler = () => {}; + manager.defineJob('test', 'myJob', handler); + expect(manager.getJob('myJob')).toBe(handler); + }); + + it('throws on duplicate job from different source', () => { + manager.defineJob('adapter-a', 'myJob', () => {}); + expect(() => { + manager.defineJob('adapter-b', 'myJob', () => {}); + }).toThrowError(/Cloud code conflict/); + }); + }); + + describe('lookup methods', () => { + it('getFunctionNames returns all registered names', () => { + manager.defineFunction('test', 'a', () => {}); + manager.defineFunction('test', 'b', () => {}); + manager.defineFunction('test', 'c', () => {}); + expect(manager.getFunctionNames().sort()).toEqual(['a', 'b', 'c']); + }); + + it('getJobs returns all jobs as a Map', () => { + const h1 = () => {}; + const h2 = () => {}; + manager.defineJob('test', 'job1', h1); + manager.defineJob('test', 'job2', h2); + const jobs = manager.getJobs(); + expect(jobs.get('job1')).toBe(h1); + expect(jobs.get('job2')).toBe(h2); + }); + + it('getJobsObject returns plain object (for facade compat)', () => { + const h1 = () => {}; + manager.defineJob('test', 'job1', h1); + const jobs = manager.getJobsObject(); + expect(jobs['job1']).toBe(h1); + }); + + it('triggerExists returns boolean', () => { + manager.defineTrigger('test', 'Todo', 'beforeSave', () => {}); + expect(manager.triggerExists('Todo', 'beforeSave')).toBe(true); + expect(manager.triggerExists('Todo', 'afterSave')).toBe(false); + }); + + it('getFunction returns undefined for unregistered', () => { + expect(manager.getFunction('nonexistent')).toBeUndefined(); + }); + + it('getTrigger returns undefined for unregistered', () => { + expect(manager.getTrigger('Todo', 'beforeSave')).toBeUndefined(); + }); + }); + + describe('validators', () => { + it('getValidator returns validator for function', () => { + const validator = { requireUser: true }; + manager.defineFunction('test', 'secured', () => {}, validator); + expect(manager.getValidator('secured')).toEqual(validator); + }); + + it('getValidator returns validator for trigger (key format: triggerType.className)', () => { + const validator = { requireMaster: true }; + manager.defineTrigger('test', 'Todo', 'beforeSave', () => {}, validator); + expect(manager.getValidator('beforeSave.Todo')).toEqual(validator); + }); + + it('getValidator returns undefined when no validator set', () => { + manager.defineFunction('test', 'noValidator', () => {}); + expect(manager.getValidator('noValidator')).toBeUndefined(); + }); + }); + + describe('unregisterAll', () => { + it('removes all hooks from a source', () => { + manager.defineFunction('adapter-a', 'fn1', () => {}); + manager.defineFunction('adapter-b', 'fn2', () => {}); + manager.defineTrigger('adapter-a', 'Todo', 'beforeSave', () => {}); + manager.defineJob('adapter-a', 'job1', () => {}); + + manager.unregisterAll('adapter-a'); + + expect(manager.getFunction('fn1')).toBeUndefined(); + expect(manager.getFunction('fn2')).toBeDefined(); + expect(manager.getTrigger('Todo', 'beforeSave')).toBeUndefined(); + expect(manager.getJob('job1')).toBeUndefined(); + }); + + it('removes live query handlers from a source', () => { + let callCount = 0; + manager.defineLiveQueryHandler('keep', () => { callCount++; }); + manager.defineLiveQueryHandler('remove', () => { callCount += 100; }); + + manager.unregisterAll('remove'); + manager.runLiveQueryEventHandlers({}); + + expect(callCount).toBe(1); + }); + }); + + describe('defineLiveQueryHandler', () => { + it('registers and executes handlers synchronously', () => { + const calls = []; + manager.defineLiveQueryHandler('a', (data) => calls.push(['a', data])); + manager.defineLiveQueryHandler('b', (data) => calls.push(['b', data])); + + manager.runLiveQueryEventHandlers({ event: 'test' }); + + expect(calls).toEqual([['a', { event: 'test' }], ['b', { event: 'test' }]]); + }); + }); + + describe('registry scoping', () => { + it('registry calls use the adapter name as source', () => { + const registry = manager.createRegistry('my-adapter'); + registry.defineFunction('hello', () => {}); + + expect(() => { + manager.defineFunction('other-adapter', 'hello', () => {}); + }).toThrowError(/my-adapter.*other-adapter/); + }); + }); + + describe('lifecycle', () => { + it('initialize calls each adapter in order', async () => { + const order = []; + const makeAdapter = (name) => ({ + name, + async initialize(registry) { order.push(name); }, + async isHealthy() { return true; }, + async shutdown() {}, + }); + + await manager.initialize( + [makeAdapter('first'), makeAdapter('second')], + { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' } + ); + + expect(order).toEqual(['first', 'second']); + }); + + it('rejects duplicate adapter names', async () => { + const adapter = { + name: 'dupe', + async initialize() {}, + async isHealthy() { return true; }, + async shutdown() {}, + }; + + await expectAsync( + manager.initialize([adapter, adapter], { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }) + ).toBeRejectedWithError(/Duplicate adapter name/); + }); + + it('shutdown calls each adapter and clears store', async () => { + let shutdownCalled = false; + const adapter = { + name: 'test', + async initialize(registry) { + registry.defineFunction('fn', () => {}); + }, + async isHealthy() { return true; }, + async shutdown() { shutdownCalled = true; }, + }; + + await manager.initialize([adapter], { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + expect(manager.getFunction('fn')).toBeDefined(); + + await manager.shutdown(); + expect(shutdownCalled).toBe(true); + expect(manager.getFunction('fn')).toBeUndefined(); + }); + + it('healthCheck returns status per adapter', async () => { + const healthy = { name: 'ok', async initialize() {}, async isHealthy() { return true; }, async shutdown() {} }; + const unhealthy = { name: 'bad', async initialize() {}, async isHealthy() { return false; }, async shutdown() {} }; + + await manager.initialize([healthy, unhealthy], { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + const results = await manager.healthCheck(); + + expect(results.get('ok')).toBe(true); + expect(results.get('bad')).toBe(false); + }); + }); + + describe('removeFunction / removeTrigger', () => { + it('removeFunction removes a registered function', () => { + manager.defineFunction('test', 'fn', () => {}); + expect(manager.getFunction('fn')).toBeDefined(); + manager.removeFunction('fn'); + expect(manager.getFunction('fn')).toBeUndefined(); + }); + + it('removeTrigger removes a registered trigger', () => { + manager.defineTrigger('test', 'Todo', 'beforeSave', () => {}); + expect(manager.getTrigger('Todo', 'beforeSave')).toBeDefined(); + manager.removeTrigger('beforeSave', 'Todo'); + expect(manager.getTrigger('Todo', 'beforeSave')).toBeUndefined(); + }); + }); +}); +``` + +- [ ] **Step 2: Run tests to verify they fail** + +Run: `npm run build && TESTING=1 npx jasmine spec/CloudCodeManager.spec.js` +Expected: FAIL — `CloudCodeManager` module not found + +- [ ] **Step 3: Implement CloudCodeManager** + +```typescript +// src/cloud-code/CloudCodeManager.ts +import type { + CloudCodeAdapter, + CloudCodeRegistry, + CloudFunctionHandler, + CloudTriggerHandler, + CloudJobHandler, + LiveQueryHandler, + ValidatorHandler, + HookStore, + ParseServerConfig, +} from './types'; + +const USER_ONLY_TRIGGERS = ['beforeLogin', 'afterLogin', 'beforePasswordResetRequest']; +const SESSION_ONLY_TRIGGERS = ['afterLogout']; + +function validateClassNameForTrigger(className: string, triggerName: string): void { + // Only afterSave is allowed on _PushStatus + if (className === '_PushStatus' && triggerName === 'beforeSave') { + throw new Error(`Triggers are not allowed for class _PushStatus: ${triggerName}. Only afterSave is allowed.`); + } + // Only afterLogout is allowed on _Session + if (className === '_Session' && triggerName !== 'afterLogout') { + throw new Error('Only the afterLogout trigger is allowed for the _Session class.'); + } + // Auth triggers only on _User + if (USER_ONLY_TRIGGERS.includes(triggerName) && className !== '_User') { + throw new Error(`${triggerName} trigger is only allowed on _User class`); + } + if (SESSION_ONLY_TRIGGERS.includes(triggerName) && className !== '_Session') { + throw new Error(`${triggerName} trigger is only allowed on _Session class`); + } +} + +function triggerKey(className: string, triggerName: string): string { + return `${triggerName}.${className}`; +} + +export class CloudCodeManager { + private adapters: Map = new Map(); + private store: HookStore = { + functions: new Map(), + triggers: new Map(), + jobs: new Map(), + liveQueryHandlers: [], + }; + + // --- Lifecycle --- + + async initialize(adapters: CloudCodeAdapter[], serverConfig: ParseServerConfig): Promise { + for (const adapter of adapters) { + if (this.adapters.has(adapter.name)) { + throw new Error(`Duplicate adapter name: '${adapter.name}'`); + } + this.adapters.set(adapter.name, adapter); + const registry = this.createRegistry(adapter.name); + await adapter.initialize(registry, serverConfig); + } + } + + async shutdown(): Promise { + for (const adapter of this.adapters.values()) { + await adapter.shutdown(); + } + this.adapters.clear(); + this.store.functions.clear(); + this.store.triggers.clear(); + this.store.jobs.clear(); + this.store.liveQueryHandlers.length = 0; + } + + async healthCheck(): Promise> { + const results = new Map(); + for (const [name, adapter] of this.adapters) { + try { + results.set(name, await adapter.isHealthy()); + } catch { + results.set(name, false); + } + } + return results; + } + + // --- Registry Factory --- + + createRegistry(source: string): CloudCodeRegistry { + return { + defineFunction: (name, handler, validator?) => { + this.defineFunction(source, name, handler, validator); + }, + defineTrigger: (className, triggerName, handler, validator?) => { + this.defineTrigger(source, className, triggerName, handler, validator); + }, + defineJob: (name, handler) => { + this.defineJob(source, name, handler); + }, + defineLiveQueryHandler: (handler) => { + this.defineLiveQueryHandler(source, handler); + }, + }; + } + + // --- Registration --- + + defineFunction(source: string, name: string, handler: CloudFunctionHandler, validator?: ValidatorHandler): void { + const existing = this.store.functions.get(name); + if (existing && existing.source !== source) { + throw new Error( + `Cloud code conflict: function '${name}' registered by both '${existing.source}' and '${source}'` + ); + } + this.store.functions.set(name, { handler, source, validator }); + } + + defineTrigger(source: string, className: string, triggerName: string, handler: CloudTriggerHandler, validator?: ValidatorHandler): void { + validateClassNameForTrigger(className, triggerName); + const key = triggerKey(className, triggerName); + const existing = this.store.triggers.get(key); + if (existing && existing.source !== source) { + throw new Error( + `Cloud code conflict: ${triggerName} on '${className}' registered by both '${existing.source}' and '${source}'` + ); + } + this.store.triggers.set(key, { handler, source, validator }); + } + + defineJob(source: string, name: string, handler: CloudJobHandler): void { + const existing = this.store.jobs.get(name); + if (existing && existing.source !== source) { + throw new Error( + `Cloud code conflict: job '${name}' registered by both '${existing.source}' and '${source}'` + ); + } + this.store.jobs.set(name, { handler, source }); + } + + defineLiveQueryHandler(source: string, handler: LiveQueryHandler): void { + this.store.liveQueryHandlers.push({ handler, source }); + } + + // --- Removal (for HooksController REST API and facade compatibility) --- + + removeFunction(name: string): void { + this.store.functions.delete(name); + } + + removeTrigger(triggerType: string, className: string): void { + this.store.triggers.delete(triggerKey(className, triggerType)); + } + + unregisterAll(source: string): void { + for (const [key, entry] of this.store.functions) { + if (entry.source === source) this.store.functions.delete(key); + } + for (const [key, entry] of this.store.triggers) { + if (entry.source === source) this.store.triggers.delete(key); + } + for (const [key, entry] of this.store.jobs) { + if (entry.source === source) this.store.jobs.delete(key); + } + this.store.liveQueryHandlers = this.store.liveQueryHandlers.filter(e => e.source !== source); + } + + // --- Lookup --- + + getFunction(name: string): CloudFunctionHandler | undefined { + return this.store.functions.get(name)?.handler; + } + + getTrigger(className: string, triggerType: string): CloudTriggerHandler | undefined { + return this.store.triggers.get(triggerKey(className, triggerType))?.handler; + } + + triggerExists(className: string, triggerType: string): boolean { + return this.store.triggers.has(triggerKey(className, triggerType)); + } + + getJob(name: string): CloudJobHandler | undefined { + return this.store.jobs.get(name)?.handler; + } + + getJobs(): Map { + const result = new Map(); + for (const [name, entry] of this.store.jobs) { + result.set(name, entry.handler); + } + return result; + } + + /** Returns jobs as a plain object (for triggers.js facade backwards compatibility) */ + getJobsObject(): Record { + const result: Record = {}; + for (const [name, entry] of this.store.jobs) { + result[name] = entry.handler; + } + return result; + } + + getFunctionNames(): string[] { + return Array.from(this.store.functions.keys()); + } + + getValidator(key: string): ValidatorHandler | undefined { + const fn = this.store.functions.get(key); + if (fn) return fn.validator; + const trigger = this.store.triggers.get(key); + if (trigger) return trigger.validator; + return undefined; + } + + /** Synchronously clear the entire store. Used by test cleanup (_unregisterAll). */ + clearAll(): void { + this.store.functions.clear(); + this.store.triggers.clear(); + this.store.jobs.clear(); + this.store.liveQueryHandlers.length = 0; + } + + // --- Execution --- + + runLiveQueryEventHandlers(data: any): void { + for (const entry of this.store.liveQueryHandlers) { + entry.handler(data); + } + } +} +``` + +- [ ] **Step 4: Build and run tests** + +Run: `npm run build && TESTING=1 npx jasmine spec/CloudCodeManager.spec.js` +Expected: All tests PASS + +- [ ] **Step 5: Commit** + +```bash +git add -f src/cloud-code/CloudCodeManager.ts src/cloud-code/types.ts spec/CloudCodeManager.spec.js +git commit -m "feat: add CloudCodeManager with registration, conflict detection, and lookup" +``` + +--- + +## Chunk 2: LegacyAdapter and Triggers Facade + +### Task 3: Implement LegacyAdapter + +The LegacyAdapter is intentionally simple — it just loads the cloud code file. All the registration magic happens through the existing `Parse.Cloud.js` → `triggers.js` → `CloudCodeManager` delegation chain (set up in Task 4). + +**Files:** +- Create: `src/cloud-code/adapters/LegacyAdapter.ts` + +- [ ] **Step 1: Implement LegacyAdapter** + +```typescript +// src/cloud-code/adapters/LegacyAdapter.ts +import type { CloudCodeAdapter, CloudCodeRegistry, ParseServerConfig } from '../types'; + +export class LegacyAdapter implements CloudCodeAdapter { + readonly name = 'legacy'; + private cloud: string | ((parse: any) => void); + + constructor(cloud: string | ((parse: any) => void)) { + this.cloud = cloud; + } + + async initialize(registry: CloudCodeRegistry, _config: ParseServerConfig): Promise { + // The registry is not used directly by LegacyAdapter. + // Instead, the cloud code file calls Parse.Cloud.define() etc., + // which calls triggers.addFunction() etc., + // which the facade delegates to CloudCodeManager. + // + // The LegacyAdapter's sole job is to load the cloud code file. + const Parse = require('parse/node').Parse; + + if (typeof this.cloud === 'function') { + await Promise.resolve(this.cloud(Parse)); + } else if (typeof this.cloud === 'string') { + const path = require('path'); + const resolved = path.resolve(process.cwd(), this.cloud); + // Support both CommonJS and ES modules + try { + const pkg = require(path.resolve(process.cwd(), 'package.json')); + if (process.env.npm_package_type === 'module' || pkg?.type === 'module') { + await import(resolved); + } else { + require(resolved); + } + } catch { + require(resolved); + } + } + } + + async isHealthy(): Promise { + return true; + } + + async shutdown(): Promise { + // No-op for in-process code + } +} +``` + +- [ ] **Step 2: Commit** + +```bash +git add -f src/cloud-code/adapters/LegacyAdapter.ts +git commit -m "feat: add LegacyAdapter for loading cloud code files" +``` + +### Task 4: Create triggers.js Facade + +This is the critical migration step. `triggers.js` keeps all its exports, but its internal `add*`, `get*`, and `remove*` functions delegate to `CloudCodeManager` when one is present on `AppCache`. + +**Files:** +- Modify: `src/triggers.js` + +- [ ] **Step 1: Read the full triggers.js to understand the current structure** + +Read: `src/triggers.js` completely. Pay attention to: +- The `_triggerStore` global (line 90) +- The `add()`, `get()`, `remove()` internal functions (lines 123-147) +- All exported functions that call these internals +- The `_unregisterAll()` function (line 183) + +- [ ] **Step 2: Add AppCache import and manager helper at top of triggers.js** + +Add after existing imports (around line 3): + +```javascript +import AppCache from './cache'; + +function getManager(applicationId) { + const cached = AppCache.get(applicationId || Parse.applicationId); + return cached && cached.cloudCodeManager; +} +``` + +- [ ] **Step 3: Update registration functions to delegate writes** + +Update `addFunction`, `addJob`, `addTrigger`, `addConnectTrigger`, `addLiveQueryEventHandler` to delegate to manager when present. **CRITICAL:** The fallback paths must use the original `add(category, name, handler, applicationId)` signature exactly as they are today. + +```javascript +export function addFunction(functionName, handler, validationHandler, applicationId) { + const manager = getManager(applicationId); + if (manager) { + manager.defineFunction('legacy', functionName, handler, validationHandler); + return; + } + // Original code — do not change these signatures + add(Category.Functions, functionName, handler, applicationId); + add(Category.Validators, functionName, validationHandler, applicationId); +} + +export function addJob(jobName, handler, applicationId) { + const manager = getManager(applicationId); + if (manager) { + manager.defineJob('legacy', jobName, handler); + return; + } + add(Category.Jobs, jobName, handler, applicationId); +} + +export function addTrigger(type, className, handler, applicationId, validationHandler) { + const manager = getManager(applicationId); + if (manager) { + manager.defineTrigger('legacy', className, type, handler, validationHandler); + return; + } + // Original code — preserve exact signatures + validateClassNameForTriggers(className, type); + add(Category.Triggers, `${type}.${className}`, handler, applicationId); + add(Category.Validators, `${type}.${className}`, validationHandler, applicationId); +} + +export function addConnectTrigger(type, handler, applicationId, validationHandler) { + const manager = getManager(applicationId); + if (manager) { + manager.defineTrigger('legacy', ConnectClassName, type, handler, validationHandler); + return; + } + add(Category.Triggers, `${type}.${ConnectClassName}`, handler, applicationId); + add(Category.Validators, `${type}.${ConnectClassName}`, validationHandler, applicationId); +} + +export function addLiveQueryEventHandler(handler, applicationId) { + const manager = getManager(applicationId); + if (manager) { + manager.defineLiveQueryHandler('legacy', handler); + return; + } + // Original code + applicationId = applicationId || Parse.applicationId; + _triggerStore[applicationId] = _triggerStore[applicationId] || baseStore(); + _triggerStore[applicationId].LiveQuery.push(handler); +} +``` + +- [ ] **Step 4: Update lookup functions to delegate reads** + +**CRITICAL:** Fallback paths must use the original `get(category, name, applicationId)` signature exactly. + +```javascript +export function getTrigger(className, triggerType, applicationId) { + const manager = getManager(applicationId); + if (manager) { + return manager.getTrigger(className, triggerType); + } + return get(Category.Triggers, `${triggerType}.${className}`, applicationId); +} + +export function triggerExists(className, type, applicationId) { + const manager = getManager(applicationId); + if (manager) { + return manager.triggerExists(className, type); + } + return !!get(Category.Triggers, `${type}.${className}`, applicationId); +} + +export function getFunction(functionName, applicationId) { + const manager = getManager(applicationId); + if (manager) { + return manager.getFunction(functionName); + } + return get(Category.Functions, functionName, applicationId); +} + +export function getFunctionNames(applicationId) { + const manager = getManager(applicationId); + if (manager) { + return manager.getFunctionNames(); + } + // ... keep existing implementation as fallback (recursive namespace traversal) +} + +export function getJob(jobName, applicationId) { + const manager = getManager(applicationId); + if (manager) { + return manager.getJob(jobName); + } + return get(Category.Jobs, jobName, applicationId); +} + +export function getJobs(applicationId) { + const manager = getManager(applicationId); + if (manager) { + // Returns plain object for backwards compatibility (consumers use Object.keys()) + return manager.getJobsObject(); + } + // ... keep existing implementation as fallback +} + +export function getValidator(functionName, applicationId) { + const manager = getManager(applicationId); + if (manager) { + return manager.getValidator(functionName); + } + return get(Category.Validators, functionName, applicationId); +} +``` + +- [ ] **Step 5: Update removal functions** + +```javascript +export function removeFunction(functionName, applicationId) { + const manager = getManager(applicationId); + if (manager) { + manager.removeFunction(functionName); + return; + } + remove(Category.Functions, functionName, applicationId); +} + +export function removeTrigger(type, className, applicationId) { + const manager = getManager(applicationId); + if (manager) { + manager.removeTrigger(type, className); + return; + } + remove(Category.Triggers, `${type}.${className}`, applicationId); +} +``` + +- [ ] **Step 6: Update `_unregisterAll` to clear ALL sources in the manager** + +The existing `_unregisterAll()` wipes everything for all appIds. When a manager exists, we must clear ALL sources (not just 'legacy') to match this behavior — this is used by test cleanup. + +```javascript +export function _unregisterAll() { + Object.keys(_triggerStore).forEach(appId => { + const manager = getManager(appId); + if (manager) { + // clearAll() synchronously wipes the entire store (all sources) + // This matches existing behavior of wiping everything for test cleanup + manager.clearAll(); + } + delete _triggerStore[appId]; + }); +} +``` + +- [ ] **Step 7: Update `runLiveQueryEventHandlers` to delegate** + +```javascript +export function runLiveQueryEventHandlers(data, applicationId = Parse.applicationId) { + const manager = getManager(applicationId); + if (manager) { + manager.runLiveQueryEventHandlers(data); + return; + } + _triggerStore[applicationId] = _triggerStore[applicationId] || baseStore(); + _triggerStore[applicationId].LiveQuery.forEach(handler => handler(data)); +} +``` + +- [ ] **Step 8: Build and run existing CloudCode tests** + +Run: `npm run build && TESTING=1 npx jasmine --filter="CloudCode"` +Expected: All existing tests PASS (facade falls back to legacy store when no manager present) + +- [ ] **Step 9: Commit** + +```bash +git add src/triggers.js +git commit -m "feat: add CloudCodeManager delegation layer to triggers.js facade" +``` + +--- + +## Chunk 3: ParseServer Integration + +### Task 5: Add Config Options + +**Files:** +- Modify: `src/Options/index.js` +- Modify: `src/Options/Definitions.js` + +- [ ] **Step 1: Add types to `src/Options/index.js`** + +After the existing `cloud: ?string` line, add: + +```javascript +cloudCodeCommand: ?string, +webhookKey: ?string, +cloudCodeOptions: ?{ + startupTimeout: ?number, + healthCheckInterval: ?number, + shutdownTimeout: ?number, + maxRestartDelay: ?number, +}, +cloudCodeAdapters: ?Array, +``` + +- [ ] **Step 2: Add definitions to `src/Options/Definitions.js`** + +Add in alphabetical order: + +```javascript +cloudCodeAdapters: { + help: 'Array of CloudCodeAdapter instances for BYO cloud code integration', +}, +cloudCodeCommand: { + env: 'PARSE_SERVER_CLOUD_CODE_COMMAND', + help: 'Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol)', +}, +cloudCodeOptions: { + help: 'Options for the external cloud code process adapter', +}, +webhookKey: { + env: 'PARSE_SERVER_WEBHOOK_KEY', + help: 'Webhook key for authenticating external cloud code process requests. Required when cloudCodeCommand is set.', +}, +``` + +- [ ] **Step 3: Commit** + +```bash +git add src/Options/index.js src/Options/Definitions.js +git commit -m "feat: add config options for cloud code adapters" +``` + +### Task 6: Integrate CloudCodeManager into ParseServer Startup + +**Files:** +- Modify: `src/ParseServer.ts` + +- [ ] **Step 1: Read the current ParseServer.ts startup flow** + +Read: `src/ParseServer.ts` — focus on the `start()` method (lines ~150-210) and the cloud code loading block (lines ~187-202). + +- [ ] **Step 2: Add imports at top of ParseServer.ts** + +```typescript +import { CloudCodeManager } from './cloud-code/CloudCodeManager'; +import { LegacyAdapter } from './cloud-code/adapters/LegacyAdapter'; +import { InProcessAdapter } from './cloud-code/adapters/InProcessAdapter'; +import { ExternalProcessAdapter } from './cloud-code/adapters/ExternalProcessAdapter'; +``` + +- [ ] **Step 3: Add resolveAdapters function** + +Add before the `ParseServer` class or as a module-level function: + +```typescript +function resolveAdapters(options: any): any[] { + const adapters: any[] = []; + + if (options.cloudCodeAdapters) { + adapters.push(...options.cloudCodeAdapters); + } + + if (options.cloud) { + if (typeof options.cloud === 'object' && typeof options.cloud.getRouter === 'function') { + adapters.push(new InProcessAdapter(options.cloud)); + } else { + adapters.push(new LegacyAdapter(options.cloud)); + } + } + + if (options.cloudCodeCommand) { + if (!options.webhookKey) { + throw new Error('webhookKey is required when using cloudCodeCommand'); + } + adapters.push(new ExternalProcessAdapter( + options.cloudCodeCommand, + options.webhookKey, + options.cloudCodeOptions + )); + } + + return adapters; +} +``` + +- [ ] **Step 4: Replace cloud code loading block in start() method** + +Replace the existing cloud code loading block (lines ~187-202) with: + +```typescript +const adapters = resolveAdapters({ + cloud, + cloudCodeCommand: this.config.cloudCodeCommand, + webhookKey: this.config.webhookKey, + cloudCodeOptions: this.config.cloudCodeOptions, + cloudCodeAdapters: this.config.cloudCodeAdapters, +}); + +if (adapters.length > 0) { + addParseCloud(); + const cloudManager = new CloudCodeManager(); + + // CRITICAL: Store on this.config BEFORE adapter initialization. + // this.config flows into AppCache via Config.put() later in start(). + // We must also store it on AppCache NOW so the facade can find it + // during LegacyAdapter.initialize() → Parse.Cloud.define() → triggers.addFunction(). + this.config.cloudCodeManager = cloudManager; + const appId = this.config.appId; + const cached = AppCache.get(appId); + if (cached) { + cached.cloudCodeManager = cloudManager; + } + + await cloudManager.initialize(adapters, { + appId, + masterKey: this.config.masterKey, + serverURL: this.config.serverURL || `http://localhost:${this.config.port}${this.config.mountPath || '/parse'}`, + }); +} +``` + +**Critical ordering notes:** +1. `cloudManager` must be on both `this.config` AND `AppCache` BEFORE `initialize()` — because LegacyAdapter loads cloud code synchronously during `initialize()`, and those `Parse.Cloud.define()` calls flow through the facade which reads from `AppCache`. +2. Storing on `this.config` ensures the reference survives the `Config.put(this.config)` call at the end of `start()`, which overwrites the AppCache entry with `this.config`. + +- [ ] **Step 5: Add AppCache import if not already present** + +Verify `AppCache` is imported. It may already be imported in ParseServer.ts as `import cache from './cache'` — if so, use `cache` instead of `AppCache`. + +- [ ] **Step 6: Build and run tests** + +Run: `npm run build && TESTING=1 npx jasmine --filter="CloudCode"` +Expected: All existing tests PASS + +- [ ] **Step 7: Commit** + +```bash +git add src/ParseServer.ts +git commit -m "feat: integrate CloudCodeManager initialization into ParseServer startup" +``` + +--- + +## Chunk 4: InProcessAdapter and Webhook Bridge + +### Task 7: Implement Webhook Bridge + +**Files:** +- Create: `src/cloud-code/adapters/webhook-bridge.ts` + +- [ ] **Step 1: Create the webhook bridge** + +```typescript +// src/cloud-code/adapters/webhook-bridge.ts +import { Parse } from 'parse/node'; +import type { WebhookResponse } from '../types'; + +export function requestToWebhookBody(request: any): Record { + const body: Record = { + master: request.master ?? false, + ip: request.ip ?? '', + headers: request.headers ?? {}, + installationId: request.installationId, + }; + + if (request.user) { + body.user = typeof request.user.toJSON === 'function' ? request.user.toJSON() : request.user; + } + if (request.params !== undefined) body.params = request.params; + if (request.jobId !== undefined) body.jobId = request.jobId; + if (request.object) { + body.object = typeof request.object.toJSON === 'function' ? request.object.toJSON() : request.object; + } + if (request.original) { + body.original = typeof request.original.toJSON === 'function' ? request.original.toJSON() : request.original; + } + if (request.context !== undefined) body.context = request.context; + if (request.query) { + body.query = { + className: request.query.className, + where: request.query._where, + limit: request.query._limit, + skip: request.query._skip, + include: request.query._include?.join(','), + keys: request.query._keys?.join(','), + order: request.query._order, + }; + } + if (request.count !== undefined) body.count = request.count; + if (request.isGet !== undefined) body.isGet = request.isGet; + if (request.file) body.file = request.file; + if (request.fileSize !== undefined) body.fileSize = request.fileSize; + if (request.event) body.event = request.event; + if (request.requestId !== undefined) body.requestId = request.requestId; + if (request.clients !== undefined) body.clients = request.clients; + if (request.subscriptions !== undefined) body.subscriptions = request.subscriptions; + + return body; +} + +export function webhookResponseToResult(response: WebhookResponse): unknown { + if ('error' in response) { + throw new Parse.Error(response.error.code, response.error.message); + } + return response.success; +} + +export function applyBeforeSaveResponse(request: any, response: WebhookResponse): void { + if ('error' in response) { + throw new Parse.Error(response.error.code, response.error.message); + } + const result = response.success; + if (typeof result === 'object' && result !== null && Object.keys(result).length === 0) { + return; + } + if (typeof result === 'object' && result !== null) { + const skipFields = ['objectId', 'createdAt', 'updatedAt', 'className']; + for (const [key, value] of Object.entries(result)) { + if (!skipFields.includes(key)) { + request.object.set(key, value); + } + } + } +} +``` + +- [ ] **Step 2: Commit** + +```bash +git add -f src/cloud-code/adapters/webhook-bridge.ts +git commit -m "feat: add webhook bridge for request/response conversion" +``` + +### Task 8: Implement InProcessAdapter + +**Files:** +- Create: `src/cloud-code/adapters/InProcessAdapter.ts` +- Test: `spec/InProcessAdapter.spec.js` + +- [ ] **Step 1: Write failing tests** + +```javascript +// spec/InProcessAdapter.spec.js +const { InProcessAdapter } = require('../lib/cloud-code/adapters/InProcessAdapter'); +const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); + +function createMockCloudCode(manifest, handlers = {}) { + return { + getRouter() { + return { + getManifest() { return manifest; }, + async dispatchFunction(name, body) { + if (handlers[`function:${name}`]) return handlers[`function:${name}`](body); + return { success: null }; + }, + async dispatchTrigger(className, triggerName, body) { + if (handlers[`trigger:${triggerName}.${className}`]) return handlers[`trigger:${triggerName}.${className}`](body); + return { success: {} }; + }, + async dispatchJob(name, body) { + if (handlers[`job:${name}`]) return handlers[`job:${name}`](body); + return { success: null }; + }, + }; + }, + }; +} + +describe('InProcessAdapter', () => { + let manager; + + beforeEach(() => { + manager = new CloudCodeManager(); + }); + + it('has name "in-process"', () => { + const cloud = createMockCloudCode({ protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [], jobs: [] } }); + const adapter = new InProcessAdapter(cloud); + expect(adapter.name).toBe('in-process'); + }); + + it('registers functions from manifest', async () => { + const cloud = createMockCloudCode({ + protocol: 'ParseCloud/1.0', + hooks: { + functions: [{ name: 'hello' }, { name: 'greet' }], + triggers: [], + jobs: [], + }, + }); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + expect(manager.getFunction('hello')).toBeDefined(); + expect(manager.getFunction('greet')).toBeDefined(); + }); + + it('registers triggers from manifest', async () => { + const cloud = createMockCloudCode({ + protocol: 'ParseCloud/1.0', + hooks: { + functions: [], + triggers: [{ className: 'Todo', triggerName: 'beforeSave' }], + jobs: [], + }, + }); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + expect(manager.getTrigger('Todo', 'beforeSave')).toBeDefined(); + }); + + it('registers jobs from manifest', async () => { + const cloud = createMockCloudCode({ + protocol: 'ParseCloud/1.0', + hooks: { + functions: [], + triggers: [], + jobs: [{ name: 'cleanup' }], + }, + }); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + expect(manager.getJob('cleanup')).toBeDefined(); + }); + + it('bridge handler dispatches function and returns result', async () => { + const cloud = createMockCloudCode( + { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'add' }], triggers: [], jobs: [] } }, + { 'function:add': (body) => ({ success: body.params.a + body.params.b }) } + ); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + const handler = manager.getFunction('add'); + const result = await handler({ params: { a: 2, b: 3 }, master: false, ip: '127.0.0.1', headers: {} }); + expect(result).toBe(5); + }); + + it('bridge handler throws Parse.Error on error response', async () => { + const cloud = createMockCloudCode( + { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'fail' }], triggers: [], jobs: [] } }, + { 'function:fail': () => ({ error: { code: 141, message: 'boom' } }) } + ); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + const handler = manager.getFunction('fail'); + await expectAsync(handler({ params: {}, master: false, ip: '', headers: {} })) + .toBeRejectedWithError(/boom/); + }); + + it('isHealthy returns true', async () => { + const cloud = createMockCloudCode({ protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [], jobs: [] } }); + const adapter = new InProcessAdapter(cloud); + expect(await adapter.isHealthy()).toBe(true); + }); +}); +``` + +- [ ] **Step 2: Implement InProcessAdapter** + +```typescript +// src/cloud-code/adapters/InProcessAdapter.ts +import type { + CloudCodeAdapter, + CloudCodeRegistry, + ParseServerConfig, + InProcessCloudCode, +} from '../types'; +import { requestToWebhookBody, webhookResponseToResult, applyBeforeSaveResponse } from './webhook-bridge'; + +export class InProcessAdapter implements CloudCodeAdapter { + readonly name = 'in-process'; + private cloudCode: InProcessCloudCode; + + constructor(cloudCode: InProcessCloudCode) { + this.cloudCode = cloudCode; + } + + async initialize(registry: CloudCodeRegistry, _config: ParseServerConfig): Promise { + const router = this.cloudCode.getRouter(); + const manifest = router.getManifest(); + + for (const fn of manifest.hooks.functions) { + registry.defineFunction(fn.name, async (request) => { + const body = requestToWebhookBody(request); + const response = await router.dispatchFunction(fn.name, body); + return webhookResponseToResult(response); + }); + } + + for (const trigger of manifest.hooks.triggers) { + const { className, triggerName } = trigger; + registry.defineTrigger(className, triggerName as any, async (request) => { + const body = requestToWebhookBody(request); + const response = await router.dispatchTrigger(className, triggerName, body); + if (triggerName === 'beforeSave') { + applyBeforeSaveResponse(request, response); + return; + } + return webhookResponseToResult(response); + }); + } + + for (const job of manifest.hooks.jobs) { + registry.defineJob(job.name, async (request) => { + const body = requestToWebhookBody(request); + const response = await router.dispatchJob(job.name, body); + return webhookResponseToResult(response); + }); + } + } + + async isHealthy(): Promise { + return true; + } + + async shutdown(): Promise {} +} +``` + +- [ ] **Step 3: Build and run tests** + +Run: `npm run build && TESTING=1 npx jasmine spec/InProcessAdapter.spec.js` +Expected: All tests PASS + +- [ ] **Step 4: Commit** + +```bash +git add -f src/cloud-code/adapters/InProcessAdapter.ts spec/InProcessAdapter.spec.js +git commit -m "feat: add InProcessAdapter with webhook bridge for manifest-based cloud code" +``` + +--- + +## Chunk 5: ExternalProcessAdapter + +### Task 9: Implement ExternalProcessAdapter + +**Files:** +- Create: `src/cloud-code/adapters/ExternalProcessAdapter.ts` +- Test: `spec/ExternalProcessAdapter.spec.js` + +- [ ] **Step 1: Write failing tests** + +```javascript +// spec/ExternalProcessAdapter.spec.js +const { ExternalProcessAdapter } = require('../lib/cloud-code/adapters/ExternalProcessAdapter'); +const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); +const http = require('http'); + +function createMockCloudServer(manifest, port) { + return new Promise((resolve) => { + const server = http.createServer((req, res) => { + if (req.url === '/' && req.method === 'GET') { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(manifest)); + } else if (req.url === '/health' && req.method === 'GET') { + res.writeHead(200); + res.end('OK'); + } else if (req.url.startsWith('/functions/') && req.method === 'POST') { + let body = ''; + req.on('data', d => body += d); + req.on('end', () => { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: 'external-result' })); + }); + } else { + res.writeHead(404); + res.end(); + } + }); + server.listen(port, () => resolve(server)); + }); +} + +describe('ExternalProcessAdapter', () => { + it('has name "external-process"', () => { + const adapter = new ExternalProcessAdapter('echo test', 'secret-key'); + expect(adapter.name).toBe('external-process'); + }); + + it('requires webhookKey', () => { + expect(() => new ExternalProcessAdapter('echo test', '')).toThrowError(/webhookKey/); + }); + + it('shutdown resolves cleanly when no process started', async () => { + const adapter = new ExternalProcessAdapter('echo test', 'key'); + await expectAsync(adapter.shutdown()).toBeResolved(); + }); + + it('spawns process and reads manifest', async () => { + const manager = new CloudCodeManager(); + const port = 19876; + const server = await createMockCloudServer( + { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'ext-fn' }], triggers: [], jobs: [] } }, + port + ); + + try { + const cmd = `node -e "process.stdout.write('PARSE_CLOUD_READY:${port}\\n'); setTimeout(() => {}, 60000)"`; + const adapter = new ExternalProcessAdapter(cmd, 'test-key', { + startupTimeout: 5000, + healthCheckInterval: 0, + }); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + expect(manager.getFunction('ext-fn')).toBeDefined(); + + await adapter.shutdown(); + } finally { + server.close(); + } + }, 10000); +}); +``` + +- [ ] **Step 2: Implement ExternalProcessAdapter** + +```typescript +// src/cloud-code/adapters/ExternalProcessAdapter.ts +import { spawn, ChildProcess } from 'child_process'; +import http from 'http'; +import type { + CloudCodeAdapter, + CloudCodeRegistry, + ParseServerConfig, + CloudManifest, + CloudCodeOptions, + WebhookResponse, +} from '../types'; +import { requestToWebhookBody, webhookResponseToResult, applyBeforeSaveResponse } from './webhook-bridge'; + +const DEFAULT_OPTIONS: Required = { + startupTimeout: 30000, + healthCheckInterval: 30000, + shutdownTimeout: 5000, + maxRestartDelay: 30000, +}; + +function httpGet(url: string): Promise { + return new Promise((resolve, reject) => { + http.get(url, (res) => { + let data = ''; + res.on('data', (chunk) => data += chunk); + res.on('end', () => resolve(data)); + }).on('error', reject); + }); +} + +function httpPost(url: string, body: Record, webhookKey: string): Promise { + return new Promise((resolve, reject) => { + const payload = JSON.stringify(body); + const urlObj = new URL(url); + const req = http.request({ + hostname: urlObj.hostname, + port: urlObj.port, + path: urlObj.pathname, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Content-Length': Buffer.byteLength(payload), + 'X-Parse-Webhook-Key': webhookKey, + }, + }, (res) => { + let data = ''; + res.on('data', (chunk) => data += chunk); + res.on('end', () => { + try { + resolve(JSON.parse(data)); + } catch (e) { + reject(new Error(`Invalid JSON from cloud code process: ${data}`)); + } + }); + }); + req.on('error', reject); + req.write(payload); + req.end(); + }); +} + +export class ExternalProcessAdapter implements CloudCodeAdapter { + readonly name = 'external-process'; + private command: string; + private webhookKey: string; + private options: Required; + private process: ChildProcess | null = null; + private port: number = 0; + private healthInterval: ReturnType | null = null; + + constructor(command: string, webhookKey: string, options?: CloudCodeOptions) { + if (!webhookKey) { + throw new Error('webhookKey is required for ExternalProcessAdapter'); + } + this.command = command; + this.webhookKey = webhookKey; + this.options = { ...DEFAULT_OPTIONS, ...options }; + } + + async initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise { + this.port = await this.spawnAndWaitForReady(config); + const manifest = await this.fetchManifest(); + this.registerFromManifest(registry, manifest); + + if (this.options.healthCheckInterval > 0) { + this.healthInterval = setInterval(() => this.checkHealth(), this.options.healthCheckInterval); + } + } + + async isHealthy(): Promise { + try { + const response = await httpGet(`http://localhost:${this.port}/health`); + return response === 'OK' || response.includes('ok'); + } catch { + return false; + } + } + + async shutdown(): Promise { + if (this.healthInterval) { + clearInterval(this.healthInterval); + this.healthInterval = null; + } + if (this.process && !this.process.killed) { + this.process.kill('SIGTERM'); + await Promise.race([ + new Promise((resolve) => this.process!.once('exit', () => resolve())), + new Promise((resolve) => setTimeout(() => { + if (this.process && !this.process.killed) { + this.process.kill('SIGKILL'); + } + resolve(); + }, this.options.shutdownTimeout)), + ]); + } + this.process = null; + } + + private spawnAndWaitForReady(config: ParseServerConfig): Promise { + return new Promise((resolve, reject) => { + const child = spawn(this.command, { + shell: true, + env: { + ...process.env, + PARSE_SERVER_URL: config.serverURL, + PARSE_APPLICATION_ID: config.appId, + PARSE_MASTER_KEY: config.masterKey, + PARSE_WEBHOOK_KEY: this.webhookKey, + PARSE_CLOUD_PORT: '0', + }, + stdio: ['ignore', 'pipe', 'pipe'], + }); + + this.process = child; + + const timeout = setTimeout(() => { + child.kill('SIGKILL'); + reject(new Error(`Cloud code process did not emit PARSE_CLOUD_READY within ${this.options.startupTimeout}ms`)); + }, this.options.startupTimeout); + + let stdout = ''; + child.stdout!.on('data', (data) => { + stdout += data.toString(); + const match = stdout.match(/PARSE_CLOUD_READY:(\d+)/); + if (match) { + clearTimeout(timeout); + resolve(parseInt(match[1], 10)); + } + }); + + child.stderr!.on('data', (data) => { + process.stderr.write(`[cloud-code] ${data}`); + }); + + child.on('error', (err) => { + clearTimeout(timeout); + reject(new Error(`Failed to spawn cloud code process: ${err.message}`)); + }); + + child.on('exit', (code) => { + clearTimeout(timeout); + if (!this.port) { + reject(new Error(`Cloud code process exited with code ${code} before becoming ready`)); + } + }); + }); + } + + private async fetchManifest(): Promise { + const data = await httpGet(`http://localhost:${this.port}/`); + return JSON.parse(data); + } + + private registerFromManifest(registry: CloudCodeRegistry, manifest: CloudManifest): void { + for (const fn of manifest.hooks.functions) { + registry.defineFunction(fn.name, async (request) => { + const body = requestToWebhookBody(request); + const response = await httpPost(`http://localhost:${this.port}/functions/${fn.name}`, body, this.webhookKey); + return webhookResponseToResult(response); + }); + } + + for (const trigger of manifest.hooks.triggers) { + const { className, triggerName } = trigger; + registry.defineTrigger(className, triggerName as any, async (request) => { + const body = requestToWebhookBody(request); + const response = await httpPost( + `http://localhost:${this.port}/triggers/${className}/${triggerName}`, + body, + this.webhookKey + ); + if (triggerName === 'beforeSave') { + applyBeforeSaveResponse(request, response); + return; + } + return webhookResponseToResult(response); + }); + } + + for (const job of manifest.hooks.jobs) { + registry.defineJob(job.name, async (request) => { + const body = requestToWebhookBody(request); + const response = await httpPost(`http://localhost:${this.port}/jobs/${job.name}`, body, this.webhookKey); + return webhookResponseToResult(response); + }); + } + } + + private async checkHealth(): Promise { + const healthy = await this.isHealthy(); + if (!healthy) { + console.warn('[cloud-code] External process health check failed'); + } + } +} +``` + +- [ ] **Step 3: Build and run tests** + +Run: `npm run build && TESTING=1 npx jasmine spec/ExternalProcessAdapter.spec.js` +Expected: All tests PASS + +- [ ] **Step 4: Commit** + +```bash +git add -f src/cloud-code/adapters/ExternalProcessAdapter.ts spec/ExternalProcessAdapter.spec.js +git commit -m "feat: add ExternalProcessAdapter with child process lifecycle management" +``` + +--- + +## Chunk 6: Integration Tests and Full Verification + +### Task 10: Integration Tests + +**Files:** +- Create: `spec/CloudCodeAdapter.integration.spec.js` + +- [ ] **Step 1: Write integration tests** + +```javascript +// spec/CloudCodeAdapter.integration.spec.js +const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); +const { LegacyAdapter } = require('../lib/cloud-code/adapters/LegacyAdapter'); +const { InProcessAdapter } = require('../lib/cloud-code/adapters/InProcessAdapter'); + +describe('Cloud Code Adapter Integration', () => { + describe('composable adapters', () => { + it('supports LegacyAdapter + InProcessAdapter registering different hooks', async () => { + const manager = new CloudCodeManager(); + + const legacyCloud = (Parse) => { + Parse.Cloud.define('legacyFn', () => 'from-legacy'); + }; + + const inProcessCloud = { + getRouter() { + return { + getManifest() { + return { + protocol: 'ParseCloud/1.0', + hooks: { + functions: [{ name: 'inProcessFn' }], + triggers: [], + jobs: [], + }, + }; + }, + async dispatchFunction() { return { success: 'from-in-process' }; }, + async dispatchTrigger() { return { success: {} }; }, + async dispatchJob() { return { success: null }; }, + }; + }, + }; + + // InProcessAdapter first (registers directly), then LegacyAdapter + // Note: LegacyAdapter needs the manager on AppCache for the facade to work. + // In real usage, ParseServer.start() handles this. For testing, we use + // the InProcess adapter (which registers directly) to verify composition. + const inProcessAdapter = new InProcessAdapter(inProcessCloud); + const inProcessRegistry = manager.createRegistry(inProcessAdapter.name); + await inProcessAdapter.initialize(inProcessRegistry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + // Simulate legacy registration via manager directly + manager.defineFunction('legacy', 'legacyFn', () => 'from-legacy'); + + expect(manager.getFunction('legacyFn')).toBeDefined(); + expect(manager.getFunction('inProcessFn')).toBeDefined(); + expect(manager.getFunctionNames().sort()).toEqual(['inProcessFn', 'legacyFn']); + }); + + it('throws on conflict between adapters', async () => { + const manager = new CloudCodeManager(); + + // Register a function from "legacy" source + manager.defineFunction('legacy', 'shared', () => 'from-legacy'); + + // InProcess adapter tries to register same function + const inProcessCloud = { + getRouter() { + return { + getManifest() { + return { + protocol: 'ParseCloud/1.0', + hooks: { functions: [{ name: 'shared' }], triggers: [], jobs: [] }, + }; + }, + async dispatchFunction() { return { success: 'from-in-process' }; }, + async dispatchTrigger() { return { success: {} }; }, + async dispatchJob() { return { success: null }; }, + }; + }, + }; + + const adapter = new InProcessAdapter(inProcessCloud); + const registry = manager.createRegistry(adapter.name); + + await expectAsync( + adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }) + ).toBeRejectedWithError(/Cloud code conflict.*shared.*legacy.*in-process/); + }); + }); + + describe('shutdown', () => { + it('shuts down all adapters', async () => { + const manager = new CloudCodeManager(); + let shutdownCalled = false; + + const adapter = { + name: 'test', + async initialize() {}, + async isHealthy() { return true; }, + async shutdown() { shutdownCalled = true; }, + }; + + await manager.initialize([adapter], { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + await manager.shutdown(); + + expect(shutdownCalled).toBe(true); + }); + }); + + describe('unregisterAll', () => { + it('allows re-registration after unregisterAll', () => { + const manager = new CloudCodeManager(); + manager.defineFunction('adapter-a', 'fn', () => 'first'); + manager.unregisterAll('adapter-a'); + + // Now a different adapter can register the same name + expect(() => { + manager.defineFunction('adapter-b', 'fn', () => 'second'); + }).not.toThrow(); + expect(manager.getFunction('fn')).toBeDefined(); + }); + }); +}); +``` + +- [ ] **Step 2: Build and run integration tests** + +Run: `npm run build && TESTING=1 npx jasmine spec/CloudCodeAdapter.integration.spec.js` +Expected: All tests PASS + +- [ ] **Step 3: Commit** + +```bash +git add -f spec/CloudCodeAdapter.integration.spec.js +git commit -m "test: add integration tests for composable cloud code adapters" +``` + +### Task 11: Full Test Suite Verification + +- [ ] **Step 1: Build everything** + +Run: `npm run build` + +- [ ] **Step 2: Run complete test suite** + +Run: `npm run testonly` + +- [ ] **Step 3: Fix any failures** + +If tests fail, analyze each failure: +- **Import issues:** Verify triggers.js facade imports `AppCache` correctly +- **Manager not found:** Verify facade falls back to legacy store when no manager exists (most tests won't have a manager) +- **Type errors:** Verify Babel compiles all `.ts` files correctly +- **Behavioral changes:** Verify facade delegation matches original behavior exactly + +- [ ] **Step 4: Commit fixes if any** + +```bash +git add -A +git commit -m "fix: resolve test regressions from cloud code adapter integration" +``` From aa555c3f2d1be35209207d8040cf095ee39ce7bf Mon Sep 17 00:00:00 2001 From: Daniel Date: Mon, 16 Mar 2026 22:25:58 +1100 Subject: [PATCH 05/14] feature: add cloud adapter --- .../plans/2026-03-16-cloud-code-adapter.md | 2020 ----------------- .../2026-03-16-cloud-code-adapter-design.md | 379 ---- spec/CloudCodeAdapter.integration.spec.js | 107 + spec/CloudCodeManager.spec.js | 575 +++++ spec/ExternalProcessAdapter.spec.js | 70 + spec/InProcessAdapter.spec.js | 121 + src/Options/Definitions.js | 12 +- src/Options/index.js | 15 +- src/ParseServer.ts | 45 +- src/cloud-code/CloudCodeManager.ts | 285 +++ src/cloud-code/README.md | 395 ++++ .../adapters/ExternalProcessAdapter.ts | 215 ++ src/cloud-code/adapters/InProcessAdapter.ts | 56 + src/cloud-code/adapters/LegacyAdapter.ts | 43 + src/cloud-code/adapters/webhook-bridge.ts | 70 + src/cloud-code/resolveAdapters.ts | 33 + src/cloud-code/types.ts | 105 + src/triggers.js | 92 +- 18 files changed, 2219 insertions(+), 2419 deletions(-) delete mode 100644 docs/superpowers/plans/2026-03-16-cloud-code-adapter.md delete mode 100644 docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md create mode 100644 spec/CloudCodeAdapter.integration.spec.js create mode 100644 spec/CloudCodeManager.spec.js create mode 100644 spec/ExternalProcessAdapter.spec.js create mode 100644 spec/InProcessAdapter.spec.js create mode 100644 src/cloud-code/CloudCodeManager.ts create mode 100644 src/cloud-code/README.md create mode 100644 src/cloud-code/adapters/ExternalProcessAdapter.ts create mode 100644 src/cloud-code/adapters/InProcessAdapter.ts create mode 100644 src/cloud-code/adapters/LegacyAdapter.ts create mode 100644 src/cloud-code/adapters/webhook-bridge.ts create mode 100644 src/cloud-code/resolveAdapters.ts create mode 100644 src/cloud-code/types.ts diff --git a/docs/superpowers/plans/2026-03-16-cloud-code-adapter.md b/docs/superpowers/plans/2026-03-16-cloud-code-adapter.md deleted file mode 100644 index baac476cb1..0000000000 --- a/docs/superpowers/plans/2026-03-16-cloud-code-adapter.md +++ /dev/null @@ -1,2020 +0,0 @@ -# Cloud Code Adapter Implementation Plan - -> **For agentic workers:** REQUIRED: Use superpowers:subagent-driven-development (if subagents available) or superpowers:executing-plans to implement this plan. Steps use checkbox (`- [ ]`) syntax for tracking. - -**Goal:** Replace `triggers.js` with a `CloudCodeManager` that supports composable adapters (Legacy, InProcess, ExternalProcess, BYO), enabling multi-language cloud code without breaking existing Parse Server users. - -**Architecture:** One `CloudCodeManager` per `applicationId`, stored on `this.config` (which flows into `AppCache` via `Config.put()`). The manager owns hook registration (with conflict detection), lookup, and execution. Three built-in adapters cover existing use cases. `triggers.js` becomes a facade that delegates both reads AND writes to `CloudCodeManager` when one exists, preserving all existing `Parse.Cloud.js` behavior (validators, rate limiting, auth trigger argument parsing) without reimplementing it. - -**Tech Stack:** TypeScript, Babel (transpiles `.ts` via `@babel/preset-typescript`), Jasmine tests, Parse JS SDK - -**Spec:** `docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md` - -**Target User Experience (`@parse-lite/cloud`):** - -```typescript -import ParseServer from 'parse-server'; -import { ParseCloud } from '@parse-lite/cloud'; - -const cloud = new ParseCloud(); -cloud.class('Todo') - .requireUser() - .beforeSave(async ({ object, isNew, user }) => { - if (isNew) object.authorId = user!.objectId; - return object; - }); -cloud.function('getStats').requireMaster().handle(async () => ({ total: 42 })); - -new ParseServer({ - databaseURI: 'mongodb://localhost:27017/myapp', - appId: 'myapp', - masterKey: 'secret', - cloud: cloud, // ← ParseCloud has getRouter(), detected as InProcessAdapter -}); -``` - ---- - -## File Structure - -### New Files - -| File | Responsibility | -|------|---------------| -| `src/cloud-code/types.ts` | TypeScript interfaces: `CloudCodeAdapter`, `CloudCodeRegistry`, `HookStore`, `TriggerName`, handler types, config types | -| `src/cloud-code/CloudCodeManager.ts` | Core manager: adapter lifecycle, hook store, registration with conflict detection, lookup, execution | -| `src/cloud-code/adapters/LegacyAdapter.ts` | Loads user cloud code file (delegates to existing `Parse.Cloud.js` → `triggers.js` facade → manager) | -| `src/cloud-code/adapters/InProcessAdapter.ts` | Duck-typed `getRouter()` integration, manifest-based registration, webhook body bridge | -| `src/cloud-code/adapters/ExternalProcessAdapter.ts` | Child process lifecycle, ParseCloud/1.0 protocol, health monitoring | -| `src/cloud-code/adapters/webhook-bridge.ts` | `requestToWebhookBody()` and `webhookResponseToResult()` shared by InProcess and External adapters | -| `spec/CloudCodeManager.spec.js` | Unit tests for CloudCodeManager | -| `spec/InProcessAdapter.spec.js` | Tests for InProcessAdapter with mock router | -| `spec/ExternalProcessAdapter.spec.js` | Tests for ExternalProcessAdapter lifecycle | -| `spec/CloudCodeAdapter.integration.spec.js` | Integration tests for composable adapters | - -### Modified Files - -| File | Change | -|------|--------| -| `src/triggers.js` | Becomes facade: all `add*`/`get*`/`remove*` functions delegate to `CloudCodeManager` via `AppCache` when one exists | -| `src/ParseServer.ts` | Initialize `CloudCodeManager` with resolved adapters during `start()`, store on `AppCache` | -| `src/Options/index.js` | Add `cloudCodeCommand`, `webhookKey`, `cloudCodeOptions`, `cloudCodeAdapters` types | -| `src/Options/Definitions.js` | Add option definitions for new config fields | - -### Key Architectural Decision: LegacyAdapter Does NOT Patch Parse.Cloud - -The LegacyAdapter simply loads the user's cloud code file. `Parse.Cloud.define()`, `Parse.Cloud.beforeSave()`, etc. continue to call `triggers.addFunction()`, `triggers.addTrigger()` exactly as they do today. The facade in `triggers.js` intercepts these writes and delegates to `CloudCodeManager.defineFunction()` / `CloudCodeManager.defineTrigger()`. - -This approach: -- Preserves `validateValidator()` and `addRateLimit()` calls in `Parse.Cloud.js` without reimplementation -- Preserves complex auth trigger argument parsing (`beforeLogin` can be called with or without className) -- Preserves `getClassName()` / `getRoute()` / `isParseObjectConstructor()` logic -- Eliminates a whole class of patching/restore bugs - ---- - -## Chunk 1: Types and CloudCodeManager Core - -### Task 1: Define TypeScript Interfaces - -**Files:** -- Create: `src/cloud-code/types.ts` - -- [ ] **Step 1: Create the types file** - -```typescript -// src/cloud-code/types.ts - -// --- Trigger Types (mirrors existing Types object in triggers.js) --- - -export const TriggerTypes = Object.freeze({ - beforeLogin: 'beforeLogin', - afterLogin: 'afterLogin', - afterLogout: 'afterLogout', - beforePasswordResetRequest: 'beforePasswordResetRequest', - beforeSave: 'beforeSave', - afterSave: 'afterSave', - beforeDelete: 'beforeDelete', - afterDelete: 'afterDelete', - beforeFind: 'beforeFind', - afterFind: 'afterFind', - beforeConnect: 'beforeConnect', - beforeSubscribe: 'beforeSubscribe', - afterEvent: 'afterEvent', -}); - -export type TriggerName = keyof typeof TriggerTypes; - -// --- Handler Types --- - -export type CloudFunctionHandler = (request: any) => any; -export type CloudTriggerHandler = (request: any) => any; -export type CloudJobHandler = (request: any) => any; -export type LiveQueryHandler = (data: any) => void; -export type ValidatorHandler = Record | ((request: any) => any); - -// --- Hook Store --- - -export interface FunctionEntry { - handler: CloudFunctionHandler; - source: string; - validator?: ValidatorHandler; -} - -export interface TriggerEntry { - handler: CloudTriggerHandler; - source: string; - validator?: ValidatorHandler; -} - -export interface JobEntry { - handler: CloudJobHandler; - source: string; -} - -export interface LiveQueryEntry { - handler: LiveQueryHandler; - source: string; -} - -export interface HookStore { - functions: Map; - triggers: Map; - jobs: Map; - liveQueryHandlers: LiveQueryEntry[]; -} - -// --- Server Config --- - -export interface ParseServerConfig { - appId: string; - masterKey: string; - serverURL: string; -} - -// --- Registry (scoped per-adapter) --- - -export interface CloudCodeRegistry { - defineFunction(name: string, handler: CloudFunctionHandler, validator?: ValidatorHandler): void; - defineTrigger(className: string, triggerName: TriggerName, handler: CloudTriggerHandler, validator?: ValidatorHandler): void; - defineJob(name: string, handler: CloudJobHandler): void; - defineLiveQueryHandler(handler: LiveQueryHandler): void; -} - -// --- Adapter Interface --- - -export interface CloudCodeAdapter { - readonly name: string; - initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise; - isHealthy(): Promise; - shutdown(): Promise; -} - -// --- InProcess duck-typed interface --- - -export interface CloudManifest { - protocol: string; - hooks: { - functions: Array<{ name: string }>; - triggers: Array<{ className: string; triggerName: string }>; - jobs: Array<{ name: string }>; - }; -} - -export type WebhookResponse = - | { success: unknown } - | { error: { code: number; message: string } }; - -export interface CloudRouter { - getManifest(): CloudManifest; - dispatchFunction(name: string, body: Record): Promise; - dispatchTrigger(className: string, triggerName: string, body: Record): Promise; - dispatchJob(name: string, body: Record): Promise; -} - -export interface InProcessCloudCode { - getRouter(): CloudRouter; -} - -// --- External Process Options --- - -export interface CloudCodeOptions { - startupTimeout?: number; - healthCheckInterval?: number; - shutdownTimeout?: number; - maxRestartDelay?: number; -} -``` - -- [ ] **Step 2: Verify it compiles** - -Run: `npm run build` -Expected: No errors - -- [ ] **Step 3: Commit** - -```bash -git add -f src/cloud-code/types.ts -git commit -m "feat: add TypeScript types for Cloud Code Adapter system" -``` - ---- - -### Task 2: CloudCodeManager — Registration, Conflict Detection, Lookup - -**Files:** -- Create: `src/cloud-code/CloudCodeManager.ts` -- Test: `spec/CloudCodeManager.spec.js` - -- [ ] **Step 1: Write failing tests** - -```javascript -// spec/CloudCodeManager.spec.js -const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); - -describe('CloudCodeManager', () => { - let manager; - - beforeEach(() => { - manager = new CloudCodeManager(); - }); - - describe('createRegistry', () => { - it('creates a scoped registry for an adapter', () => { - const registry = manager.createRegistry('test-adapter'); - expect(registry.defineFunction).toBeDefined(); - expect(registry.defineTrigger).toBeDefined(); - expect(registry.defineJob).toBeDefined(); - expect(registry.defineLiveQueryHandler).toBeDefined(); - }); - }); - - describe('defineFunction', () => { - it('registers a cloud function', () => { - const handler = () => {}; - manager.defineFunction('test', 'hello', handler); - expect(manager.getFunction('hello')).toBe(handler); - }); - - it('throws on duplicate function from different source', () => { - manager.defineFunction('adapter-a', 'hello', () => {}); - expect(() => { - manager.defineFunction('adapter-b', 'hello', () => {}); - }).toThrowError(/Cloud code conflict.*hello.*adapter-a.*adapter-b/); - }); - - it('allows re-registration from same source (overwrite)', () => { - const handler1 = () => 'first'; - const handler2 = () => 'second'; - manager.defineFunction('test', 'hello', handler1); - manager.defineFunction('test', 'hello', handler2); - expect(manager.getFunction('hello')).toBe(handler2); - }); - - it('supports namespaced function names', () => { - const handler = () => {}; - manager.defineFunction('test', 'ns.sub.func', handler); - expect(manager.getFunction('ns.sub.func')).toBe(handler); - }); - }); - - describe('defineTrigger', () => { - it('registers a trigger', () => { - const handler = () => {}; - manager.defineTrigger('test', 'Todo', 'beforeSave', handler); - expect(manager.getTrigger('Todo', 'beforeSave')).toBe(handler); - }); - - it('throws on duplicate trigger from different source', () => { - manager.defineTrigger('adapter-a', 'Todo', 'beforeSave', () => {}); - expect(() => { - manager.defineTrigger('adapter-b', 'Todo', 'beforeSave', () => {}); - }).toThrowError(/Cloud code conflict.*beforeSave.*Todo.*adapter-a.*adapter-b/); - }); - - it('rejects beforeSave on _PushStatus', () => { - expect(() => { - manager.defineTrigger('test', '_PushStatus', 'beforeSave', () => {}); - }).toThrowError(); - }); - - it('allows afterSave on _PushStatus', () => { - expect(() => { - manager.defineTrigger('test', '_PushStatus', 'afterSave', () => {}); - }).not.toThrow(); - }); - - it('allows beforeLogin only on _User', () => { - expect(() => { - manager.defineTrigger('test', 'Todo', 'beforeLogin', () => {}); - }).toThrowError(); - expect(() => { - manager.defineTrigger('test', '_User', 'beforeLogin', () => {}); - }).not.toThrow(); - }); - - it('rejects all triggers on _Session except afterLogout', () => { - expect(() => { - manager.defineTrigger('test', '_Session', 'beforeSave', () => {}); - }).toThrowError(/Only the afterLogout trigger/); - expect(() => { - manager.defineTrigger('test', '_Session', 'afterLogout', () => {}); - }).not.toThrow(); - }); - - it('uses virtual className @File for file triggers', () => { - const handler = () => {}; - manager.defineTrigger('test', '@File', 'beforeSave', handler); - expect(manager.getTrigger('@File', 'beforeSave')).toBe(handler); - }); - - it('uses virtual className @Connect for connect triggers', () => { - const handler = () => {}; - manager.defineTrigger('test', '@Connect', 'beforeConnect', handler); - expect(manager.getTrigger('@Connect', 'beforeConnect')).toBe(handler); - }); - }); - - describe('defineJob', () => { - it('registers a job', () => { - const handler = () => {}; - manager.defineJob('test', 'myJob', handler); - expect(manager.getJob('myJob')).toBe(handler); - }); - - it('throws on duplicate job from different source', () => { - manager.defineJob('adapter-a', 'myJob', () => {}); - expect(() => { - manager.defineJob('adapter-b', 'myJob', () => {}); - }).toThrowError(/Cloud code conflict/); - }); - }); - - describe('lookup methods', () => { - it('getFunctionNames returns all registered names', () => { - manager.defineFunction('test', 'a', () => {}); - manager.defineFunction('test', 'b', () => {}); - manager.defineFunction('test', 'c', () => {}); - expect(manager.getFunctionNames().sort()).toEqual(['a', 'b', 'c']); - }); - - it('getJobs returns all jobs as a Map', () => { - const h1 = () => {}; - const h2 = () => {}; - manager.defineJob('test', 'job1', h1); - manager.defineJob('test', 'job2', h2); - const jobs = manager.getJobs(); - expect(jobs.get('job1')).toBe(h1); - expect(jobs.get('job2')).toBe(h2); - }); - - it('getJobsObject returns plain object (for facade compat)', () => { - const h1 = () => {}; - manager.defineJob('test', 'job1', h1); - const jobs = manager.getJobsObject(); - expect(jobs['job1']).toBe(h1); - }); - - it('triggerExists returns boolean', () => { - manager.defineTrigger('test', 'Todo', 'beforeSave', () => {}); - expect(manager.triggerExists('Todo', 'beforeSave')).toBe(true); - expect(manager.triggerExists('Todo', 'afterSave')).toBe(false); - }); - - it('getFunction returns undefined for unregistered', () => { - expect(manager.getFunction('nonexistent')).toBeUndefined(); - }); - - it('getTrigger returns undefined for unregistered', () => { - expect(manager.getTrigger('Todo', 'beforeSave')).toBeUndefined(); - }); - }); - - describe('validators', () => { - it('getValidator returns validator for function', () => { - const validator = { requireUser: true }; - manager.defineFunction('test', 'secured', () => {}, validator); - expect(manager.getValidator('secured')).toEqual(validator); - }); - - it('getValidator returns validator for trigger (key format: triggerType.className)', () => { - const validator = { requireMaster: true }; - manager.defineTrigger('test', 'Todo', 'beforeSave', () => {}, validator); - expect(manager.getValidator('beforeSave.Todo')).toEqual(validator); - }); - - it('getValidator returns undefined when no validator set', () => { - manager.defineFunction('test', 'noValidator', () => {}); - expect(manager.getValidator('noValidator')).toBeUndefined(); - }); - }); - - describe('unregisterAll', () => { - it('removes all hooks from a source', () => { - manager.defineFunction('adapter-a', 'fn1', () => {}); - manager.defineFunction('adapter-b', 'fn2', () => {}); - manager.defineTrigger('adapter-a', 'Todo', 'beforeSave', () => {}); - manager.defineJob('adapter-a', 'job1', () => {}); - - manager.unregisterAll('adapter-a'); - - expect(manager.getFunction('fn1')).toBeUndefined(); - expect(manager.getFunction('fn2')).toBeDefined(); - expect(manager.getTrigger('Todo', 'beforeSave')).toBeUndefined(); - expect(manager.getJob('job1')).toBeUndefined(); - }); - - it('removes live query handlers from a source', () => { - let callCount = 0; - manager.defineLiveQueryHandler('keep', () => { callCount++; }); - manager.defineLiveQueryHandler('remove', () => { callCount += 100; }); - - manager.unregisterAll('remove'); - manager.runLiveQueryEventHandlers({}); - - expect(callCount).toBe(1); - }); - }); - - describe('defineLiveQueryHandler', () => { - it('registers and executes handlers synchronously', () => { - const calls = []; - manager.defineLiveQueryHandler('a', (data) => calls.push(['a', data])); - manager.defineLiveQueryHandler('b', (data) => calls.push(['b', data])); - - manager.runLiveQueryEventHandlers({ event: 'test' }); - - expect(calls).toEqual([['a', { event: 'test' }], ['b', { event: 'test' }]]); - }); - }); - - describe('registry scoping', () => { - it('registry calls use the adapter name as source', () => { - const registry = manager.createRegistry('my-adapter'); - registry.defineFunction('hello', () => {}); - - expect(() => { - manager.defineFunction('other-adapter', 'hello', () => {}); - }).toThrowError(/my-adapter.*other-adapter/); - }); - }); - - describe('lifecycle', () => { - it('initialize calls each adapter in order', async () => { - const order = []; - const makeAdapter = (name) => ({ - name, - async initialize(registry) { order.push(name); }, - async isHealthy() { return true; }, - async shutdown() {}, - }); - - await manager.initialize( - [makeAdapter('first'), makeAdapter('second')], - { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' } - ); - - expect(order).toEqual(['first', 'second']); - }); - - it('rejects duplicate adapter names', async () => { - const adapter = { - name: 'dupe', - async initialize() {}, - async isHealthy() { return true; }, - async shutdown() {}, - }; - - await expectAsync( - manager.initialize([adapter, adapter], { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }) - ).toBeRejectedWithError(/Duplicate adapter name/); - }); - - it('shutdown calls each adapter and clears store', async () => { - let shutdownCalled = false; - const adapter = { - name: 'test', - async initialize(registry) { - registry.defineFunction('fn', () => {}); - }, - async isHealthy() { return true; }, - async shutdown() { shutdownCalled = true; }, - }; - - await manager.initialize([adapter], { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); - expect(manager.getFunction('fn')).toBeDefined(); - - await manager.shutdown(); - expect(shutdownCalled).toBe(true); - expect(manager.getFunction('fn')).toBeUndefined(); - }); - - it('healthCheck returns status per adapter', async () => { - const healthy = { name: 'ok', async initialize() {}, async isHealthy() { return true; }, async shutdown() {} }; - const unhealthy = { name: 'bad', async initialize() {}, async isHealthy() { return false; }, async shutdown() {} }; - - await manager.initialize([healthy, unhealthy], { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); - const results = await manager.healthCheck(); - - expect(results.get('ok')).toBe(true); - expect(results.get('bad')).toBe(false); - }); - }); - - describe('removeFunction / removeTrigger', () => { - it('removeFunction removes a registered function', () => { - manager.defineFunction('test', 'fn', () => {}); - expect(manager.getFunction('fn')).toBeDefined(); - manager.removeFunction('fn'); - expect(manager.getFunction('fn')).toBeUndefined(); - }); - - it('removeTrigger removes a registered trigger', () => { - manager.defineTrigger('test', 'Todo', 'beforeSave', () => {}); - expect(manager.getTrigger('Todo', 'beforeSave')).toBeDefined(); - manager.removeTrigger('beforeSave', 'Todo'); - expect(manager.getTrigger('Todo', 'beforeSave')).toBeUndefined(); - }); - }); -}); -``` - -- [ ] **Step 2: Run tests to verify they fail** - -Run: `npm run build && TESTING=1 npx jasmine spec/CloudCodeManager.spec.js` -Expected: FAIL — `CloudCodeManager` module not found - -- [ ] **Step 3: Implement CloudCodeManager** - -```typescript -// src/cloud-code/CloudCodeManager.ts -import type { - CloudCodeAdapter, - CloudCodeRegistry, - CloudFunctionHandler, - CloudTriggerHandler, - CloudJobHandler, - LiveQueryHandler, - ValidatorHandler, - HookStore, - ParseServerConfig, -} from './types'; - -const USER_ONLY_TRIGGERS = ['beforeLogin', 'afterLogin', 'beforePasswordResetRequest']; -const SESSION_ONLY_TRIGGERS = ['afterLogout']; - -function validateClassNameForTrigger(className: string, triggerName: string): void { - // Only afterSave is allowed on _PushStatus - if (className === '_PushStatus' && triggerName === 'beforeSave') { - throw new Error(`Triggers are not allowed for class _PushStatus: ${triggerName}. Only afterSave is allowed.`); - } - // Only afterLogout is allowed on _Session - if (className === '_Session' && triggerName !== 'afterLogout') { - throw new Error('Only the afterLogout trigger is allowed for the _Session class.'); - } - // Auth triggers only on _User - if (USER_ONLY_TRIGGERS.includes(triggerName) && className !== '_User') { - throw new Error(`${triggerName} trigger is only allowed on _User class`); - } - if (SESSION_ONLY_TRIGGERS.includes(triggerName) && className !== '_Session') { - throw new Error(`${triggerName} trigger is only allowed on _Session class`); - } -} - -function triggerKey(className: string, triggerName: string): string { - return `${triggerName}.${className}`; -} - -export class CloudCodeManager { - private adapters: Map = new Map(); - private store: HookStore = { - functions: new Map(), - triggers: new Map(), - jobs: new Map(), - liveQueryHandlers: [], - }; - - // --- Lifecycle --- - - async initialize(adapters: CloudCodeAdapter[], serverConfig: ParseServerConfig): Promise { - for (const adapter of adapters) { - if (this.adapters.has(adapter.name)) { - throw new Error(`Duplicate adapter name: '${adapter.name}'`); - } - this.adapters.set(adapter.name, adapter); - const registry = this.createRegistry(adapter.name); - await adapter.initialize(registry, serverConfig); - } - } - - async shutdown(): Promise { - for (const adapter of this.adapters.values()) { - await adapter.shutdown(); - } - this.adapters.clear(); - this.store.functions.clear(); - this.store.triggers.clear(); - this.store.jobs.clear(); - this.store.liveQueryHandlers.length = 0; - } - - async healthCheck(): Promise> { - const results = new Map(); - for (const [name, adapter] of this.adapters) { - try { - results.set(name, await adapter.isHealthy()); - } catch { - results.set(name, false); - } - } - return results; - } - - // --- Registry Factory --- - - createRegistry(source: string): CloudCodeRegistry { - return { - defineFunction: (name, handler, validator?) => { - this.defineFunction(source, name, handler, validator); - }, - defineTrigger: (className, triggerName, handler, validator?) => { - this.defineTrigger(source, className, triggerName, handler, validator); - }, - defineJob: (name, handler) => { - this.defineJob(source, name, handler); - }, - defineLiveQueryHandler: (handler) => { - this.defineLiveQueryHandler(source, handler); - }, - }; - } - - // --- Registration --- - - defineFunction(source: string, name: string, handler: CloudFunctionHandler, validator?: ValidatorHandler): void { - const existing = this.store.functions.get(name); - if (existing && existing.source !== source) { - throw new Error( - `Cloud code conflict: function '${name}' registered by both '${existing.source}' and '${source}'` - ); - } - this.store.functions.set(name, { handler, source, validator }); - } - - defineTrigger(source: string, className: string, triggerName: string, handler: CloudTriggerHandler, validator?: ValidatorHandler): void { - validateClassNameForTrigger(className, triggerName); - const key = triggerKey(className, triggerName); - const existing = this.store.triggers.get(key); - if (existing && existing.source !== source) { - throw new Error( - `Cloud code conflict: ${triggerName} on '${className}' registered by both '${existing.source}' and '${source}'` - ); - } - this.store.triggers.set(key, { handler, source, validator }); - } - - defineJob(source: string, name: string, handler: CloudJobHandler): void { - const existing = this.store.jobs.get(name); - if (existing && existing.source !== source) { - throw new Error( - `Cloud code conflict: job '${name}' registered by both '${existing.source}' and '${source}'` - ); - } - this.store.jobs.set(name, { handler, source }); - } - - defineLiveQueryHandler(source: string, handler: LiveQueryHandler): void { - this.store.liveQueryHandlers.push({ handler, source }); - } - - // --- Removal (for HooksController REST API and facade compatibility) --- - - removeFunction(name: string): void { - this.store.functions.delete(name); - } - - removeTrigger(triggerType: string, className: string): void { - this.store.triggers.delete(triggerKey(className, triggerType)); - } - - unregisterAll(source: string): void { - for (const [key, entry] of this.store.functions) { - if (entry.source === source) this.store.functions.delete(key); - } - for (const [key, entry] of this.store.triggers) { - if (entry.source === source) this.store.triggers.delete(key); - } - for (const [key, entry] of this.store.jobs) { - if (entry.source === source) this.store.jobs.delete(key); - } - this.store.liveQueryHandlers = this.store.liveQueryHandlers.filter(e => e.source !== source); - } - - // --- Lookup --- - - getFunction(name: string): CloudFunctionHandler | undefined { - return this.store.functions.get(name)?.handler; - } - - getTrigger(className: string, triggerType: string): CloudTriggerHandler | undefined { - return this.store.triggers.get(triggerKey(className, triggerType))?.handler; - } - - triggerExists(className: string, triggerType: string): boolean { - return this.store.triggers.has(triggerKey(className, triggerType)); - } - - getJob(name: string): CloudJobHandler | undefined { - return this.store.jobs.get(name)?.handler; - } - - getJobs(): Map { - const result = new Map(); - for (const [name, entry] of this.store.jobs) { - result.set(name, entry.handler); - } - return result; - } - - /** Returns jobs as a plain object (for triggers.js facade backwards compatibility) */ - getJobsObject(): Record { - const result: Record = {}; - for (const [name, entry] of this.store.jobs) { - result[name] = entry.handler; - } - return result; - } - - getFunctionNames(): string[] { - return Array.from(this.store.functions.keys()); - } - - getValidator(key: string): ValidatorHandler | undefined { - const fn = this.store.functions.get(key); - if (fn) return fn.validator; - const trigger = this.store.triggers.get(key); - if (trigger) return trigger.validator; - return undefined; - } - - /** Synchronously clear the entire store. Used by test cleanup (_unregisterAll). */ - clearAll(): void { - this.store.functions.clear(); - this.store.triggers.clear(); - this.store.jobs.clear(); - this.store.liveQueryHandlers.length = 0; - } - - // --- Execution --- - - runLiveQueryEventHandlers(data: any): void { - for (const entry of this.store.liveQueryHandlers) { - entry.handler(data); - } - } -} -``` - -- [ ] **Step 4: Build and run tests** - -Run: `npm run build && TESTING=1 npx jasmine spec/CloudCodeManager.spec.js` -Expected: All tests PASS - -- [ ] **Step 5: Commit** - -```bash -git add -f src/cloud-code/CloudCodeManager.ts src/cloud-code/types.ts spec/CloudCodeManager.spec.js -git commit -m "feat: add CloudCodeManager with registration, conflict detection, and lookup" -``` - ---- - -## Chunk 2: LegacyAdapter and Triggers Facade - -### Task 3: Implement LegacyAdapter - -The LegacyAdapter is intentionally simple — it just loads the cloud code file. All the registration magic happens through the existing `Parse.Cloud.js` → `triggers.js` → `CloudCodeManager` delegation chain (set up in Task 4). - -**Files:** -- Create: `src/cloud-code/adapters/LegacyAdapter.ts` - -- [ ] **Step 1: Implement LegacyAdapter** - -```typescript -// src/cloud-code/adapters/LegacyAdapter.ts -import type { CloudCodeAdapter, CloudCodeRegistry, ParseServerConfig } from '../types'; - -export class LegacyAdapter implements CloudCodeAdapter { - readonly name = 'legacy'; - private cloud: string | ((parse: any) => void); - - constructor(cloud: string | ((parse: any) => void)) { - this.cloud = cloud; - } - - async initialize(registry: CloudCodeRegistry, _config: ParseServerConfig): Promise { - // The registry is not used directly by LegacyAdapter. - // Instead, the cloud code file calls Parse.Cloud.define() etc., - // which calls triggers.addFunction() etc., - // which the facade delegates to CloudCodeManager. - // - // The LegacyAdapter's sole job is to load the cloud code file. - const Parse = require('parse/node').Parse; - - if (typeof this.cloud === 'function') { - await Promise.resolve(this.cloud(Parse)); - } else if (typeof this.cloud === 'string') { - const path = require('path'); - const resolved = path.resolve(process.cwd(), this.cloud); - // Support both CommonJS and ES modules - try { - const pkg = require(path.resolve(process.cwd(), 'package.json')); - if (process.env.npm_package_type === 'module' || pkg?.type === 'module') { - await import(resolved); - } else { - require(resolved); - } - } catch { - require(resolved); - } - } - } - - async isHealthy(): Promise { - return true; - } - - async shutdown(): Promise { - // No-op for in-process code - } -} -``` - -- [ ] **Step 2: Commit** - -```bash -git add -f src/cloud-code/adapters/LegacyAdapter.ts -git commit -m "feat: add LegacyAdapter for loading cloud code files" -``` - -### Task 4: Create triggers.js Facade - -This is the critical migration step. `triggers.js` keeps all its exports, but its internal `add*`, `get*`, and `remove*` functions delegate to `CloudCodeManager` when one is present on `AppCache`. - -**Files:** -- Modify: `src/triggers.js` - -- [ ] **Step 1: Read the full triggers.js to understand the current structure** - -Read: `src/triggers.js` completely. Pay attention to: -- The `_triggerStore` global (line 90) -- The `add()`, `get()`, `remove()` internal functions (lines 123-147) -- All exported functions that call these internals -- The `_unregisterAll()` function (line 183) - -- [ ] **Step 2: Add AppCache import and manager helper at top of triggers.js** - -Add after existing imports (around line 3): - -```javascript -import AppCache from './cache'; - -function getManager(applicationId) { - const cached = AppCache.get(applicationId || Parse.applicationId); - return cached && cached.cloudCodeManager; -} -``` - -- [ ] **Step 3: Update registration functions to delegate writes** - -Update `addFunction`, `addJob`, `addTrigger`, `addConnectTrigger`, `addLiveQueryEventHandler` to delegate to manager when present. **CRITICAL:** The fallback paths must use the original `add(category, name, handler, applicationId)` signature exactly as they are today. - -```javascript -export function addFunction(functionName, handler, validationHandler, applicationId) { - const manager = getManager(applicationId); - if (manager) { - manager.defineFunction('legacy', functionName, handler, validationHandler); - return; - } - // Original code — do not change these signatures - add(Category.Functions, functionName, handler, applicationId); - add(Category.Validators, functionName, validationHandler, applicationId); -} - -export function addJob(jobName, handler, applicationId) { - const manager = getManager(applicationId); - if (manager) { - manager.defineJob('legacy', jobName, handler); - return; - } - add(Category.Jobs, jobName, handler, applicationId); -} - -export function addTrigger(type, className, handler, applicationId, validationHandler) { - const manager = getManager(applicationId); - if (manager) { - manager.defineTrigger('legacy', className, type, handler, validationHandler); - return; - } - // Original code — preserve exact signatures - validateClassNameForTriggers(className, type); - add(Category.Triggers, `${type}.${className}`, handler, applicationId); - add(Category.Validators, `${type}.${className}`, validationHandler, applicationId); -} - -export function addConnectTrigger(type, handler, applicationId, validationHandler) { - const manager = getManager(applicationId); - if (manager) { - manager.defineTrigger('legacy', ConnectClassName, type, handler, validationHandler); - return; - } - add(Category.Triggers, `${type}.${ConnectClassName}`, handler, applicationId); - add(Category.Validators, `${type}.${ConnectClassName}`, validationHandler, applicationId); -} - -export function addLiveQueryEventHandler(handler, applicationId) { - const manager = getManager(applicationId); - if (manager) { - manager.defineLiveQueryHandler('legacy', handler); - return; - } - // Original code - applicationId = applicationId || Parse.applicationId; - _triggerStore[applicationId] = _triggerStore[applicationId] || baseStore(); - _triggerStore[applicationId].LiveQuery.push(handler); -} -``` - -- [ ] **Step 4: Update lookup functions to delegate reads** - -**CRITICAL:** Fallback paths must use the original `get(category, name, applicationId)` signature exactly. - -```javascript -export function getTrigger(className, triggerType, applicationId) { - const manager = getManager(applicationId); - if (manager) { - return manager.getTrigger(className, triggerType); - } - return get(Category.Triggers, `${triggerType}.${className}`, applicationId); -} - -export function triggerExists(className, type, applicationId) { - const manager = getManager(applicationId); - if (manager) { - return manager.triggerExists(className, type); - } - return !!get(Category.Triggers, `${type}.${className}`, applicationId); -} - -export function getFunction(functionName, applicationId) { - const manager = getManager(applicationId); - if (manager) { - return manager.getFunction(functionName); - } - return get(Category.Functions, functionName, applicationId); -} - -export function getFunctionNames(applicationId) { - const manager = getManager(applicationId); - if (manager) { - return manager.getFunctionNames(); - } - // ... keep existing implementation as fallback (recursive namespace traversal) -} - -export function getJob(jobName, applicationId) { - const manager = getManager(applicationId); - if (manager) { - return manager.getJob(jobName); - } - return get(Category.Jobs, jobName, applicationId); -} - -export function getJobs(applicationId) { - const manager = getManager(applicationId); - if (manager) { - // Returns plain object for backwards compatibility (consumers use Object.keys()) - return manager.getJobsObject(); - } - // ... keep existing implementation as fallback -} - -export function getValidator(functionName, applicationId) { - const manager = getManager(applicationId); - if (manager) { - return manager.getValidator(functionName); - } - return get(Category.Validators, functionName, applicationId); -} -``` - -- [ ] **Step 5: Update removal functions** - -```javascript -export function removeFunction(functionName, applicationId) { - const manager = getManager(applicationId); - if (manager) { - manager.removeFunction(functionName); - return; - } - remove(Category.Functions, functionName, applicationId); -} - -export function removeTrigger(type, className, applicationId) { - const manager = getManager(applicationId); - if (manager) { - manager.removeTrigger(type, className); - return; - } - remove(Category.Triggers, `${type}.${className}`, applicationId); -} -``` - -- [ ] **Step 6: Update `_unregisterAll` to clear ALL sources in the manager** - -The existing `_unregisterAll()` wipes everything for all appIds. When a manager exists, we must clear ALL sources (not just 'legacy') to match this behavior — this is used by test cleanup. - -```javascript -export function _unregisterAll() { - Object.keys(_triggerStore).forEach(appId => { - const manager = getManager(appId); - if (manager) { - // clearAll() synchronously wipes the entire store (all sources) - // This matches existing behavior of wiping everything for test cleanup - manager.clearAll(); - } - delete _triggerStore[appId]; - }); -} -``` - -- [ ] **Step 7: Update `runLiveQueryEventHandlers` to delegate** - -```javascript -export function runLiveQueryEventHandlers(data, applicationId = Parse.applicationId) { - const manager = getManager(applicationId); - if (manager) { - manager.runLiveQueryEventHandlers(data); - return; - } - _triggerStore[applicationId] = _triggerStore[applicationId] || baseStore(); - _triggerStore[applicationId].LiveQuery.forEach(handler => handler(data)); -} -``` - -- [ ] **Step 8: Build and run existing CloudCode tests** - -Run: `npm run build && TESTING=1 npx jasmine --filter="CloudCode"` -Expected: All existing tests PASS (facade falls back to legacy store when no manager present) - -- [ ] **Step 9: Commit** - -```bash -git add src/triggers.js -git commit -m "feat: add CloudCodeManager delegation layer to triggers.js facade" -``` - ---- - -## Chunk 3: ParseServer Integration - -### Task 5: Add Config Options - -**Files:** -- Modify: `src/Options/index.js` -- Modify: `src/Options/Definitions.js` - -- [ ] **Step 1: Add types to `src/Options/index.js`** - -After the existing `cloud: ?string` line, add: - -```javascript -cloudCodeCommand: ?string, -webhookKey: ?string, -cloudCodeOptions: ?{ - startupTimeout: ?number, - healthCheckInterval: ?number, - shutdownTimeout: ?number, - maxRestartDelay: ?number, -}, -cloudCodeAdapters: ?Array, -``` - -- [ ] **Step 2: Add definitions to `src/Options/Definitions.js`** - -Add in alphabetical order: - -```javascript -cloudCodeAdapters: { - help: 'Array of CloudCodeAdapter instances for BYO cloud code integration', -}, -cloudCodeCommand: { - env: 'PARSE_SERVER_CLOUD_CODE_COMMAND', - help: 'Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol)', -}, -cloudCodeOptions: { - help: 'Options for the external cloud code process adapter', -}, -webhookKey: { - env: 'PARSE_SERVER_WEBHOOK_KEY', - help: 'Webhook key for authenticating external cloud code process requests. Required when cloudCodeCommand is set.', -}, -``` - -- [ ] **Step 3: Commit** - -```bash -git add src/Options/index.js src/Options/Definitions.js -git commit -m "feat: add config options for cloud code adapters" -``` - -### Task 6: Integrate CloudCodeManager into ParseServer Startup - -**Files:** -- Modify: `src/ParseServer.ts` - -- [ ] **Step 1: Read the current ParseServer.ts startup flow** - -Read: `src/ParseServer.ts` — focus on the `start()` method (lines ~150-210) and the cloud code loading block (lines ~187-202). - -- [ ] **Step 2: Add imports at top of ParseServer.ts** - -```typescript -import { CloudCodeManager } from './cloud-code/CloudCodeManager'; -import { LegacyAdapter } from './cloud-code/adapters/LegacyAdapter'; -import { InProcessAdapter } from './cloud-code/adapters/InProcessAdapter'; -import { ExternalProcessAdapter } from './cloud-code/adapters/ExternalProcessAdapter'; -``` - -- [ ] **Step 3: Add resolveAdapters function** - -Add before the `ParseServer` class or as a module-level function: - -```typescript -function resolveAdapters(options: any): any[] { - const adapters: any[] = []; - - if (options.cloudCodeAdapters) { - adapters.push(...options.cloudCodeAdapters); - } - - if (options.cloud) { - if (typeof options.cloud === 'object' && typeof options.cloud.getRouter === 'function') { - adapters.push(new InProcessAdapter(options.cloud)); - } else { - adapters.push(new LegacyAdapter(options.cloud)); - } - } - - if (options.cloudCodeCommand) { - if (!options.webhookKey) { - throw new Error('webhookKey is required when using cloudCodeCommand'); - } - adapters.push(new ExternalProcessAdapter( - options.cloudCodeCommand, - options.webhookKey, - options.cloudCodeOptions - )); - } - - return adapters; -} -``` - -- [ ] **Step 4: Replace cloud code loading block in start() method** - -Replace the existing cloud code loading block (lines ~187-202) with: - -```typescript -const adapters = resolveAdapters({ - cloud, - cloudCodeCommand: this.config.cloudCodeCommand, - webhookKey: this.config.webhookKey, - cloudCodeOptions: this.config.cloudCodeOptions, - cloudCodeAdapters: this.config.cloudCodeAdapters, -}); - -if (adapters.length > 0) { - addParseCloud(); - const cloudManager = new CloudCodeManager(); - - // CRITICAL: Store on this.config BEFORE adapter initialization. - // this.config flows into AppCache via Config.put() later in start(). - // We must also store it on AppCache NOW so the facade can find it - // during LegacyAdapter.initialize() → Parse.Cloud.define() → triggers.addFunction(). - this.config.cloudCodeManager = cloudManager; - const appId = this.config.appId; - const cached = AppCache.get(appId); - if (cached) { - cached.cloudCodeManager = cloudManager; - } - - await cloudManager.initialize(adapters, { - appId, - masterKey: this.config.masterKey, - serverURL: this.config.serverURL || `http://localhost:${this.config.port}${this.config.mountPath || '/parse'}`, - }); -} -``` - -**Critical ordering notes:** -1. `cloudManager` must be on both `this.config` AND `AppCache` BEFORE `initialize()` — because LegacyAdapter loads cloud code synchronously during `initialize()`, and those `Parse.Cloud.define()` calls flow through the facade which reads from `AppCache`. -2. Storing on `this.config` ensures the reference survives the `Config.put(this.config)` call at the end of `start()`, which overwrites the AppCache entry with `this.config`. - -- [ ] **Step 5: Add AppCache import if not already present** - -Verify `AppCache` is imported. It may already be imported in ParseServer.ts as `import cache from './cache'` — if so, use `cache` instead of `AppCache`. - -- [ ] **Step 6: Build and run tests** - -Run: `npm run build && TESTING=1 npx jasmine --filter="CloudCode"` -Expected: All existing tests PASS - -- [ ] **Step 7: Commit** - -```bash -git add src/ParseServer.ts -git commit -m "feat: integrate CloudCodeManager initialization into ParseServer startup" -``` - ---- - -## Chunk 4: InProcessAdapter and Webhook Bridge - -### Task 7: Implement Webhook Bridge - -**Files:** -- Create: `src/cloud-code/adapters/webhook-bridge.ts` - -- [ ] **Step 1: Create the webhook bridge** - -```typescript -// src/cloud-code/adapters/webhook-bridge.ts -import { Parse } from 'parse/node'; -import type { WebhookResponse } from '../types'; - -export function requestToWebhookBody(request: any): Record { - const body: Record = { - master: request.master ?? false, - ip: request.ip ?? '', - headers: request.headers ?? {}, - installationId: request.installationId, - }; - - if (request.user) { - body.user = typeof request.user.toJSON === 'function' ? request.user.toJSON() : request.user; - } - if (request.params !== undefined) body.params = request.params; - if (request.jobId !== undefined) body.jobId = request.jobId; - if (request.object) { - body.object = typeof request.object.toJSON === 'function' ? request.object.toJSON() : request.object; - } - if (request.original) { - body.original = typeof request.original.toJSON === 'function' ? request.original.toJSON() : request.original; - } - if (request.context !== undefined) body.context = request.context; - if (request.query) { - body.query = { - className: request.query.className, - where: request.query._where, - limit: request.query._limit, - skip: request.query._skip, - include: request.query._include?.join(','), - keys: request.query._keys?.join(','), - order: request.query._order, - }; - } - if (request.count !== undefined) body.count = request.count; - if (request.isGet !== undefined) body.isGet = request.isGet; - if (request.file) body.file = request.file; - if (request.fileSize !== undefined) body.fileSize = request.fileSize; - if (request.event) body.event = request.event; - if (request.requestId !== undefined) body.requestId = request.requestId; - if (request.clients !== undefined) body.clients = request.clients; - if (request.subscriptions !== undefined) body.subscriptions = request.subscriptions; - - return body; -} - -export function webhookResponseToResult(response: WebhookResponse): unknown { - if ('error' in response) { - throw new Parse.Error(response.error.code, response.error.message); - } - return response.success; -} - -export function applyBeforeSaveResponse(request: any, response: WebhookResponse): void { - if ('error' in response) { - throw new Parse.Error(response.error.code, response.error.message); - } - const result = response.success; - if (typeof result === 'object' && result !== null && Object.keys(result).length === 0) { - return; - } - if (typeof result === 'object' && result !== null) { - const skipFields = ['objectId', 'createdAt', 'updatedAt', 'className']; - for (const [key, value] of Object.entries(result)) { - if (!skipFields.includes(key)) { - request.object.set(key, value); - } - } - } -} -``` - -- [ ] **Step 2: Commit** - -```bash -git add -f src/cloud-code/adapters/webhook-bridge.ts -git commit -m "feat: add webhook bridge for request/response conversion" -``` - -### Task 8: Implement InProcessAdapter - -**Files:** -- Create: `src/cloud-code/adapters/InProcessAdapter.ts` -- Test: `spec/InProcessAdapter.spec.js` - -- [ ] **Step 1: Write failing tests** - -```javascript -// spec/InProcessAdapter.spec.js -const { InProcessAdapter } = require('../lib/cloud-code/adapters/InProcessAdapter'); -const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); - -function createMockCloudCode(manifest, handlers = {}) { - return { - getRouter() { - return { - getManifest() { return manifest; }, - async dispatchFunction(name, body) { - if (handlers[`function:${name}`]) return handlers[`function:${name}`](body); - return { success: null }; - }, - async dispatchTrigger(className, triggerName, body) { - if (handlers[`trigger:${triggerName}.${className}`]) return handlers[`trigger:${triggerName}.${className}`](body); - return { success: {} }; - }, - async dispatchJob(name, body) { - if (handlers[`job:${name}`]) return handlers[`job:${name}`](body); - return { success: null }; - }, - }; - }, - }; -} - -describe('InProcessAdapter', () => { - let manager; - - beforeEach(() => { - manager = new CloudCodeManager(); - }); - - it('has name "in-process"', () => { - const cloud = createMockCloudCode({ protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [], jobs: [] } }); - const adapter = new InProcessAdapter(cloud); - expect(adapter.name).toBe('in-process'); - }); - - it('registers functions from manifest', async () => { - const cloud = createMockCloudCode({ - protocol: 'ParseCloud/1.0', - hooks: { - functions: [{ name: 'hello' }, { name: 'greet' }], - triggers: [], - jobs: [], - }, - }); - const adapter = new InProcessAdapter(cloud); - const registry = manager.createRegistry(adapter.name); - await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); - - expect(manager.getFunction('hello')).toBeDefined(); - expect(manager.getFunction('greet')).toBeDefined(); - }); - - it('registers triggers from manifest', async () => { - const cloud = createMockCloudCode({ - protocol: 'ParseCloud/1.0', - hooks: { - functions: [], - triggers: [{ className: 'Todo', triggerName: 'beforeSave' }], - jobs: [], - }, - }); - const adapter = new InProcessAdapter(cloud); - const registry = manager.createRegistry(adapter.name); - await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); - - expect(manager.getTrigger('Todo', 'beforeSave')).toBeDefined(); - }); - - it('registers jobs from manifest', async () => { - const cloud = createMockCloudCode({ - protocol: 'ParseCloud/1.0', - hooks: { - functions: [], - triggers: [], - jobs: [{ name: 'cleanup' }], - }, - }); - const adapter = new InProcessAdapter(cloud); - const registry = manager.createRegistry(adapter.name); - await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); - - expect(manager.getJob('cleanup')).toBeDefined(); - }); - - it('bridge handler dispatches function and returns result', async () => { - const cloud = createMockCloudCode( - { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'add' }], triggers: [], jobs: [] } }, - { 'function:add': (body) => ({ success: body.params.a + body.params.b }) } - ); - const adapter = new InProcessAdapter(cloud); - const registry = manager.createRegistry(adapter.name); - await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); - - const handler = manager.getFunction('add'); - const result = await handler({ params: { a: 2, b: 3 }, master: false, ip: '127.0.0.1', headers: {} }); - expect(result).toBe(5); - }); - - it('bridge handler throws Parse.Error on error response', async () => { - const cloud = createMockCloudCode( - { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'fail' }], triggers: [], jobs: [] } }, - { 'function:fail': () => ({ error: { code: 141, message: 'boom' } }) } - ); - const adapter = new InProcessAdapter(cloud); - const registry = manager.createRegistry(adapter.name); - await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); - - const handler = manager.getFunction('fail'); - await expectAsync(handler({ params: {}, master: false, ip: '', headers: {} })) - .toBeRejectedWithError(/boom/); - }); - - it('isHealthy returns true', async () => { - const cloud = createMockCloudCode({ protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [], jobs: [] } }); - const adapter = new InProcessAdapter(cloud); - expect(await adapter.isHealthy()).toBe(true); - }); -}); -``` - -- [ ] **Step 2: Implement InProcessAdapter** - -```typescript -// src/cloud-code/adapters/InProcessAdapter.ts -import type { - CloudCodeAdapter, - CloudCodeRegistry, - ParseServerConfig, - InProcessCloudCode, -} from '../types'; -import { requestToWebhookBody, webhookResponseToResult, applyBeforeSaveResponse } from './webhook-bridge'; - -export class InProcessAdapter implements CloudCodeAdapter { - readonly name = 'in-process'; - private cloudCode: InProcessCloudCode; - - constructor(cloudCode: InProcessCloudCode) { - this.cloudCode = cloudCode; - } - - async initialize(registry: CloudCodeRegistry, _config: ParseServerConfig): Promise { - const router = this.cloudCode.getRouter(); - const manifest = router.getManifest(); - - for (const fn of manifest.hooks.functions) { - registry.defineFunction(fn.name, async (request) => { - const body = requestToWebhookBody(request); - const response = await router.dispatchFunction(fn.name, body); - return webhookResponseToResult(response); - }); - } - - for (const trigger of manifest.hooks.triggers) { - const { className, triggerName } = trigger; - registry.defineTrigger(className, triggerName as any, async (request) => { - const body = requestToWebhookBody(request); - const response = await router.dispatchTrigger(className, triggerName, body); - if (triggerName === 'beforeSave') { - applyBeforeSaveResponse(request, response); - return; - } - return webhookResponseToResult(response); - }); - } - - for (const job of manifest.hooks.jobs) { - registry.defineJob(job.name, async (request) => { - const body = requestToWebhookBody(request); - const response = await router.dispatchJob(job.name, body); - return webhookResponseToResult(response); - }); - } - } - - async isHealthy(): Promise { - return true; - } - - async shutdown(): Promise {} -} -``` - -- [ ] **Step 3: Build and run tests** - -Run: `npm run build && TESTING=1 npx jasmine spec/InProcessAdapter.spec.js` -Expected: All tests PASS - -- [ ] **Step 4: Commit** - -```bash -git add -f src/cloud-code/adapters/InProcessAdapter.ts spec/InProcessAdapter.spec.js -git commit -m "feat: add InProcessAdapter with webhook bridge for manifest-based cloud code" -``` - ---- - -## Chunk 5: ExternalProcessAdapter - -### Task 9: Implement ExternalProcessAdapter - -**Files:** -- Create: `src/cloud-code/adapters/ExternalProcessAdapter.ts` -- Test: `spec/ExternalProcessAdapter.spec.js` - -- [ ] **Step 1: Write failing tests** - -```javascript -// spec/ExternalProcessAdapter.spec.js -const { ExternalProcessAdapter } = require('../lib/cloud-code/adapters/ExternalProcessAdapter'); -const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); -const http = require('http'); - -function createMockCloudServer(manifest, port) { - return new Promise((resolve) => { - const server = http.createServer((req, res) => { - if (req.url === '/' && req.method === 'GET') { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(manifest)); - } else if (req.url === '/health' && req.method === 'GET') { - res.writeHead(200); - res.end('OK'); - } else if (req.url.startsWith('/functions/') && req.method === 'POST') { - let body = ''; - req.on('data', d => body += d); - req.on('end', () => { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: 'external-result' })); - }); - } else { - res.writeHead(404); - res.end(); - } - }); - server.listen(port, () => resolve(server)); - }); -} - -describe('ExternalProcessAdapter', () => { - it('has name "external-process"', () => { - const adapter = new ExternalProcessAdapter('echo test', 'secret-key'); - expect(adapter.name).toBe('external-process'); - }); - - it('requires webhookKey', () => { - expect(() => new ExternalProcessAdapter('echo test', '')).toThrowError(/webhookKey/); - }); - - it('shutdown resolves cleanly when no process started', async () => { - const adapter = new ExternalProcessAdapter('echo test', 'key'); - await expectAsync(adapter.shutdown()).toBeResolved(); - }); - - it('spawns process and reads manifest', async () => { - const manager = new CloudCodeManager(); - const port = 19876; - const server = await createMockCloudServer( - { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'ext-fn' }], triggers: [], jobs: [] } }, - port - ); - - try { - const cmd = `node -e "process.stdout.write('PARSE_CLOUD_READY:${port}\\n'); setTimeout(() => {}, 60000)"`; - const adapter = new ExternalProcessAdapter(cmd, 'test-key', { - startupTimeout: 5000, - healthCheckInterval: 0, - }); - const registry = manager.createRegistry(adapter.name); - await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); - - expect(manager.getFunction('ext-fn')).toBeDefined(); - - await adapter.shutdown(); - } finally { - server.close(); - } - }, 10000); -}); -``` - -- [ ] **Step 2: Implement ExternalProcessAdapter** - -```typescript -// src/cloud-code/adapters/ExternalProcessAdapter.ts -import { spawn, ChildProcess } from 'child_process'; -import http from 'http'; -import type { - CloudCodeAdapter, - CloudCodeRegistry, - ParseServerConfig, - CloudManifest, - CloudCodeOptions, - WebhookResponse, -} from '../types'; -import { requestToWebhookBody, webhookResponseToResult, applyBeforeSaveResponse } from './webhook-bridge'; - -const DEFAULT_OPTIONS: Required = { - startupTimeout: 30000, - healthCheckInterval: 30000, - shutdownTimeout: 5000, - maxRestartDelay: 30000, -}; - -function httpGet(url: string): Promise { - return new Promise((resolve, reject) => { - http.get(url, (res) => { - let data = ''; - res.on('data', (chunk) => data += chunk); - res.on('end', () => resolve(data)); - }).on('error', reject); - }); -} - -function httpPost(url: string, body: Record, webhookKey: string): Promise { - return new Promise((resolve, reject) => { - const payload = JSON.stringify(body); - const urlObj = new URL(url); - const req = http.request({ - hostname: urlObj.hostname, - port: urlObj.port, - path: urlObj.pathname, - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'Content-Length': Buffer.byteLength(payload), - 'X-Parse-Webhook-Key': webhookKey, - }, - }, (res) => { - let data = ''; - res.on('data', (chunk) => data += chunk); - res.on('end', () => { - try { - resolve(JSON.parse(data)); - } catch (e) { - reject(new Error(`Invalid JSON from cloud code process: ${data}`)); - } - }); - }); - req.on('error', reject); - req.write(payload); - req.end(); - }); -} - -export class ExternalProcessAdapter implements CloudCodeAdapter { - readonly name = 'external-process'; - private command: string; - private webhookKey: string; - private options: Required; - private process: ChildProcess | null = null; - private port: number = 0; - private healthInterval: ReturnType | null = null; - - constructor(command: string, webhookKey: string, options?: CloudCodeOptions) { - if (!webhookKey) { - throw new Error('webhookKey is required for ExternalProcessAdapter'); - } - this.command = command; - this.webhookKey = webhookKey; - this.options = { ...DEFAULT_OPTIONS, ...options }; - } - - async initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise { - this.port = await this.spawnAndWaitForReady(config); - const manifest = await this.fetchManifest(); - this.registerFromManifest(registry, manifest); - - if (this.options.healthCheckInterval > 0) { - this.healthInterval = setInterval(() => this.checkHealth(), this.options.healthCheckInterval); - } - } - - async isHealthy(): Promise { - try { - const response = await httpGet(`http://localhost:${this.port}/health`); - return response === 'OK' || response.includes('ok'); - } catch { - return false; - } - } - - async shutdown(): Promise { - if (this.healthInterval) { - clearInterval(this.healthInterval); - this.healthInterval = null; - } - if (this.process && !this.process.killed) { - this.process.kill('SIGTERM'); - await Promise.race([ - new Promise((resolve) => this.process!.once('exit', () => resolve())), - new Promise((resolve) => setTimeout(() => { - if (this.process && !this.process.killed) { - this.process.kill('SIGKILL'); - } - resolve(); - }, this.options.shutdownTimeout)), - ]); - } - this.process = null; - } - - private spawnAndWaitForReady(config: ParseServerConfig): Promise { - return new Promise((resolve, reject) => { - const child = spawn(this.command, { - shell: true, - env: { - ...process.env, - PARSE_SERVER_URL: config.serverURL, - PARSE_APPLICATION_ID: config.appId, - PARSE_MASTER_KEY: config.masterKey, - PARSE_WEBHOOK_KEY: this.webhookKey, - PARSE_CLOUD_PORT: '0', - }, - stdio: ['ignore', 'pipe', 'pipe'], - }); - - this.process = child; - - const timeout = setTimeout(() => { - child.kill('SIGKILL'); - reject(new Error(`Cloud code process did not emit PARSE_CLOUD_READY within ${this.options.startupTimeout}ms`)); - }, this.options.startupTimeout); - - let stdout = ''; - child.stdout!.on('data', (data) => { - stdout += data.toString(); - const match = stdout.match(/PARSE_CLOUD_READY:(\d+)/); - if (match) { - clearTimeout(timeout); - resolve(parseInt(match[1], 10)); - } - }); - - child.stderr!.on('data', (data) => { - process.stderr.write(`[cloud-code] ${data}`); - }); - - child.on('error', (err) => { - clearTimeout(timeout); - reject(new Error(`Failed to spawn cloud code process: ${err.message}`)); - }); - - child.on('exit', (code) => { - clearTimeout(timeout); - if (!this.port) { - reject(new Error(`Cloud code process exited with code ${code} before becoming ready`)); - } - }); - }); - } - - private async fetchManifest(): Promise { - const data = await httpGet(`http://localhost:${this.port}/`); - return JSON.parse(data); - } - - private registerFromManifest(registry: CloudCodeRegistry, manifest: CloudManifest): void { - for (const fn of manifest.hooks.functions) { - registry.defineFunction(fn.name, async (request) => { - const body = requestToWebhookBody(request); - const response = await httpPost(`http://localhost:${this.port}/functions/${fn.name}`, body, this.webhookKey); - return webhookResponseToResult(response); - }); - } - - for (const trigger of manifest.hooks.triggers) { - const { className, triggerName } = trigger; - registry.defineTrigger(className, triggerName as any, async (request) => { - const body = requestToWebhookBody(request); - const response = await httpPost( - `http://localhost:${this.port}/triggers/${className}/${triggerName}`, - body, - this.webhookKey - ); - if (triggerName === 'beforeSave') { - applyBeforeSaveResponse(request, response); - return; - } - return webhookResponseToResult(response); - }); - } - - for (const job of manifest.hooks.jobs) { - registry.defineJob(job.name, async (request) => { - const body = requestToWebhookBody(request); - const response = await httpPost(`http://localhost:${this.port}/jobs/${job.name}`, body, this.webhookKey); - return webhookResponseToResult(response); - }); - } - } - - private async checkHealth(): Promise { - const healthy = await this.isHealthy(); - if (!healthy) { - console.warn('[cloud-code] External process health check failed'); - } - } -} -``` - -- [ ] **Step 3: Build and run tests** - -Run: `npm run build && TESTING=1 npx jasmine spec/ExternalProcessAdapter.spec.js` -Expected: All tests PASS - -- [ ] **Step 4: Commit** - -```bash -git add -f src/cloud-code/adapters/ExternalProcessAdapter.ts spec/ExternalProcessAdapter.spec.js -git commit -m "feat: add ExternalProcessAdapter with child process lifecycle management" -``` - ---- - -## Chunk 6: Integration Tests and Full Verification - -### Task 10: Integration Tests - -**Files:** -- Create: `spec/CloudCodeAdapter.integration.spec.js` - -- [ ] **Step 1: Write integration tests** - -```javascript -// spec/CloudCodeAdapter.integration.spec.js -const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); -const { LegacyAdapter } = require('../lib/cloud-code/adapters/LegacyAdapter'); -const { InProcessAdapter } = require('../lib/cloud-code/adapters/InProcessAdapter'); - -describe('Cloud Code Adapter Integration', () => { - describe('composable adapters', () => { - it('supports LegacyAdapter + InProcessAdapter registering different hooks', async () => { - const manager = new CloudCodeManager(); - - const legacyCloud = (Parse) => { - Parse.Cloud.define('legacyFn', () => 'from-legacy'); - }; - - const inProcessCloud = { - getRouter() { - return { - getManifest() { - return { - protocol: 'ParseCloud/1.0', - hooks: { - functions: [{ name: 'inProcessFn' }], - triggers: [], - jobs: [], - }, - }; - }, - async dispatchFunction() { return { success: 'from-in-process' }; }, - async dispatchTrigger() { return { success: {} }; }, - async dispatchJob() { return { success: null }; }, - }; - }, - }; - - // InProcessAdapter first (registers directly), then LegacyAdapter - // Note: LegacyAdapter needs the manager on AppCache for the facade to work. - // In real usage, ParseServer.start() handles this. For testing, we use - // the InProcess adapter (which registers directly) to verify composition. - const inProcessAdapter = new InProcessAdapter(inProcessCloud); - const inProcessRegistry = manager.createRegistry(inProcessAdapter.name); - await inProcessAdapter.initialize(inProcessRegistry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); - - // Simulate legacy registration via manager directly - manager.defineFunction('legacy', 'legacyFn', () => 'from-legacy'); - - expect(manager.getFunction('legacyFn')).toBeDefined(); - expect(manager.getFunction('inProcessFn')).toBeDefined(); - expect(manager.getFunctionNames().sort()).toEqual(['inProcessFn', 'legacyFn']); - }); - - it('throws on conflict between adapters', async () => { - const manager = new CloudCodeManager(); - - // Register a function from "legacy" source - manager.defineFunction('legacy', 'shared', () => 'from-legacy'); - - // InProcess adapter tries to register same function - const inProcessCloud = { - getRouter() { - return { - getManifest() { - return { - protocol: 'ParseCloud/1.0', - hooks: { functions: [{ name: 'shared' }], triggers: [], jobs: [] }, - }; - }, - async dispatchFunction() { return { success: 'from-in-process' }; }, - async dispatchTrigger() { return { success: {} }; }, - async dispatchJob() { return { success: null }; }, - }; - }, - }; - - const adapter = new InProcessAdapter(inProcessCloud); - const registry = manager.createRegistry(adapter.name); - - await expectAsync( - adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }) - ).toBeRejectedWithError(/Cloud code conflict.*shared.*legacy.*in-process/); - }); - }); - - describe('shutdown', () => { - it('shuts down all adapters', async () => { - const manager = new CloudCodeManager(); - let shutdownCalled = false; - - const adapter = { - name: 'test', - async initialize() {}, - async isHealthy() { return true; }, - async shutdown() { shutdownCalled = true; }, - }; - - await manager.initialize([adapter], { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); - await manager.shutdown(); - - expect(shutdownCalled).toBe(true); - }); - }); - - describe('unregisterAll', () => { - it('allows re-registration after unregisterAll', () => { - const manager = new CloudCodeManager(); - manager.defineFunction('adapter-a', 'fn', () => 'first'); - manager.unregisterAll('adapter-a'); - - // Now a different adapter can register the same name - expect(() => { - manager.defineFunction('adapter-b', 'fn', () => 'second'); - }).not.toThrow(); - expect(manager.getFunction('fn')).toBeDefined(); - }); - }); -}); -``` - -- [ ] **Step 2: Build and run integration tests** - -Run: `npm run build && TESTING=1 npx jasmine spec/CloudCodeAdapter.integration.spec.js` -Expected: All tests PASS - -- [ ] **Step 3: Commit** - -```bash -git add -f spec/CloudCodeAdapter.integration.spec.js -git commit -m "test: add integration tests for composable cloud code adapters" -``` - -### Task 11: Full Test Suite Verification - -- [ ] **Step 1: Build everything** - -Run: `npm run build` - -- [ ] **Step 2: Run complete test suite** - -Run: `npm run testonly` - -- [ ] **Step 3: Fix any failures** - -If tests fail, analyze each failure: -- **Import issues:** Verify triggers.js facade imports `AppCache` correctly -- **Manager not found:** Verify facade falls back to legacy store when no manager exists (most tests won't have a manager) -- **Type errors:** Verify Babel compiles all `.ts` files correctly -- **Behavioral changes:** Verify facade delegation matches original behavior exactly - -- [ ] **Step 4: Commit fixes if any** - -```bash -git add -A -git commit -m "fix: resolve test regressions from cloud code adapter integration" -``` diff --git a/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md b/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md deleted file mode 100644 index 7e546e065f..0000000000 --- a/docs/superpowers/specs/2026-03-16-cloud-code-adapter-design.md +++ /dev/null @@ -1,379 +0,0 @@ -# Cloud Code Adapter — Design Specification - -**Status:** Approved -**Target:** Parse Server 10.x -**Date:** 2026-03-16 -**Related:** [ParseCloud/1.0 Protocol](../../../parse-lite-sdks/docs/cloud-code-protocol.md), [Adapter Proposal](../../../parse-lite-sdks/docs/cloud-code-adapter-proposal.md) - ---- - -## 1. Problem Statement - -Parse Server's cloud code system (`Parse.Cloud.define`, `Parse.Cloud.beforeSave`, etc.) has fundamental limitations: - -1. **JavaScript only** — No support for cloud code in Swift, C#, Go, or other languages. -2. **Global singleton** — All cloud code shares `Parse.Cloud` namespace. No composition, difficult testing. -3. **In-process only** — No supported mechanism for cloud code as a separate process or service. -4. **No adapter pattern** — Hard-wired implementation with no pluggable interface. -5. **Manual webhook registration** — External webhooks require manual REST API calls. - -## 2. Design Decisions - -| Decision | Choice | Rationale | -|----------|--------|-----------| -| Adapter composition | Multiple adapters coexist | Users can run legacy JS + external Swift + custom adapters simultaneously | -| Hook conflicts | Error on conflict at startup | Fail fast, no ambiguity about which adapter handles a hook | -| Hot reload | Startup-only for v1 | Simpler implementation; can be added later | -| Registry API | Adapters only (no public registry) | Clean boundary, single integration point | -| Implementation location | In parse-server directly | Core server functionality | -| Webhook key | Explicitly configured (required) | No auto-generation, no persistence question. Diverges from proposal which offered auto-generation. | -| Language | TypeScript | Type safety throughout | -| Architecture | Replace triggers.js entirely | CloudCodeManager becomes single source of truth | -| applicationId scoping | One CloudCodeManager per app | Stored on `Config`, mirrors existing `_triggerStore[applicationId]` pattern | - -## 3. Architecture - -### 3.1 CloudCodeManager — The New Core - -`CloudCodeManager` replaces `triggers.js` as the single source of truth for all hook registration, lookup, and execution. One instance exists per `applicationId`, stored on the app's `Config` object. - -```typescript -class CloudCodeManager { - private adapters: Map; - private store: HookStore; - - // Lifecycle - async initialize(adapterConfigs: AdapterConfig[], serverConfig: ParseServerConfig): Promise; - async shutdown(): Promise; - async healthCheck(): Promise>; - - // Registration (called by adapters via CloudCodeRegistry) - defineFunction(source: string, name: string, handler: CloudFunctionHandler, validator?: ValidatorHandler): void; - defineTrigger(source: string, className: string, triggerName: TriggerName, handler: CloudTriggerHandler, validator?: ValidatorHandler): void; - defineJob(source: string, name: string, handler: CloudJobHandler): void; - defineLiveQueryHandler(source: string, handler: LiveQueryHandler): void; - unregisterAll(source: string): void; - - // Lookup (consumed by routers, rest of Parse Server) - getFunction(name: string): CloudFunctionHandler | undefined; - getTrigger(className: string, triggerType: string): CloudTriggerHandler | undefined; - triggerExists(className: string, triggerType: string): boolean; - getJob(name: string): CloudJobHandler | undefined; - getJobs(): Map; - getFunctionNames(): string[]; - getValidator(key: string): ValidatorHandler | undefined; - // key is a function name or `${triggerType}.${className}` for trigger validators - - // Execution (replaces maybeRunTrigger, maybeRunValidator, and specialized variants) - // runTrigger also subsumes maybeRunAfterFindTrigger (className + triggerType lookup) - async runTrigger(triggerType: string, auth: Auth, parseObject: ParseObject, ...): Promise; - async runQueryTrigger(triggerType: string, className: string, query: any, ...): Promise; - async runFileTrigger(triggerType: string, file: any, ...): Promise; - async runGlobalConfigTrigger(triggerType: string, config: any, ...): Promise; - async runValidator(request: any, functionName: string, auth: Auth): Promise; - runLiveQueryEventHandlers(data: any): void; // synchronous, matches existing behavior -} -``` - -**Registration validation:** `defineTrigger()` enforces className/triggerType rules for all adapters (not just Legacy): -- No `beforeSave` on `_PushStatus` -- `beforeLogin`/`afterLogin`/`beforePasswordResetRequest` only on `_User` -- `afterLogout` only on `_Session` -``` - -Since the manager is scoped per-app, lookup methods no longer need an `applicationId` parameter. - -### 3.2 HookStore - -Typed internal structure replacing `Object.create(null)` pattern: - -```typescript -interface HookStore { - functions: Map; - triggers: Map; - // key format: `${triggerType}.${className}` - jobs: Map; - liveQueryHandlers: Array<{ handler: LiveQueryHandler; source: string }>; -} -``` - -### 3.3 CloudCodeAdapter Interface - -```typescript -interface CloudCodeAdapter { - /** Unique identifier for this adapter instance */ - readonly name: string; - - /** Register all hooks with the registry. Called once at startup. */ - initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise; - - /** Return true if adapter is healthy and ready. */ - isHealthy(): Promise; - - /** Clean up resources. Called during Parse Server shutdown. */ - shutdown(): Promise; -} -``` - -### 3.4 CloudCodeRegistry - -Scoped per-adapter. Created by `CloudCodeManager` with the adapter's `name` bound as `source`: - -```typescript -interface CloudCodeRegistry { - defineFunction(name: string, handler: CloudFunctionHandler, validator?: ValidatorHandler): void; - defineTrigger(className: string, triggerName: TriggerName, handler: CloudTriggerHandler, validator?: ValidatorHandler): void; - defineJob(name: string, handler: CloudJobHandler): void; - defineLiveQueryHandler(handler: LiveQueryHandler): void; -} - -type TriggerName = - | 'beforeSave' | 'afterSave' - | 'beforeDelete' | 'afterDelete' - | 'beforeFind' | 'afterFind' - | 'beforeLogin' | 'afterLogin' | 'afterLogout' - | 'beforePasswordResetRequest' - | 'beforeConnect' | 'beforeSubscribe' | 'afterEvent'; -``` - -**Virtual classNames for special trigger targets:** - -File and Config triggers use standard trigger names (`beforeSave`, `afterSave`, etc.) with virtual classNames: - -| Target | Virtual className | Example registration | -|--------|-------------------|---------------------| -| `Parse.File` | `@File` | `defineTrigger('@File', 'beforeSave', handler)` | -| `Parse.Config` | `@Config` | `defineTrigger('@Config', 'beforeSave', handler)` | -| `beforeConnect` | `@Connect` | `defineTrigger('@Connect', 'beforeConnect', handler)` | -| `beforeSubscribe` | class name | `defineTrigger('Todo', 'beforeSubscribe', handler)` | - -This matches the existing internal storage pattern in `triggers.js` where `getClassName(Parse.File)` returns `'@File'`. The LegacyAdapter maps `Parse.Cloud.beforeSaveFile(handler)` to `defineTrigger('@File', 'beforeSave', handler)`, and `Parse.Cloud.beforeConnect(handler)` to `defineTrigger('@Connect', 'beforeConnect', handler)`. - -## 4. Built-in Adapter Implementations - -### 4.1 LegacyAdapter - -Wraps `cloud: './main.js'` or `cloud: (parse) => {}`. Zero breaking changes. - -- `initialize()` temporarily patches `Parse.Cloud.*` methods to route through the registry, loads the user's cloud code file, then restores originals. -- `isHealthy()` always returns `true` (in-process). -- `shutdown()` is a no-op. - -### 4.2 InProcessAdapter - -Wraps `cloud: cloudInstance` where `cloudInstance` has a `getRouter()` method (duck-typed). - -- `initialize()` calls `getRouter().getManifest()`, creates bridge handlers for each hook that convert Parse Server requests to webhook body format and call `dispatchFunction`/`dispatchTrigger`/`dispatchJob`. -- `isHealthy()` always returns `true` (in-process). -- `shutdown()` is a no-op. - -**Duck-typed interface:** - -```typescript -interface InProcessCloudCode { - getRouter(): { - getManifest(): CloudManifest; - dispatchFunction(name: string, body: Record): Promise; - dispatchTrigger(className: string, triggerName: string, body: Record): Promise; - dispatchJob(name: string, body: Record): Promise; - }; -} - -interface CloudManifest { - protocol: string; - hooks: { - functions: Array<{ name: string }>; - triggers: Array<{ className: string; triggerName: string }>; - jobs: Array<{ name: string }>; - }; -} - -type WebhookResponse = - | { success: unknown } - | { error: { code: number; message: string } }; -``` - -### 4.3 ExternalProcessAdapter - -Wraps `cloudCodeCommand: 'swift run CloudCode'`. - -- `initialize()` spawns child process with environment variables, waits for `PARSE_CLOUD_READY:` on stdout, fetches manifest via `GET http://localhost:/`, registers bridge handlers. -- `isHealthy()` calls `GET http://localhost:/health`. -- `shutdown()` sends `SIGTERM`, waits `shutdownTimeout`, then `SIGKILL`. -- Crash recovery: the `CloudCodeManager` calls `unregisterAll(adapter.name)` internally, then the adapter restarts with exponential backoff (1s, 2s, 4s, 8s, capped at `maxRestartDelay`). - -**Environment variables passed to child process:** - -| Variable | Source | -|----------|--------| -| `PARSE_SERVER_URL` | Parse Server's own URL | -| `PARSE_APPLICATION_ID` | `appId` from config | -| `PARSE_MASTER_KEY` | `masterKey` from config | -| `PARSE_WEBHOOK_KEY` | `webhookKey` from config (required) | -| `PARSE_CLOUD_PORT` | `0` (OS-assigned) | - -## 5. Configuration - -### 5.1 ParseServerOptions Extension - -```typescript -interface ParseServerOptions { - // Existing (unchanged, routes through LegacyAdapter): - cloud?: string | ((parse: any) => void) | InProcessCloudCode; - - // New — external process: - cloudCodeCommand?: string; - webhookKey?: string; // Required when cloudCodeCommand is set - cloudCodeOptions?: { - startupTimeout?: number; // default 30000ms - healthCheckInterval?: number; // default 30000ms - shutdownTimeout?: number; // default 5000ms - maxRestartDelay?: number; // default 30000ms - }; - - // New — explicit BYO adapter(s): - cloudCodeAdapters?: CloudCodeAdapter[]; -} -``` - -### 5.2 Resolution Order - -All sources compose. Any hook collision throws at startup. - -```typescript -function resolveAdapters(options: ParseServerOptions): CloudCodeAdapter[] { - const adapters: CloudCodeAdapter[] = []; - - if (options.cloudCodeAdapters) { - adapters.push(...options.cloudCodeAdapters); - } - - if (options.cloud) { - if (typeof options.cloud === 'object' && typeof options.cloud.getRouter === 'function') { - adapters.push(new InProcessAdapter(options.cloud)); - } else { - adapters.push(new LegacyAdapter(options.cloud)); - } - } - - if (options.cloudCodeCommand) { - if (!options.webhookKey) { - throw new Error('webhookKey is required when using cloudCodeCommand'); - } - adapters.push(new ExternalProcessAdapter( - options.cloudCodeCommand, - options.webhookKey, - options.cloudCodeOptions - )); - } - - return adapters; -} -``` - -### 5.3 Startup Sequence - -1. `ParseServer` constructor -2. `resolveAdapters(options)` → `CloudCodeAdapter[]` -3. `CloudCodeManager.initialize(adapters, config)` - - For each adapter: create scoped `CloudCodeRegistry`, call `adapter.initialize(registry, config)` - - Registry calls flow into `HookStore` with conflict checks -4. If any conflict → throw, server does not start -5. All routers use `CloudCodeManager` for lookups - -### 5.4 Conflict Error Format - -``` -"Cloud code conflict: beforeSave on 'Todo' registered by both 'legacy' and 'external-process'" -``` - -## 6. Migration Strategy — Replacing triggers.js - -### 6.1 Current Consumers - -| Consumer | triggers.js Usage | Migration | -|----------|-------------------|-----------| -| `Parse.Cloud.js` | `addFunction`, `addTrigger`, `addJob`, `addConnectTrigger`, `addLiveQueryEventHandler` | LegacyAdapter delegates to `CloudCodeRegistry` | -| `FunctionsRouter.js` | `getFunction`, `getJob`, `getFunctionNames`, `maybeRunValidator` | Import from `CloudCodeManager` | -| `CloudCodeRouter.js` | `getJob` (scheduled jobs) | Import from `CloudCodeManager` | -| `RestWrite.js` | `getTrigger`, `maybeRunTrigger`, `getRequestObject` | Import from `CloudCodeManager` | -| `RestQuery.js` | `getTrigger`, `maybeRunTrigger` | Import from `CloudCodeManager` | -| `UsersRouter.js` | `getTrigger` (login/logout) | Import from `CloudCodeManager` | -| `FilesRouter.js` | `getTrigger` (file triggers) | Import from `CloudCodeManager` | -| `GlobalConfigRouter.js` | `maybeRunGlobalConfigTrigger` | Import from `CloudCodeManager` | -| `LiveQuery/` | `getTrigger`, `maybeRunTrigger`, connect/subscribe | Import from `CloudCodeManager` | -| `Config.js` | Validates cloud config | Updated for new options | - -### 6.2 Migration Approach - -1. **`triggers.ts` becomes a thin re-export facade** — all exports delegate to `CloudCodeManager` on the current app's `Config`. Existing import sites work without immediate changes. -2. **Incremental consumer migration** — update consumers one file at a time from `triggers.*` to `config.cloud.*` (the `CloudCodeManager` instance on `Config`). -3. **Facade removal** — once all consumers are migrated, delete `triggers.ts`. - -### 6.3 Parse.Cloud.js Transformation - -`LegacyAdapter` temporarily patches `Parse.Cloud.*` during `initialize()`: - -```typescript -class LegacyAdapter implements CloudCodeAdapter { - readonly name = 'legacy'; - - async initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise { - const originalDefine = Parse.Cloud.define; - Parse.Cloud.define = (name, handler, validator) => { - registry.defineFunction(name, handler, validator); - }; - // ... same for beforeSave, afterSave, etc. - - if (typeof this.cloud === 'string') { - require(this.cloud); - } else if (typeof this.cloud === 'function') { - this.cloud(Parse); - } - - Parse.Cloud.define = originalDefine; - // ... - } -} -``` - -### 6.4 Utility Functions - -Pure data transformation helpers from `triggers.js` move to `src/cloud-code/request-utils.ts`. They have no dependency on the hook store: - -- `getRequestObject()`, `getResponseObject()` — build request/response objects for trigger handlers -- `getRequestQueryObject()` — build request for query triggers -- `getRequestFileObject()` — build request for file triggers -- `resolveError()` — normalize error responses -- `toJSONwithObjects()` — serialize with Parse object preservation -- `inflate()` — inflate REST data into Parse Objects (used by `RestWrite.js`) - -### 6.5 Validators and Rate Limiting - -Validators (including `requireUser`, `requireMaster`, `fields`, `rateLimit`) are supported only through the `LegacyAdapter`. Non-legacy adapters (InProcess, External) handle validation within their own process — Parse Server does not apply server-side validators for hooks registered by these adapters. Rate limiting middleware integration (`addRateLimit`) is handled by the `LegacyAdapter` during `initialize()`, preserving existing behavior. - -## 7. Request/Response Bridge - -For `InProcessAdapter` and `ExternalProcessAdapter`, a bridge converts between Parse Server's internal request objects and the webhook body format. - -### Parse Request → Webhook Body - -Converts `Parse.Object` instances to JSON, maps all trigger-specific fields (object, original, query, file, context, etc.). - -### Webhook Response → Parse Result - -- `{ success: }` → return value -- `{ error: { code, message } }` → throw `Parse.Error` - -### beforeSave Special Case - -- Empty object `{}` → accept original (no changes) -- Object with fields → apply field changes to `request.object` -- Error → reject save - -## 8. Non-Goals (v1) - -- **Hot reload** — hooks registered once at startup -- **Public CloudCodeRegistry API** — all registration through adapters -- **Multi-process orchestration** — one external process per adapter -- **Auto-generated webhook key** — must be explicitly configured diff --git a/spec/CloudCodeAdapter.integration.spec.js b/spec/CloudCodeAdapter.integration.spec.js new file mode 100644 index 0000000000..71c6f499cf --- /dev/null +++ b/spec/CloudCodeAdapter.integration.spec.js @@ -0,0 +1,107 @@ +'use strict'; + +const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); +const { InProcessAdapter } = require('../lib/cloud-code/adapters/InProcessAdapter'); + +describe('Cloud Code Adapter Integration', () => { + describe('composable adapters', () => { + it('supports multiple adapters registering different hooks', async () => { + const manager = new CloudCodeManager(); + + const inProcessCloud = { + getRouter() { + return { + getManifest() { + return { + protocol: 'ParseCloud/1.0', + hooks: { + functions: [{ name: 'inProcessFn' }], + triggers: [], + jobs: [], + }, + }; + }, + async dispatchFunction() { return { success: 'from-in-process' }; }, + async dispatchTrigger() { return { success: {} }; }, + async dispatchJob() { return { success: null }; }, + }; + }, + }; + + const inProcessAdapter = new InProcessAdapter(inProcessCloud); + const inProcessRegistry = manager.createRegistry(inProcessAdapter.name); + await inProcessAdapter.initialize(inProcessRegistry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + // Simulate legacy registration via manager directly + manager.defineFunction('legacyFn', () => 'from-legacy', 'legacy'); + + const legacyEntry = manager.getFunction('legacyFn'); + expect(legacyEntry).toBeDefined(); + expect(manager.getFunction('inProcessFn')).toBeDefined(); + expect(manager.getFunctionNames().sort()).toEqual(['inProcessFn', 'legacyFn']); + }); + + it('throws on conflict between adapters', async () => { + const manager = new CloudCodeManager(); + + // Register a function from "legacy" source + manager.defineFunction('shared', () => 'from-legacy', 'legacy'); + + // InProcess adapter tries to register same function + const inProcessCloud = { + getRouter() { + return { + getManifest() { + return { + protocol: 'ParseCloud/1.0', + hooks: { functions: [{ name: 'shared' }], triggers: [], jobs: [] }, + }; + }, + async dispatchFunction() { return { success: 'from-in-process' }; }, + async dispatchTrigger() { return { success: {} }; }, + async dispatchJob() { return { success: null }; }, + }; + }, + }; + + const adapter = new InProcessAdapter(inProcessCloud); + const registry = manager.createRegistry(adapter.name); + + await expectAsync( + adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }) + ).toBeRejectedWithError(/already registered/); + }); + }); + + describe('shutdown', () => { + it('shuts down all adapters', async () => { + const manager = new CloudCodeManager(); + let shutdownCalled = false; + + const adapter = { + name: 'test', + async initialize() {}, + async isHealthy() { return true; }, + async shutdown() { shutdownCalled = true; }, + }; + + await manager.initialize([adapter], { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + await manager.shutdown(); + + expect(shutdownCalled).toBe(true); + }); + }); + + describe('unregisterAll', () => { + it('allows re-registration after unregisterAll', () => { + const manager = new CloudCodeManager(); + manager.defineFunction('fn', () => 'first', 'adapter-a'); + manager.unregisterAll('adapter-a'); + + expect(() => { + manager.defineFunction('fn', () => 'second', 'adapter-b'); + }).not.toThrow(); + expect(manager.getFunction('fn')).toBeDefined(); + }); + }); +}); diff --git a/spec/CloudCodeManager.spec.js b/spec/CloudCodeManager.spec.js new file mode 100644 index 0000000000..cdf71d22cc --- /dev/null +++ b/spec/CloudCodeManager.spec.js @@ -0,0 +1,575 @@ +'use strict'; + +const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); + +describe('CloudCodeManager', () => { + let manager; + + beforeEach(() => { + manager = new CloudCodeManager(); + }); + + // ─── Function Registration ─────────────────────────────────────────────────── + + describe('defineFunction', () => { + it('registers a function', () => { + const handler = () => 'result'; + manager.defineFunction('myFunc', handler, 'source-a'); + const entry = manager.getFunction('myFunc'); + expect(entry).not.toBeNull(); + expect(entry.handler).toBe(handler); + expect(entry.source).toBe('source-a'); + }); + + it('allows overwriting a function from the same source', () => { + const handler1 = () => 'v1'; + const handler2 = () => 'v2'; + manager.defineFunction('myFunc', handler1, 'source-a'); + manager.defineFunction('myFunc', handler2, 'source-a'); + expect(manager.getFunction('myFunc').handler).toBe(handler2); + }); + + it('throws when a different source tries to register the same function', () => { + manager.defineFunction('myFunc', () => {}, 'source-a'); + expect(() => { + manager.defineFunction('myFunc', () => {}, 'source-b'); + }).toThrowError(/already registered/i); + }); + + it('stores a validator with the function', () => { + const handler = () => {}; + const validator = req => {}; + manager.defineFunction('myFunc', handler, 'source-a', validator); + expect(manager.getFunction('myFunc').validator).toBe(validator); + }); + + it('returns null for an unknown function', () => { + expect(manager.getFunction('unknown')).toBeNull(); + }); + }); + + // ─── Trigger Registration ──────────────────────────────────────────────────── + + describe('defineTrigger', () => { + it('registers a trigger', () => { + const handler = () => {}; + manager.defineTrigger('MyClass', 'beforeSave', handler, 'source-a'); + const entry = manager.getTrigger('MyClass', 'beforeSave'); + expect(entry).not.toBeNull(); + expect(entry.handler).toBe(handler); + expect(entry.source).toBe('source-a'); + }); + + it('allows overwriting a trigger from the same source', () => { + const handler1 = () => {}; + const handler2 = () => {}; + manager.defineTrigger('MyClass', 'beforeSave', handler1, 'source-a'); + manager.defineTrigger('MyClass', 'beforeSave', handler2, 'source-a'); + expect(manager.getTrigger('MyClass', 'beforeSave').handler).toBe(handler2); + }); + + it('throws when a different source tries to register the same trigger', () => { + manager.defineTrigger('MyClass', 'beforeSave', () => {}, 'source-a'); + expect(() => { + manager.defineTrigger('MyClass', 'beforeSave', () => {}, 'source-b'); + }).toThrowError(/already registered/i); + }); + + it('stores a validator with the trigger', () => { + const handler = () => {}; + const validator = { requireUser: true }; + manager.defineTrigger('MyClass', 'beforeSave', handler, 'source-a', validator); + expect(manager.getTrigger('MyClass', 'beforeSave').validator).toBe(validator); + }); + + it('returns null for an unknown trigger', () => { + expect(manager.getTrigger('MyClass', 'beforeSave')).toBeNull(); + }); + + it('triggerExists returns true for a registered trigger', () => { + manager.defineTrigger('MyClass', 'afterSave', () => {}, 'source-a'); + expect(manager.triggerExists('MyClass', 'afterSave')).toBe(true); + }); + + it('triggerExists returns false for an unregistered trigger', () => { + expect(manager.triggerExists('MyClass', 'afterSave')).toBe(false); + }); + }); + + // ─── Validation Rules ──────────────────────────────────────────────────────── + + describe('validation rules', () => { + it('rejects beforeSave on _PushStatus', () => { + expect(() => { + manager.defineTrigger('_PushStatus', 'beforeSave', () => {}, 'source-a'); + }).toThrowError(/_PushStatus/); + }); + + it('allows afterSave on _PushStatus', () => { + expect(() => { + manager.defineTrigger('_PushStatus', 'afterSave', () => {}, 'source-a'); + }).not.toThrow(); + }); + + it('rejects beforeDelete on _PushStatus', () => { + expect(() => { + manager.defineTrigger('_PushStatus', 'beforeDelete', () => {}, 'source-a'); + }).toThrowError(/_PushStatus/); + }); + + it('rejects beforeFind on _PushStatus', () => { + expect(() => { + manager.defineTrigger('_PushStatus', 'beforeFind', () => {}, 'source-a'); + }).toThrowError(/_PushStatus/); + }); + + it('rejects beforeSave on _Session', () => { + expect(() => { + manager.defineTrigger('_Session', 'beforeSave', () => {}, 'source-a'); + }).toThrowError(/_Session/); + }); + + it('rejects afterSave on _Session', () => { + expect(() => { + manager.defineTrigger('_Session', 'afterSave', () => {}, 'source-a'); + }).toThrowError(/_Session/); + }); + + it('allows afterLogout on _Session', () => { + expect(() => { + manager.defineTrigger('_Session', 'afterLogout', () => {}, 'source-a'); + }).not.toThrow(); + }); + + it('rejects beforeLogin on a non-_User class', () => { + expect(() => { + manager.defineTrigger('SomeClass', 'beforeLogin', () => {}, 'source-a'); + }).toThrowError(/_User/); + }); + + it('allows beforeLogin on _User', () => { + expect(() => { + manager.defineTrigger('_User', 'beforeLogin', () => {}, 'source-a'); + }).not.toThrow(); + }); + + it('rejects afterLogin on a non-_User class', () => { + expect(() => { + manager.defineTrigger('SomeClass', 'afterLogin', () => {}, 'source-a'); + }).toThrowError(/_User/); + }); + + it('allows afterLogin on _User', () => { + expect(() => { + manager.defineTrigger('_User', 'afterLogin', () => {}, 'source-a'); + }).not.toThrow(); + }); + + it('rejects beforePasswordResetRequest on a non-_User class', () => { + expect(() => { + manager.defineTrigger('SomeClass', 'beforePasswordResetRequest', () => {}, 'source-a'); + }).toThrowError(/_User/); + }); + + it('allows beforePasswordResetRequest on _User', () => { + expect(() => { + manager.defineTrigger('_User', 'beforePasswordResetRequest', () => {}, 'source-a'); + }).not.toThrow(); + }); + + it('rejects afterLogout on a non-_Session class', () => { + expect(() => { + manager.defineTrigger('_User', 'afterLogout', () => {}, 'source-a'); + }).toThrowError(/_Session/); + }); + + it('allows beforeSave on @File virtual className', () => { + expect(() => { + manager.defineTrigger('@File', 'beforeSave', () => {}, 'source-a'); + }).not.toThrow(); + }); + + it('allows beforeConnect on @Connect virtual className', () => { + expect(() => { + manager.defineTrigger('@Connect', 'beforeConnect', () => {}, 'source-a'); + }).not.toThrow(); + }); + }); + + // ─── Job Registration ───────────────────────────────────────────────────────── + + describe('defineJob', () => { + it('registers a job', () => { + const handler = () => {}; + manager.defineJob('myJob', handler, 'source-a'); + const entry = manager.getJob('myJob'); + expect(entry).not.toBeNull(); + expect(entry.handler).toBe(handler); + expect(entry.source).toBe('source-a'); + }); + + it('allows overwriting a job from the same source', () => { + const handler1 = () => {}; + const handler2 = () => {}; + manager.defineJob('myJob', handler1, 'source-a'); + manager.defineJob('myJob', handler2, 'source-a'); + expect(manager.getJob('myJob').handler).toBe(handler2); + }); + + it('throws when a different source tries to register the same job', () => { + manager.defineJob('myJob', () => {}, 'source-a'); + expect(() => { + manager.defineJob('myJob', () => {}, 'source-b'); + }).toThrowError(/already registered/i); + }); + + it('returns null for an unknown job', () => { + expect(manager.getJob('unknown')).toBeNull(); + }); + + it('getJobs returns all jobs as a Map', () => { + manager.defineJob('job1', () => {}, 'source-a'); + manager.defineJob('job2', () => {}, 'source-a'); + const jobs = manager.getJobs(); + expect(jobs instanceof Map).toBe(true); + expect(jobs.size).toBe(2); + expect(jobs.has('job1')).toBe(true); + expect(jobs.has('job2')).toBe(true); + }); + + it('getJobsObject returns all jobs as a plain object', () => { + manager.defineJob('job1', () => {}, 'source-a'); + manager.defineJob('job2', () => {}, 'source-a'); + const jobs = manager.getJobsObject(); + expect(typeof jobs).toBe('object'); + expect(jobs['job1']).toBeDefined(); + expect(jobs['job2']).toBeDefined(); + }); + }); + + // ─── Live Query Handlers ───────────────────────────────────────────────────── + + describe('defineLiveQueryHandler', () => { + it('registers a live query handler', () => { + const handler = data => {}; + manager.defineLiveQueryHandler(handler, 'source-a'); + // runLiveQueryEventHandlers should call the handler + let called = false; + const h = data => { called = true; }; + manager.defineLiveQueryHandler(h, 'source-a'); + manager.runLiveQueryEventHandlers({ event: 'test' }); + expect(called).toBe(true); + }); + + it('runs all live query handlers when runLiveQueryEventHandlers is called', () => { + const calls = []; + manager.defineLiveQueryHandler(data => calls.push('h1'), 'source-a'); + manager.defineLiveQueryHandler(data => calls.push('h2'), 'source-b'); + manager.runLiveQueryEventHandlers({ event: 'test' }); + expect(calls).toEqual(['h1', 'h2']); + }); + + it('passes data to each live query handler', () => { + let received; + manager.defineLiveQueryHandler(data => { received = data; }, 'source-a'); + manager.runLiveQueryEventHandlers({ event: 'create', objectId: '123' }); + expect(received).toEqual({ event: 'create', objectId: '123' }); + }); + }); + + // ─── Lookup Methods ────────────────────────────────────────────────────────── + + describe('getFunctionNames', () => { + it('returns an empty array when no functions are registered', () => { + expect(manager.getFunctionNames()).toEqual([]); + }); + + it('returns names of all registered functions', () => { + manager.defineFunction('funcA', () => {}, 'source-a'); + manager.defineFunction('funcB', () => {}, 'source-a'); + const names = manager.getFunctionNames(); + expect(names.sort()).toEqual(['funcA', 'funcB']); + }); + }); + + describe('getValidator', () => { + it('returns the validator for a function', () => { + const validator = req => {}; + manager.defineFunction('myFunc', () => {}, 'source-a', validator); + expect(manager.getValidator('myFunc')).toBe(validator); + }); + + it('returns null when function has no validator', () => { + manager.defineFunction('myFunc', () => {}, 'source-a'); + expect(manager.getValidator('myFunc')).toBeNull(); + }); + + it('returns null when function does not exist', () => { + expect(manager.getValidator('unknown')).toBeNull(); + }); + }); + + // ─── Removal ───────────────────────────────────────────────────────────────── + + describe('removeFunction', () => { + it('removes a registered function', () => { + manager.defineFunction('myFunc', () => {}, 'source-a'); + manager.removeFunction('myFunc'); + expect(manager.getFunction('myFunc')).toBeNull(); + }); + + it('does not throw when removing an unknown function', () => { + expect(() => manager.removeFunction('unknown')).not.toThrow(); + }); + }); + + describe('removeTrigger', () => { + it('removes a registered trigger', () => { + manager.defineTrigger('MyClass', 'beforeSave', () => {}, 'source-a'); + manager.removeTrigger('MyClass', 'beforeSave'); + expect(manager.getTrigger('MyClass', 'beforeSave')).toBeNull(); + }); + + it('does not throw when removing an unknown trigger', () => { + expect(() => manager.removeTrigger('MyClass', 'beforeSave')).not.toThrow(); + }); + }); + + describe('unregisterAll', () => { + it('removes all hooks registered by a given source', () => { + manager.defineFunction('funcA', () => {}, 'source-a'); + manager.defineFunction('funcB', () => {}, 'source-b'); + manager.defineTrigger('MyClass', 'beforeSave', () => {}, 'source-a'); + manager.defineJob('jobA', () => {}, 'source-a'); + + manager.unregisterAll('source-a'); + + expect(manager.getFunction('funcA')).toBeNull(); + expect(manager.getFunction('funcB')).not.toBeNull(); + expect(manager.getTrigger('MyClass', 'beforeSave')).toBeNull(); + expect(manager.getJob('jobA')).toBeNull(); + }); + + it('removes live query handlers registered by a given source', () => { + const calls = []; + manager.defineLiveQueryHandler(() => calls.push('a'), 'source-a'); + manager.defineLiveQueryHandler(() => calls.push('b'), 'source-b'); + + manager.unregisterAll('source-a'); + manager.runLiveQueryEventHandlers({}); + + expect(calls).toEqual(['b']); + }); + }); + + describe('clearAll', () => { + it('removes all registered hooks regardless of source', () => { + manager.defineFunction('funcA', () => {}, 'source-a'); + manager.defineFunction('funcB', () => {}, 'source-b'); + manager.defineTrigger('MyClass', 'beforeSave', () => {}, 'source-a'); + manager.defineJob('jobA', () => {}, 'source-a'); + + manager.clearAll(); + + expect(manager.getFunction('funcA')).toBeNull(); + expect(manager.getFunction('funcB')).toBeNull(); + expect(manager.getTrigger('MyClass', 'beforeSave')).toBeNull(); + expect(manager.getJob('jobA')).toBeNull(); + expect(manager.getFunctionNames()).toEqual([]); + }); + + it('clears live query handlers', () => { + const calls = []; + manager.defineLiveQueryHandler(() => calls.push('a'), 'source-a'); + manager.clearAll(); + manager.runLiveQueryEventHandlers({}); + expect(calls).toEqual([]); + }); + }); + + // ─── Registry Scoping ──────────────────────────────────────────────────────── + + describe('createRegistry', () => { + it('returns a registry scoped to the given source', () => { + const registry = manager.createRegistry('source-a'); + const handler = () => {}; + registry.defineFunction('myFunc', handler); + expect(manager.getFunction('myFunc').source).toBe('source-a'); + }); + + it('scoped registry defineFunction stores the handler correctly', () => { + const registry = manager.createRegistry('source-a'); + const handler = () => 'result'; + registry.defineFunction('myFunc', handler); + expect(manager.getFunction('myFunc').handler).toBe(handler); + }); + + it('scoped registry defineTrigger stores the trigger correctly', () => { + const registry = manager.createRegistry('source-a'); + const handler = () => {}; + registry.defineTrigger('MyClass', 'beforeSave', handler); + expect(manager.getTrigger('MyClass', 'beforeSave').source).toBe('source-a'); + }); + + it('scoped registry defineJob stores the job correctly', () => { + const registry = manager.createRegistry('source-a'); + const handler = () => {}; + registry.defineJob('myJob', handler); + expect(manager.getJob('myJob').source).toBe('source-a'); + }); + + it('scoped registry defineLiveQueryHandler registers the handler', () => { + const registry = manager.createRegistry('source-a'); + const calls = []; + registry.defineLiveQueryHandler(data => calls.push(data)); + manager.runLiveQueryEventHandlers({ event: 'test' }); + expect(calls.length).toBe(1); + }); + + it('scoped registry conflict detection uses the scoped source', () => { + const registryA = manager.createRegistry('source-a'); + const registryB = manager.createRegistry('source-b'); + registryA.defineFunction('myFunc', () => {}); + expect(() => { + registryB.defineFunction('myFunc', () => {}); + }).toThrowError(/already registered/i); + }); + }); + + // ─── Lifecycle ─────────────────────────────────────────────────────────────── + + describe('initialize', () => { + it('calls initialize on each adapter', async () => { + const calls = []; + const adapterA = { + name: 'adapter-a', + initialize: async (registry, config) => { calls.push('a'); }, + isHealthy: async () => true, + shutdown: async () => {}, + }; + const adapterB = { + name: 'adapter-b', + initialize: async (registry, config) => { calls.push('b'); }, + isHealthy: async () => true, + shutdown: async () => {}, + }; + const config = { appId: 'testApp', masterKey: 'key', serverURL: 'http://localhost:1337/parse' }; + await manager.initialize([adapterA, adapterB], config); + expect(calls).toEqual(['a', 'b']); + }); + + it('passes a scoped registry to each adapter', async () => { + let capturedRegistry; + const adapter = { + name: 'test-adapter', + initialize: async (registry, config) => { + capturedRegistry = registry; + registry.defineFunction('adapterFunc', () => {}); + }, + isHealthy: async () => true, + shutdown: async () => {}, + }; + const config = { appId: 'testApp', masterKey: 'key', serverURL: 'http://localhost:1337/parse' }; + await manager.initialize([adapter], config); + expect(manager.getFunction('adapterFunc')).not.toBeNull(); + expect(manager.getFunction('adapterFunc').source).toBe('test-adapter'); + }); + + it('throws when two adapters have the same name', async () => { + const adapterA = { + name: 'duplicate', + initialize: async () => {}, + isHealthy: async () => true, + shutdown: async () => {}, + }; + const adapterB = { + name: 'duplicate', + initialize: async () => {}, + isHealthy: async () => true, + shutdown: async () => {}, + }; + const config = { appId: 'testApp', masterKey: 'key', serverURL: 'http://localhost:1337/parse' }; + await expectAsync(manager.initialize([adapterA, adapterB], config)).toBeRejectedWithError(/duplicate/i); + }); + }); + + describe('shutdown', () => { + it('calls shutdown on each initialized adapter', async () => { + const calls = []; + const adapter = { + name: 'adapter-a', + initialize: async () => {}, + isHealthy: async () => true, + shutdown: async () => { calls.push('shutdown'); }, + }; + const config = { appId: 'testApp', masterKey: 'key', serverURL: 'http://localhost:1337/parse' }; + await manager.initialize([adapter], config); + await manager.shutdown(); + expect(calls).toEqual(['shutdown']); + }); + }); + + describe('healthCheck', () => { + it('returns true when all adapters are healthy', async () => { + const adapter = { + name: 'adapter-a', + initialize: async () => {}, + isHealthy: async () => true, + shutdown: async () => {}, + }; + const config = { appId: 'testApp', masterKey: 'key', serverURL: 'http://localhost:1337/parse' }; + await manager.initialize([adapter], config); + const healthy = await manager.healthCheck(); + expect(healthy).toBe(true); + }); + + it('returns false when any adapter is unhealthy', async () => { + const adapterA = { + name: 'adapter-a', + initialize: async () => {}, + isHealthy: async () => true, + shutdown: async () => {}, + }; + const adapterB = { + name: 'adapter-b', + initialize: async () => {}, + isHealthy: async () => false, + shutdown: async () => {}, + }; + const config = { appId: 'testApp', masterKey: 'key', serverURL: 'http://localhost:1337/parse' }; + await manager.initialize([adapterA, adapterB], config); + const healthy = await manager.healthCheck(); + expect(healthy).toBe(false); + }); + + it('returns true when no adapters are registered', async () => { + const healthy = await manager.healthCheck(); + expect(healthy).toBe(true); + }); + }); + + // ─── Validators for triggers ───────────────────────────────────────────────── + + describe('trigger validators', () => { + it('stores a function validator on a trigger', () => { + const handler = () => {}; + const validator = req => {}; + manager.defineTrigger('MyClass', 'beforeSave', handler, 'source-a', validator); + expect(manager.getTrigger('MyClass', 'beforeSave').validator).toBe(validator); + }); + + it('stores an object validator on a trigger', () => { + const handler = () => {}; + const validator = { requireUser: true }; + manager.defineTrigger('MyClass', 'beforeSave', handler, 'source-a', validator); + expect(manager.getTrigger('MyClass', 'beforeSave').validator).toBe(validator); + }); + + it('stores undefined validator when no validator provided', () => { + const handler = () => {}; + manager.defineTrigger('MyClass', 'beforeSave', handler, 'source-a'); + const entry = manager.getTrigger('MyClass', 'beforeSave'); + expect(entry.validator).toBeUndefined(); + }); + }); +}); diff --git a/spec/ExternalProcessAdapter.spec.js b/spec/ExternalProcessAdapter.spec.js new file mode 100644 index 0000000000..6a328b92d8 --- /dev/null +++ b/spec/ExternalProcessAdapter.spec.js @@ -0,0 +1,70 @@ +// spec/ExternalProcessAdapter.spec.js +const { ExternalProcessAdapter } = require('../lib/cloud-code/adapters/ExternalProcessAdapter'); +const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); +const http = require('http'); + +function createMockCloudServer(manifest, port) { + return new Promise((resolve) => { + const server = http.createServer((req, res) => { + if (req.url === '/' && req.method === 'GET') { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(manifest)); + } else if (req.url === '/health' && req.method === 'GET') { + res.writeHead(200); + res.end('OK'); + } else if (req.url.startsWith('/functions/') && req.method === 'POST') { + let body = ''; + req.on('data', d => body += d); + req.on('end', () => { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: 'external-result' })); + }); + } else { + res.writeHead(404); + res.end(); + } + }); + server.listen(port, () => resolve(server)); + }); +} + +describe('ExternalProcessAdapter', () => { + it('has name "external-process"', () => { + const adapter = new ExternalProcessAdapter('echo test', 'secret-key'); + expect(adapter.name).toBe('external-process'); + }); + + it('requires webhookKey', () => { + expect(() => new ExternalProcessAdapter('echo test', '')).toThrowError(/webhookKey/); + }); + + it('shutdown resolves cleanly when no process started', async () => { + const adapter = new ExternalProcessAdapter('echo test', 'key'); + await expectAsync(adapter.shutdown()).toBeResolved(); + }); + + it('spawns process and reads manifest', async () => { + const manager = new CloudCodeManager(); + const port = 19876; + const server = await createMockCloudServer( + { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'ext-fn' }], triggers: [], jobs: [] } }, + port + ); + + try { + const cmd = `node -e "process.stdout.write('PARSE_CLOUD_READY:${port}\\n'); setTimeout(() => {}, 60000)"`; + const adapter = new ExternalProcessAdapter(cmd, 'test-key', { + startupTimeout: 5000, + healthCheckInterval: 0, + }); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + expect(manager.getFunction('ext-fn')).toBeDefined(); + + await adapter.shutdown(); + } finally { + server.close(); + } + }, 10000); +}); diff --git a/spec/InProcessAdapter.spec.js b/spec/InProcessAdapter.spec.js new file mode 100644 index 0000000000..b14c139375 --- /dev/null +++ b/spec/InProcessAdapter.spec.js @@ -0,0 +1,121 @@ +const { InProcessAdapter } = require('../lib/cloud-code/adapters/InProcessAdapter'); +const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); + +function createMockCloudCode(manifest, handlers = {}) { + return { + getRouter() { + return { + getManifest() { return manifest; }, + async dispatchFunction(name, body) { + if (handlers[`function:${name}`]) return handlers[`function:${name}`](body); + return { success: null }; + }, + async dispatchTrigger(className, triggerName, body) { + if (handlers[`trigger:${triggerName}.${className}`]) return handlers[`trigger:${triggerName}.${className}`](body); + return { success: {} }; + }, + async dispatchJob(name, body) { + if (handlers[`job:${name}`]) return handlers[`job:${name}`](body); + return { success: null }; + }, + }; + }, + }; +} + +describe('InProcessAdapter', () => { + let manager; + + beforeEach(() => { + manager = new CloudCodeManager(); + }); + + it('has name "in-process"', () => { + const cloud = createMockCloudCode({ protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [], jobs: [] } }); + const adapter = new InProcessAdapter(cloud); + expect(adapter.name).toBe('in-process'); + }); + + it('registers functions from manifest', async () => { + const cloud = createMockCloudCode({ + protocol: 'ParseCloud/1.0', + hooks: { + functions: [{ name: 'hello' }, { name: 'greet' }], + triggers: [], + jobs: [], + }, + }); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + expect(manager.getFunction('hello')).toBeDefined(); + expect(manager.getFunction('greet')).toBeDefined(); + }); + + it('registers triggers from manifest', async () => { + const cloud = createMockCloudCode({ + protocol: 'ParseCloud/1.0', + hooks: { + functions: [], + triggers: [{ className: 'Todo', triggerName: 'beforeSave' }], + jobs: [], + }, + }); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + expect(manager.getTrigger('Todo', 'beforeSave')).toBeDefined(); + }); + + it('registers jobs from manifest', async () => { + const cloud = createMockCloudCode({ + protocol: 'ParseCloud/1.0', + hooks: { + functions: [], + triggers: [], + jobs: [{ name: 'cleanup' }], + }, + }); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + expect(manager.getJob('cleanup')).toBeDefined(); + }); + + it('bridge handler dispatches function and returns result', async () => { + const cloud = createMockCloudCode( + { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'add' }], triggers: [], jobs: [] } }, + { 'function:add': (body) => ({ success: body.params.a + body.params.b }) } + ); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + const entry = manager.getFunction('add'); + const result = await entry.handler({ params: { a: 2, b: 3 }, master: false, ip: '127.0.0.1', headers: {} }); + expect(result).toBe(5); + }); + + it('bridge handler throws Parse.Error on error response', async () => { + const cloud = createMockCloudCode( + { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'fail' }], triggers: [], jobs: [] } }, + { 'function:fail': () => ({ error: { code: 141, message: 'boom' } }) } + ); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + const entry = manager.getFunction('fail'); + await expectAsync(entry.handler({ params: {}, master: false, ip: '', headers: {} })) + .toBeRejectedWithError(/boom/); + }); + + it('isHealthy returns true', async () => { + const cloud = createMockCloudCode({ protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [], jobs: [] } }); + const adapter = new InProcessAdapter(cloud); + expect(await adapter.isHealthy()).toBe(true); + }); +}); diff --git a/src/Options/Definitions.js b/src/Options/Definitions.js index e502ba6db5..0956f12baf 100644 --- a/src/Options/Definitions.js +++ b/src/Options/Definitions.js @@ -130,6 +130,16 @@ module.exports.ParseServerOptions = { env: 'PARSE_SERVER_CLOUD', help: 'Full path to your cloud code main.js', }, + cloudCodeAdapters: { + help: 'Array of CloudCodeAdapter instances for BYO cloud code integration', + }, + cloudCodeCommand: { + env: 'PARSE_SERVER_CLOUD_CODE_COMMAND', + help: 'Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol)', + }, + cloudCodeOptions: { + help: 'Options for the external cloud code process adapter', + }, cluster: { env: 'PARSE_SERVER_CLUSTER', help: 'Run with cluster, optionally set the number of processes default to os.cpus().length', @@ -616,7 +626,7 @@ module.exports.ParseServerOptions = { }, webhookKey: { env: 'PARSE_SERVER_WEBHOOK_KEY', - help: 'Key sent with outgoing webhook calls', + help: 'Webhook key for authenticating external cloud code process requests. Required when cloudCodeCommand is set.', }, }; module.exports.RateLimitOptions = { diff --git a/src/Options/index.js b/src/Options/index.js index 43cc58b287..59ed994898 100644 --- a/src/Options/index.js +++ b/src/Options/index.js @@ -141,6 +141,19 @@ export interface ParseServerOptions { convertUsernameToLowercase: ?boolean; /* Full path to your cloud code main.js */ cloud: ?string; + /* Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol) */ + cloudCodeCommand: ?string; + /* Key for authenticating external cloud code process requests. Required when cloudCodeCommand is set. */ + webhookKey: ?string; + /* Options for the external cloud code process adapter */ + cloudCodeOptions: ?{ + startupTimeout: ?number, + healthCheckInterval: ?number, + shutdownTimeout: ?number, + maxRestartDelay: ?number, + }; + /* Array of CloudCodeAdapter instances for BYO cloud code integration */ + cloudCodeAdapters: ?Array; /* A collection prefix for the classes :DEFAULT: '' */ collectionPrefix: ?string; @@ -158,8 +171,6 @@ export interface ParseServerOptions { restAPIKey: ?string; /* Read-only key, which has the same capabilities as MasterKey without writes */ readOnlyMasterKey: ?string; - /* Key sent with outgoing webhook calls */ - webhookKey: ?string; /* Key for your files */ fileKey: ?string; /* Enable (or disable) the addition of a unique hash to the file names diff --git a/src/ParseServer.ts b/src/ParseServer.ts index 2681fe0d84..6f2b0b9209 100644 --- a/src/ParseServer.ts +++ b/src/ParseServer.ts @@ -46,6 +46,9 @@ import Deprecator from './Deprecator/Deprecator'; import { DefinedSchemas } from './SchemaMigrations/DefinedSchemas'; import OptionsDefinitions from './Options/Definitions'; import { resolvingPromise, Connections } from './TestUtils'; +import { CloudCodeManager } from './cloud-code/CloudCodeManager'; +import { resolveAdapters } from './cloud-code/resolveAdapters'; +import { AppCache } from './cache'; // Mutate the Parse object to add the Cloud Code handlers addParseCloud(); @@ -184,24 +187,34 @@ class ParseServer { } startupPromises.push(liveQueryController.connect()); await Promise.all(startupPromises); - if (cloud) { + const adapters = resolveAdapters({ + cloud, + cloudCodeCommand: this.config.cloudCodeCommand, + webhookKey: this.config.webhookKey, + cloudCodeOptions: this.config.cloudCodeOptions, + cloudCodeAdapters: this.config.cloudCodeAdapters, + }); + + if (adapters.length > 0) { addParseCloud(); - if (typeof cloud === 'function') { - await Promise.resolve(cloud(Parse)); - } else if (typeof cloud === 'string') { - let json; - if (process.env.npm_package_json) { - json = require(process.env.npm_package_json); - } - if (process.env.npm_package_type === 'module' || json?.type === 'module') { - await import(path.resolve(process.cwd(), cloud)); - } else { - require(path.resolve(process.cwd(), cloud)); - } - } else { - throw "argument 'cloud' must either be a string or a function"; + const cloudManager = new CloudCodeManager(); + + // CRITICAL: Store on this.config BEFORE adapter initialization. + // this.config flows into AppCache via Config.put() later in start(). + // We must also store it on AppCache NOW so the facade can find it + // during LegacyAdapter.initialize() → Parse.Cloud.define() → triggers.addFunction(). + this.config.cloudCodeManager = cloudManager; + const appId = this.config.appId; + const cached = AppCache.get(appId); + if (cached) { + cached.cloudCodeManager = cloudManager; } - await new Promise(resolve => setTimeout(resolve, 10)); + + await cloudManager.initialize(adapters, { + appId, + masterKey: this.config.masterKey, + serverURL: this.config.serverURL || `http://localhost:${this.config.port}${this.config.mountPath || '/parse'}`, + }); } if (security && security.enableCheck && security.enableCheckLog) { new CheckRunner(security).run(); diff --git a/src/cloud-code/CloudCodeManager.ts b/src/cloud-code/CloudCodeManager.ts new file mode 100644 index 0000000000..b80d682a19 --- /dev/null +++ b/src/cloud-code/CloudCodeManager.ts @@ -0,0 +1,285 @@ +// src/cloud-code/CloudCodeManager.ts + +import type { + CloudCodeAdapter, + CloudCodeRegistry, + CloudFunctionHandler, + CloudJobHandler, + CloudTriggerHandler, + FunctionEntry, + JobEntry, + LiveQueryEntry, + LiveQueryHandler, + ParseServerConfig, + TriggerEntry, + TriggerName, + ValidatorHandler, +} from './types'; + +// Triggers that are restricted to _User class only +const USER_ONLY_TRIGGERS = new Set([ + 'beforeLogin', + 'afterLogin', + 'beforePasswordResetRequest', +]); + +// Triggers blocked on _PushStatus (all except afterSave) +const PUSH_STATUS_ALLOWED_TRIGGERS = new Set(['afterSave']); + +function makeTriggerKey(className: string, triggerName: string): string { + return `${className}:${triggerName}`; +} + +function validateTriggerConstraints(className: string, triggerName: TriggerName): void { + if (className === '_PushStatus' && !PUSH_STATUS_ALLOWED_TRIGGERS.has(triggerName)) { + throw new Error( + `Trigger "${triggerName}" is not allowed on _PushStatus. Only afterSave is permitted.` + ); + } + + if (className === '_Session' && triggerName !== 'afterLogout') { + throw new Error( + `Trigger "${triggerName}" is not allowed on _Session. Only afterLogout is permitted.` + ); + } + + if (USER_ONLY_TRIGGERS.has(triggerName) && className !== '_User') { + throw new Error( + `Trigger "${triggerName}" is only allowed on _User class, not "${className}".` + ); + } + + if (triggerName === 'afterLogout' && className !== '_Session') { + throw new Error( + `Trigger "afterLogout" is only allowed on _Session class, not "${className}".` + ); + } +} + +export class CloudCodeManager { + private readonly functions: Map = new Map(); + private readonly triggers: Map = new Map(); + private readonly jobs: Map = new Map(); + private readonly liveQueryHandlers: LiveQueryEntry[] = []; + private readonly adapters: CloudCodeAdapter[] = []; + + // ─── Function Registration ───────────────────────────────────────────────── + + defineFunction( + name: string, + handler: CloudFunctionHandler, + source: string, + validator?: ValidatorHandler + ): void { + const existing = this.functions.get(name); + if (existing && existing.source !== source) { + throw new Error( + `Cloud function "${name}" is already registered by source "${existing.source}". Cannot register from "${source}".` + ); + } + this.functions.set(name, { handler, source, validator }); + } + + getFunction(name: string): FunctionEntry | null { + return this.functions.get(name) ?? null; + } + + getFunctionNames(): string[] { + return Array.from(this.functions.keys()); + } + + getValidator(key: string): ValidatorHandler | null { + // Check functions first + const fnEntry = this.functions.get(key); + if (fnEntry) { + return fnEntry.validator ?? null; + } + // Check triggers — key format from facade is "triggerType.className" + // Convert to our internal key format "className:triggerName" + const dotIdx = key.indexOf('.'); + if (dotIdx !== -1) { + const triggerName = key.substring(0, dotIdx); + const className = key.substring(dotIdx + 1); + const triggerEntry = this.triggers.get(makeTriggerKey(className, triggerName)); + if (triggerEntry) { + return triggerEntry.validator ?? null; + } + } + return null; + } + + removeFunction(name: string): void { + this.functions.delete(name); + } + + // ─── Trigger Registration ────────────────────────────────────────────────── + + defineTrigger( + className: string, + triggerName: TriggerName, + handler: CloudTriggerHandler, + source: string, + validator?: ValidatorHandler + ): void { + validateTriggerConstraints(className, triggerName); + + const key = makeTriggerKey(className, triggerName); + const existing = this.triggers.get(key); + if (existing && existing.source !== source) { + throw new Error( + `Trigger "${triggerName}" on "${className}" is already registered by source "${existing.source}". Cannot register from "${source}".` + ); + } + this.triggers.set(key, { handler, source, validator }); + } + + getTrigger(className: string, triggerName: string): TriggerEntry | null { + return this.triggers.get(makeTriggerKey(className, triggerName)) ?? null; + } + + triggerExists(className: string, triggerName: string): boolean { + return this.triggers.has(makeTriggerKey(className, triggerName)); + } + + removeTrigger(className: string, triggerName: string): void { + this.triggers.delete(makeTriggerKey(className, triggerName)); + } + + // ─── Job Registration ────────────────────────────────────────────────────── + + defineJob(name: string, handler: CloudJobHandler, source: string): void { + const existing = this.jobs.get(name); + if (existing && existing.source !== source) { + throw new Error( + `Cloud job "${name}" is already registered by source "${existing.source}". Cannot register from "${source}".` + ); + } + this.jobs.set(name, { handler, source }); + } + + getJob(name: string): JobEntry | null { + return this.jobs.get(name) ?? null; + } + + getJobs(): Map { + return this.jobs; + } + + getJobsObject(): Record { + const result: Record = {}; + for (const [name, entry] of this.jobs) { + result[name] = entry.handler; + } + return result; + } + + // ─── Live Query Handlers ─────────────────────────────────────────────────── + + defineLiveQueryHandler(handler: LiveQueryHandler, source: string): void { + this.liveQueryHandlers.push({ handler, source }); + } + + runLiveQueryEventHandlers(data: unknown): void { + for (const entry of this.liveQueryHandlers) { + entry.handler(data); + } + } + + // ─── Removal ────────────────────────────────────────────────────────────── + + unregisterAll(source: string): void { + for (const [name, entry] of this.functions) { + if (entry.source === source) { + this.functions.delete(name); + } + } + + for (const [key, entry] of this.triggers) { + if (entry.source === source) { + this.triggers.delete(key); + } + } + + for (const [name, entry] of this.jobs) { + if (entry.source === source) { + this.jobs.delete(name); + } + } + + const handlersToRemove = this.liveQueryHandlers.filter(e => e.source === source); + for (const entry of handlersToRemove) { + const idx = this.liveQueryHandlers.indexOf(entry); + if (idx !== -1) { + this.liveQueryHandlers.splice(idx, 1); + } + } + } + + clearAll(): void { + this.functions.clear(); + this.triggers.clear(); + this.jobs.clear(); + this.liveQueryHandlers.length = 0; + } + + // ─── Registry Factory ────────────────────────────────────────────────────── + + createRegistry(source: string): CloudCodeRegistry { + const manager = this; + return { + defineFunction(name: string, handler: CloudFunctionHandler, validator?: ValidatorHandler): void { + manager.defineFunction(name, handler, source, validator); + }, + defineTrigger( + className: string, + triggerName: TriggerName, + handler: CloudTriggerHandler, + validator?: ValidatorHandler + ): void { + manager.defineTrigger(className, triggerName, handler, source, validator); + }, + defineJob(name: string, handler: CloudJobHandler): void { + manager.defineJob(name, handler, source); + }, + defineLiveQueryHandler(handler: LiveQueryHandler): void { + manager.defineLiveQueryHandler(handler, source); + }, + }; + } + + // ─── Lifecycle ───────────────────────────────────────────────────────────── + + async initialize(adapters: CloudCodeAdapter[], config: ParseServerConfig): Promise { + const seen = new Set(); + for (const adapter of adapters) { + if (seen.has(adapter.name)) { + throw new Error( + `Duplicate adapter name "${adapter.name}". Each adapter must have a unique name.` + ); + } + seen.add(adapter.name); + } + + for (const adapter of adapters) { + const registry = this.createRegistry(adapter.name); + await adapter.initialize(registry, config); + this.adapters.push(adapter); + } + } + + async shutdown(): Promise { + for (const adapter of this.adapters) { + await adapter.shutdown(); + } + } + + async healthCheck(): Promise { + for (const adapter of this.adapters) { + const healthy = await adapter.isHealthy(); + if (!healthy) { + return false; + } + } + return true; + } +} diff --git a/src/cloud-code/README.md b/src/cloud-code/README.md new file mode 100644 index 0000000000..d38906e17e --- /dev/null +++ b/src/cloud-code/README.md @@ -0,0 +1,395 @@ +# Cloud Code Adapters + +Parse Server supports pluggable cloud code adapters. You can write cloud code in any language, use any SDK, or bring your own adapter implementation. + +## Quick Start + +### Option A: In-Process (TypeScript/JavaScript) + +Pass any object with a `getRouter()` method as the `cloud` option. Any SDK that implements the router interface works — see [Building a Custom In-Process SDK](#building-a-custom-in-process-sdk) below. + +```typescript +import ParseServer from 'parse-server'; + +const cloud = createMyCloudCode(); // any object with getRouter() + +new ParseServer({ + databaseURI: 'mongodb://localhost:27017/myapp', + appId: 'myapp', + masterKey: 'secret', + cloud: cloud, // detected via getRouter() +}); +``` + +### Option B: External Process (Any Language) + +Spawn a separate process that speaks the ParseCloud/1.0 HTTP protocol: + +```javascript +new ParseServer({ + databaseURI: 'mongodb://localhost:27017/myapp', + appId: 'myapp', + masterKey: 'secret', + cloudCodeCommand: 'swift run CloudCode', + webhookKey: 'your-secret-key', +}); +``` + +The process receives config via environment variables and communicates via HTTP webhooks. + +### Option C: Custom Adapter + +Pass any object implementing the `CloudCodeAdapter` interface: + +```typescript +new ParseServer({ + appId: 'myapp', + masterKey: 'secret', + cloudCodeAdapters: [myCustomAdapter], +}); +``` + +All three options compose — you can use them simultaneously. Hook conflicts (same function/trigger registered by multiple adapters) throw at startup. + +--- + +## Building a Custom In-Process SDK + +To build a JavaScript/TypeScript cloud code SDK that integrates with Parse Server in-process, your library needs to expose a router with three methods. + +### Required Interface + +```typescript +interface InProcessCloudCode { + getRouter(): { + /** Return all registered hooks */ + getManifest(): { + protocol: string; // e.g. "ParseCloud/1.0" + hooks: { + functions: Array<{ name: string }>; + triggers: Array<{ className: string; triggerName: string }>; + jobs: Array<{ name: string }>; + }; + }; + + /** Dispatch a cloud function call */ + dispatchFunction( + name: string, + body: Record + ): Promise<{ success: unknown } | { error: { code: number; message: string } }>; + + /** Dispatch a trigger */ + dispatchTrigger( + className: string, + triggerName: string, + body: Record + ): Promise<{ success: unknown } | { error: { code: number; message: string } }>; + + /** Dispatch a job */ + dispatchJob( + name: string, + body: Record + ): Promise<{ success: unknown } | { error: { code: number; message: string } }>; + }; +} +``` + +Parse Server detects your object via duck typing: if `cloud` has a `getRouter()` method, it's treated as an `InProcessCloudCode` instance. + +### How It Works + +1. Parse Server calls `cloud.getRouter().getManifest()` at startup +2. For each hook in the manifest, a bridge handler is registered +3. When a request comes in, Parse Server serializes it to a webhook body and calls `dispatchFunction`/`dispatchTrigger`/`dispatchJob` +4. Your SDK processes the request and returns `{ success: result }` or `{ error: { code, message } }` + +### Webhook Body Format + +The body passed to `dispatch*` methods contains: + +```typescript +{ + master: boolean, // Was master key used? + ip: string, // Client IP + headers: object, // HTTP headers + installationId: string, // Client installation ID + user?: object, // Authenticated user (JSON) + params?: object, // Function/job parameters + + // Trigger-specific: + object?: object, // The object being saved/deleted (JSON) + original?: object, // Original object before changes (JSON) + context?: object, // Custom context passed between triggers + + // Query triggers: + query?: { + className: string, + where: object, + limit: number, + skip: number, + include: string, + keys: string, + order: string, + }, + + // File triggers: + file?: object, + fileSize?: number, + + // Job-specific: + jobId?: string, +} +``` + +### Response Format + +All dispatch methods return one of: + +```typescript +// Success +{ success: } + +// Error — thrown as Parse.Error on the server side +{ error: { code: number, message: string } } +``` + +For `beforeSave` triggers specifically: +- `{ success: {} }` (empty object) means "accept the original, no changes" +- `{ success: { field: value, ... } }` means "apply these field changes" + +### Minimal Example + +A bare-bones SDK in ~40 lines: + +```typescript +class MyCloudSDK { + private functions = new Map Promise>(); + + define(name: string, handler: (body: any) => Promise) { + this.functions.set(name, handler); + return this; + } + + getRouter() { + const functions = this.functions; + return { + getManifest() { + return { + protocol: 'MySDK/1.0', + hooks: { + functions: Array.from(functions.keys()).map(name => ({ name })), + triggers: [], + jobs: [], + }, + }; + }, + async dispatchFunction(name: string, body: Record) { + const handler = functions.get(name); + if (!handler) return { error: { code: 141, message: `Unknown function: ${name}` } }; + try { + const result = await handler(body); + return { success: result }; + } catch (e: any) { + return { error: { code: e.code || 141, message: e.message } }; + } + }, + async dispatchTrigger() { return { success: {} }; }, + async dispatchJob() { return { success: null }; }, + }; + } +} + +// Usage: +const cloud = new MyCloudSDK(); +cloud.define('hello', async (body) => `Hello, ${body.params.name}!`); + +new ParseServer({ cloud: cloud, ... }); +``` + +--- + +## Building an External Process SDK (Any Language) + +To build a cloud code SDK in Swift, C#, Go, Python, or any language, your process needs to: + +1. Start an HTTP server +2. Print `PARSE_CLOUD_READY:` to stdout +3. Serve a manifest at `GET /` +4. Handle webhook requests at `POST /functions/:name`, `POST /triggers/:className/:triggerName`, `POST /jobs/:name` +5. Respond to health checks at `GET /health` + +### Environment Variables + +Parse Server passes these to your process: + +| Variable | Description | +|----------|-------------| +| `PARSE_SERVER_URL` | Parse Server URL (e.g. `http://localhost:1337/parse`) | +| `PARSE_APPLICATION_ID` | App ID | +| `PARSE_MASTER_KEY` | Master key | +| `PARSE_WEBHOOK_KEY` | Key for authenticating requests (check `X-Parse-Webhook-Key` header) | +| `PARSE_CLOUD_PORT` | Suggested port (`0` = OS-assigned) | + +### Protocol + +**Startup:** Print `PARSE_CLOUD_READY:` to stdout once your HTTP server is listening. + +**Manifest** (`GET /`): + +```json +{ + "protocol": "ParseCloud/1.0", + "hooks": { + "functions": [{ "name": "hello" }], + "triggers": [{ "className": "Todo", "triggerName": "beforeSave" }], + "jobs": [{ "name": "cleanup" }] + } +} +``` + +**Webhook requests** (`POST /functions/:name`, etc.): + +- Request body: same webhook body format described above +- Request header: `X-Parse-Webhook-Key` must match your `PARSE_WEBHOOK_KEY` +- Response: `{ "success": }` or `{ "error": { "code": 142, "message": "..." } }` + +**Health check** (`GET /health`): Return `200 OK`. + +**Shutdown:** Parse Server sends `SIGTERM`. Clean up and exit. After the configured timeout (default 5s), `SIGKILL` is sent. + +### Trigger Names + +| Trigger | className | +|---------|-----------| +| `beforeSave`, `afterSave`, `beforeDelete`, `afterDelete`, `beforeFind`, `afterFind` | Any class name (e.g. `Todo`, `_User`) | +| `beforeSave`, `afterSave`, `beforeDelete`, `afterDelete` on files | `@File` | +| `beforeSave`, `afterSave` on config | `@Config` | +| `beforeLogin`, `afterLogin`, `beforePasswordResetRequest` | `_User` | +| `afterLogout` | `_Session` | +| `beforeConnect` | `@Connect` | +| `beforeSubscribe`, `afterEvent` | Any class name | + +### Example: Go + +```go +package main + +import ( + "encoding/json" + "fmt" + "net/http" + "os" +) + +func main() { + mux := http.NewServeMux() + + mux.HandleFunc("GET /", func(w http.ResponseWriter, r *http.Request) { + json.NewEncoder(w).Encode(map[string]any{ + "protocol": "ParseCloud/1.0", + "hooks": map[string]any{ + "functions": []map[string]string{{"name": "hello"}}, + "triggers": []any{}, + "jobs": []any{}, + }, + }) + }) + + mux.HandleFunc("POST /functions/hello", func(w http.ResponseWriter, r *http.Request) { + if r.Header.Get("X-Parse-Webhook-Key") != os.Getenv("PARSE_WEBHOOK_KEY") { + http.Error(w, "Unauthorized", 401) + return + } + var body map[string]any + json.NewDecoder(r.Body).Decode(&body) + params, _ := body["params"].(map[string]any) + name, _ := params["name"].(string) + json.NewEncoder(w).Encode(map[string]any{ + "success": fmt.Sprintf("Hello, %s!", name), + }) + }) + + mux.HandleFunc("GET /health", func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte("OK")) + }) + + listener, _ := net.Listen("tcp", ":0") + port := listener.Addr().(*net.TCPAddr).Port + fmt.Printf("PARSE_CLOUD_READY:%d\n", port) + http.Serve(listener, mux) +} +``` + +```javascript +// parse-server config +new ParseServer({ + cloudCodeCommand: 'go run ./cloud-code', + webhookKey: 'my-secret-key', + // ... +}); +``` + +--- + +## Building a Fully Custom Adapter + +For complete control, implement the `CloudCodeAdapter` interface directly: + +```typescript +import type { CloudCodeAdapter, CloudCodeRegistry, ParseServerConfig } from 'parse-server/cloud-code/types'; + +class MyAdapter implements CloudCodeAdapter { + readonly name = 'my-adapter'; + + async initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise { + // Register hooks using the registry + registry.defineFunction('myFunction', async (request) => { + return { result: 'hello' }; + }); + + registry.defineTrigger('Todo', 'beforeSave', async (request) => { + // request.object, request.user, etc. are Parse Server internal objects + // Return value or throw to reject + }); + + registry.defineJob('myJob', async (request) => { + // Long-running work + }); + + registry.defineLiveQueryHandler((data) => { + // Handle live query events + }); + } + + async isHealthy(): Promise { + return true; + } + + async shutdown(): Promise { + // Clean up resources + } +} + +// Usage: +new ParseServer({ + cloudCodeAdapters: [new MyAdapter()], + // ... +}); +``` + +### Registry API + +| Method | Description | +|--------|-------------| +| `defineFunction(name, handler, validator?)` | Register a cloud function | +| `defineTrigger(className, triggerName, handler, validator?)` | Register a trigger | +| `defineJob(name, handler)` | Register a background job | +| `defineLiveQueryHandler(handler)` | Register a live query event handler | + +### Notes + +- The `handler` receives Parse Server's internal request object (with `Parse.Object` instances, etc.) +- This is a lower-level API than the InProcess router interface — you work with Parse Server internals directly +- Validators (optional) support `{ requireUser: true, requireMaster: true, fields: {...}, rateLimit: {...} }` — same as `Parse.Cloud.define` validators +- Multiple custom adapters can coexist — each gets a unique source name from `adapter.name` +- Hook conflicts between adapters throw at startup diff --git a/src/cloud-code/adapters/ExternalProcessAdapter.ts b/src/cloud-code/adapters/ExternalProcessAdapter.ts new file mode 100644 index 0000000000..27e279fea6 --- /dev/null +++ b/src/cloud-code/adapters/ExternalProcessAdapter.ts @@ -0,0 +1,215 @@ +// src/cloud-code/adapters/ExternalProcessAdapter.ts +import { spawn, ChildProcess } from 'child_process'; +import http from 'http'; +import type { + CloudCodeAdapter, + CloudCodeRegistry, + ParseServerConfig, + CloudManifest, + CloudCodeOptions, + WebhookResponse, +} from '../types'; +import { requestToWebhookBody, webhookResponseToResult, applyBeforeSaveResponse } from './webhook-bridge'; + +const DEFAULT_OPTIONS: Required = { + startupTimeout: 30000, + healthCheckInterval: 30000, + shutdownTimeout: 5000, + maxRestartDelay: 30000, +}; + +function httpGet(url: string): Promise { + return new Promise((resolve, reject) => { + http.get(url, (res) => { + let data = ''; + res.on('data', (chunk) => data += chunk); + res.on('end', () => resolve(data)); + }).on('error', reject); + }); +} + +function httpPost(url: string, body: Record, webhookKey: string): Promise { + return new Promise((resolve, reject) => { + const payload = JSON.stringify(body); + const urlObj = new URL(url); + const req = http.request({ + hostname: urlObj.hostname, + port: urlObj.port, + path: urlObj.pathname, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Content-Length': Buffer.byteLength(payload), + 'X-Parse-Webhook-Key': webhookKey, + }, + }, (res) => { + let data = ''; + res.on('data', (chunk) => data += chunk); + res.on('end', () => { + try { + resolve(JSON.parse(data)); + } catch (e) { + reject(new Error(`Invalid JSON from cloud code process: ${data}`)); + } + }); + }); + req.on('error', reject); + req.write(payload); + req.end(); + }); +} + +export class ExternalProcessAdapter implements CloudCodeAdapter { + readonly name = 'external-process'; + private command: string; + private webhookKey: string; + private options: Required; + private process: ChildProcess | null = null; + private port: number = 0; + private healthInterval: ReturnType | null = null; + + constructor(command: string, webhookKey: string, options?: CloudCodeOptions) { + if (!webhookKey) { + throw new Error('webhookKey is required for ExternalProcessAdapter'); + } + this.command = command; + this.webhookKey = webhookKey; + this.options = { ...DEFAULT_OPTIONS, ...options }; + } + + async initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise { + this.port = await this.spawnAndWaitForReady(config); + const manifest = await this.fetchManifest(); + this.registerFromManifest(registry, manifest); + + if (this.options.healthCheckInterval > 0) { + this.healthInterval = setInterval(() => this.checkHealth(), this.options.healthCheckInterval); + } + } + + async isHealthy(): Promise { + try { + const response = await httpGet(`http://localhost:${this.port}/health`); + return response === 'OK' || response.includes('ok'); + } catch { + return false; + } + } + + async shutdown(): Promise { + if (this.healthInterval) { + clearInterval(this.healthInterval); + this.healthInterval = null; + } + if (this.process && !this.process.killed) { + this.process.kill('SIGTERM'); + await Promise.race([ + new Promise((resolve) => this.process!.once('exit', () => resolve())), + new Promise((resolve) => setTimeout(() => { + if (this.process && !this.process.killed) { + this.process.kill('SIGKILL'); + } + resolve(); + }, this.options.shutdownTimeout)), + ]); + } + this.process = null; + } + + private spawnAndWaitForReady(config: ParseServerConfig): Promise { + return new Promise((resolve, reject) => { + const child = spawn(this.command, { + shell: true, + env: { + ...process.env, + PARSE_SERVER_URL: config.serverURL, + PARSE_APPLICATION_ID: config.appId, + PARSE_MASTER_KEY: config.masterKey, + PARSE_WEBHOOK_KEY: this.webhookKey, + PARSE_CLOUD_PORT: '0', + }, + stdio: ['ignore', 'pipe', 'pipe'], + }); + + this.process = child; + + const timeout = setTimeout(() => { + child.kill('SIGKILL'); + reject(new Error(`Cloud code process did not emit PARSE_CLOUD_READY within ${this.options.startupTimeout}ms`)); + }, this.options.startupTimeout); + + let stdout = ''; + child.stdout!.on('data', (data) => { + stdout += data.toString(); + const match = stdout.match(/PARSE_CLOUD_READY:(\d+)/); + if (match) { + clearTimeout(timeout); + resolve(parseInt(match[1], 10)); + } + }); + + child.stderr!.on('data', (data) => { + process.stderr.write(`[cloud-code] ${data}`); + }); + + child.on('error', (err) => { + clearTimeout(timeout); + reject(new Error(`Failed to spawn cloud code process: ${err.message}`)); + }); + + child.on('exit', (code) => { + clearTimeout(timeout); + if (!this.port) { + reject(new Error(`Cloud code process exited with code ${code} before becoming ready`)); + } + }); + }); + } + + private async fetchManifest(): Promise { + const data = await httpGet(`http://localhost:${this.port}/`); + return JSON.parse(data); + } + + private registerFromManifest(registry: CloudCodeRegistry, manifest: CloudManifest): void { + for (const fn of manifest.hooks.functions) { + registry.defineFunction(fn.name, async (request) => { + const body = requestToWebhookBody(request); + const response = await httpPost(`http://localhost:${this.port}/functions/${fn.name}`, body, this.webhookKey); + return webhookResponseToResult(response); + }); + } + + for (const trigger of manifest.hooks.triggers) { + const { className, triggerName } = trigger; + registry.defineTrigger(className, triggerName as any, async (request) => { + const body = requestToWebhookBody(request); + const response = await httpPost( + `http://localhost:${this.port}/triggers/${className}/${triggerName}`, + body, + this.webhookKey + ); + if (triggerName === 'beforeSave') { + applyBeforeSaveResponse(request, response); + return; + } + return webhookResponseToResult(response); + }); + } + + for (const job of manifest.hooks.jobs) { + registry.defineJob(job.name, async (request) => { + const body = requestToWebhookBody(request); + const response = await httpPost(`http://localhost:${this.port}/jobs/${job.name}`, body, this.webhookKey); + return webhookResponseToResult(response); + }); + } + } + + private async checkHealth(): Promise { + const healthy = await this.isHealthy(); + if (!healthy) { + console.warn('[cloud-code] External process health check failed'); + } + } +} diff --git a/src/cloud-code/adapters/InProcessAdapter.ts b/src/cloud-code/adapters/InProcessAdapter.ts new file mode 100644 index 0000000000..db92a05290 --- /dev/null +++ b/src/cloud-code/adapters/InProcessAdapter.ts @@ -0,0 +1,56 @@ +import type { + CloudCodeAdapter, + CloudCodeRegistry, + ParseServerConfig, + InProcessCloudCode, +} from '../types'; +import { requestToWebhookBody, webhookResponseToResult, applyBeforeSaveResponse } from './webhook-bridge'; + +export class InProcessAdapter implements CloudCodeAdapter { + readonly name = 'in-process'; + private cloudCode: InProcessCloudCode; + + constructor(cloudCode: InProcessCloudCode) { + this.cloudCode = cloudCode; + } + + async initialize(registry: CloudCodeRegistry, _config: ParseServerConfig): Promise { + const router = this.cloudCode.getRouter(); + const manifest = router.getManifest(); + + for (const fn of manifest.hooks.functions) { + registry.defineFunction(fn.name, async (request) => { + const body = requestToWebhookBody(request); + const response = await router.dispatchFunction(fn.name, body); + return webhookResponseToResult(response); + }); + } + + for (const trigger of manifest.hooks.triggers) { + const { className, triggerName } = trigger; + registry.defineTrigger(className, triggerName as any, async (request) => { + const body = requestToWebhookBody(request); + const response = await router.dispatchTrigger(className, triggerName, body); + if (triggerName === 'beforeSave') { + applyBeforeSaveResponse(request, response); + return; + } + return webhookResponseToResult(response); + }); + } + + for (const job of manifest.hooks.jobs) { + registry.defineJob(job.name, async (request) => { + const body = requestToWebhookBody(request); + const response = await router.dispatchJob(job.name, body); + return webhookResponseToResult(response); + }); + } + } + + async isHealthy(): Promise { + return true; + } + + async shutdown(): Promise {} +} diff --git a/src/cloud-code/adapters/LegacyAdapter.ts b/src/cloud-code/adapters/LegacyAdapter.ts new file mode 100644 index 0000000000..4f73e2ed49 --- /dev/null +++ b/src/cloud-code/adapters/LegacyAdapter.ts @@ -0,0 +1,43 @@ +// src/cloud-code/adapters/LegacyAdapter.ts + +import type { CloudCodeAdapter, CloudCodeRegistry, ParseServerConfig } from '../types'; + +export class LegacyAdapter implements CloudCodeAdapter { + readonly name = 'legacy'; + private cloud: string | ((parse: any) => void); + + constructor(cloud: string | ((parse: any) => void)) { + this.cloud = cloud; + } + + async initialize(_registry: CloudCodeRegistry, _config: ParseServerConfig): Promise { + // The registry is not used directly by LegacyAdapter. + // Instead, the cloud code file calls Parse.Cloud.define() etc., + // which calls triggers.addFunction() etc., + // which the facade delegates to CloudCodeManager. + const Parse = require('parse/node').Parse; + + if (typeof this.cloud === 'function') { + await Promise.resolve(this.cloud(Parse)); + } else if (typeof this.cloud === 'string') { + const path = require('path'); + const resolved = path.resolve(process.cwd(), this.cloud); + try { + const pkg = require(path.resolve(process.cwd(), 'package.json')); + if (process.env.npm_package_type === 'module' || pkg?.type === 'module') { + await import(resolved); + } else { + require(resolved); + } + } catch { + require(resolved); + } + } + } + + async isHealthy(): Promise { + return true; + } + + async shutdown(): Promise {} +} diff --git a/src/cloud-code/adapters/webhook-bridge.ts b/src/cloud-code/adapters/webhook-bridge.ts new file mode 100644 index 0000000000..1c98e4880b --- /dev/null +++ b/src/cloud-code/adapters/webhook-bridge.ts @@ -0,0 +1,70 @@ +import { Parse } from 'parse/node'; +import type { WebhookResponse } from '../types'; + +export function requestToWebhookBody(request: any): Record { + const body: Record = { + master: request.master ?? false, + ip: request.ip ?? '', + headers: request.headers ?? {}, + installationId: request.installationId, + }; + + if (request.user) { + body.user = typeof request.user.toJSON === 'function' ? request.user.toJSON() : request.user; + } + if (request.params !== undefined) body.params = request.params; + if (request.jobId !== undefined) body.jobId = request.jobId; + if (request.object) { + body.object = typeof request.object.toJSON === 'function' ? request.object.toJSON() : request.object; + } + if (request.original) { + body.original = typeof request.original.toJSON === 'function' ? request.original.toJSON() : request.original; + } + if (request.context !== undefined) body.context = request.context; + if (request.query) { + body.query = { + className: request.query.className, + where: request.query._where, + limit: request.query._limit, + skip: request.query._skip, + include: request.query._include?.join(','), + keys: request.query._keys?.join(','), + order: request.query._order, + }; + } + if (request.count !== undefined) body.count = request.count; + if (request.isGet !== undefined) body.isGet = request.isGet; + if (request.file) body.file = request.file; + if (request.fileSize !== undefined) body.fileSize = request.fileSize; + if (request.event) body.event = request.event; + if (request.requestId !== undefined) body.requestId = request.requestId; + if (request.clients !== undefined) body.clients = request.clients; + if (request.subscriptions !== undefined) body.subscriptions = request.subscriptions; + + return body; +} + +export function webhookResponseToResult(response: WebhookResponse): unknown { + if ('error' in response) { + throw new Parse.Error(response.error.code, response.error.message); + } + return response.success; +} + +export function applyBeforeSaveResponse(request: any, response: WebhookResponse): void { + if ('error' in response) { + throw new Parse.Error(response.error.code, response.error.message); + } + const result = response.success; + if (typeof result === 'object' && result !== null && Object.keys(result).length === 0) { + return; + } + if (typeof result === 'object' && result !== null) { + const skipFields = ['objectId', 'createdAt', 'updatedAt', 'className']; + for (const [key, value] of Object.entries(result)) { + if (!skipFields.includes(key)) { + request.object.set(key, value); + } + } + } +} diff --git a/src/cloud-code/resolveAdapters.ts b/src/cloud-code/resolveAdapters.ts new file mode 100644 index 0000000000..c67c188fd8 --- /dev/null +++ b/src/cloud-code/resolveAdapters.ts @@ -0,0 +1,33 @@ +import { LegacyAdapter } from './adapters/LegacyAdapter'; +import { InProcessAdapter } from './adapters/InProcessAdapter'; +import { ExternalProcessAdapter } from './adapters/ExternalProcessAdapter'; +import type { CloudCodeAdapter } from './types'; + +export function resolveAdapters(options: any): CloudCodeAdapter[] { + const adapters: CloudCodeAdapter[] = []; + + if (options.cloudCodeAdapters) { + adapters.push(...options.cloudCodeAdapters); + } + + if (options.cloud) { + if (typeof options.cloud === 'object' && typeof options.cloud.getRouter === 'function') { + adapters.push(new InProcessAdapter(options.cloud)); + } else { + adapters.push(new LegacyAdapter(options.cloud)); + } + } + + if (options.cloudCodeCommand) { + if (!options.webhookKey) { + throw new Error('webhookKey is required when using cloudCodeCommand'); + } + adapters.push(new ExternalProcessAdapter( + options.cloudCodeCommand, + options.webhookKey, + options.cloudCodeOptions + )); + } + + return adapters; +} diff --git a/src/cloud-code/types.ts b/src/cloud-code/types.ts new file mode 100644 index 0000000000..01adc84008 --- /dev/null +++ b/src/cloud-code/types.ts @@ -0,0 +1,105 @@ +// src/cloud-code/types.ts + +export const TriggerTypes = Object.freeze({ + beforeLogin: 'beforeLogin', + afterLogin: 'afterLogin', + afterLogout: 'afterLogout', + beforePasswordResetRequest: 'beforePasswordResetRequest', + beforeSave: 'beforeSave', + afterSave: 'afterSave', + beforeDelete: 'beforeDelete', + afterDelete: 'afterDelete', + beforeFind: 'beforeFind', + afterFind: 'afterFind', + beforeConnect: 'beforeConnect', + beforeSubscribe: 'beforeSubscribe', + afterEvent: 'afterEvent', +}); + +export type TriggerName = keyof typeof TriggerTypes; + +export type CloudFunctionHandler = (request: any) => any; +export type CloudTriggerHandler = (request: any) => any; +export type CloudJobHandler = (request: any) => any; +export type LiveQueryHandler = (data: any) => void; +export type ValidatorHandler = Record | ((request: any) => any); + +export interface FunctionEntry { + handler: CloudFunctionHandler; + source: string; + validator?: ValidatorHandler; +} + +export interface TriggerEntry { + handler: CloudTriggerHandler; + source: string; + validator?: ValidatorHandler; +} + +export interface JobEntry { + handler: CloudJobHandler; + source: string; +} + +export interface LiveQueryEntry { + handler: LiveQueryHandler; + source: string; +} + +export interface HookStore { + functions: Map; + triggers: Map; + jobs: Map; + liveQueryHandlers: LiveQueryEntry[]; +} + +export interface ParseServerConfig { + appId: string; + masterKey: string; + serverURL: string; +} + +export interface CloudCodeRegistry { + defineFunction(name: string, handler: CloudFunctionHandler, validator?: ValidatorHandler): void; + defineTrigger(className: string, triggerName: TriggerName, handler: CloudTriggerHandler, validator?: ValidatorHandler): void; + defineJob(name: string, handler: CloudJobHandler): void; + defineLiveQueryHandler(handler: LiveQueryHandler): void; +} + +export interface CloudCodeAdapter { + readonly name: string; + initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise; + isHealthy(): Promise; + shutdown(): Promise; +} + +export interface CloudManifest { + protocol: string; + hooks: { + functions: Array<{ name: string }>; + triggers: Array<{ className: string; triggerName: string }>; + jobs: Array<{ name: string }>; + }; +} + +export type WebhookResponse = + | { success: unknown } + | { error: { code: number; message: string } }; + +export interface CloudRouter { + getManifest(): CloudManifest; + dispatchFunction(name: string, body: Record): Promise; + dispatchTrigger(className: string, triggerName: string, body: Record): Promise; + dispatchJob(name: string, body: Record): Promise; +} + +export interface InProcessCloudCode { + getRouter(): CloudRouter; +} + +export interface CloudCodeOptions { + startupTimeout?: number; + healthCheckInterval?: number; + shutdownTimeout?: number; + maxRestartDelay?: number; +} diff --git a/src/triggers.js b/src/triggers.js index 7558aa6c2d..583da7baeb 100644 --- a/src/triggers.js +++ b/src/triggers.js @@ -1,6 +1,12 @@ // triggers.js import Parse from 'parse/node'; import { logger } from './logger'; +import AppCache from './cache'; + +function getManager(applicationId) { + const cached = AppCache.get(applicationId || Parse.applicationId); + return cached && cached.cloudCodeManager; +} export const Types = { beforeLogin: 'beforeLogin', @@ -147,41 +153,89 @@ function get(category, name, applicationId) { } export function addFunction(functionName, handler, validationHandler, applicationId) { + const manager = getManager(applicationId); + if (manager) { + manager.defineFunction(functionName, handler, 'legacy', validationHandler); + return; + } add(Category.Functions, functionName, handler, applicationId); add(Category.Validators, functionName, validationHandler, applicationId); } export function addJob(jobName, handler, applicationId) { + const manager = getManager(applicationId); + if (manager) { + manager.defineJob(jobName, handler, 'legacy'); + return; + } add(Category.Jobs, jobName, handler, applicationId); } export function addTrigger(type, className, handler, applicationId, validationHandler) { + const manager = getManager(applicationId); + if (manager) { + manager.defineTrigger(className, type, handler, 'legacy', validationHandler); + return; + } validateClassNameForTriggers(className, type); add(Category.Triggers, `${type}.${className}`, handler, applicationId); add(Category.Validators, `${type}.${className}`, validationHandler, applicationId); } export function addConnectTrigger(type, handler, applicationId, validationHandler) { + const manager = getManager(applicationId); + if (manager) { + manager.defineTrigger(ConnectClassName, type, handler, 'legacy', validationHandler); + return; + } add(Category.Triggers, `${type}.${ConnectClassName}`, handler, applicationId); add(Category.Validators, `${type}.${ConnectClassName}`, validationHandler, applicationId); } export function addLiveQueryEventHandler(handler, applicationId) { + const manager = getManager(applicationId); + if (manager) { + manager.defineLiveQueryHandler(handler, 'legacy'); + return; + } applicationId = applicationId || Parse.applicationId; _triggerStore[applicationId] = _triggerStore[applicationId] || baseStore(); _triggerStore[applicationId].LiveQuery.push(handler); } export function removeFunction(functionName, applicationId) { + const manager = getManager(applicationId); + if (manager) { + manager.removeFunction(functionName); + return; + } remove(Category.Functions, functionName, applicationId); } export function removeTrigger(type, className, applicationId) { + const manager = getManager(applicationId); + if (manager) { + manager.removeTrigger(className, type); + return; + } remove(Category.Triggers, `${type}.${className}`, applicationId); } export function _unregisterAll() { - Object.keys(_triggerStore).forEach(appId => delete _triggerStore[appId]); + // Clear managers from AppCache entries + const appCacheStore = AppCache.cache; + if (appCacheStore) { + Object.keys(appCacheStore).forEach(appId => { + const manager = getManager(appId); + if (manager) { + manager.clearAll(); + } + }); + } + // Clear legacy trigger store + Object.keys(_triggerStore).forEach(appId => { + delete _triggerStore[appId]; + }); } export function toJSONwithObjects(object, className) { @@ -212,6 +266,11 @@ export function getTrigger(className, triggerType, applicationId) { if (!applicationId) { throw 'Missing ApplicationID'; } + const manager = getManager(applicationId); + if (manager) { + const entry = manager.getTrigger(className, triggerType); + return entry ? entry.handler : undefined; + } return get(Category.Triggers, `${triggerType}.${className}`, applicationId); } @@ -227,14 +286,27 @@ export async function runTrigger(trigger, name, request, auth) { } export function triggerExists(className: string, type: string, applicationId: string): boolean { + const manager = getManager(applicationId); + if (manager) { + return manager.triggerExists(className, type); + } return getTrigger(className, type, applicationId) != undefined; } export function getFunction(functionName, applicationId) { + const manager = getManager(applicationId); + if (manager) { + const entry = manager.getFunction(functionName); + return entry ? entry.handler : undefined; + } return get(Category.Functions, functionName, applicationId); } export function getFunctionNames(applicationId) { + const manager = getManager(applicationId); + if (manager) { + return manager.getFunctionNames(); + } const store = (_triggerStore[applicationId] && _triggerStore[applicationId][Category.Functions]) || {}; const functionNames = []; @@ -256,10 +328,19 @@ export function getFunctionNames(applicationId) { } export function getJob(jobName, applicationId) { + const manager = getManager(applicationId); + if (manager) { + const entry = manager.getJob(jobName); + return entry ? entry.handler : undefined; + } return get(Category.Jobs, jobName, applicationId); } export function getJobs(applicationId) { + const mgr = getManager(applicationId); + if (mgr) { + return mgr.getJobsObject(); + } var manager = _triggerStore[applicationId]; if (manager && manager.Jobs) { return manager.Jobs; @@ -268,6 +349,10 @@ export function getJobs(applicationId) { } export function getValidator(functionName, applicationId) { + const manager = getManager(applicationId); + if (manager) { + return manager.getValidator(functionName); + } return get(Category.Validators, functionName, applicationId); } @@ -1032,6 +1117,11 @@ export function inflate(data, restObject) { } export function runLiveQueryEventHandlers(data, applicationId = Parse.applicationId) { + const manager = getManager(applicationId); + if (manager) { + manager.runLiveQueryEventHandlers(data); + return; + } if (!_triggerStore || !_triggerStore[applicationId] || !_triggerStore[applicationId].LiveQuery) { return; } From 5b9f644771623d336db860b10490511a8180fa64 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 17 Mar 2026 20:45:14 +1100 Subject: [PATCH 06/14] fix build --- spec/InProcessAdapter.spec.js | 12 +- src/Options/Definitions.js | 2874 ++++++++--------- src/Options/docs.js | 6 +- src/Options/index.js | 9 +- .../adapters/ExternalProcessAdapter.ts | 2 +- src/cloud-code/adapters/webhook-bridge.ts | 2 +- 6 files changed, 1454 insertions(+), 1451 deletions(-) diff --git a/spec/InProcessAdapter.spec.js b/spec/InProcessAdapter.spec.js index b14c139375..3821492d8f 100644 --- a/spec/InProcessAdapter.spec.js +++ b/spec/InProcessAdapter.spec.js @@ -7,15 +7,21 @@ function createMockCloudCode(manifest, handlers = {}) { return { getManifest() { return manifest; }, async dispatchFunction(name, body) { - if (handlers[`function:${name}`]) return handlers[`function:${name}`](body); + if (handlers[`function:${name}`]) { + return handlers[`function:${name}`](body); + } return { success: null }; }, async dispatchTrigger(className, triggerName, body) { - if (handlers[`trigger:${triggerName}.${className}`]) return handlers[`trigger:${triggerName}.${className}`](body); + if (handlers[`trigger:${triggerName}.${className}`]) { + return handlers[`trigger:${triggerName}.${className}`](body); + } return { success: {} }; }, async dispatchJob(name, body) { - if (handlers[`job:${name}`]) return handlers[`job:${name}`](body); + if (handlers[`job:${name}`]) { + return handlers[`job:${name}`](body); + } return { success: null }; }, }; diff --git a/src/Options/Definitions.js b/src/Options/Definitions.js index 0956f12baf..79a81abf5e 100644 --- a/src/Options/Definitions.js +++ b/src/Options/Definitions.js @@ -3,1484 +3,1482 @@ This code has been generated by resources/buildConfigDefinitions.js Do not edit manually, but update Options/index.js */ -var parsers = require('./parsers'); +var parsers = require("./parsers"); module.exports.SchemaOptions = { - afterMigration: { - env: 'PARSE_SERVER_SCHEMA_AFTER_MIGRATION', - help: 'Execute a callback after running schema migrations.', - }, - beforeMigration: { - env: 'PARSE_SERVER_SCHEMA_BEFORE_MIGRATION', - help: 'Execute a callback before running schema migrations.', - }, - definitions: { - env: 'PARSE_SERVER_SCHEMA_DEFINITIONS', - help: 'Rest representation on Parse.Schema https://docs.parseplatform.org/rest/guide/#adding-a-schema', - required: true, - action: parsers.objectParser, - default: [], - }, - deleteExtraFields: { - env: 'PARSE_SERVER_SCHEMA_DELETE_EXTRA_FIELDS', - help: 'Is true if Parse Server should delete any fields not defined in a schema definition. This should only be used during development.', - action: parsers.booleanParser, - default: false, - }, - keepUnknownIndexes: { - env: 'PARSE_SERVER_SCHEMA_KEEP_UNKNOWN_INDEXES', - help: "(Optional) Keep indexes that are present in the database but not defined in the schema. Set this to `true` if you are adding indexes manually, so that they won't be removed when running schema migration. Default is `false`.", - action: parsers.booleanParser, - default: false, - }, - lockSchemas: { - env: 'PARSE_SERVER_SCHEMA_LOCK_SCHEMAS', - help: 'Is true if Parse Server will reject any attempts to modify the schema while the server is running.', - action: parsers.booleanParser, - default: false, - }, - recreateModifiedFields: { - env: 'PARSE_SERVER_SCHEMA_RECREATE_MODIFIED_FIELDS', - help: 'Is true if Parse Server should recreate any fields that are different between the current database schema and theschema definition. This should only be used during development.', - action: parsers.booleanParser, - default: false, - }, - strict: { - env: 'PARSE_SERVER_SCHEMA_STRICT', - help: 'Is true if Parse Server should exit if schema update fail.', - action: parsers.booleanParser, - default: false, - }, + "afterMigration": { + "env": "PARSE_SERVER_SCHEMA_AFTER_MIGRATION", + "help": "Execute a callback after running schema migrations." + }, + "beforeMigration": { + "env": "PARSE_SERVER_SCHEMA_BEFORE_MIGRATION", + "help": "Execute a callback before running schema migrations." + }, + "definitions": { + "env": "PARSE_SERVER_SCHEMA_DEFINITIONS", + "help": "Rest representation on Parse.Schema https://docs.parseplatform.org/rest/guide/#adding-a-schema", + "required": true, + "action": parsers.objectParser, + "default": [] + }, + "deleteExtraFields": { + "env": "PARSE_SERVER_SCHEMA_DELETE_EXTRA_FIELDS", + "help": "Is true if Parse Server should delete any fields not defined in a schema definition. This should only be used during development.", + "action": parsers.booleanParser, + "default": false + }, + "keepUnknownIndexes": { + "env": "PARSE_SERVER_SCHEMA_KEEP_UNKNOWN_INDEXES", + "help": "(Optional) Keep indexes that are present in the database but not defined in the schema. Set this to `true` if you are adding indexes manually, so that they won't be removed when running schema migration. Default is `false`.", + "action": parsers.booleanParser, + "default": false + }, + "lockSchemas": { + "env": "PARSE_SERVER_SCHEMA_LOCK_SCHEMAS", + "help": "Is true if Parse Server will reject any attempts to modify the schema while the server is running.", + "action": parsers.booleanParser, + "default": false + }, + "recreateModifiedFields": { + "env": "PARSE_SERVER_SCHEMA_RECREATE_MODIFIED_FIELDS", + "help": "Is true if Parse Server should recreate any fields that are different between the current database schema and theschema definition. This should only be used during development.", + "action": parsers.booleanParser, + "default": false + }, + "strict": { + "env": "PARSE_SERVER_SCHEMA_STRICT", + "help": "Is true if Parse Server should exit if schema update fail.", + "action": parsers.booleanParser, + "default": false + } }; module.exports.ParseServerOptions = { - accountLockout: { - env: 'PARSE_SERVER_ACCOUNT_LOCKOUT', - help: "The account lockout policy for failed login attempts.

Note: Setting a user's ACL to an empty object `{}` via master key is a separate mechanism that only prevents new logins; it does not invalidate existing session tokens. To immediately revoke a user's access, destroy their sessions via master key in addition to setting the ACL.", - action: parsers.objectParser, - type: 'AccountLockoutOptions', - }, - allowClientClassCreation: { - env: 'PARSE_SERVER_ALLOW_CLIENT_CLASS_CREATION', - help: 'Enable (or disable) client class creation, defaults to false', - action: parsers.booleanParser, - default: false, - }, - allowCustomObjectId: { - env: 'PARSE_SERVER_ALLOW_CUSTOM_OBJECT_ID', - help: 'Enable (or disable) custom objectId', - action: parsers.booleanParser, - default: false, - }, - allowExpiredAuthDataToken: { - env: 'PARSE_SERVER_ALLOW_EXPIRED_AUTH_DATA_TOKEN', - help: 'Allow a user to log in even if the 3rd party authentication token that was used to sign in to their account has expired. If this is set to `false`, then the token will be validated every time the user signs in to their account. This refers to the token that is stored in the `_User.authData` field. Defaults to `false`.', - action: parsers.booleanParser, - default: false, - }, - allowHeaders: { - env: 'PARSE_SERVER_ALLOW_HEADERS', - help: 'Add headers to Access-Control-Allow-Headers', - action: parsers.arrayParser, - }, - allowOrigin: { - env: 'PARSE_SERVER_ALLOW_ORIGIN', - help: 'Sets origins for Access-Control-Allow-Origin. This can be a string for a single origin or an array of strings for multiple origins.', - action: parsers.arrayParser, - }, - analyticsAdapter: { - env: 'PARSE_SERVER_ANALYTICS_ADAPTER', - help: 'Adapter module for the analytics', - action: parsers.moduleOrObjectParser, - }, - appId: { - env: 'PARSE_SERVER_APPLICATION_ID', - help: 'Your Parse Application ID', - required: true, - }, - appName: { - env: 'PARSE_SERVER_APP_NAME', - help: 'Sets the app name', - }, - auth: { - env: 'PARSE_SERVER_AUTH_PROVIDERS', - help: "Configuration for your authentication providers, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#oauth-and-3rd-party-authentication

Provider names must start with a letter and contain only letters, digits, and underscores (`/^[A-Za-z][A-Za-z0-9_]*$/`). This is because each provider name is used to construct a database field (`_auth_data_`), which must comply with Parse Server's field naming rules.", - action: parsers.objectParser, - }, - cacheAdapter: { - env: 'PARSE_SERVER_CACHE_ADAPTER', - help: 'Adapter module for the cache', - action: parsers.moduleOrObjectParser, - }, - cacheMaxSize: { - env: 'PARSE_SERVER_CACHE_MAX_SIZE', - help: 'Sets the maximum size for the in memory cache, defaults to 10000', - action: parsers.numberParser('cacheMaxSize'), - default: 10000, - }, - cacheTTL: { - env: 'PARSE_SERVER_CACHE_TTL', - help: 'Sets the TTL for the in memory cache (in ms), defaults to 5000 (5 seconds)', - action: parsers.numberParser('cacheTTL'), - default: 5000, - }, - clientKey: { - env: 'PARSE_SERVER_CLIENT_KEY', - help: 'Key for iOS, MacOS, tvOS clients', - }, - cloud: { - env: 'PARSE_SERVER_CLOUD', - help: 'Full path to your cloud code main.js', - }, - cloudCodeAdapters: { - help: 'Array of CloudCodeAdapter instances for BYO cloud code integration', - }, - cloudCodeCommand: { - env: 'PARSE_SERVER_CLOUD_CODE_COMMAND', - help: 'Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol)', - }, - cloudCodeOptions: { - help: 'Options for the external cloud code process adapter', - }, - cluster: { - env: 'PARSE_SERVER_CLUSTER', - help: 'Run with cluster, optionally set the number of processes default to os.cpus().length', - action: parsers.numberOrBooleanParser, - }, - collectionPrefix: { - env: 'PARSE_SERVER_COLLECTION_PREFIX', - help: 'A collection prefix for the classes', - default: '', - }, - convertEmailToLowercase: { - env: 'PARSE_SERVER_CONVERT_EMAIL_TO_LOWERCASE', - help: 'Optional. If set to `true`, the `email` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `email` property is stored as set, without any case modifications. Default is `false`.', - action: parsers.booleanParser, - default: false, - }, - convertUsernameToLowercase: { - env: 'PARSE_SERVER_CONVERT_USERNAME_TO_LOWERCASE', - help: 'Optional. If set to `true`, the `username` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `username` property is stored as set, without any case modifications. Default is `false`.', - action: parsers.booleanParser, - default: false, - }, - customPages: { - env: 'PARSE_SERVER_CUSTOM_PAGES', - help: 'custom pages for password validation and reset', - action: parsers.objectParser, - type: 'CustomPagesOptions', - default: {}, - }, - databaseAdapter: { - env: 'PARSE_SERVER_DATABASE_ADAPTER', - help: 'Adapter module for the database; any options that are not explicitly described here are passed directly to the database client.', - action: parsers.moduleOrObjectParser, - }, - databaseOptions: { - env: 'PARSE_SERVER_DATABASE_OPTIONS', - help: 'Options to pass to the database client', - action: parsers.objectParser, - type: 'DatabaseOptions', - }, - databaseURI: { - env: 'PARSE_SERVER_DATABASE_URI', - help: 'The full URI to your database. Supported databases are mongodb or postgres.', - required: true, - default: 'mongodb://localhost:27017/parse', - }, - defaultLimit: { - env: 'PARSE_SERVER_DEFAULT_LIMIT', - help: 'Default value for limit option on queries, defaults to `100`.', - action: parsers.numberParser('defaultLimit'), - default: 100, - }, - directAccess: { - env: 'PARSE_SERVER_DIRECT_ACCESS', - help: 'Set to `true` if Parse requests within the same Node.js environment as Parse Server should be routed to Parse Server directly instead of via the HTTP interface. Default is `false`.

If set to `false` then Parse requests within the same Node.js environment as Parse Server are executed as HTTP requests sent to Parse Server via the `serverURL`. For example, a `Parse.Query` in Cloud Code is calling Parse Server via a HTTP request. The server is essentially making a HTTP request to itself, unnecessarily using network resources such as network ports.

\u26A0\uFE0F In environments where multiple Parse Server instances run behind a load balancer and Parse requests within the current Node.js environment should be routed via the load balancer and distributed as HTTP requests among all instances via the `serverURL`, this should be set to `false`.', - action: parsers.booleanParser, - default: true, - }, - dotNetKey: { - env: 'PARSE_SERVER_DOT_NET_KEY', - help: 'Key for Unity and .Net SDK', - }, - emailAdapter: { - env: 'PARSE_SERVER_EMAIL_ADAPTER', - help: 'Adapter module for email sending', - action: parsers.moduleOrObjectParser, - }, - emailVerifySuccessOnInvalidEmail: { - env: 'PARSE_SERVER_EMAIL_VERIFY_SUCCESS_ON_INVALID_EMAIL', - help: 'Set to `true` if a request to verify the email should return a success response even if the provided email address does not belong to a verifiable account, for example because it is unknown or already verified, or `false` if the request should return an error response in those cases.

Default is `true`.
Requires option `verifyUserEmails: true`.', - action: parsers.booleanParser, - default: true, - }, - emailVerifyTokenReuseIfValid: { - env: 'PARSE_SERVER_EMAIL_VERIFY_TOKEN_REUSE_IF_VALID', - help: 'Set to `true` if a email verification token should be reused in case another token is requested but there is a token that is still valid, i.e. has not expired. This avoids the often observed issue that a user requests multiple emails and does not know which link contains a valid token because each newly generated token would invalidate the previous token.

Default is `false`.
Requires option `verifyUserEmails: true`.', - action: parsers.booleanParser, - default: false, - }, - emailVerifyTokenValidityDuration: { - env: 'PARSE_SERVER_EMAIL_VERIFY_TOKEN_VALIDITY_DURATION', - help: 'Set the validity duration of the email verification token in seconds after which the token expires. The token is used in the link that is set in the email. After the token expires, the link becomes invalid and a new link has to be sent. If the option is not set or set to `undefined`, then the token never expires.

For example, to expire the token after 2 hours, set a value of 7200 seconds (= 60 seconds * 60 minutes * 2 hours).

Default is `undefined`.
Requires option `verifyUserEmails: true`.', - action: parsers.numberParser('emailVerifyTokenValidityDuration'), - }, - enableAnonymousUsers: { - env: 'PARSE_SERVER_ENABLE_ANON_USERS', - help: 'Enable (or disable) anonymous users, defaults to true', - action: parsers.booleanParser, - default: true, - }, - enableCollationCaseComparison: { - env: 'PARSE_SERVER_ENABLE_COLLATION_CASE_COMPARISON', - help: 'Optional. If set to `true`, the collation rule of case comparison for queries and indexes is enabled. Enable this option to run Parse Server with MongoDB Atlas Serverless or AWS Amazon DocumentDB. If `false`, the collation rule of case comparison is disabled. Default is `false`.', - action: parsers.booleanParser, - default: false, - }, - enableExpressErrorHandler: { - env: 'PARSE_SERVER_ENABLE_EXPRESS_ERROR_HANDLER', - help: 'Enables the default express error handler for all errors', - action: parsers.booleanParser, - default: false, - }, - enableInsecureAuthAdapters: { - env: 'PARSE_SERVER_ENABLE_INSECURE_AUTH_ADAPTERS', - help: 'Optional. Enables insecure authentication adapters. Insecure auth adapters are deprecated and will be removed in a future version. Defaults to `false`.', - action: parsers.booleanParser, - default: false, - }, - enableSanitizedErrorResponse: { - env: 'PARSE_SERVER_ENABLE_SANITIZED_ERROR_RESPONSE', - help: 'If set to `true`, error details are removed from error messages in responses to client requests, and instead a generic error message is sent. Default is `true`.', - action: parsers.booleanParser, - default: true, - }, - encryptionKey: { - env: 'PARSE_SERVER_ENCRYPTION_KEY', - help: 'Key for encrypting your files', - }, - enforcePrivateUsers: { - env: 'PARSE_SERVER_ENFORCE_PRIVATE_USERS', - help: 'Set to true if new users should be created without public read and write access.', - action: parsers.booleanParser, - default: true, - }, - expireInactiveSessions: { - env: 'PARSE_SERVER_EXPIRE_INACTIVE_SESSIONS', - help: 'Sets whether we should expire the inactive sessions, defaults to true. If false, all new sessions are created with no expiration date.', - action: parsers.booleanParser, - default: true, - }, - extendSessionOnUse: { - env: 'PARSE_SERVER_EXTEND_SESSION_ON_USE', - help: "Whether Parse Server should automatically extend a valid session by the sessionLength. In order to reduce the number of session updates in the database, a session will only be extended when a request is received after at least half of the current session's lifetime has passed.", - action: parsers.booleanParser, - default: false, - }, - fileKey: { - env: 'PARSE_SERVER_FILE_KEY', - help: 'Key for your files', - }, - filesAdapter: { - env: 'PARSE_SERVER_FILES_ADAPTER', - help: 'Adapter module for the files sub-system', - action: parsers.moduleOrObjectParser, - }, - fileUpload: { - env: 'PARSE_SERVER_FILE_UPLOAD_OPTIONS', - help: 'Options for file uploads', - action: parsers.objectParser, - type: 'FileUploadOptions', - default: {}, - }, - graphQLPath: { - env: 'PARSE_SERVER_GRAPHQL_PATH', - help: 'The mount path for the GraphQL endpoint

\u26A0\uFE0F File upload inside the GraphQL mutation system requires Parse Server to be able to call itself by making requests to the URL set in `serverURL`.

Defaults is `/graphql`.', - default: '/graphql', - }, - graphQLPublicIntrospection: { - env: 'PARSE_SERVER_GRAPHQL_PUBLIC_INTROSPECTION', - help: 'Enable public introspection for the GraphQL endpoint, defaults to false', - action: parsers.booleanParser, - default: false, - }, - graphQLSchema: { - env: 'PARSE_SERVER_GRAPH_QLSCHEMA', - help: 'Full path to your GraphQL custom schema.graphql file', - }, - host: { - env: 'PARSE_SERVER_HOST', - help: 'The host to serve ParseServer on, defaults to 0.0.0.0', - default: '0.0.0.0', - }, - idempotencyOptions: { - env: 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_OPTIONS', - help: 'Options for request idempotency to deduplicate identical requests that may be caused by network issues. Caution, this is an experimental feature that may not be appropriate for production.', - action: parsers.objectParser, - type: 'IdempotencyOptions', - default: {}, - }, - javascriptKey: { - env: 'PARSE_SERVER_JAVASCRIPT_KEY', - help: 'Key for the Javascript SDK', - }, - jsonLogs: { - env: 'JSON_LOGS', - help: 'Log as structured JSON objects', - action: parsers.booleanParser, - }, - liveQuery: { - env: 'PARSE_SERVER_LIVE_QUERY', - help: "parse-server's LiveQuery configuration object", - action: parsers.objectParser, - type: 'LiveQueryOptions', - }, - liveQueryServerOptions: { - env: 'PARSE_SERVER_LIVE_QUERY_SERVER_OPTIONS', - help: 'Live query server configuration options (will start the liveQuery server)', - action: parsers.objectParser, - type: 'LiveQueryServerOptions', - }, - loggerAdapter: { - env: 'PARSE_SERVER_LOGGER_ADAPTER', - help: 'Adapter module for the logging sub-system', - action: parsers.moduleOrObjectParser, - }, - logLevel: { - env: 'PARSE_SERVER_LOG_LEVEL', - help: 'Sets the level for logs', - }, - logLevels: { - env: 'PARSE_SERVER_LOG_LEVELS', - help: '(Optional) Overrides the log levels used internally by Parse Server to log events.', - action: parsers.objectParser, - type: 'LogLevels', - default: {}, - }, - logsFolder: { - env: 'PARSE_SERVER_LOGS_FOLDER', - help: "Folder for the logs (defaults to './logs'); set to null to disable file based logging", - default: './logs', - }, - maintenanceKey: { - env: 'PARSE_SERVER_MAINTENANCE_KEY', - help: '(Optional) The maintenance key is used for modifying internal and read-only fields of Parse Server.

\u26A0\uFE0F This key is not intended to be used as part of a regular operation of Parse Server. This key is intended to conduct out-of-band changes such as one-time migrations or data correction tasks. Internal fields are not officially documented and may change at any time without publication in release changelogs. We strongly advice not to rely on internal fields as part of your regular operation and to investigate the implications of any planned changes *directly in the source code* of your current version of Parse Server.', - required: true, - }, - maintenanceKeyIps: { - env: 'PARSE_SERVER_MAINTENANCE_KEY_IPS', - help: "(Optional) Restricts the use of maintenance key permissions to a list of IP addresses or ranges.

This option accepts a list of single IP addresses, for example `['10.0.0.1', '10.0.0.2']`. You can also use CIDR notation to specify an IP address range, for example `['10.0.1.0/24']`.

Special scenarios:
- Setting an empty array `[]` means that the maintenance key cannot be used even in Parse Server Cloud Code. This value cannot be set via an environment variable as there is no way to pass an empty array to Parse Server via an environment variable.
- Setting `['0.0.0.0/0', '::0']` means to allow any IPv4 and IPv6 address to use the maintenance key and effectively disables the IP filter.

Considerations:
- IPv4 and IPv6 addresses are not compared against each other. Each IP version (IPv4 and IPv6) needs to be considered separately. For example, `['0.0.0.0/0']` allows any IPv4 address and blocks every IPv6 address. Conversely, `['::0']` allows any IPv6 address and blocks every IPv4 address.
- Keep in mind that the IP version in use depends on the network stack of the environment in which Parse Server runs. A local environment may use a different IP version than a remote environment. For example, it's possible that locally the value `['0.0.0.0/0']` allows the request IP because the environment is using IPv4, but when Parse Server is deployed remotely the request IP is blocked because the remote environment is using IPv6.
- When setting the option via an environment variable the notation is a comma-separated string, for example `\"0.0.0.0/0,::0\"`.
- IPv6 zone indices (`%` suffix) are not supported, for example `fe80::1%eth0`, `fe80::1%1` or `::1%lo`.

Defaults to `['127.0.0.1', '::1']` which means that only `localhost`, the server instance on which Parse Server runs, is allowed to use the maintenance key.", - action: parsers.arrayParser, - default: ['127.0.0.1', '::1'], - }, - masterKey: { - env: 'PARSE_SERVER_MASTER_KEY', - help: 'Your Parse Master Key', - required: true, - }, - masterKeyIps: { - env: 'PARSE_SERVER_MASTER_KEY_IPS', - help: "(Optional) Restricts the use of master key permissions to a list of IP addresses or ranges.

This option accepts a list of single IP addresses, for example `['10.0.0.1', '10.0.0.2']`. You can also use CIDR notation to specify an IP address range, for example `['10.0.1.0/24']`.

Special scenarios:
- Setting an empty array `[]` means that the master key cannot be used even in Parse Server Cloud Code. This value cannot be set via an environment variable as there is no way to pass an empty array to Parse Server via an environment variable.
- Setting `['0.0.0.0/0', '::0']` means to allow any IPv4 and IPv6 address to use the master key and effectively disables the IP filter.

Considerations:
- IPv4 and IPv6 addresses are not compared against each other. Each IP version (IPv4 and IPv6) needs to be considered separately. For example, `['0.0.0.0/0']` allows any IPv4 address and blocks every IPv6 address. Conversely, `['::0']` allows any IPv6 address and blocks every IPv4 address.
- Keep in mind that the IP version in use depends on the network stack of the environment in which Parse Server runs. A local environment may use a different IP version than a remote environment. For example, it's possible that locally the value `['0.0.0.0/0']` allows the request IP because the environment is using IPv4, but when Parse Server is deployed remotely the request IP is blocked because the remote environment is using IPv6.
- When setting the option via an environment variable the notation is a comma-separated string, for example `\"0.0.0.0/0,::0\"`.
- IPv6 zone indices (`%` suffix) are not supported, for example `fe80::1%eth0`, `fe80::1%1` or `::1%lo`.

Defaults to `['127.0.0.1', '::1']` which means that only `localhost`, the server instance on which Parse Server runs, is allowed to use the master key.", - action: parsers.arrayParser, - default: ['127.0.0.1', '::1'], - }, - masterKeyTtl: { - env: 'PARSE_SERVER_MASTER_KEY_TTL', - help: '(Optional) The duration in seconds for which the current `masterKey` is being used before it is requested again if `masterKey` is set to a function. If `masterKey` is not set to a function, this option has no effect. Default is `0`, which means the master key is requested by invoking the `masterKey` function every time the master key is used internally by Parse Server.', - action: parsers.numberParser('masterKeyTtl'), - }, - maxLimit: { - env: 'PARSE_SERVER_MAX_LIMIT', - help: 'Max value for limit option on queries, defaults to unlimited', - action: parsers.numberParser('maxLimit'), - }, - maxLogFiles: { - env: 'PARSE_SERVER_MAX_LOG_FILES', - help: "Maximum number of logs to keep. If not set, no logs will be removed. This can be a number of files or number of days. If using days, add 'd' as the suffix. (default: null)", - action: parsers.numberOrStringParser('maxLogFiles'), - }, - maxUploadSize: { - env: 'PARSE_SERVER_MAX_UPLOAD_SIZE', - help: 'Max file size for uploads, defaults to 20mb', - default: '20mb', - }, - middleware: { - env: 'PARSE_SERVER_MIDDLEWARE', - help: 'middleware for express server, can be string or function', - }, - mountGraphQL: { - env: 'PARSE_SERVER_MOUNT_GRAPHQL', - help: 'Mounts the GraphQL endpoint', - action: parsers.booleanParser, - default: false, - }, - mountPath: { - env: 'PARSE_SERVER_MOUNT_PATH', - help: 'Mount path for the server, defaults to /parse', - default: '/parse', - }, - mountPlayground: { - env: 'PARSE_SERVER_MOUNT_PLAYGROUND', - help: 'Deprecated. Mounts the GraphQL Playground which is deprecated and will be removed in a future version. The playground exposes the master key in the browser. Use Parse Dashboard as GraphQL IDE or configure a third-party GraphQL client with custom request headers.', - action: parsers.booleanParser, - default: false, - }, - objectIdSize: { - env: 'PARSE_SERVER_OBJECT_ID_SIZE', - help: "Sets the number of characters in generated object id's, default 10", - action: parsers.numberParser('objectIdSize'), - default: 10, - }, - pages: { - env: 'PARSE_SERVER_PAGES', - help: 'The options for pages such as password reset and email verification.', - action: parsers.objectParser, - type: 'PagesOptions', - default: {}, - }, - passwordPolicy: { - env: 'PARSE_SERVER_PASSWORD_POLICY', - help: 'The password policy for enforcing password related rules.', - action: parsers.objectParser, - type: 'PasswordPolicyOptions', - }, - playgroundPath: { - env: 'PARSE_SERVER_PLAYGROUND_PATH', - help: 'Deprecated. Mount path for the GraphQL Playground. The playground is deprecated and will be removed in a future version.', - default: '/playground', - }, - port: { - env: 'PORT', - help: 'The port to run the ParseServer, defaults to 1337.', - action: parsers.numberParser('port'), - default: 1337, - }, - preserveFileName: { - env: 'PARSE_SERVER_PRESERVE_FILE_NAME', - help: 'Enable (or disable) the addition of a unique hash to the file names', - action: parsers.booleanParser, - default: false, - }, - preventLoginWithUnverifiedEmail: { - env: 'PARSE_SERVER_PREVENT_LOGIN_WITH_UNVERIFIED_EMAIL', - help: "Set to `true` to prevent a user from logging in if the email has not yet been verified and email verification is required. Supports a function with a return value of `true` or `false` for conditional prevention. The function receives a request object that includes `createdWith` to indicate whether the invocation is for `signup` or `login` and the used auth provider.

The `createdWith` values per scenario:
  • Password signup: `{ action: 'signup', authProvider: 'password' }`
  • Auth provider signup: `{ action: 'signup', authProvider: '' }`
  • Password login: `{ action: 'login', authProvider: 'password' }`
  • Auth provider login: function not invoked; auth provider login bypasses email verification
Default is `false`.
Requires option `verifyUserEmails: true`.", - action: parsers.booleanOrFunctionParser, - default: false, - }, - preventSignupWithUnverifiedEmail: { - env: 'PARSE_SERVER_PREVENT_SIGNUP_WITH_UNVERIFIED_EMAIL', - help: "If set to `true` it prevents a user from signing up if the email has not yet been verified and email verification is required. In that case the server responds to the sign-up with HTTP status 400 and a Parse Error 205 `EMAIL_NOT_FOUND`. If set to `false` the server responds with HTTP status 200, and client SDKs return an unauthenticated Parse User without session token. In that case subsequent requests fail until the user's email address is verified.

Default is `false`.
Requires option `verifyUserEmails: true`.", - action: parsers.booleanParser, - default: false, - }, - protectedFields: { - env: 'PARSE_SERVER_PROTECTED_FIELDS', - help: 'Protected fields that should be treated with extra security when fetching details.', - action: parsers.objectParser, - default: { - _User: { - '*': ['email'], - }, - }, - }, - publicServerURL: { - env: 'PARSE_PUBLIC_SERVER_URL', - help: 'Optional. The public URL to Parse Server. This URL will be used to reach Parse Server publicly for features like password reset and email verification links. The option can be set to a string or a function that can be asynchronously resolved. The returned URL string must start with `http://` or `https://`.', - }, - push: { - env: 'PARSE_SERVER_PUSH', - help: 'Configuration for push, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#push-notifications', - action: parsers.objectParser, - }, - rateLimit: { - env: 'PARSE_SERVER_RATE_LIMIT', - help: "Options to limit repeated requests to Parse Server APIs. This can be used to protect sensitive endpoints such as `/requestPasswordReset` from brute-force attacks or Parse Server as a whole from denial-of-service (DoS) attacks.

\u2139\uFE0F Mind the following limitations:
- rate limits applied per IP address; this limits protection against distributed denial-of-service (DDoS) attacks where many requests are coming from various IP addresses
- if multiple Parse Server instances are behind a load balancer or ran in a cluster, each instance will calculate it's own request rates, independent from other instances; this limits the applicability of this feature when using a load balancer and another rate limiting solution that takes requests across all instances into account may be more suitable
- this feature provides basic protection against denial-of-service attacks, but a more sophisticated solution works earlier in the request flow and prevents a malicious requests to even reach a server instance; it's therefore recommended to implement a solution according to architecture and user case.", - action: parsers.arrayParser, - type: 'RateLimitOptions[]', - default: [], - }, - readOnlyMasterKey: { - env: 'PARSE_SERVER_READ_ONLY_MASTER_KEY', - help: 'Read-only key, which has the same capabilities as MasterKey without writes', - }, - readOnlyMasterKeyIps: { - env: 'PARSE_SERVER_READ_ONLY_MASTER_KEY_IPS', - help: "(Optional) Restricts the use of read-only master key permissions to a list of IP addresses or ranges.

This option accepts a list of single IP addresses, for example `['10.0.0.1', '10.0.0.2']`. You can also use CIDR notation to specify an IP address range, for example `['10.0.1.0/24']`.

Special scenarios:
- Setting an empty array `[]` means that the read-only master key cannot be used even in Parse Server Cloud Code. This value cannot be set via an environment variable as there is no way to pass an empty array to Parse Server via an environment variable.
- Setting `['0.0.0.0/0', '::0']` means to allow any IPv4 and IPv6 address to use the read-only master key and effectively disables the IP filter.

Considerations:
- IPv4 and IPv6 addresses are not compared against each other. Each IP version (IPv4 and IPv6) needs to be considered separately. For example, `['0.0.0.0/0']` allows any IPv4 address and blocks every IPv6 address. Conversely, `['::0']` allows any IPv6 address and blocks every IPv4 address.
- Keep in mind that the IP version in use depends on the network stack of the environment in which Parse Server runs. A local environment may use a different IP version than a remote environment. For example, it's possible that locally the value `['0.0.0.0/0']` allows the request IP because the environment is using IPv4, but when Parse Server is deployed remotely the request IP is blocked because the remote environment is using IPv6.
- When setting the option via an environment variable the notation is a comma-separated string, for example `\"0.0.0.0/0,::0\"`.
- IPv6 zone indices (`%` suffix) are not supported, for example `fe80::1%eth0`, `fe80::1%1` or `::1%lo`.

Defaults to `['0.0.0.0/0', '::0']` which means that any IP address is allowed to use the read-only master key. It is recommended to set this option to `['127.0.0.1', '::1']` to restrict access to `localhost`.", - action: parsers.arrayParser, - default: ['0.0.0.0/0', '::0'], - }, - requestComplexity: { - env: 'PARSE_SERVER_REQUEST_COMPLEXITY', - help: 'Options to limit the complexity of requests to prevent denial-of-service attacks. Limits are enforced for all requests except those using the master or maintenance key. Each property can be set to `-1` to disable that specific limit.', - action: parsers.objectParser, - type: 'RequestComplexityOptions', - default: {}, - }, - requestContextMiddleware: { - env: 'PARSE_SERVER_REQUEST_CONTEXT_MIDDLEWARE', - help: 'Options to customize the request context using inversion of control/dependency injection.', - }, - requestKeywordDenylist: { - env: 'PARSE_SERVER_REQUEST_KEYWORD_DENYLIST', - help: 'An array of keys and values that are prohibited in database read and write requests to prevent potential security vulnerabilities. It is possible to specify only a key (`{"key":"..."}`), only a value (`{"value":"..."}`) or a key-value pair (`{"key":"...","value":"..."}`). The specification can use the following types: `boolean`, `numeric` or `string`, where `string` will be interpreted as a regex notation. Request data is deep-scanned for matching definitions to detect also any nested occurrences. Defaults are patterns that are likely to be used in malicious requests. Setting this option will override the default patterns.', - action: parsers.arrayParser, - default: [ - { - key: '_bsontype', - value: 'Code', - }, - { - key: 'constructor', - }, - { - key: '__proto__', - }, - ], - }, - restAPIKey: { - env: 'PARSE_SERVER_REST_API_KEY', - help: 'Key for REST calls', - }, - revokeSessionOnPasswordReset: { - env: 'PARSE_SERVER_REVOKE_SESSION_ON_PASSWORD_RESET', - help: "When a user changes their password, either through the reset password email or while logged in, all sessions are revoked if this is true. Set to false if you don't want to revoke sessions.", - action: parsers.booleanParser, - default: true, - }, - scheduledPush: { - env: 'PARSE_SERVER_SCHEDULED_PUSH', - help: 'Configuration for push scheduling, defaults to false.', - action: parsers.booleanParser, - default: false, - }, - schema: { - env: 'PARSE_SERVER_SCHEMA', - help: 'Defined schema', - action: parsers.objectParser, - type: 'SchemaOptions', - }, - security: { - env: 'PARSE_SERVER_SECURITY', - help: 'The security options to identify and report weak security settings.', - action: parsers.objectParser, - type: 'SecurityOptions', - default: {}, - }, - sendUserEmailVerification: { - env: 'PARSE_SERVER_SEND_USER_EMAIL_VERIFICATION', - help: 'Set to `false` to prevent sending of verification email. Supports a function with a return value of `true` or `false` for conditional email sending.

Default is `true`.
', - action: parsers.booleanOrFunctionParser, - default: true, - }, - serverCloseComplete: { - env: 'PARSE_SERVER_SERVER_CLOSE_COMPLETE', - help: 'Callback when server has closed', - }, - serverURL: { - env: 'PARSE_SERVER_URL', - help: 'The URL to Parse Server.

\u26A0\uFE0F Certain server features or adapters may require Parse Server to be able to call itself by making requests to the URL set in `serverURL`. If a feature requires this, it is mentioned in the documentation. In that case ensure that the URL is accessible from the server itself.', - required: true, - }, - sessionLength: { - env: 'PARSE_SERVER_SESSION_LENGTH', - help: 'Session duration, in seconds, defaults to 1 year', - action: parsers.numberParser('sessionLength'), - default: 31536000, - }, - silent: { - env: 'SILENT', - help: 'Disables console output', - action: parsers.booleanParser, - }, - startLiveQueryServer: { - env: 'PARSE_SERVER_START_LIVE_QUERY_SERVER', - help: 'Starts the liveQuery server', - action: parsers.booleanParser, - }, - trustProxy: { - env: 'PARSE_SERVER_TRUST_PROXY', - help: 'The trust proxy settings. It is important to understand the exact setup of the reverse proxy, since this setting will trust values provided in the Parse Server API request. See the express trust proxy settings documentation. Defaults to `false`.', - action: parsers.objectParser, - default: [], - }, - userSensitiveFields: { - env: 'PARSE_SERVER_USER_SENSITIVE_FIELDS', - help: 'Personally identifiable information fields in the user table the should be removed for non-authorized users. Deprecated @see protectedFields', - action: parsers.arrayParser, - }, - verbose: { - env: 'VERBOSE', - help: 'Set the logging to verbose', - action: parsers.booleanParser, - }, - verifyServerUrl: { - env: 'PARSE_SERVER_VERIFY_SERVER_URL', - help: 'Parse Server makes a HTTP request to the URL set in `serverURL` at the end of its launch routine to verify that the launch succeeded. If this option is set to `false`, the verification will be skipped. This can be useful in environments where the server URL is not accessible from the server itself, such as when running behind a firewall or in certain containerized environments.

\u26A0\uFE0F Server URL verification requires Parse Server to be able to call itself by making requests to the URL set in `serverURL`.

Default is `true`.', - action: parsers.booleanParser, - default: true, - }, - verifyUserEmails: { - env: 'PARSE_SERVER_VERIFY_USER_EMAILS', - help: "Set to `true` to require users to verify their email address to complete the sign-up process. Supports a function with a return value of `true` or `false` for conditional verification. The function receives a request object that includes `createdWith` to indicate whether the invocation is for `signup` or `login` and the used auth provider.

The `createdWith` values per scenario:
  • Password signup: `{ action: 'signup', authProvider: 'password' }`
  • Auth provider signup: `{ action: 'signup', authProvider: '' }`
  • Password login: `{ action: 'login', authProvider: 'password' }`
  • Auth provider login: function not invoked; auth provider login bypasses email verification
  • Resend verification email: `createdWith` is `undefined`; use the `resendRequest` property to identify those
Default is `false`.", - action: parsers.booleanOrFunctionParser, - default: false, - }, - webhookKey: { - env: 'PARSE_SERVER_WEBHOOK_KEY', - help: 'Webhook key for authenticating external cloud code process requests. Required when cloudCodeCommand is set.', - }, + "accountLockout": { + "env": "PARSE_SERVER_ACCOUNT_LOCKOUT", + "help": "The account lockout policy for failed login attempts.

Note: Setting a user's ACL to an empty object `{}` via master key is a separate mechanism that only prevents new logins; it does not invalidate existing session tokens. To immediately revoke a user's access, destroy their sessions via master key in addition to setting the ACL.", + "action": parsers.objectParser, + "type": "AccountLockoutOptions" + }, + "allowClientClassCreation": { + "env": "PARSE_SERVER_ALLOW_CLIENT_CLASS_CREATION", + "help": "Enable (or disable) client class creation, defaults to false", + "action": parsers.booleanParser, + "default": false + }, + "allowCustomObjectId": { + "env": "PARSE_SERVER_ALLOW_CUSTOM_OBJECT_ID", + "help": "Enable (or disable) custom objectId", + "action": parsers.booleanParser, + "default": false + }, + "allowExpiredAuthDataToken": { + "env": "PARSE_SERVER_ALLOW_EXPIRED_AUTH_DATA_TOKEN", + "help": "Allow a user to log in even if the 3rd party authentication token that was used to sign in to their account has expired. If this is set to `false`, then the token will be validated every time the user signs in to their account. This refers to the token that is stored in the `_User.authData` field. Defaults to `false`.", + "action": parsers.booleanParser, + "default": false + }, + "allowHeaders": { + "env": "PARSE_SERVER_ALLOW_HEADERS", + "help": "Add headers to Access-Control-Allow-Headers", + "action": parsers.arrayParser + }, + "allowOrigin": { + "env": "PARSE_SERVER_ALLOW_ORIGIN", + "help": "Sets origins for Access-Control-Allow-Origin. This can be a string for a single origin or an array of strings for multiple origins.", + "action": parsers.arrayParser + }, + "analyticsAdapter": { + "env": "PARSE_SERVER_ANALYTICS_ADAPTER", + "help": "Adapter module for the analytics", + "action": parsers.moduleOrObjectParser + }, + "appId": { + "env": "PARSE_SERVER_APPLICATION_ID", + "help": "Your Parse Application ID", + "required": true + }, + "appName": { + "env": "PARSE_SERVER_APP_NAME", + "help": "Sets the app name" + }, + "auth": { + "env": "PARSE_SERVER_AUTH_PROVIDERS", + "help": "Configuration for your authentication providers, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#oauth-and-3rd-party-authentication

Provider names must start with a letter and contain only letters, digits, and underscores (`/^[A-Za-z][A-Za-z0-9_]*$/`). This is because each provider name is used to construct a database field (`_auth_data_`), which must comply with Parse Server's field naming rules.", + "action": parsers.objectParser + }, + "cacheAdapter": { + "env": "PARSE_SERVER_CACHE_ADAPTER", + "help": "Adapter module for the cache", + "action": parsers.moduleOrObjectParser + }, + "cacheMaxSize": { + "env": "PARSE_SERVER_CACHE_MAX_SIZE", + "help": "Sets the maximum size for the in memory cache, defaults to 10000", + "action": parsers.numberParser("cacheMaxSize"), + "default": 10000 + }, + "cacheTTL": { + "env": "PARSE_SERVER_CACHE_TTL", + "help": "Sets the TTL for the in memory cache (in ms), defaults to 5000 (5 seconds)", + "action": parsers.numberParser("cacheTTL"), + "default": 5000 + }, + "clientKey": { + "env": "PARSE_SERVER_CLIENT_KEY", + "help": "Key for iOS, MacOS, tvOS clients" + }, + "cloud": { + "env": "PARSE_SERVER_CLOUD", + "help": "Full path to your cloud code main.js" + }, + "cloudCodeAdapters": { + "env": "PARSE_SERVER_CLOUD_CODE_ADAPTERS", + "help": "Array of CloudCodeAdapter instances for BYO cloud code integration", + "action": parsers.objectParser + }, + "cloudCodeCommand": { + "env": "PARSE_SERVER_CLOUD_CODE_COMMAND", + "help": "Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol)" + }, + "cloudCodeOptions": { + "env": "PARSE_SERVER_CLOUD_CODE_OPTIONS", + "help": "Options for the external cloud code process adapter", + "action": parsers.objectParser + }, + "cluster": { + "env": "PARSE_SERVER_CLUSTER", + "help": "Run with cluster, optionally set the number of processes default to os.cpus().length", + "action": parsers.numberOrBooleanParser + }, + "collectionPrefix": { + "env": "PARSE_SERVER_COLLECTION_PREFIX", + "help": "A collection prefix for the classes", + "default": "" + }, + "convertEmailToLowercase": { + "env": "PARSE_SERVER_CONVERT_EMAIL_TO_LOWERCASE", + "help": "Optional. If set to `true`, the `email` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `email` property is stored as set, without any case modifications. Default is `false`.", + "action": parsers.booleanParser, + "default": false + }, + "convertUsernameToLowercase": { + "env": "PARSE_SERVER_CONVERT_USERNAME_TO_LOWERCASE", + "help": "Optional. If set to `true`, the `username` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `username` property is stored as set, without any case modifications. Default is `false`.", + "action": parsers.booleanParser, + "default": false + }, + "customPages": { + "env": "PARSE_SERVER_CUSTOM_PAGES", + "help": "custom pages for password validation and reset", + "action": parsers.objectParser, + "type": "CustomPagesOptions", + "default": {} + }, + "databaseAdapter": { + "env": "PARSE_SERVER_DATABASE_ADAPTER", + "help": "Adapter module for the database; any options that are not explicitly described here are passed directly to the database client.", + "action": parsers.moduleOrObjectParser + }, + "databaseOptions": { + "env": "PARSE_SERVER_DATABASE_OPTIONS", + "help": "Options to pass to the database client", + "action": parsers.objectParser, + "type": "DatabaseOptions" + }, + "databaseURI": { + "env": "PARSE_SERVER_DATABASE_URI", + "help": "The full URI to your database. Supported databases are mongodb or postgres.", + "required": true, + "default": "mongodb://localhost:27017/parse" + }, + "defaultLimit": { + "env": "PARSE_SERVER_DEFAULT_LIMIT", + "help": "Default value for limit option on queries, defaults to `100`.", + "action": parsers.numberParser("defaultLimit"), + "default": 100 + }, + "directAccess": { + "env": "PARSE_SERVER_DIRECT_ACCESS", + "help": "Set to `true` if Parse requests within the same Node.js environment as Parse Server should be routed to Parse Server directly instead of via the HTTP interface. Default is `false`.

If set to `false` then Parse requests within the same Node.js environment as Parse Server are executed as HTTP requests sent to Parse Server via the `serverURL`. For example, a `Parse.Query` in Cloud Code is calling Parse Server via a HTTP request. The server is essentially making a HTTP request to itself, unnecessarily using network resources such as network ports.

\u26A0\uFE0F In environments where multiple Parse Server instances run behind a load balancer and Parse requests within the current Node.js environment should be routed via the load balancer and distributed as HTTP requests among all instances via the `serverURL`, this should be set to `false`.", + "action": parsers.booleanParser, + "default": true + }, + "dotNetKey": { + "env": "PARSE_SERVER_DOT_NET_KEY", + "help": "Key for Unity and .Net SDK" + }, + "emailAdapter": { + "env": "PARSE_SERVER_EMAIL_ADAPTER", + "help": "Adapter module for email sending", + "action": parsers.moduleOrObjectParser + }, + "emailVerifySuccessOnInvalidEmail": { + "env": "PARSE_SERVER_EMAIL_VERIFY_SUCCESS_ON_INVALID_EMAIL", + "help": "Set to `true` if a request to verify the email should return a success response even if the provided email address does not belong to a verifiable account, for example because it is unknown or already verified, or `false` if the request should return an error response in those cases.

Default is `true`.
Requires option `verifyUserEmails: true`.", + "action": parsers.booleanParser, + "default": true + }, + "emailVerifyTokenReuseIfValid": { + "env": "PARSE_SERVER_EMAIL_VERIFY_TOKEN_REUSE_IF_VALID", + "help": "Set to `true` if a email verification token should be reused in case another token is requested but there is a token that is still valid, i.e. has not expired. This avoids the often observed issue that a user requests multiple emails and does not know which link contains a valid token because each newly generated token would invalidate the previous token.

Default is `false`.
Requires option `verifyUserEmails: true`.", + "action": parsers.booleanParser, + "default": false + }, + "emailVerifyTokenValidityDuration": { + "env": "PARSE_SERVER_EMAIL_VERIFY_TOKEN_VALIDITY_DURATION", + "help": "Set the validity duration of the email verification token in seconds after which the token expires. The token is used in the link that is set in the email. After the token expires, the link becomes invalid and a new link has to be sent. If the option is not set or set to `undefined`, then the token never expires.

For example, to expire the token after 2 hours, set a value of 7200 seconds (= 60 seconds * 60 minutes * 2 hours).

Default is `undefined`.
Requires option `verifyUserEmails: true`.", + "action": parsers.numberParser("emailVerifyTokenValidityDuration") + }, + "enableAnonymousUsers": { + "env": "PARSE_SERVER_ENABLE_ANON_USERS", + "help": "Enable (or disable) anonymous users, defaults to true", + "action": parsers.booleanParser, + "default": true + }, + "enableCollationCaseComparison": { + "env": "PARSE_SERVER_ENABLE_COLLATION_CASE_COMPARISON", + "help": "Optional. If set to `true`, the collation rule of case comparison for queries and indexes is enabled. Enable this option to run Parse Server with MongoDB Atlas Serverless or AWS Amazon DocumentDB. If `false`, the collation rule of case comparison is disabled. Default is `false`.", + "action": parsers.booleanParser, + "default": false + }, + "enableExpressErrorHandler": { + "env": "PARSE_SERVER_ENABLE_EXPRESS_ERROR_HANDLER", + "help": "Enables the default express error handler for all errors", + "action": parsers.booleanParser, + "default": false + }, + "enableInsecureAuthAdapters": { + "env": "PARSE_SERVER_ENABLE_INSECURE_AUTH_ADAPTERS", + "help": "Optional. Enables insecure authentication adapters. Insecure auth adapters are deprecated and will be removed in a future version. Defaults to `false`.", + "action": parsers.booleanParser, + "default": false + }, + "enableSanitizedErrorResponse": { + "env": "PARSE_SERVER_ENABLE_SANITIZED_ERROR_RESPONSE", + "help": "If set to `true`, error details are removed from error messages in responses to client requests, and instead a generic error message is sent. Default is `true`.", + "action": parsers.booleanParser, + "default": true + }, + "encryptionKey": { + "env": "PARSE_SERVER_ENCRYPTION_KEY", + "help": "Key for encrypting your files" + }, + "enforcePrivateUsers": { + "env": "PARSE_SERVER_ENFORCE_PRIVATE_USERS", + "help": "Set to true if new users should be created without public read and write access.", + "action": parsers.booleanParser, + "default": true + }, + "expireInactiveSessions": { + "env": "PARSE_SERVER_EXPIRE_INACTIVE_SESSIONS", + "help": "Sets whether we should expire the inactive sessions, defaults to true. If false, all new sessions are created with no expiration date.", + "action": parsers.booleanParser, + "default": true + }, + "extendSessionOnUse": { + "env": "PARSE_SERVER_EXTEND_SESSION_ON_USE", + "help": "Whether Parse Server should automatically extend a valid session by the sessionLength. In order to reduce the number of session updates in the database, a session will only be extended when a request is received after at least half of the current session's lifetime has passed.", + "action": parsers.booleanParser, + "default": false + }, + "fileKey": { + "env": "PARSE_SERVER_FILE_KEY", + "help": "Key for your files" + }, + "filesAdapter": { + "env": "PARSE_SERVER_FILES_ADAPTER", + "help": "Adapter module for the files sub-system", + "action": parsers.moduleOrObjectParser + }, + "fileUpload": { + "env": "PARSE_SERVER_FILE_UPLOAD_OPTIONS", + "help": "Options for file uploads", + "action": parsers.objectParser, + "type": "FileUploadOptions", + "default": {} + }, + "graphQLPath": { + "env": "PARSE_SERVER_GRAPHQL_PATH", + "help": "The mount path for the GraphQL endpoint

\u26A0\uFE0F File upload inside the GraphQL mutation system requires Parse Server to be able to call itself by making requests to the URL set in `serverURL`.

Defaults is `/graphql`.", + "default": "/graphql" + }, + "graphQLPublicIntrospection": { + "env": "PARSE_SERVER_GRAPHQL_PUBLIC_INTROSPECTION", + "help": "Enable public introspection for the GraphQL endpoint, defaults to false", + "action": parsers.booleanParser, + "default": false + }, + "graphQLSchema": { + "env": "PARSE_SERVER_GRAPH_QLSCHEMA", + "help": "Full path to your GraphQL custom schema.graphql file" + }, + "host": { + "env": "PARSE_SERVER_HOST", + "help": "The host to serve ParseServer on, defaults to 0.0.0.0", + "default": "0.0.0.0" + }, + "idempotencyOptions": { + "env": "PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_OPTIONS", + "help": "Options for request idempotency to deduplicate identical requests that may be caused by network issues. Caution, this is an experimental feature that may not be appropriate for production.", + "action": parsers.objectParser, + "type": "IdempotencyOptions", + "default": {} + }, + "javascriptKey": { + "env": "PARSE_SERVER_JAVASCRIPT_KEY", + "help": "Key for the Javascript SDK" + }, + "jsonLogs": { + "env": "JSON_LOGS", + "help": "Log as structured JSON objects", + "action": parsers.booleanParser + }, + "liveQuery": { + "env": "PARSE_SERVER_LIVE_QUERY", + "help": "parse-server's LiveQuery configuration object", + "action": parsers.objectParser, + "type": "LiveQueryOptions" + }, + "liveQueryServerOptions": { + "env": "PARSE_SERVER_LIVE_QUERY_SERVER_OPTIONS", + "help": "Live query server configuration options (will start the liveQuery server)", + "action": parsers.objectParser, + "type": "LiveQueryServerOptions" + }, + "loggerAdapter": { + "env": "PARSE_SERVER_LOGGER_ADAPTER", + "help": "Adapter module for the logging sub-system", + "action": parsers.moduleOrObjectParser + }, + "logLevel": { + "env": "PARSE_SERVER_LOG_LEVEL", + "help": "Sets the level for logs" + }, + "logLevels": { + "env": "PARSE_SERVER_LOG_LEVELS", + "help": "(Optional) Overrides the log levels used internally by Parse Server to log events.", + "action": parsers.objectParser, + "type": "LogLevels", + "default": {} + }, + "logsFolder": { + "env": "PARSE_SERVER_LOGS_FOLDER", + "help": "Folder for the logs (defaults to './logs'); set to null to disable file based logging", + "default": "./logs" + }, + "maintenanceKey": { + "env": "PARSE_SERVER_MAINTENANCE_KEY", + "help": "(Optional) The maintenance key is used for modifying internal and read-only fields of Parse Server.

\u26A0\uFE0F This key is not intended to be used as part of a regular operation of Parse Server. This key is intended to conduct out-of-band changes such as one-time migrations or data correction tasks. Internal fields are not officially documented and may change at any time without publication in release changelogs. We strongly advice not to rely on internal fields as part of your regular operation and to investigate the implications of any planned changes *directly in the source code* of your current version of Parse Server.", + "required": true + }, + "maintenanceKeyIps": { + "env": "PARSE_SERVER_MAINTENANCE_KEY_IPS", + "help": "(Optional) Restricts the use of maintenance key permissions to a list of IP addresses or ranges.

This option accepts a list of single IP addresses, for example `['10.0.0.1', '10.0.0.2']`. You can also use CIDR notation to specify an IP address range, for example `['10.0.1.0/24']`.

Special scenarios:
- Setting an empty array `[]` means that the maintenance key cannot be used even in Parse Server Cloud Code. This value cannot be set via an environment variable as there is no way to pass an empty array to Parse Server via an environment variable.
- Setting `['0.0.0.0/0', '::0']` means to allow any IPv4 and IPv6 address to use the maintenance key and effectively disables the IP filter.

Considerations:
- IPv4 and IPv6 addresses are not compared against each other. Each IP version (IPv4 and IPv6) needs to be considered separately. For example, `['0.0.0.0/0']` allows any IPv4 address and blocks every IPv6 address. Conversely, `['::0']` allows any IPv6 address and blocks every IPv4 address.
- Keep in mind that the IP version in use depends on the network stack of the environment in which Parse Server runs. A local environment may use a different IP version than a remote environment. For example, it's possible that locally the value `['0.0.0.0/0']` allows the request IP because the environment is using IPv4, but when Parse Server is deployed remotely the request IP is blocked because the remote environment is using IPv6.
- When setting the option via an environment variable the notation is a comma-separated string, for example `\"0.0.0.0/0,::0\"`.
- IPv6 zone indices (`%` suffix) are not supported, for example `fe80::1%eth0`, `fe80::1%1` or `::1%lo`.

Defaults to `['127.0.0.1', '::1']` which means that only `localhost`, the server instance on which Parse Server runs, is allowed to use the maintenance key.", + "action": parsers.arrayParser, + "default": ["127.0.0.1", "::1"] + }, + "masterKey": { + "env": "PARSE_SERVER_MASTER_KEY", + "help": "Your Parse Master Key", + "required": true + }, + "masterKeyIps": { + "env": "PARSE_SERVER_MASTER_KEY_IPS", + "help": "(Optional) Restricts the use of master key permissions to a list of IP addresses or ranges.

This option accepts a list of single IP addresses, for example `['10.0.0.1', '10.0.0.2']`. You can also use CIDR notation to specify an IP address range, for example `['10.0.1.0/24']`.

Special scenarios:
- Setting an empty array `[]` means that the master key cannot be used even in Parse Server Cloud Code. This value cannot be set via an environment variable as there is no way to pass an empty array to Parse Server via an environment variable.
- Setting `['0.0.0.0/0', '::0']` means to allow any IPv4 and IPv6 address to use the master key and effectively disables the IP filter.

Considerations:
- IPv4 and IPv6 addresses are not compared against each other. Each IP version (IPv4 and IPv6) needs to be considered separately. For example, `['0.0.0.0/0']` allows any IPv4 address and blocks every IPv6 address. Conversely, `['::0']` allows any IPv6 address and blocks every IPv4 address.
- Keep in mind that the IP version in use depends on the network stack of the environment in which Parse Server runs. A local environment may use a different IP version than a remote environment. For example, it's possible that locally the value `['0.0.0.0/0']` allows the request IP because the environment is using IPv4, but when Parse Server is deployed remotely the request IP is blocked because the remote environment is using IPv6.
- When setting the option via an environment variable the notation is a comma-separated string, for example `\"0.0.0.0/0,::0\"`.
- IPv6 zone indices (`%` suffix) are not supported, for example `fe80::1%eth0`, `fe80::1%1` or `::1%lo`.

Defaults to `['127.0.0.1', '::1']` which means that only `localhost`, the server instance on which Parse Server runs, is allowed to use the master key.", + "action": parsers.arrayParser, + "default": ["127.0.0.1", "::1"] + }, + "masterKeyTtl": { + "env": "PARSE_SERVER_MASTER_KEY_TTL", + "help": "(Optional) The duration in seconds for which the current `masterKey` is being used before it is requested again if `masterKey` is set to a function. If `masterKey` is not set to a function, this option has no effect. Default is `0`, which means the master key is requested by invoking the `masterKey` function every time the master key is used internally by Parse Server.", + "action": parsers.numberParser("masterKeyTtl") + }, + "maxLimit": { + "env": "PARSE_SERVER_MAX_LIMIT", + "help": "Max value for limit option on queries, defaults to unlimited", + "action": parsers.numberParser("maxLimit") + }, + "maxLogFiles": { + "env": "PARSE_SERVER_MAX_LOG_FILES", + "help": "Maximum number of logs to keep. If not set, no logs will be removed. This can be a number of files or number of days. If using days, add 'd' as the suffix. (default: null)", + "action": parsers.numberOrStringParser("maxLogFiles") + }, + "maxUploadSize": { + "env": "PARSE_SERVER_MAX_UPLOAD_SIZE", + "help": "Max file size for uploads, defaults to 20mb", + "default": "20mb" + }, + "middleware": { + "env": "PARSE_SERVER_MIDDLEWARE", + "help": "middleware for express server, can be string or function" + }, + "mountGraphQL": { + "env": "PARSE_SERVER_MOUNT_GRAPHQL", + "help": "Mounts the GraphQL endpoint", + "action": parsers.booleanParser, + "default": false + }, + "mountPath": { + "env": "PARSE_SERVER_MOUNT_PATH", + "help": "Mount path for the server, defaults to /parse", + "default": "/parse" + }, + "mountPlayground": { + "env": "PARSE_SERVER_MOUNT_PLAYGROUND", + "help": "Deprecated. Mounts the GraphQL Playground which is deprecated and will be removed in a future version. The playground exposes the master key in the browser. Use Parse Dashboard as GraphQL IDE or configure a third-party GraphQL client with custom request headers.", + "action": parsers.booleanParser, + "default": false + }, + "objectIdSize": { + "env": "PARSE_SERVER_OBJECT_ID_SIZE", + "help": "Sets the number of characters in generated object id's, default 10", + "action": parsers.numberParser("objectIdSize"), + "default": 10 + }, + "pages": { + "env": "PARSE_SERVER_PAGES", + "help": "The options for pages such as password reset and email verification.", + "action": parsers.objectParser, + "type": "PagesOptions", + "default": {} + }, + "passwordPolicy": { + "env": "PARSE_SERVER_PASSWORD_POLICY", + "help": "The password policy for enforcing password related rules.", + "action": parsers.objectParser, + "type": "PasswordPolicyOptions" + }, + "playgroundPath": { + "env": "PARSE_SERVER_PLAYGROUND_PATH", + "help": "Deprecated. Mount path for the GraphQL Playground. The playground is deprecated and will be removed in a future version.", + "default": "/playground" + }, + "port": { + "env": "PORT", + "help": "The port to run the ParseServer, defaults to 1337.", + "action": parsers.numberParser("port"), + "default": 1337 + }, + "preserveFileName": { + "env": "PARSE_SERVER_PRESERVE_FILE_NAME", + "help": "Enable (or disable) the addition of a unique hash to the file names", + "action": parsers.booleanParser, + "default": false + }, + "preventLoginWithUnverifiedEmail": { + "env": "PARSE_SERVER_PREVENT_LOGIN_WITH_UNVERIFIED_EMAIL", + "help": "Set to `true` to prevent a user from logging in if the email has not yet been verified and email verification is required. Supports a function with a return value of `true` or `false` for conditional prevention. The function receives a request object that includes `createdWith` to indicate whether the invocation is for `signup` or `login` and the used auth provider.

The `createdWith` values per scenario:
  • Password signup: `{ action: 'signup', authProvider: 'password' }`
  • Auth provider signup: `{ action: 'signup', authProvider: '' }`
  • Password login: `{ action: 'login', authProvider: 'password' }`
  • Auth provider login: function not invoked; auth provider login bypasses email verification
Default is `false`.
Requires option `verifyUserEmails: true`.", + "action": parsers.booleanOrFunctionParser, + "default": false + }, + "preventSignupWithUnverifiedEmail": { + "env": "PARSE_SERVER_PREVENT_SIGNUP_WITH_UNVERIFIED_EMAIL", + "help": "If set to `true` it prevents a user from signing up if the email has not yet been verified and email verification is required. In that case the server responds to the sign-up with HTTP status 400 and a Parse Error 205 `EMAIL_NOT_FOUND`. If set to `false` the server responds with HTTP status 200, and client SDKs return an unauthenticated Parse User without session token. In that case subsequent requests fail until the user's email address is verified.

Default is `false`.
Requires option `verifyUserEmails: true`.", + "action": parsers.booleanParser, + "default": false + }, + "protectedFields": { + "env": "PARSE_SERVER_PROTECTED_FIELDS", + "help": "Protected fields that should be treated with extra security when fetching details.", + "action": parsers.objectParser, + "default": { + "_User": { + "*": ["email"] + } + } + }, + "publicServerURL": { + "env": "PARSE_PUBLIC_SERVER_URL", + "help": "Optional. The public URL to Parse Server. This URL will be used to reach Parse Server publicly for features like password reset and email verification links. The option can be set to a string or a function that can be asynchronously resolved. The returned URL string must start with `http://` or `https://`." + }, + "push": { + "env": "PARSE_SERVER_PUSH", + "help": "Configuration for push, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#push-notifications", + "action": parsers.objectParser + }, + "rateLimit": { + "env": "PARSE_SERVER_RATE_LIMIT", + "help": "Options to limit repeated requests to Parse Server APIs. This can be used to protect sensitive endpoints such as `/requestPasswordReset` from brute-force attacks or Parse Server as a whole from denial-of-service (DoS) attacks.

\u2139\uFE0F Mind the following limitations:
- rate limits applied per IP address; this limits protection against distributed denial-of-service (DDoS) attacks where many requests are coming from various IP addresses
- if multiple Parse Server instances are behind a load balancer or ran in a cluster, each instance will calculate it's own request rates, independent from other instances; this limits the applicability of this feature when using a load balancer and another rate limiting solution that takes requests across all instances into account may be more suitable
- this feature provides basic protection against denial-of-service attacks, but a more sophisticated solution works earlier in the request flow and prevents a malicious requests to even reach a server instance; it's therefore recommended to implement a solution according to architecture and user case.", + "action": parsers.arrayParser, + "type": "RateLimitOptions[]", + "default": [] + }, + "readOnlyMasterKey": { + "env": "PARSE_SERVER_READ_ONLY_MASTER_KEY", + "help": "Read-only key, which has the same capabilities as MasterKey without writes" + }, + "readOnlyMasterKeyIps": { + "env": "PARSE_SERVER_READ_ONLY_MASTER_KEY_IPS", + "help": "(Optional) Restricts the use of read-only master key permissions to a list of IP addresses or ranges.

This option accepts a list of single IP addresses, for example `['10.0.0.1', '10.0.0.2']`. You can also use CIDR notation to specify an IP address range, for example `['10.0.1.0/24']`.

Special scenarios:
- Setting an empty array `[]` means that the read-only master key cannot be used even in Parse Server Cloud Code. This value cannot be set via an environment variable as there is no way to pass an empty array to Parse Server via an environment variable.
- Setting `['0.0.0.0/0', '::0']` means to allow any IPv4 and IPv6 address to use the read-only master key and effectively disables the IP filter.

Considerations:
- IPv4 and IPv6 addresses are not compared against each other. Each IP version (IPv4 and IPv6) needs to be considered separately. For example, `['0.0.0.0/0']` allows any IPv4 address and blocks every IPv6 address. Conversely, `['::0']` allows any IPv6 address and blocks every IPv4 address.
- Keep in mind that the IP version in use depends on the network stack of the environment in which Parse Server runs. A local environment may use a different IP version than a remote environment. For example, it's possible that locally the value `['0.0.0.0/0']` allows the request IP because the environment is using IPv4, but when Parse Server is deployed remotely the request IP is blocked because the remote environment is using IPv6.
- When setting the option via an environment variable the notation is a comma-separated string, for example `\"0.0.0.0/0,::0\"`.
- IPv6 zone indices (`%` suffix) are not supported, for example `fe80::1%eth0`, `fe80::1%1` or `::1%lo`.

Defaults to `['0.0.0.0/0', '::0']` which means that any IP address is allowed to use the read-only master key. It is recommended to set this option to `['127.0.0.1', '::1']` to restrict access to `localhost`.", + "action": parsers.arrayParser, + "default": ["0.0.0.0/0", "::0"] + }, + "requestComplexity": { + "env": "PARSE_SERVER_REQUEST_COMPLEXITY", + "help": "Options to limit the complexity of requests to prevent denial-of-service attacks. Limits are enforced for all requests except those using the master or maintenance key. Each property can be set to `-1` to disable that specific limit.", + "action": parsers.objectParser, + "type": "RequestComplexityOptions", + "default": {} + }, + "requestContextMiddleware": { + "env": "PARSE_SERVER_REQUEST_CONTEXT_MIDDLEWARE", + "help": "Options to customize the request context using inversion of control/dependency injection." + }, + "requestKeywordDenylist": { + "env": "PARSE_SERVER_REQUEST_KEYWORD_DENYLIST", + "help": "An array of keys and values that are prohibited in database read and write requests to prevent potential security vulnerabilities. It is possible to specify only a key (`{\"key\":\"...\"}`), only a value (`{\"value\":\"...\"}`) or a key-value pair (`{\"key\":\"...\",\"value\":\"...\"}`). The specification can use the following types: `boolean`, `numeric` or `string`, where `string` will be interpreted as a regex notation. Request data is deep-scanned for matching definitions to detect also any nested occurrences. Defaults are patterns that are likely to be used in malicious requests. Setting this option will override the default patterns.", + "action": parsers.arrayParser, + "default": [{ + key: "_bsontype", + value: "Code" + }, { + key: "constructor" + }, { + key: "__proto__" + }] + }, + "restAPIKey": { + "env": "PARSE_SERVER_REST_API_KEY", + "help": "Key for REST calls" + }, + "revokeSessionOnPasswordReset": { + "env": "PARSE_SERVER_REVOKE_SESSION_ON_PASSWORD_RESET", + "help": "When a user changes their password, either through the reset password email or while logged in, all sessions are revoked if this is true. Set to false if you don't want to revoke sessions.", + "action": parsers.booleanParser, + "default": true + }, + "scheduledPush": { + "env": "PARSE_SERVER_SCHEDULED_PUSH", + "help": "Configuration for push scheduling, defaults to false.", + "action": parsers.booleanParser, + "default": false + }, + "schema": { + "env": "PARSE_SERVER_SCHEMA", + "help": "Defined schema", + "action": parsers.objectParser, + "type": "SchemaOptions" + }, + "security": { + "env": "PARSE_SERVER_SECURITY", + "help": "The security options to identify and report weak security settings.", + "action": parsers.objectParser, + "type": "SecurityOptions", + "default": {} + }, + "sendUserEmailVerification": { + "env": "PARSE_SERVER_SEND_USER_EMAIL_VERIFICATION", + "help": "Set to `false` to prevent sending of verification email. Supports a function with a return value of `true` or `false` for conditional email sending.

Default is `true`.
", + "action": parsers.booleanOrFunctionParser, + "default": true + }, + "serverCloseComplete": { + "env": "PARSE_SERVER_SERVER_CLOSE_COMPLETE", + "help": "Callback when server has closed" + }, + "serverURL": { + "env": "PARSE_SERVER_URL", + "help": "The URL to Parse Server.

\u26A0\uFE0F Certain server features or adapters may require Parse Server to be able to call itself by making requests to the URL set in `serverURL`. If a feature requires this, it is mentioned in the documentation. In that case ensure that the URL is accessible from the server itself.", + "required": true + }, + "sessionLength": { + "env": "PARSE_SERVER_SESSION_LENGTH", + "help": "Session duration, in seconds, defaults to 1 year", + "action": parsers.numberParser("sessionLength"), + "default": 31536000 + }, + "silent": { + "env": "SILENT", + "help": "Disables console output", + "action": parsers.booleanParser + }, + "startLiveQueryServer": { + "env": "PARSE_SERVER_START_LIVE_QUERY_SERVER", + "help": "Starts the liveQuery server", + "action": parsers.booleanParser + }, + "trustProxy": { + "env": "PARSE_SERVER_TRUST_PROXY", + "help": "The trust proxy settings. It is important to understand the exact setup of the reverse proxy, since this setting will trust values provided in the Parse Server API request. See the express trust proxy settings documentation. Defaults to `false`.", + "action": parsers.objectParser, + "default": [] + }, + "userSensitiveFields": { + "env": "PARSE_SERVER_USER_SENSITIVE_FIELDS", + "help": "Personally identifiable information fields in the user table the should be removed for non-authorized users. Deprecated @see protectedFields", + "action": parsers.arrayParser + }, + "verbose": { + "env": "VERBOSE", + "help": "Set the logging to verbose", + "action": parsers.booleanParser + }, + "verifyServerUrl": { + "env": "PARSE_SERVER_VERIFY_SERVER_URL", + "help": "Parse Server makes a HTTP request to the URL set in `serverURL` at the end of its launch routine to verify that the launch succeeded. If this option is set to `false`, the verification will be skipped. This can be useful in environments where the server URL is not accessible from the server itself, such as when running behind a firewall or in certain containerized environments.

\u26A0\uFE0F Server URL verification requires Parse Server to be able to call itself by making requests to the URL set in `serverURL`.

Default is `true`.", + "action": parsers.booleanParser, + "default": true + }, + "verifyUserEmails": { + "env": "PARSE_SERVER_VERIFY_USER_EMAILS", + "help": "Set to `true` to require users to verify their email address to complete the sign-up process. Supports a function with a return value of `true` or `false` for conditional verification. The function receives a request object that includes `createdWith` to indicate whether the invocation is for `signup` or `login` and the used auth provider.

The `createdWith` values per scenario:
  • Password signup: `{ action: 'signup', authProvider: 'password' }`
  • Auth provider signup: `{ action: 'signup', authProvider: '' }`
  • Password login: `{ action: 'login', authProvider: 'password' }`
  • Auth provider login: function not invoked; auth provider login bypasses email verification
  • Resend verification email: `createdWith` is `undefined`; use the `resendRequest` property to identify those
Default is `false`.", + "action": parsers.booleanOrFunctionParser, + "default": false + }, + "webhookKey": { + "env": "PARSE_SERVER_WEBHOOK_KEY", + "help": "Key for authenticating external cloud code process requests. Required when cloudCodeCommand is set." + } }; module.exports.RateLimitOptions = { - errorResponseMessage: { - env: 'PARSE_SERVER_RATE_LIMIT_ERROR_RESPONSE_MESSAGE', - help: 'The error message that should be returned in the body of the HTTP 429 response when the rate limit is hit. Default is `Too many requests.`.', - default: 'Too many requests.', - }, - includeInternalRequests: { - env: 'PARSE_SERVER_RATE_LIMIT_INCLUDE_INTERNAL_REQUESTS', - help: 'Optional, if `true` the rate limit will also apply to requests that are made in by Cloud Code, default is `false`. Note that a public Cloud Code function that triggers internal requests may circumvent rate limiting and be vulnerable to attacks.', - action: parsers.booleanParser, - default: false, - }, - includeMasterKey: { - env: 'PARSE_SERVER_RATE_LIMIT_INCLUDE_MASTER_KEY', - help: 'Optional, if `true` the rate limit will also apply to requests using the `masterKey`, default is `false`. Note that a public Cloud Code function that triggers internal requests using the `masterKey` may circumvent rate limiting and be vulnerable to attacks.', - action: parsers.booleanParser, - default: false, - }, - redisUrl: { - env: 'PARSE_SERVER_RATE_LIMIT_REDIS_URL', - help: 'Optional, the URL of the Redis server to store rate limit data. This allows to rate limit requests for multiple servers by calculating the sum of all requests across all servers. This is useful if multiple servers are processing requests behind a load balancer. For example, the limit of 10 requests is reached if each of 2 servers processed 5 requests.', - }, - requestCount: { - env: 'PARSE_SERVER_RATE_LIMIT_REQUEST_COUNT', - help: 'The number of requests that can be made per IP address within the time window set in `requestTimeWindow` before the rate limit is applied. For batch requests, this also limits the number of sub-requests in a single batch that target this path; however, requests already consumed in the current time window are not counted against the batch, so the effective limit may be higher when combining individual and batch requests. Note that this is a basic server-level rate limit; for comprehensive protection, use a reverse proxy or WAF for rate limiting.', - action: parsers.numberParser('requestCount'), - }, - requestMethods: { - env: 'PARSE_SERVER_RATE_LIMIT_REQUEST_METHODS', - help: 'Optional, the HTTP request methods to which the rate limit should be applied, default is all methods.', - action: parsers.arrayParser, - }, - requestPath: { - env: 'PARSE_SERVER_RATE_LIMIT_REQUEST_PATH', - help: 'The path of the API route to be rate limited. Route paths, in combination with a request method, define the endpoints at which requests can be made. Route paths can be strings or string patterns following path-to-regexp v8 syntax.', - required: true, - }, - requestTimeWindow: { - env: 'PARSE_SERVER_RATE_LIMIT_REQUEST_TIME_WINDOW', - help: 'The window of time in milliseconds within which the number of requests set in `requestCount` can be made before the rate limit is applied.', - action: parsers.numberParser('requestTimeWindow'), - }, - zone: { - env: 'PARSE_SERVER_RATE_LIMIT_ZONE', - help: 'The type of rate limit to apply. The following types are supported:
  • `global`: rate limit based on the number of requests made by all users
  • `ip`: rate limit based on the IP address of the request
  • `user`: rate limit based on the user ID of the request
  • `session`: rate limit based on the session token of the request
Default is `ip`.', - default: 'ip', - }, + "errorResponseMessage": { + "env": "PARSE_SERVER_RATE_LIMIT_ERROR_RESPONSE_MESSAGE", + "help": "The error message that should be returned in the body of the HTTP 429 response when the rate limit is hit. Default is `Too many requests.`.", + "default": "Too many requests." + }, + "includeInternalRequests": { + "env": "PARSE_SERVER_RATE_LIMIT_INCLUDE_INTERNAL_REQUESTS", + "help": "Optional, if `true` the rate limit will also apply to requests that are made in by Cloud Code, default is `false`. Note that a public Cloud Code function that triggers internal requests may circumvent rate limiting and be vulnerable to attacks.", + "action": parsers.booleanParser, + "default": false + }, + "includeMasterKey": { + "env": "PARSE_SERVER_RATE_LIMIT_INCLUDE_MASTER_KEY", + "help": "Optional, if `true` the rate limit will also apply to requests using the `masterKey`, default is `false`. Note that a public Cloud Code function that triggers internal requests using the `masterKey` may circumvent rate limiting and be vulnerable to attacks.", + "action": parsers.booleanParser, + "default": false + }, + "redisUrl": { + "env": "PARSE_SERVER_RATE_LIMIT_REDIS_URL", + "help": "Optional, the URL of the Redis server to store rate limit data. This allows to rate limit requests for multiple servers by calculating the sum of all requests across all servers. This is useful if multiple servers are processing requests behind a load balancer. For example, the limit of 10 requests is reached if each of 2 servers processed 5 requests." + }, + "requestCount": { + "env": "PARSE_SERVER_RATE_LIMIT_REQUEST_COUNT", + "help": "The number of requests that can be made per IP address within the time window set in `requestTimeWindow` before the rate limit is applied. For batch requests, this also limits the number of sub-requests in a single batch that target this path; however, requests already consumed in the current time window are not counted against the batch, so the effective limit may be higher when combining individual and batch requests. Note that this is a basic server-level rate limit; for comprehensive protection, use a reverse proxy or WAF for rate limiting.", + "action": parsers.numberParser("requestCount") + }, + "requestMethods": { + "env": "PARSE_SERVER_RATE_LIMIT_REQUEST_METHODS", + "help": "Optional, the HTTP request methods to which the rate limit should be applied, default is all methods.", + "action": parsers.arrayParser + }, + "requestPath": { + "env": "PARSE_SERVER_RATE_LIMIT_REQUEST_PATH", + "help": "The path of the API route to be rate limited. Route paths, in combination with a request method, define the endpoints at which requests can be made. Route paths can be strings or string patterns following path-to-regexp v8 syntax.", + "required": true + }, + "requestTimeWindow": { + "env": "PARSE_SERVER_RATE_LIMIT_REQUEST_TIME_WINDOW", + "help": "The window of time in milliseconds within which the number of requests set in `requestCount` can be made before the rate limit is applied.", + "action": parsers.numberParser("requestTimeWindow") + }, + "zone": { + "env": "PARSE_SERVER_RATE_LIMIT_ZONE", + "help": "The type of rate limit to apply. The following types are supported:
  • `global`: rate limit based on the number of requests made by all users
  • `ip`: rate limit based on the IP address of the request
  • `user`: rate limit based on the user ID of the request
  • `session`: rate limit based on the session token of the request
Default is `ip`.", + "default": "ip" + } }; module.exports.RequestComplexityOptions = { - graphQLDepth: { - env: 'PARSE_SERVER_REQUEST_COMPLEXITY_GRAPHQL_DEPTH', - help: 'Maximum depth of GraphQL field selections. Set to `-1` to disable. Default is `-1`.', - action: parsers.numberParser('graphQLDepth'), - default: -1, - }, - graphQLFields: { - env: 'PARSE_SERVER_REQUEST_COMPLEXITY_GRAPHQL_FIELDS', - help: 'Maximum number of field selections in a GraphQL query. Set to `-1` to disable. Default is `-1`.', - action: parsers.numberParser('graphQLFields'), - default: -1, - }, - includeCount: { - env: 'PARSE_SERVER_REQUEST_COMPLEXITY_INCLUDE_COUNT', - help: 'Maximum number of include paths in a single query. Set to `-1` to disable. Default is `-1`.', - action: parsers.numberParser('includeCount'), - default: -1, - }, - includeDepth: { - env: 'PARSE_SERVER_REQUEST_COMPLEXITY_INCLUDE_DEPTH', - help: 'Maximum depth of include pointer chains (e.g. `a.b.c` = depth 3). Set to `-1` to disable. Default is `-1`.', - action: parsers.numberParser('includeDepth'), - default: -1, - }, - queryDepth: { - env: 'PARSE_SERVER_REQUEST_COMPLEXITY_QUERY_DEPTH', - help: 'Maximum nesting depth of `$or`, `$and`, `$nor` query operators. Set to `-1` to disable. Default is `-1`.', - action: parsers.numberParser('queryDepth'), - default: -1, - }, - subqueryDepth: { - env: 'PARSE_SERVER_REQUEST_COMPLEXITY_SUBQUERY_DEPTH', - help: 'Maximum nesting depth of `$inQuery`, `$notInQuery`, `$select`, `$dontSelect` subqueries. Set to `-1` to disable. Default is `-1`.', - action: parsers.numberParser('subqueryDepth'), - default: -1, - }, + "graphQLDepth": { + "env": "PARSE_SERVER_REQUEST_COMPLEXITY_GRAPHQL_DEPTH", + "help": "Maximum depth of GraphQL field selections. Set to `-1` to disable. Default is `-1`.", + "action": parsers.numberParser("graphQLDepth"), + "default": -1 + }, + "graphQLFields": { + "env": "PARSE_SERVER_REQUEST_COMPLEXITY_GRAPHQL_FIELDS", + "help": "Maximum number of field selections in a GraphQL query. Set to `-1` to disable. Default is `-1`.", + "action": parsers.numberParser("graphQLFields"), + "default": -1 + }, + "includeCount": { + "env": "PARSE_SERVER_REQUEST_COMPLEXITY_INCLUDE_COUNT", + "help": "Maximum number of include paths in a single query. Set to `-1` to disable. Default is `-1`.", + "action": parsers.numberParser("includeCount"), + "default": -1 + }, + "includeDepth": { + "env": "PARSE_SERVER_REQUEST_COMPLEXITY_INCLUDE_DEPTH", + "help": "Maximum depth of include pointer chains (e.g. `a.b.c` = depth 3). Set to `-1` to disable. Default is `-1`.", + "action": parsers.numberParser("includeDepth"), + "default": -1 + }, + "queryDepth": { + "env": "PARSE_SERVER_REQUEST_COMPLEXITY_QUERY_DEPTH", + "help": "Maximum nesting depth of `$or`, `$and`, `$nor` query operators. Set to `-1` to disable. Default is `-1`.", + "action": parsers.numberParser("queryDepth"), + "default": -1 + }, + "subqueryDepth": { + "env": "PARSE_SERVER_REQUEST_COMPLEXITY_SUBQUERY_DEPTH", + "help": "Maximum nesting depth of `$inQuery`, `$notInQuery`, `$select`, `$dontSelect` subqueries. Set to `-1` to disable. Default is `-1`.", + "action": parsers.numberParser("subqueryDepth"), + "default": -1 + } }; module.exports.SecurityOptions = { - checkGroups: { - env: 'PARSE_SERVER_SECURITY_CHECK_GROUPS', - help: 'The security check groups to run. This allows to add custom security checks or override existing ones. Default are the groups defined in `CheckGroups.js`.', - action: parsers.arrayParser, - }, - enableCheck: { - env: 'PARSE_SERVER_SECURITY_ENABLE_CHECK', - help: 'Is true if Parse Server should check for weak security settings.', - action: parsers.booleanParser, - default: false, - }, - enableCheckLog: { - env: 'PARSE_SERVER_SECURITY_ENABLE_CHECK_LOG', - help: 'Is true if the security check report should be written to logs. This should only be enabled temporarily to not expose weak security settings in logs.', - action: parsers.booleanParser, - default: false, - }, + "checkGroups": { + "env": "PARSE_SERVER_SECURITY_CHECK_GROUPS", + "help": "The security check groups to run. This allows to add custom security checks or override existing ones. Default are the groups defined in `CheckGroups.js`.", + "action": parsers.arrayParser + }, + "enableCheck": { + "env": "PARSE_SERVER_SECURITY_ENABLE_CHECK", + "help": "Is true if Parse Server should check for weak security settings.", + "action": parsers.booleanParser, + "default": false + }, + "enableCheckLog": { + "env": "PARSE_SERVER_SECURITY_ENABLE_CHECK_LOG", + "help": "Is true if the security check report should be written to logs. This should only be enabled temporarily to not expose weak security settings in logs.", + "action": parsers.booleanParser, + "default": false + } }; module.exports.PagesOptions = { - customRoutes: { - env: 'PARSE_SERVER_PAGES_CUSTOM_ROUTES', - help: 'The custom routes.', - action: parsers.arrayParser, - type: 'PagesRoute[]', - default: [], - }, - customUrls: { - env: 'PARSE_SERVER_PAGES_CUSTOM_URLS', - help: 'The URLs to the custom pages.', - action: parsers.objectParser, - type: 'PagesCustomUrlsOptions', - default: {}, - }, - enableLocalization: { - env: 'PARSE_SERVER_PAGES_ENABLE_LOCALIZATION', - help: 'Is true if pages should be localized; this has no effect on custom page redirects.', - action: parsers.booleanParser, - default: false, - }, - encodePageParamHeaders: { - env: 'PARSE_SERVER_PAGES_ENCODE_PAGE_PARAM_HEADERS', - help: 'Is `true` if the page parameter headers should be URI-encoded. This is required if any page parameter value contains non-ASCII characters, such as the app name.', - action: parsers.booleanParser, - default: false, - }, - forceRedirect: { - env: 'PARSE_SERVER_PAGES_FORCE_REDIRECT', - help: 'Is true if responses should always be redirects and never content, false if the response type should depend on the request type (GET request -> content response; POST request -> redirect response).', - action: parsers.booleanParser, - default: false, - }, - localizationFallbackLocale: { - env: 'PARSE_SERVER_PAGES_LOCALIZATION_FALLBACK_LOCALE', - help: 'The fallback locale for localization if no matching translation is provided for the given locale. This is only relevant when providing translation resources via JSON file.', - default: 'en', - }, - localizationJsonPath: { - env: 'PARSE_SERVER_PAGES_LOCALIZATION_JSON_PATH', - help: 'The path to the JSON file for localization; the translations will be used to fill template placeholders according to the locale.', - }, - pagesEndpoint: { - env: 'PARSE_SERVER_PAGES_PAGES_ENDPOINT', - help: "The API endpoint for the pages. Default is 'apps'.", - default: 'apps', - }, - pagesPath: { - env: 'PARSE_SERVER_PAGES_PAGES_PATH', - help: "The path to the pages directory; this also defines where the static endpoint '/apps' points to. Default is the './public/' directory of the parse-server module.", - }, - placeholders: { - env: 'PARSE_SERVER_PAGES_PLACEHOLDERS', - help: 'The placeholder keys and values which will be filled in pages; this can be a simple object or a callback function.', - action: parsers.objectParser, - default: {}, - }, + "customRoutes": { + "env": "PARSE_SERVER_PAGES_CUSTOM_ROUTES", + "help": "The custom routes.", + "action": parsers.arrayParser, + "type": "PagesRoute[]", + "default": [] + }, + "customUrls": { + "env": "PARSE_SERVER_PAGES_CUSTOM_URLS", + "help": "The URLs to the custom pages.", + "action": parsers.objectParser, + "type": "PagesCustomUrlsOptions", + "default": {} + }, + "enableLocalization": { + "env": "PARSE_SERVER_PAGES_ENABLE_LOCALIZATION", + "help": "Is true if pages should be localized; this has no effect on custom page redirects.", + "action": parsers.booleanParser, + "default": false + }, + "encodePageParamHeaders": { + "env": "PARSE_SERVER_PAGES_ENCODE_PAGE_PARAM_HEADERS", + "help": "Is `true` if the page parameter headers should be URI-encoded. This is required if any page parameter value contains non-ASCII characters, such as the app name.", + "action": parsers.booleanParser, + "default": false + }, + "forceRedirect": { + "env": "PARSE_SERVER_PAGES_FORCE_REDIRECT", + "help": "Is true if responses should always be redirects and never content, false if the response type should depend on the request type (GET request -> content response; POST request -> redirect response).", + "action": parsers.booleanParser, + "default": false + }, + "localizationFallbackLocale": { + "env": "PARSE_SERVER_PAGES_LOCALIZATION_FALLBACK_LOCALE", + "help": "The fallback locale for localization if no matching translation is provided for the given locale. This is only relevant when providing translation resources via JSON file.", + "default": "en" + }, + "localizationJsonPath": { + "env": "PARSE_SERVER_PAGES_LOCALIZATION_JSON_PATH", + "help": "The path to the JSON file for localization; the translations will be used to fill template placeholders according to the locale." + }, + "pagesEndpoint": { + "env": "PARSE_SERVER_PAGES_PAGES_ENDPOINT", + "help": "The API endpoint for the pages. Default is 'apps'.", + "default": "apps" + }, + "pagesPath": { + "env": "PARSE_SERVER_PAGES_PAGES_PATH", + "help": "The path to the pages directory; this also defines where the static endpoint '/apps' points to. Default is the './public/' directory of the parse-server module." + }, + "placeholders": { + "env": "PARSE_SERVER_PAGES_PLACEHOLDERS", + "help": "The placeholder keys and values which will be filled in pages; this can be a simple object or a callback function.", + "action": parsers.objectParser, + "default": {} + } }; module.exports.PagesRoute = { - handler: { - env: 'PARSE_SERVER_PAGES_ROUTE_HANDLER', - help: 'The route handler that is an async function.', - required: true, - }, - method: { - env: 'PARSE_SERVER_PAGES_ROUTE_METHOD', - help: "The route method, e.g. 'GET' or 'POST'.", - required: true, - }, - path: { - env: 'PARSE_SERVER_PAGES_ROUTE_PATH', - help: 'The route path.', - required: true, - }, + "handler": { + "env": "PARSE_SERVER_PAGES_ROUTE_HANDLER", + "help": "The route handler that is an async function.", + "required": true + }, + "method": { + "env": "PARSE_SERVER_PAGES_ROUTE_METHOD", + "help": "The route method, e.g. 'GET' or 'POST'.", + "required": true + }, + "path": { + "env": "PARSE_SERVER_PAGES_ROUTE_PATH", + "help": "The route path.", + "required": true + } }; module.exports.PagesCustomUrlsOptions = { - emailVerificationLinkExpired: { - env: 'PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_LINK_EXPIRED', - help: 'The URL to the custom page for email verification -> link expired.', - }, - emailVerificationLinkInvalid: { - env: 'PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_LINK_INVALID', - help: 'The URL to the custom page for email verification -> link invalid.', - }, - emailVerificationSendFail: { - env: 'PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_SEND_FAIL', - help: 'The URL to the custom page for email verification -> link send fail.', - }, - emailVerificationSendSuccess: { - env: 'PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_SEND_SUCCESS', - help: 'The URL to the custom page for email verification -> resend link -> success.', - }, - emailVerificationSuccess: { - env: 'PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_SUCCESS', - help: 'The URL to the custom page for email verification -> success.', - }, - passwordReset: { - env: 'PARSE_SERVER_PAGES_CUSTOM_URL_PASSWORD_RESET', - help: 'The URL to the custom page for password reset.', - }, - passwordResetLinkInvalid: { - env: 'PARSE_SERVER_PAGES_CUSTOM_URL_PASSWORD_RESET_LINK_INVALID', - help: 'The URL to the custom page for password reset -> link invalid.', - }, - passwordResetSuccess: { - env: 'PARSE_SERVER_PAGES_CUSTOM_URL_PASSWORD_RESET_SUCCESS', - help: 'The URL to the custom page for password reset -> success.', - }, + "emailVerificationLinkExpired": { + "env": "PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_LINK_EXPIRED", + "help": "The URL to the custom page for email verification -> link expired." + }, + "emailVerificationLinkInvalid": { + "env": "PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_LINK_INVALID", + "help": "The URL to the custom page for email verification -> link invalid." + }, + "emailVerificationSendFail": { + "env": "PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_SEND_FAIL", + "help": "The URL to the custom page for email verification -> link send fail." + }, + "emailVerificationSendSuccess": { + "env": "PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_SEND_SUCCESS", + "help": "The URL to the custom page for email verification -> resend link -> success." + }, + "emailVerificationSuccess": { + "env": "PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_SUCCESS", + "help": "The URL to the custom page for email verification -> success." + }, + "passwordReset": { + "env": "PARSE_SERVER_PAGES_CUSTOM_URL_PASSWORD_RESET", + "help": "The URL to the custom page for password reset." + }, + "passwordResetLinkInvalid": { + "env": "PARSE_SERVER_PAGES_CUSTOM_URL_PASSWORD_RESET_LINK_INVALID", + "help": "The URL to the custom page for password reset -> link invalid." + }, + "passwordResetSuccess": { + "env": "PARSE_SERVER_PAGES_CUSTOM_URL_PASSWORD_RESET_SUCCESS", + "help": "The URL to the custom page for password reset -> success." + } }; module.exports.CustomPagesOptions = { - choosePassword: { - env: 'PARSE_SERVER_CUSTOM_PAGES_CHOOSE_PASSWORD', - help: 'choose password page path', - }, - expiredVerificationLink: { - env: 'PARSE_SERVER_CUSTOM_PAGES_EXPIRED_VERIFICATION_LINK', - help: 'expired verification link page path', - }, - invalidLink: { - env: 'PARSE_SERVER_CUSTOM_PAGES_INVALID_LINK', - help: 'invalid link page path', - }, - invalidPasswordResetLink: { - env: 'PARSE_SERVER_CUSTOM_PAGES_INVALID_PASSWORD_RESET_LINK', - help: 'invalid password reset link page path', - }, - invalidVerificationLink: { - env: 'PARSE_SERVER_CUSTOM_PAGES_INVALID_VERIFICATION_LINK', - help: 'invalid verification link page path', - }, - linkSendFail: { - env: 'PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_FAIL', - help: 'verification link send fail page path', - }, - linkSendSuccess: { - env: 'PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_SUCCESS', - help: 'verification link send success page path', - }, - parseFrameURL: { - env: 'PARSE_SERVER_CUSTOM_PAGES_PARSE_FRAME_URL', - help: 'for masking user-facing pages', - }, - passwordResetSuccess: { - env: 'PARSE_SERVER_CUSTOM_PAGES_PASSWORD_RESET_SUCCESS', - help: 'password reset success page path', - }, - verifyEmailSuccess: { - env: 'PARSE_SERVER_CUSTOM_PAGES_VERIFY_EMAIL_SUCCESS', - help: 'verify email success page path', - }, + "choosePassword": { + "env": "PARSE_SERVER_CUSTOM_PAGES_CHOOSE_PASSWORD", + "help": "choose password page path" + }, + "expiredVerificationLink": { + "env": "PARSE_SERVER_CUSTOM_PAGES_EXPIRED_VERIFICATION_LINK", + "help": "expired verification link page path" + }, + "invalidLink": { + "env": "PARSE_SERVER_CUSTOM_PAGES_INVALID_LINK", + "help": "invalid link page path" + }, + "invalidPasswordResetLink": { + "env": "PARSE_SERVER_CUSTOM_PAGES_INVALID_PASSWORD_RESET_LINK", + "help": "invalid password reset link page path" + }, + "invalidVerificationLink": { + "env": "PARSE_SERVER_CUSTOM_PAGES_INVALID_VERIFICATION_LINK", + "help": "invalid verification link page path" + }, + "linkSendFail": { + "env": "PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_FAIL", + "help": "verification link send fail page path" + }, + "linkSendSuccess": { + "env": "PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_SUCCESS", + "help": "verification link send success page path" + }, + "parseFrameURL": { + "env": "PARSE_SERVER_CUSTOM_PAGES_PARSE_FRAME_URL", + "help": "for masking user-facing pages" + }, + "passwordResetSuccess": { + "env": "PARSE_SERVER_CUSTOM_PAGES_PASSWORD_RESET_SUCCESS", + "help": "password reset success page path" + }, + "verifyEmailSuccess": { + "env": "PARSE_SERVER_CUSTOM_PAGES_VERIFY_EMAIL_SUCCESS", + "help": "verify email success page path" + } }; module.exports.LiveQueryOptions = { - classNames: { - env: 'PARSE_SERVER_LIVEQUERY_CLASSNAMES', - help: "parse-server's LiveQuery classNames", - action: parsers.arrayParser, - }, - pubSubAdapter: { - env: 'PARSE_SERVER_LIVEQUERY_PUB_SUB_ADAPTER', - help: 'LiveQuery pubsub adapter', - action: parsers.moduleOrObjectParser, - }, - redisOptions: { - env: 'PARSE_SERVER_LIVEQUERY_REDIS_OPTIONS', - help: "parse-server's LiveQuery redisOptions", - action: parsers.objectParser, - }, - redisURL: { - env: 'PARSE_SERVER_LIVEQUERY_REDIS_URL', - help: "parse-server's LiveQuery redisURL", - }, - regexTimeout: { - env: 'PARSE_SERVER_LIVEQUERY_REGEX_TIMEOUT', - help: 'Sets the maximum execution time in milliseconds for regular expression pattern matching in LiveQuery. This protects against Regular Expression Denial of Service (ReDoS) attacks where a malicious regex pattern could block the event loop. A regex that exceeds the timeout will be treated as non-matching.

The protection runs each regex evaluation in an isolated VM context with a timeout. This adds approximately 50 microseconds of overhead per regex evaluation. For most applications this is negligible, but it can add up if you have a very large number of LiveQuery subscriptions that use `$regex` on the same class. For example, 10,000 concurrent regex subscriptions would add approximately 500ms of processing time per object save event on that class.

Set to `0` to disable the timeout and use native regex evaluation without protection. Defaults to `100`.', - action: parsers.numberParser('regexTimeout'), - default: 100, - }, - wssAdapter: { - env: 'PARSE_SERVER_LIVEQUERY_WSS_ADAPTER', - help: 'Adapter module for the WebSocketServer', - action: parsers.moduleOrObjectParser, - }, + "classNames": { + "env": "PARSE_SERVER_LIVEQUERY_CLASSNAMES", + "help": "parse-server's LiveQuery classNames", + "action": parsers.arrayParser + }, + "pubSubAdapter": { + "env": "PARSE_SERVER_LIVEQUERY_PUB_SUB_ADAPTER", + "help": "LiveQuery pubsub adapter", + "action": parsers.moduleOrObjectParser + }, + "redisOptions": { + "env": "PARSE_SERVER_LIVEQUERY_REDIS_OPTIONS", + "help": "parse-server's LiveQuery redisOptions", + "action": parsers.objectParser + }, + "redisURL": { + "env": "PARSE_SERVER_LIVEQUERY_REDIS_URL", + "help": "parse-server's LiveQuery redisURL" + }, + "regexTimeout": { + "env": "PARSE_SERVER_LIVEQUERY_REGEX_TIMEOUT", + "help": "Sets the maximum execution time in milliseconds for regular expression pattern matching in LiveQuery. This protects against Regular Expression Denial of Service (ReDoS) attacks where a malicious regex pattern could block the event loop. A regex that exceeds the timeout will be treated as non-matching.

The protection runs each regex evaluation in an isolated VM context with a timeout. This adds approximately 50 microseconds of overhead per regex evaluation. For most applications this is negligible, but it can add up if you have a very large number of LiveQuery subscriptions that use `$regex` on the same class. For example, 10,000 concurrent regex subscriptions would add approximately 500ms of processing time per object save event on that class.

Set to `0` to disable the timeout and use native regex evaluation without protection. Defaults to `100`.", + "action": parsers.numberParser("regexTimeout"), + "default": 100 + }, + "wssAdapter": { + "env": "PARSE_SERVER_LIVEQUERY_WSS_ADAPTER", + "help": "Adapter module for the WebSocketServer", + "action": parsers.moduleOrObjectParser + } }; module.exports.LiveQueryServerOptions = { - appId: { - env: 'PARSE_LIVE_QUERY_SERVER_APP_ID', - help: 'This string should match the appId in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same appId.', - }, - cacheTimeout: { - env: 'PARSE_LIVE_QUERY_SERVER_CACHE_TIMEOUT', - help: "Number in milliseconds. When clients provide the sessionToken to the LiveQuery server, the LiveQuery server will try to fetch its ParseUser's objectId from parse server and store it in the cache. The value defines the duration of the cache. Check the following Security section and our protocol specification for details, defaults to 5 * 1000 ms (5 seconds).", - action: parsers.numberParser('cacheTimeout'), - }, - keyPairs: { - env: 'PARSE_LIVE_QUERY_SERVER_KEY_PAIRS', - help: 'A JSON object that serves as a whitelist of keys. It is used for validating clients when they try to connect to the LiveQuery server. Check the following Security section and our protocol specification for details.', - action: parsers.objectParser, - }, - logLevel: { - env: 'PARSE_LIVE_QUERY_SERVER_LOG_LEVEL', - help: 'This string defines the log level of the LiveQuery server. We support VERBOSE, INFO, ERROR, NONE, defaults to INFO.', - }, - masterKey: { - env: 'PARSE_LIVE_QUERY_SERVER_MASTER_KEY', - help: 'This string should match the masterKey in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same masterKey.', - }, - port: { - env: 'PARSE_LIVE_QUERY_SERVER_PORT', - help: 'The port to run the LiveQuery server, defaults to 1337.', - action: parsers.numberParser('port'), - default: 1337, - }, - pubSubAdapter: { - env: 'PARSE_LIVE_QUERY_SERVER_PUB_SUB_ADAPTER', - help: 'LiveQuery pubsub adapter', - action: parsers.moduleOrObjectParser, - }, - redisOptions: { - env: 'PARSE_LIVE_QUERY_SERVER_REDIS_OPTIONS', - help: "parse-server's LiveQuery redisOptions", - action: parsers.objectParser, - }, - redisURL: { - env: 'PARSE_LIVE_QUERY_SERVER_REDIS_URL', - help: "parse-server's LiveQuery redisURL", - }, - serverURL: { - env: 'PARSE_LIVE_QUERY_SERVER_SERVER_URL', - help: 'This string should match the serverURL in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same serverURL.', - }, - websocketTimeout: { - env: 'PARSE_LIVE_QUERY_SERVER_WEBSOCKET_TIMEOUT', - help: 'Number of milliseconds between ping/pong frames. The WebSocket server sends ping/pong frames to the clients to keep the WebSocket alive. This value defines the interval of the ping/pong frame from the server to clients, defaults to 10 * 1000 ms (10 s).', - action: parsers.numberParser('websocketTimeout'), - }, - wssAdapter: { - env: 'PARSE_LIVE_QUERY_SERVER_WSS_ADAPTER', - help: 'Adapter module for the WebSocketServer', - action: parsers.moduleOrObjectParser, - }, + "appId": { + "env": "PARSE_LIVE_QUERY_SERVER_APP_ID", + "help": "This string should match the appId in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same appId." + }, + "cacheTimeout": { + "env": "PARSE_LIVE_QUERY_SERVER_CACHE_TIMEOUT", + "help": "Number in milliseconds. When clients provide the sessionToken to the LiveQuery server, the LiveQuery server will try to fetch its ParseUser's objectId from parse server and store it in the cache. The value defines the duration of the cache. Check the following Security section and our protocol specification for details, defaults to 5 * 1000 ms (5 seconds).", + "action": parsers.numberParser("cacheTimeout") + }, + "keyPairs": { + "env": "PARSE_LIVE_QUERY_SERVER_KEY_PAIRS", + "help": "A JSON object that serves as a whitelist of keys. It is used for validating clients when they try to connect to the LiveQuery server. Check the following Security section and our protocol specification for details.", + "action": parsers.objectParser + }, + "logLevel": { + "env": "PARSE_LIVE_QUERY_SERVER_LOG_LEVEL", + "help": "This string defines the log level of the LiveQuery server. We support VERBOSE, INFO, ERROR, NONE, defaults to INFO." + }, + "masterKey": { + "env": "PARSE_LIVE_QUERY_SERVER_MASTER_KEY", + "help": "This string should match the masterKey in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same masterKey." + }, + "port": { + "env": "PARSE_LIVE_QUERY_SERVER_PORT", + "help": "The port to run the LiveQuery server, defaults to 1337.", + "action": parsers.numberParser("port"), + "default": 1337 + }, + "pubSubAdapter": { + "env": "PARSE_LIVE_QUERY_SERVER_PUB_SUB_ADAPTER", + "help": "LiveQuery pubsub adapter", + "action": parsers.moduleOrObjectParser + }, + "redisOptions": { + "env": "PARSE_LIVE_QUERY_SERVER_REDIS_OPTIONS", + "help": "parse-server's LiveQuery redisOptions", + "action": parsers.objectParser + }, + "redisURL": { + "env": "PARSE_LIVE_QUERY_SERVER_REDIS_URL", + "help": "parse-server's LiveQuery redisURL" + }, + "serverURL": { + "env": "PARSE_LIVE_QUERY_SERVER_SERVER_URL", + "help": "This string should match the serverURL in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same serverURL." + }, + "websocketTimeout": { + "env": "PARSE_LIVE_QUERY_SERVER_WEBSOCKET_TIMEOUT", + "help": "Number of milliseconds between ping/pong frames. The WebSocket server sends ping/pong frames to the clients to keep the WebSocket alive. This value defines the interval of the ping/pong frame from the server to clients, defaults to 10 * 1000 ms (10 s).", + "action": parsers.numberParser("websocketTimeout") + }, + "wssAdapter": { + "env": "PARSE_LIVE_QUERY_SERVER_WSS_ADAPTER", + "help": "Adapter module for the WebSocketServer", + "action": parsers.moduleOrObjectParser + } }; module.exports.IdempotencyOptions = { - paths: { - env: 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_PATHS', - help: 'An array of paths for which the feature should be enabled. The mount path must not be included, for example instead of `/parse/functions/myFunction` specifiy `functions/myFunction`. The entries are interpreted as regular expression, for example `functions/.*` matches all functions, `jobs/.*` matches all jobs, `classes/.*` matches all classes, `.*` matches all paths.', - action: parsers.arrayParser, - default: [], - }, - ttl: { - env: 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_TTL', - help: 'The duration in seconds after which a request record is discarded from the database, defaults to 300s.', - action: parsers.numberParser('ttl'), - default: 300, - }, + "paths": { + "env": "PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_PATHS", + "help": "An array of paths for which the feature should be enabled. The mount path must not be included, for example instead of `/parse/functions/myFunction` specifiy `functions/myFunction`. The entries are interpreted as regular expression, for example `functions/.*` matches all functions, `jobs/.*` matches all jobs, `classes/.*` matches all classes, `.*` matches all paths.", + "action": parsers.arrayParser, + "default": [] + }, + "ttl": { + "env": "PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_TTL", + "help": "The duration in seconds after which a request record is discarded from the database, defaults to 300s.", + "action": parsers.numberParser("ttl"), + "default": 300 + } }; module.exports.AccountLockoutOptions = { - duration: { - env: 'PARSE_SERVER_ACCOUNT_LOCKOUT_DURATION', - help: 'Set the duration in minutes that a locked-out account remains locked out before automatically becoming unlocked.

Valid values are greater than `0` and less than `100000`.', - action: parsers.numberParser('duration'), - }, - threshold: { - env: 'PARSE_SERVER_ACCOUNT_LOCKOUT_THRESHOLD', - help: 'Set the number of failed sign-in attempts that will cause a user account to be locked. If the account is locked. The account will unlock after the duration set in the `duration` option has passed and no further login attempts have been made.

Valid values are greater than `0` and less than `1000`.', - action: parsers.numberParser('threshold'), - }, - unlockOnPasswordReset: { - env: 'PARSE_SERVER_ACCOUNT_LOCKOUT_UNLOCK_ON_PASSWORD_RESET', - help: 'Set to `true` if the account should be unlocked after a successful password reset.

Default is `false`.
Requires options `duration` and `threshold` to be set.', - action: parsers.booleanParser, - default: false, - }, + "duration": { + "env": "PARSE_SERVER_ACCOUNT_LOCKOUT_DURATION", + "help": "Set the duration in minutes that a locked-out account remains locked out before automatically becoming unlocked.

Valid values are greater than `0` and less than `100000`.", + "action": parsers.numberParser("duration") + }, + "threshold": { + "env": "PARSE_SERVER_ACCOUNT_LOCKOUT_THRESHOLD", + "help": "Set the number of failed sign-in attempts that will cause a user account to be locked. If the account is locked. The account will unlock after the duration set in the `duration` option has passed and no further login attempts have been made.

Valid values are greater than `0` and less than `1000`.", + "action": parsers.numberParser("threshold") + }, + "unlockOnPasswordReset": { + "env": "PARSE_SERVER_ACCOUNT_LOCKOUT_UNLOCK_ON_PASSWORD_RESET", + "help": "Set to `true` if the account should be unlocked after a successful password reset.

Default is `false`.
Requires options `duration` and `threshold` to be set.", + "action": parsers.booleanParser, + "default": false + } }; module.exports.PasswordPolicyOptions = { - doNotAllowUsername: { - env: 'PARSE_SERVER_PASSWORD_POLICY_DO_NOT_ALLOW_USERNAME', - help: 'Set to `true` to disallow the username as part of the password.

Default is `false`.', - action: parsers.booleanParser, - default: false, - }, - maxPasswordAge: { - env: 'PARSE_SERVER_PASSWORD_POLICY_MAX_PASSWORD_AGE', - help: 'Set the number of days after which a password expires. Login attempts fail if the user does not reset the password before expiration.', - action: parsers.numberParser('maxPasswordAge'), - }, - maxPasswordHistory: { - env: 'PARSE_SERVER_PASSWORD_POLICY_MAX_PASSWORD_HISTORY', - help: 'Set the number of previous password that will not be allowed to be set as new password. If the option is not set or set to `0`, no previous passwords will be considered.

Valid values are >= `0` and <= `20`.
Default is `0`.', - action: parsers.numberParser('maxPasswordHistory'), - }, - resetPasswordSuccessOnInvalidEmail: { - env: 'PARSE_SERVER_PASSWORD_POLICY_RESET_PASSWORD_SUCCESS_ON_INVALID_EMAIL', - help: 'Set to `true` if a request to reset the password should return a success response even if the provided email address is invalid, or `false` if the request should return an error response if the email address is invalid.

Default is `true`.', - action: parsers.booleanParser, - default: true, - }, - resetTokenReuseIfValid: { - env: 'PARSE_SERVER_PASSWORD_POLICY_RESET_TOKEN_REUSE_IF_VALID', - help: 'Set to `true` if a password reset token should be reused in case another token is requested but there is a token that is still valid, i.e. has not expired. This avoids the often observed issue that a user requests multiple emails and does not know which link contains a valid token because each newly generated token would invalidate the previous token.

Default is `false`.', - action: parsers.booleanParser, - default: false, - }, - resetTokenValidityDuration: { - env: 'PARSE_SERVER_PASSWORD_POLICY_RESET_TOKEN_VALIDITY_DURATION', - help: 'Set the validity duration of the password reset token in seconds after which the token expires. The token is used in the link that is set in the email. After the token expires, the link becomes invalid and a new link has to be sent. If the option is not set or set to `undefined`, then the token never expires.

For example, to expire the token after 2 hours, set a value of 7200 seconds (= 60 seconds * 60 minutes * 2 hours).

Default is `undefined`.', - action: parsers.numberParser('resetTokenValidityDuration'), - }, - validationError: { - env: 'PARSE_SERVER_PASSWORD_POLICY_VALIDATION_ERROR', - help: 'Set the error message to be sent.

Default is `Password does not meet the Password Policy requirements.`', - }, - validatorCallback: { - env: 'PARSE_SERVER_PASSWORD_POLICY_VALIDATOR_CALLBACK', - help: 'Set a callback function to validate a password to be accepted.

If used in combination with `validatorPattern`, the password must pass both to be accepted.', - }, - validatorPattern: { - env: 'PARSE_SERVER_PASSWORD_POLICY_VALIDATOR_PATTERN', - help: 'Set the regular expression validation pattern a password must match to be accepted.

If used in combination with `validatorCallback`, the password must pass both to be accepted.', - }, + "doNotAllowUsername": { + "env": "PARSE_SERVER_PASSWORD_POLICY_DO_NOT_ALLOW_USERNAME", + "help": "Set to `true` to disallow the username as part of the password.

Default is `false`.", + "action": parsers.booleanParser, + "default": false + }, + "maxPasswordAge": { + "env": "PARSE_SERVER_PASSWORD_POLICY_MAX_PASSWORD_AGE", + "help": "Set the number of days after which a password expires. Login attempts fail if the user does not reset the password before expiration.", + "action": parsers.numberParser("maxPasswordAge") + }, + "maxPasswordHistory": { + "env": "PARSE_SERVER_PASSWORD_POLICY_MAX_PASSWORD_HISTORY", + "help": "Set the number of previous password that will not be allowed to be set as new password. If the option is not set or set to `0`, no previous passwords will be considered.

Valid values are >= `0` and <= `20`.
Default is `0`.", + "action": parsers.numberParser("maxPasswordHistory") + }, + "resetPasswordSuccessOnInvalidEmail": { + "env": "PARSE_SERVER_PASSWORD_POLICY_RESET_PASSWORD_SUCCESS_ON_INVALID_EMAIL", + "help": "Set to `true` if a request to reset the password should return a success response even if the provided email address is invalid, or `false` if the request should return an error response if the email address is invalid.

Default is `true`.", + "action": parsers.booleanParser, + "default": true + }, + "resetTokenReuseIfValid": { + "env": "PARSE_SERVER_PASSWORD_POLICY_RESET_TOKEN_REUSE_IF_VALID", + "help": "Set to `true` if a password reset token should be reused in case another token is requested but there is a token that is still valid, i.e. has not expired. This avoids the often observed issue that a user requests multiple emails and does not know which link contains a valid token because each newly generated token would invalidate the previous token.

Default is `false`.", + "action": parsers.booleanParser, + "default": false + }, + "resetTokenValidityDuration": { + "env": "PARSE_SERVER_PASSWORD_POLICY_RESET_TOKEN_VALIDITY_DURATION", + "help": "Set the validity duration of the password reset token in seconds after which the token expires. The token is used in the link that is set in the email. After the token expires, the link becomes invalid and a new link has to be sent. If the option is not set or set to `undefined`, then the token never expires.

For example, to expire the token after 2 hours, set a value of 7200 seconds (= 60 seconds * 60 minutes * 2 hours).

Default is `undefined`.", + "action": parsers.numberParser("resetTokenValidityDuration") + }, + "validationError": { + "env": "PARSE_SERVER_PASSWORD_POLICY_VALIDATION_ERROR", + "help": "Set the error message to be sent.

Default is `Password does not meet the Password Policy requirements.`" + }, + "validatorCallback": { + "env": "PARSE_SERVER_PASSWORD_POLICY_VALIDATOR_CALLBACK", + "help": "Set a callback function to validate a password to be accepted.

If used in combination with `validatorPattern`, the password must pass both to be accepted." + }, + "validatorPattern": { + "env": "PARSE_SERVER_PASSWORD_POLICY_VALIDATOR_PATTERN", + "help": "Set the regular expression validation pattern a password must match to be accepted.

If used in combination with `validatorCallback`, the password must pass both to be accepted." + } }; module.exports.FileUploadOptions = { - allowedFileUrlDomains: { - env: 'PARSE_SERVER_FILE_UPLOAD_ALLOWED_FILE_URL_DOMAINS', - help: "Sets the allowed hostnames for file URLs referenced in Parse objects. When a File object includes a URL, its hostname must match one of these entries to be accepted. Supports exact hostnames (e.g., `'cdn.example.com'`) and wildcard subdomains (e.g., `'*.example.com'`). Use `['*']` to allow any domain. Use `[]` to block all file URLs (only name-based files allowed).", - action: parsers.arrayParser, - default: ['*'], - }, - enableForAnonymousUser: { - env: 'PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_ANONYMOUS_USER', - help: 'Is true if file upload should be allowed for anonymous users.', - action: parsers.booleanParser, - default: false, - }, - enableForAuthenticatedUser: { - env: 'PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_AUTHENTICATED_USER', - help: 'Is true if file upload should be allowed for authenticated users.', - action: parsers.booleanParser, - default: true, - }, - enableForPublic: { - env: 'PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_PUBLIC', - help: 'Is true if file upload should be allowed for anyone, regardless of user authentication.', - action: parsers.booleanParser, - default: false, - }, - fileExtensions: { - env: 'PARSE_SERVER_FILE_UPLOAD_FILE_EXTENSIONS', - help: 'Sets the allowed file extensions for uploading files. The extension is defined as an array of file extensions, or a regex pattern.

It is recommended to only allow the file extensions that your app actually needs, rather than relying on blocking dangerous extensions. This allowlist approach is more secure because new dangerous file extensions may emerge that are not covered by the default blocklist.

The default blocks the most common file extensions that are known to be rendered as active content by web browsers, such as HTML, SVG, and XML files, which may be used by an attacker to compromise the session token of another user via accessing the browser\'s local storage. The blocked extensions are: `html`, `htm`, `shtml`, `xhtml`, `xhtml+xml`, `xht`, `svg`, `svgz`, `svg+xml`, `xml`, `xsl`, `xslt`, `xslt+xml`, `xsd`, `rng`, `rdf`, `rdf+xml`, `owl`, `mathml`, `mathml+xml`.

Defaults to `["^(?!([xXsS]?[hH][tT][mM][lL]?(\\\\+[xX][mM][lL])?|[xX][hH][tT]|[sS][vV][gG]([zZ]|\\\\+[xX][mM][lL])?|[xX][mM][lL]|[xX][sS][lL][tT]?(\\\\+[xX][mM][lL])?|[xX][sS][dD]|[rR][nN][gG]|[rR][dD][fF](\\\\+[xX][mM][lL])?|[oO][wW][lL]|[mM][aA][tT][hH][mM][lL](\\\\+[xX][mM][lL])?)$)"]`.', - action: parsers.arrayParser, - default: [ - '^(?!([xXsS]?[hH][tT][mM][lL]?(\\+[xX][mM][lL])?|[xX][hH][tT]|[sS][vV][gG]([zZ]|\\+[xX][mM][lL])?|[xX][mM][lL]|[xX][sS][lL][tT]?(\\+[xX][mM][lL])?|[xX][sS][dD]|[rR][nN][gG]|[rR][dD][fF](\\+[xX][mM][lL])?|[oO][wW][lL]|[mM][aA][tT][hH][mM][lL](\\+[xX][mM][lL])?)$)', - ], - }, + "allowedFileUrlDomains": { + "env": "PARSE_SERVER_FILE_UPLOAD_ALLOWED_FILE_URL_DOMAINS", + "help": "Sets the allowed hostnames for file URLs referenced in Parse objects. When a File object includes a URL, its hostname must match one of these entries to be accepted. Supports exact hostnames (e.g., `'cdn.example.com'`) and wildcard subdomains (e.g., `'*.example.com'`). Use `['*']` to allow any domain. Use `[]` to block all file URLs (only name-based files allowed).", + "action": parsers.arrayParser, + "default": ["*"] + }, + "enableForAnonymousUser": { + "env": "PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_ANONYMOUS_USER", + "help": "Is true if file upload should be allowed for anonymous users.", + "action": parsers.booleanParser, + "default": false + }, + "enableForAuthenticatedUser": { + "env": "PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_AUTHENTICATED_USER", + "help": "Is true if file upload should be allowed for authenticated users.", + "action": parsers.booleanParser, + "default": true + }, + "enableForPublic": { + "env": "PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_PUBLIC", + "help": "Is true if file upload should be allowed for anyone, regardless of user authentication.", + "action": parsers.booleanParser, + "default": false + }, + "fileExtensions": { + "env": "PARSE_SERVER_FILE_UPLOAD_FILE_EXTENSIONS", + "help": "Sets the allowed file extensions for uploading files. The extension is defined as an array of file extensions, or a regex pattern.

It is recommended to only allow the file extensions that your app actually needs, rather than relying on blocking dangerous extensions. This allowlist approach is more secure because new dangerous file extensions may emerge that are not covered by the default blocklist.

The default blocks the most common file extensions that are known to be rendered as active content by web browsers, such as HTML, SVG, and XML files, which may be used by an attacker to compromise the session token of another user via accessing the browser's local storage. The blocked extensions are: `html`, `htm`, `shtml`, `xhtml`, `xhtml+xml`, `xht`, `svg`, `svgz`, `svg+xml`, `xml`, `xsl`, `xslt`, `xslt+xml`, `xsd`, `rng`, `rdf`, `rdf+xml`, `owl`, `mathml`, `mathml+xml`.

Defaults to `[\"^(?!([xXsS]?[hH][tT][mM][lL]?(\\\\+[xX][mM][lL])?|[xX][hH][tT]|[sS][vV][gG]([zZ]|\\\\+[xX][mM][lL])?|[xX][mM][lL]|[xX][sS][lL][tT]?(\\\\+[xX][mM][lL])?|[xX][sS][dD]|[rR][nN][gG]|[rR][dD][fF](\\\\+[xX][mM][lL])?|[oO][wW][lL]|[mM][aA][tT][hH][mM][lL](\\\\+[xX][mM][lL])?)$)\"]`.", + "action": parsers.arrayParser, + "default": ["^(?!([xXsS]?[hH][tT][mM][lL]?(\\+[xX][mM][lL])?|[xX][hH][tT]|[sS][vV][gG]([zZ]|\\+[xX][mM][lL])?|[xX][mM][lL]|[xX][sS][lL][tT]?(\\+[xX][mM][lL])?|[xX][sS][dD]|[rR][nN][gG]|[rR][dD][fF](\\+[xX][mM][lL])?|[oO][wW][lL]|[mM][aA][tT][hH][mM][lL](\\+[xX][mM][lL])?)$)"] + } }; /* The available log levels for Parse Server logging. Valid values are:
- `'error'` - Error level (highest priority)
- `'warn'` - Warning level
- `'info'` - Info level (default)
- `'verbose'` - Verbose level
- `'debug'` - Debug level
- `'silly'` - Silly level (lowest priority) */ module.exports.LogLevel = { - debug: { - env: 'PARSE_SERVER_LOG_LEVEL_DEBUG', - help: 'Debug level', - required: true, - }, - error: { - env: 'PARSE_SERVER_LOG_LEVEL_ERROR', - help: 'Error level - highest priority', - required: true, - }, - info: { - env: 'PARSE_SERVER_LOG_LEVEL_INFO', - help: 'Info level - default', - required: true, - }, - silly: { - env: 'PARSE_SERVER_LOG_LEVEL_SILLY', - help: 'Silly level - lowest priority', - required: true, - }, - verbose: { - env: 'PARSE_SERVER_LOG_LEVEL_VERBOSE', - help: 'Verbose level', - required: true, - }, - warn: { - env: 'PARSE_SERVER_LOG_LEVEL_WARN', - help: 'Warning level', - required: true, - }, + "debug": { + "env": "PARSE_SERVER_LOG_LEVEL_DEBUG", + "help": "Debug level", + "required": true + }, + "error": { + "env": "PARSE_SERVER_LOG_LEVEL_ERROR", + "help": "Error level - highest priority", + "required": true + }, + "info": { + "env": "PARSE_SERVER_LOG_LEVEL_INFO", + "help": "Info level - default", + "required": true + }, + "silly": { + "env": "PARSE_SERVER_LOG_LEVEL_SILLY", + "help": "Silly level - lowest priority", + "required": true + }, + "verbose": { + "env": "PARSE_SERVER_LOG_LEVEL_VERBOSE", + "help": "Verbose level", + "required": true + }, + "warn": { + "env": "PARSE_SERVER_LOG_LEVEL_WARN", + "help": "Warning level", + "required": true + } }; module.exports.LogClientEvent = { - keys: { - env: 'PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS_KEYS', - help: 'Optional array of dot-notation paths to extract specific data from the event object. If not provided or empty, the entire event object will be logged.', - action: parsers.arrayParser, - }, - logLevel: { - env: 'PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS_LOG_LEVEL', - help: "The log level to use for this event. See [LogLevel](LogLevel.html) for available values. Defaults to `'info'`.", - default: 'info', - }, - name: { - env: 'PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS_NAME', - help: 'The MongoDB driver event name to listen for. See the [MongoDB driver events documentation](https://www.mongodb.com/docs/drivers/node/current/fundamentals/monitoring/) for available events.', - required: true, - }, + "keys": { + "env": "PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS_KEYS", + "help": "Optional array of dot-notation paths to extract specific data from the event object. If not provided or empty, the entire event object will be logged.", + "action": parsers.arrayParser + }, + "logLevel": { + "env": "PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS_LOG_LEVEL", + "help": "The log level to use for this event. See [LogLevel](LogLevel.html) for available values. Defaults to `'info'`.", + "default": "info" + }, + "name": { + "env": "PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS_NAME", + "help": "The MongoDB driver event name to listen for. See the [MongoDB driver events documentation](https://www.mongodb.com/docs/drivers/node/current/fundamentals/monitoring/) for available events.", + "required": true + } }; module.exports.DatabaseOptions = { - allowPublicExplain: { - env: 'PARSE_SERVER_DATABASE_ALLOW_PUBLIC_EXPLAIN', - help: 'Set to `true` to allow `Parse.Query.explain` without master key.

\u26A0\uFE0F Enabling this option may expose sensitive query performance data to unauthorized users and could potentially be exploited for malicious purposes.', - action: parsers.booleanParser, - default: false, - }, - appName: { - env: 'PARSE_SERVER_DATABASE_APP_NAME', - help: 'The MongoDB driver option to specify the name of the application that created this MongoClient instance.', - }, - authMechanism: { - env: 'PARSE_SERVER_DATABASE_AUTH_MECHANISM', - help: 'The MongoDB driver option to specify the authentication mechanism that MongoDB will use to authenticate the connection.', - }, - authMechanismProperties: { - env: 'PARSE_SERVER_DATABASE_AUTH_MECHANISM_PROPERTIES', - help: 'The MongoDB driver option to specify properties for the specified authMechanism as a comma-separated list of colon-separated key-value pairs.', - action: parsers.objectParser, - }, - authSource: { - env: 'PARSE_SERVER_DATABASE_AUTH_SOURCE', - help: "The MongoDB driver option to specify the database name associated with the user's credentials.", - }, - autoSelectFamily: { - env: 'PARSE_SERVER_DATABASE_AUTO_SELECT_FAMILY', - help: 'The MongoDB driver option to set whether the socket attempts to connect to IPv6 and IPv4 addresses until a connection is established. If available, the driver will select the first IPv6 address.', - action: parsers.booleanParser, - }, - autoSelectFamilyAttemptTimeout: { - env: 'PARSE_SERVER_DATABASE_AUTO_SELECT_FAMILY_ATTEMPT_TIMEOUT', - help: 'The MongoDB driver option to specify the amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the autoSelectFamily option. If set to a positive integer less than 10, the value 10 is used instead.', - action: parsers.numberParser('autoSelectFamilyAttemptTimeout'), - }, - batchSize: { - env: 'PARSE_SERVER_DATABASE_BATCH_SIZE', - help: 'The number of documents per batch for MongoDB cursor `getMore` operations. A lower value reduces memory usage per batch; a higher value reduces the number of network round-trips.', - action: parsers.numberParser('batchSize'), - default: 1000, - }, - clientMetadata: { - env: 'PARSE_SERVER_DATABASE_CLIENT_METADATA', - help: "Custom metadata to append to database client connections for identifying Parse Server instances in database logs. If set, this metadata will be visible in database logs during connection handshakes. This can help with debugging and monitoring in deployments with multiple database clients. Set `name` to identify your application (e.g., 'MyApp') and `version` to your application's version. Leave undefined (default) to disable this feature and avoid the additional data transfer overhead.", - action: parsers.objectParser, - type: 'DatabaseOptionsClientMetadata', - }, - compressors: { - env: 'PARSE_SERVER_DATABASE_COMPRESSORS', - help: 'The MongoDB driver option to specify an array or comma-delimited string of compressors to enable network compression for communication between this client and a mongod/mongos instance.', - }, - connectTimeoutMS: { - env: 'PARSE_SERVER_DATABASE_CONNECT_TIMEOUT_MS', - help: 'The MongoDB driver option to specify the amount of time, in milliseconds, to wait to establish a single TCP socket connection to the server before raising an error. Specifying 0 disables the connection timeout.', - action: parsers.numberParser('connectTimeoutMS'), - }, - createIndexAuthDataUniqueness: { - env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_AUTH_DATA_UNIQUENESS', - help: 'Set to `true` to automatically create unique indexes on the authData fields of the _User collection for each configured auth provider on server start, including `anonymous` when anonymous users are enabled. These indexes prevent race conditions during concurrent signups with the same authData. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the indexes, keep in mind that the otherwise automatically created indexes may change in the future to be optimized for the internal usage by Parse Server.', - action: parsers.booleanParser, - default: true, - }, - createIndexRoleName: { - env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_ROLE_NAME', - help: 'Set to `true` to automatically create a unique index on the name field of the _Role collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', - action: parsers.booleanParser, - default: true, - }, - createIndexUserEmail: { - env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_USER_EMAIL', - help: 'Set to `true` to automatically create indexes on the email field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', - action: parsers.booleanParser, - default: true, - }, - createIndexUserEmailCaseInsensitive: { - env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_USER_EMAIL_CASE_INSENSITIVE', - help: 'Set to `true` to automatically create a case-insensitive index on the email field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', - action: parsers.booleanParser, - default: true, - }, - createIndexUserEmailVerifyToken: { - env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_USER_EMAIL_VERIFY_TOKEN', - help: 'Set to `true` to automatically create an index on the _email_verify_token field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', - action: parsers.booleanParser, - default: true, - }, - createIndexUserPasswordResetToken: { - env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_USER_PASSWORD_RESET_TOKEN', - help: 'Set to `true` to automatically create an index on the _perishable_token field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', - action: parsers.booleanParser, - default: true, - }, - createIndexUserUsername: { - env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_USER_USERNAME', - help: 'Set to `true` to automatically create indexes on the username field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', - action: parsers.booleanParser, - default: true, - }, - createIndexUserUsernameCaseInsensitive: { - env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_USER_USERNAME_CASE_INSENSITIVE', - help: 'Set to `true` to automatically create a case-insensitive index on the username field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', - action: parsers.booleanParser, - default: true, - }, - directConnection: { - env: 'PARSE_SERVER_DATABASE_DIRECT_CONNECTION', - help: 'The MongoDB driver option to force a Single topology type with a connection string containing one host.', - action: parsers.booleanParser, - }, - disableIndexFieldValidation: { - env: 'PARSE_SERVER_DATABASE_DISABLE_INDEX_FIELD_VALIDATION', - help: 'Set to `true` to disable validation of index fields. When disabled, indexes can be created even if the fields do not exist in the schema. This can be useful when creating indexes on fields that will be added later.', - action: parsers.booleanParser, - }, - enableSchemaHooks: { - env: 'PARSE_SERVER_DATABASE_ENABLE_SCHEMA_HOOKS', - help: 'Enables database real-time hooks to update single schema cache. Set to `true` if using multiple Parse Servers instances connected to the same database. Failing to do so will cause a schema change to not propagate to all instances and re-syncing will only happen when the instances restart. To use this feature with MongoDB, a replica set cluster with [change stream](https://docs.mongodb.com/manual/changeStreams/#availability) support is required.', - action: parsers.booleanParser, - default: false, - }, - forceServerObjectId: { - env: 'PARSE_SERVER_DATABASE_FORCE_SERVER_OBJECT_ID', - help: 'The MongoDB driver option to force server to assign _id values instead of driver.', - action: parsers.booleanParser, - }, - heartbeatFrequencyMS: { - env: 'PARSE_SERVER_DATABASE_HEARTBEAT_FREQUENCY_MS', - help: 'The MongoDB driver option to specify the frequency in milliseconds at which the driver checks the state of the MongoDB deployment.', - action: parsers.numberParser('heartbeatFrequencyMS'), - }, - loadBalanced: { - env: 'PARSE_SERVER_DATABASE_LOAD_BALANCED', - help: 'The MongoDB driver option to instruct the driver it is connecting to a load balancer fronting a mongos like service.', - action: parsers.booleanParser, - }, - localThresholdMS: { - env: 'PARSE_SERVER_DATABASE_LOCAL_THRESHOLD_MS', - help: 'The MongoDB driver option to specify the size (in milliseconds) of the latency window for selecting among multiple suitable MongoDB instances.', - action: parsers.numberParser('localThresholdMS'), - }, - logClientEvents: { - env: 'PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS', - help: 'An array of MongoDB client event configurations to enable logging of specific events.', - action: parsers.arrayParser, - type: 'LogClientEvent[]', - }, - maxConnecting: { - env: 'PARSE_SERVER_DATABASE_MAX_CONNECTING', - help: 'The MongoDB driver option to specify the maximum number of connections that may be in the process of being established concurrently by the connection pool.', - action: parsers.numberParser('maxConnecting'), - }, - maxIdleTimeMS: { - env: 'PARSE_SERVER_DATABASE_MAX_IDLE_TIME_MS', - help: 'The MongoDB driver option to specify the amount of time in milliseconds that a connection can remain idle in the connection pool before being removed and closed.', - action: parsers.numberParser('maxIdleTimeMS'), - }, - maxPoolSize: { - env: 'PARSE_SERVER_DATABASE_MAX_POOL_SIZE', - help: 'The MongoDB driver option to set the maximum number of opened, cached, ready-to-use database connections maintained by the driver.', - action: parsers.numberParser('maxPoolSize'), - }, - maxStalenessSeconds: { - env: 'PARSE_SERVER_DATABASE_MAX_STALENESS_SECONDS', - help: 'The MongoDB driver option to set the maximum replication lag for reads from secondary nodes.', - action: parsers.numberParser('maxStalenessSeconds'), - }, - maxTimeMS: { - env: 'PARSE_SERVER_DATABASE_MAX_TIME_MS', - help: 'The MongoDB driver option to set a cumulative time limit in milliseconds for processing operations on a cursor.', - action: parsers.numberParser('maxTimeMS'), - }, - minPoolSize: { - env: 'PARSE_SERVER_DATABASE_MIN_POOL_SIZE', - help: 'The MongoDB driver option to set the minimum number of opened, cached, ready-to-use database connections maintained by the driver.', - action: parsers.numberParser('minPoolSize'), - }, - proxyHost: { - env: 'PARSE_SERVER_DATABASE_PROXY_HOST', - help: 'The MongoDB driver option to configure a Socks5 proxy host used for creating TCP connections.', - }, - proxyPassword: { - env: 'PARSE_SERVER_DATABASE_PROXY_PASSWORD', - help: 'The MongoDB driver option to configure a Socks5 proxy password when the proxy requires username/password authentication.', - }, - proxyPort: { - env: 'PARSE_SERVER_DATABASE_PROXY_PORT', - help: 'The MongoDB driver option to configure a Socks5 proxy port used for creating TCP connections.', - action: parsers.numberParser('proxyPort'), - }, - proxyUsername: { - env: 'PARSE_SERVER_DATABASE_PROXY_USERNAME', - help: 'The MongoDB driver option to configure a Socks5 proxy username when the proxy requires username/password authentication.', - }, - readConcernLevel: { - env: 'PARSE_SERVER_DATABASE_READ_CONCERN_LEVEL', - help: 'The MongoDB driver option to specify the level of isolation.', - }, - readPreference: { - env: 'PARSE_SERVER_DATABASE_READ_PREFERENCE', - help: 'The MongoDB driver option to specify the read preferences for this connection.', - }, - readPreferenceTags: { - env: 'PARSE_SERVER_DATABASE_READ_PREFERENCE_TAGS', - help: 'The MongoDB driver option to specify the tags document as a comma-separated list of colon-separated key-value pairs.', - action: parsers.arrayParser, - }, - replicaSet: { - env: 'PARSE_SERVER_DATABASE_REPLICA_SET', - help: 'The MongoDB driver option to specify the name of the replica set, if the mongod is a member of a replica set.', - }, - retryReads: { - env: 'PARSE_SERVER_DATABASE_RETRY_READS', - help: 'The MongoDB driver option to enable retryable reads.', - action: parsers.booleanParser, - }, - retryWrites: { - env: 'PARSE_SERVER_DATABASE_RETRY_WRITES', - help: 'The MongoDB driver option to set whether to retry failed writes.', - action: parsers.booleanParser, - }, - schemaCacheTtl: { - env: 'PARSE_SERVER_DATABASE_SCHEMA_CACHE_TTL', - help: 'The duration in seconds after which the schema cache expires and will be refetched from the database. Use this option if using multiple Parse Servers instances connected to the same database. A low duration will cause the schema cache to be updated too often, causing unnecessary database reads. A high duration will cause the schema to be updated too rarely, increasing the time required until schema changes propagate to all server instances. This feature can be used as an alternative or in conjunction with the option `enableSchemaHooks`. Default is infinite which means the schema cache never expires.', - action: parsers.numberParser('schemaCacheTtl'), - }, - serverMonitoringMode: { - env: 'PARSE_SERVER_DATABASE_SERVER_MONITORING_MODE', - help: 'The MongoDB driver option to instruct the driver monitors to use a specific monitoring mode.', - }, - serverSelectionTimeoutMS: { - env: 'PARSE_SERVER_DATABASE_SERVER_SELECTION_TIMEOUT_MS', - help: 'The MongoDB driver option to specify the amount of time in milliseconds for a server to be considered suitable for selection.', - action: parsers.numberParser('serverSelectionTimeoutMS'), - }, - socketTimeoutMS: { - env: 'PARSE_SERVER_DATABASE_SOCKET_TIMEOUT_MS', - help: 'The MongoDB driver option to specify the amount of time, in milliseconds, spent attempting to send or receive on a socket before timing out. Specifying 0 means no timeout.', - action: parsers.numberParser('socketTimeoutMS'), - }, - srvMaxHosts: { - env: 'PARSE_SERVER_DATABASE_SRV_MAX_HOSTS', - help: 'The MongoDB driver option to specify the maximum number of hosts to connect to when using an srv connection string, a setting of 0 means unlimited hosts.', - action: parsers.numberParser('srvMaxHosts'), - }, - srvServiceName: { - env: 'PARSE_SERVER_DATABASE_SRV_SERVICE_NAME', - help: 'The MongoDB driver option to modify the srv URI service name.', - }, - ssl: { - env: 'PARSE_SERVER_DATABASE_SSL', - help: 'The MongoDB driver option to enable or disable TLS/SSL for the connection (equivalent to tls option).', - action: parsers.booleanParser, - }, - tls: { - env: 'PARSE_SERVER_DATABASE_TLS', - help: 'The MongoDB driver option to enable or disable TLS/SSL for the connection.', - action: parsers.booleanParser, - }, - tlsAllowInvalidCertificates: { - env: 'PARSE_SERVER_DATABASE_TLS_ALLOW_INVALID_CERTIFICATES', - help: 'The MongoDB driver option to bypass validation of the certificates presented by the mongod/mongos instance.', - action: parsers.booleanParser, - }, - tlsAllowInvalidHostnames: { - env: 'PARSE_SERVER_DATABASE_TLS_ALLOW_INVALID_HOSTNAMES', - help: 'The MongoDB driver option to disable hostname validation of the certificate presented by the mongod/mongos instance.', - action: parsers.booleanParser, - }, - tlsCAFile: { - env: 'PARSE_SERVER_DATABASE_TLS_CAFILE', - help: 'The MongoDB driver option to specify the location of a local .pem file that contains the root certificate chain from the Certificate Authority.', - }, - tlsCertificateKeyFile: { - env: 'PARSE_SERVER_DATABASE_TLS_CERTIFICATE_KEY_FILE', - help: "The MongoDB driver option to specify the location of a local .pem file that contains the client's TLS/SSL certificate and key.", - }, - tlsCertificateKeyFilePassword: { - env: 'PARSE_SERVER_DATABASE_TLS_CERTIFICATE_KEY_FILE_PASSWORD', - help: 'The MongoDB driver option to specify the password to decrypt the tlsCertificateKeyFile.', - }, - tlsInsecure: { - env: 'PARSE_SERVER_DATABASE_TLS_INSECURE', - help: 'The MongoDB driver option to disable various certificate validations.', - action: parsers.booleanParser, - }, - waitQueueTimeoutMS: { - env: 'PARSE_SERVER_DATABASE_WAIT_QUEUE_TIMEOUT_MS', - help: 'The MongoDB driver option to specify the maximum time in milliseconds that a thread can wait for a connection to become available.', - action: parsers.numberParser('waitQueueTimeoutMS'), - }, - zlibCompressionLevel: { - env: 'PARSE_SERVER_DATABASE_ZLIB_COMPRESSION_LEVEL', - help: 'The MongoDB driver option to specify the compression level if using zlib for network compression (0-9).', - action: parsers.numberParser('zlibCompressionLevel'), - }, + "allowPublicExplain": { + "env": "PARSE_SERVER_DATABASE_ALLOW_PUBLIC_EXPLAIN", + "help": "Set to `true` to allow `Parse.Query.explain` without master key.

\u26A0\uFE0F Enabling this option may expose sensitive query performance data to unauthorized users and could potentially be exploited for malicious purposes.", + "action": parsers.booleanParser, + "default": false + }, + "appName": { + "env": "PARSE_SERVER_DATABASE_APP_NAME", + "help": "The MongoDB driver option to specify the name of the application that created this MongoClient instance." + }, + "authMechanism": { + "env": "PARSE_SERVER_DATABASE_AUTH_MECHANISM", + "help": "The MongoDB driver option to specify the authentication mechanism that MongoDB will use to authenticate the connection." + }, + "authMechanismProperties": { + "env": "PARSE_SERVER_DATABASE_AUTH_MECHANISM_PROPERTIES", + "help": "The MongoDB driver option to specify properties for the specified authMechanism as a comma-separated list of colon-separated key-value pairs.", + "action": parsers.objectParser + }, + "authSource": { + "env": "PARSE_SERVER_DATABASE_AUTH_SOURCE", + "help": "The MongoDB driver option to specify the database name associated with the user's credentials." + }, + "autoSelectFamily": { + "env": "PARSE_SERVER_DATABASE_AUTO_SELECT_FAMILY", + "help": "The MongoDB driver option to set whether the socket attempts to connect to IPv6 and IPv4 addresses until a connection is established. If available, the driver will select the first IPv6 address.", + "action": parsers.booleanParser + }, + "autoSelectFamilyAttemptTimeout": { + "env": "PARSE_SERVER_DATABASE_AUTO_SELECT_FAMILY_ATTEMPT_TIMEOUT", + "help": "The MongoDB driver option to specify the amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the autoSelectFamily option. If set to a positive integer less than 10, the value 10 is used instead.", + "action": parsers.numberParser("autoSelectFamilyAttemptTimeout") + }, + "batchSize": { + "env": "PARSE_SERVER_DATABASE_BATCH_SIZE", + "help": "The number of documents per batch for MongoDB cursor `getMore` operations. A lower value reduces memory usage per batch; a higher value reduces the number of network round-trips.", + "action": parsers.numberParser("batchSize"), + "default": 1000 + }, + "clientMetadata": { + "env": "PARSE_SERVER_DATABASE_CLIENT_METADATA", + "help": "Custom metadata to append to database client connections for identifying Parse Server instances in database logs. If set, this metadata will be visible in database logs during connection handshakes. This can help with debugging and monitoring in deployments with multiple database clients. Set `name` to identify your application (e.g., 'MyApp') and `version` to your application's version. Leave undefined (default) to disable this feature and avoid the additional data transfer overhead.", + "action": parsers.objectParser, + "type": "DatabaseOptionsClientMetadata" + }, + "compressors": { + "env": "PARSE_SERVER_DATABASE_COMPRESSORS", + "help": "The MongoDB driver option to specify an array or comma-delimited string of compressors to enable network compression for communication between this client and a mongod/mongos instance." + }, + "connectTimeoutMS": { + "env": "PARSE_SERVER_DATABASE_CONNECT_TIMEOUT_MS", + "help": "The MongoDB driver option to specify the amount of time, in milliseconds, to wait to establish a single TCP socket connection to the server before raising an error. Specifying 0 disables the connection timeout.", + "action": parsers.numberParser("connectTimeoutMS") + }, + "createIndexAuthDataUniqueness": { + "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_AUTH_DATA_UNIQUENESS", + "help": "Set to `true` to automatically create unique indexes on the authData fields of the _User collection for each configured auth provider on server start, including `anonymous` when anonymous users are enabled. These indexes prevent race conditions during concurrent signups with the same authData. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the indexes, keep in mind that the otherwise automatically created indexes may change in the future to be optimized for the internal usage by Parse Server.", + "action": parsers.booleanParser, + "default": true + }, + "createIndexRoleName": { + "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_ROLE_NAME", + "help": "Set to `true` to automatically create a unique index on the name field of the _Role collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", + "action": parsers.booleanParser, + "default": true + }, + "createIndexUserEmail": { + "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_USER_EMAIL", + "help": "Set to `true` to automatically create indexes on the email field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", + "action": parsers.booleanParser, + "default": true + }, + "createIndexUserEmailCaseInsensitive": { + "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_USER_EMAIL_CASE_INSENSITIVE", + "help": "Set to `true` to automatically create a case-insensitive index on the email field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", + "action": parsers.booleanParser, + "default": true + }, + "createIndexUserEmailVerifyToken": { + "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_USER_EMAIL_VERIFY_TOKEN", + "help": "Set to `true` to automatically create an index on the _email_verify_token field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", + "action": parsers.booleanParser, + "default": true + }, + "createIndexUserPasswordResetToken": { + "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_USER_PASSWORD_RESET_TOKEN", + "help": "Set to `true` to automatically create an index on the _perishable_token field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", + "action": parsers.booleanParser, + "default": true + }, + "createIndexUserUsername": { + "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_USER_USERNAME", + "help": "Set to `true` to automatically create indexes on the username field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", + "action": parsers.booleanParser, + "default": true + }, + "createIndexUserUsernameCaseInsensitive": { + "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_USER_USERNAME_CASE_INSENSITIVE", + "help": "Set to `true` to automatically create a case-insensitive index on the username field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", + "action": parsers.booleanParser, + "default": true + }, + "directConnection": { + "env": "PARSE_SERVER_DATABASE_DIRECT_CONNECTION", + "help": "The MongoDB driver option to force a Single topology type with a connection string containing one host.", + "action": parsers.booleanParser + }, + "disableIndexFieldValidation": { + "env": "PARSE_SERVER_DATABASE_DISABLE_INDEX_FIELD_VALIDATION", + "help": "Set to `true` to disable validation of index fields. When disabled, indexes can be created even if the fields do not exist in the schema. This can be useful when creating indexes on fields that will be added later.", + "action": parsers.booleanParser + }, + "enableSchemaHooks": { + "env": "PARSE_SERVER_DATABASE_ENABLE_SCHEMA_HOOKS", + "help": "Enables database real-time hooks to update single schema cache. Set to `true` if using multiple Parse Servers instances connected to the same database. Failing to do so will cause a schema change to not propagate to all instances and re-syncing will only happen when the instances restart. To use this feature with MongoDB, a replica set cluster with [change stream](https://docs.mongodb.com/manual/changeStreams/#availability) support is required.", + "action": parsers.booleanParser, + "default": false + }, + "forceServerObjectId": { + "env": "PARSE_SERVER_DATABASE_FORCE_SERVER_OBJECT_ID", + "help": "The MongoDB driver option to force server to assign _id values instead of driver.", + "action": parsers.booleanParser + }, + "heartbeatFrequencyMS": { + "env": "PARSE_SERVER_DATABASE_HEARTBEAT_FREQUENCY_MS", + "help": "The MongoDB driver option to specify the frequency in milliseconds at which the driver checks the state of the MongoDB deployment.", + "action": parsers.numberParser("heartbeatFrequencyMS") + }, + "loadBalanced": { + "env": "PARSE_SERVER_DATABASE_LOAD_BALANCED", + "help": "The MongoDB driver option to instruct the driver it is connecting to a load balancer fronting a mongos like service.", + "action": parsers.booleanParser + }, + "localThresholdMS": { + "env": "PARSE_SERVER_DATABASE_LOCAL_THRESHOLD_MS", + "help": "The MongoDB driver option to specify the size (in milliseconds) of the latency window for selecting among multiple suitable MongoDB instances.", + "action": parsers.numberParser("localThresholdMS") + }, + "logClientEvents": { + "env": "PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS", + "help": "An array of MongoDB client event configurations to enable logging of specific events.", + "action": parsers.arrayParser, + "type": "LogClientEvent[]" + }, + "maxConnecting": { + "env": "PARSE_SERVER_DATABASE_MAX_CONNECTING", + "help": "The MongoDB driver option to specify the maximum number of connections that may be in the process of being established concurrently by the connection pool.", + "action": parsers.numberParser("maxConnecting") + }, + "maxIdleTimeMS": { + "env": "PARSE_SERVER_DATABASE_MAX_IDLE_TIME_MS", + "help": "The MongoDB driver option to specify the amount of time in milliseconds that a connection can remain idle in the connection pool before being removed and closed.", + "action": parsers.numberParser("maxIdleTimeMS") + }, + "maxPoolSize": { + "env": "PARSE_SERVER_DATABASE_MAX_POOL_SIZE", + "help": "The MongoDB driver option to set the maximum number of opened, cached, ready-to-use database connections maintained by the driver.", + "action": parsers.numberParser("maxPoolSize") + }, + "maxStalenessSeconds": { + "env": "PARSE_SERVER_DATABASE_MAX_STALENESS_SECONDS", + "help": "The MongoDB driver option to set the maximum replication lag for reads from secondary nodes.", + "action": parsers.numberParser("maxStalenessSeconds") + }, + "maxTimeMS": { + "env": "PARSE_SERVER_DATABASE_MAX_TIME_MS", + "help": "The MongoDB driver option to set a cumulative time limit in milliseconds for processing operations on a cursor.", + "action": parsers.numberParser("maxTimeMS") + }, + "minPoolSize": { + "env": "PARSE_SERVER_DATABASE_MIN_POOL_SIZE", + "help": "The MongoDB driver option to set the minimum number of opened, cached, ready-to-use database connections maintained by the driver.", + "action": parsers.numberParser("minPoolSize") + }, + "proxyHost": { + "env": "PARSE_SERVER_DATABASE_PROXY_HOST", + "help": "The MongoDB driver option to configure a Socks5 proxy host used for creating TCP connections." + }, + "proxyPassword": { + "env": "PARSE_SERVER_DATABASE_PROXY_PASSWORD", + "help": "The MongoDB driver option to configure a Socks5 proxy password when the proxy requires username/password authentication." + }, + "proxyPort": { + "env": "PARSE_SERVER_DATABASE_PROXY_PORT", + "help": "The MongoDB driver option to configure a Socks5 proxy port used for creating TCP connections.", + "action": parsers.numberParser("proxyPort") + }, + "proxyUsername": { + "env": "PARSE_SERVER_DATABASE_PROXY_USERNAME", + "help": "The MongoDB driver option to configure a Socks5 proxy username when the proxy requires username/password authentication." + }, + "readConcernLevel": { + "env": "PARSE_SERVER_DATABASE_READ_CONCERN_LEVEL", + "help": "The MongoDB driver option to specify the level of isolation." + }, + "readPreference": { + "env": "PARSE_SERVER_DATABASE_READ_PREFERENCE", + "help": "The MongoDB driver option to specify the read preferences for this connection." + }, + "readPreferenceTags": { + "env": "PARSE_SERVER_DATABASE_READ_PREFERENCE_TAGS", + "help": "The MongoDB driver option to specify the tags document as a comma-separated list of colon-separated key-value pairs.", + "action": parsers.arrayParser + }, + "replicaSet": { + "env": "PARSE_SERVER_DATABASE_REPLICA_SET", + "help": "The MongoDB driver option to specify the name of the replica set, if the mongod is a member of a replica set." + }, + "retryReads": { + "env": "PARSE_SERVER_DATABASE_RETRY_READS", + "help": "The MongoDB driver option to enable retryable reads.", + "action": parsers.booleanParser + }, + "retryWrites": { + "env": "PARSE_SERVER_DATABASE_RETRY_WRITES", + "help": "The MongoDB driver option to set whether to retry failed writes.", + "action": parsers.booleanParser + }, + "schemaCacheTtl": { + "env": "PARSE_SERVER_DATABASE_SCHEMA_CACHE_TTL", + "help": "The duration in seconds after which the schema cache expires and will be refetched from the database. Use this option if using multiple Parse Servers instances connected to the same database. A low duration will cause the schema cache to be updated too often, causing unnecessary database reads. A high duration will cause the schema to be updated too rarely, increasing the time required until schema changes propagate to all server instances. This feature can be used as an alternative or in conjunction with the option `enableSchemaHooks`. Default is infinite which means the schema cache never expires.", + "action": parsers.numberParser("schemaCacheTtl") + }, + "serverMonitoringMode": { + "env": "PARSE_SERVER_DATABASE_SERVER_MONITORING_MODE", + "help": "The MongoDB driver option to instruct the driver monitors to use a specific monitoring mode." + }, + "serverSelectionTimeoutMS": { + "env": "PARSE_SERVER_DATABASE_SERVER_SELECTION_TIMEOUT_MS", + "help": "The MongoDB driver option to specify the amount of time in milliseconds for a server to be considered suitable for selection.", + "action": parsers.numberParser("serverSelectionTimeoutMS") + }, + "socketTimeoutMS": { + "env": "PARSE_SERVER_DATABASE_SOCKET_TIMEOUT_MS", + "help": "The MongoDB driver option to specify the amount of time, in milliseconds, spent attempting to send or receive on a socket before timing out. Specifying 0 means no timeout.", + "action": parsers.numberParser("socketTimeoutMS") + }, + "srvMaxHosts": { + "env": "PARSE_SERVER_DATABASE_SRV_MAX_HOSTS", + "help": "The MongoDB driver option to specify the maximum number of hosts to connect to when using an srv connection string, a setting of 0 means unlimited hosts.", + "action": parsers.numberParser("srvMaxHosts") + }, + "srvServiceName": { + "env": "PARSE_SERVER_DATABASE_SRV_SERVICE_NAME", + "help": "The MongoDB driver option to modify the srv URI service name." + }, + "ssl": { + "env": "PARSE_SERVER_DATABASE_SSL", + "help": "The MongoDB driver option to enable or disable TLS/SSL for the connection (equivalent to tls option).", + "action": parsers.booleanParser + }, + "tls": { + "env": "PARSE_SERVER_DATABASE_TLS", + "help": "The MongoDB driver option to enable or disable TLS/SSL for the connection.", + "action": parsers.booleanParser + }, + "tlsAllowInvalidCertificates": { + "env": "PARSE_SERVER_DATABASE_TLS_ALLOW_INVALID_CERTIFICATES", + "help": "The MongoDB driver option to bypass validation of the certificates presented by the mongod/mongos instance.", + "action": parsers.booleanParser + }, + "tlsAllowInvalidHostnames": { + "env": "PARSE_SERVER_DATABASE_TLS_ALLOW_INVALID_HOSTNAMES", + "help": "The MongoDB driver option to disable hostname validation of the certificate presented by the mongod/mongos instance.", + "action": parsers.booleanParser + }, + "tlsCAFile": { + "env": "PARSE_SERVER_DATABASE_TLS_CAFILE", + "help": "The MongoDB driver option to specify the location of a local .pem file that contains the root certificate chain from the Certificate Authority." + }, + "tlsCertificateKeyFile": { + "env": "PARSE_SERVER_DATABASE_TLS_CERTIFICATE_KEY_FILE", + "help": "The MongoDB driver option to specify the location of a local .pem file that contains the client's TLS/SSL certificate and key." + }, + "tlsCertificateKeyFilePassword": { + "env": "PARSE_SERVER_DATABASE_TLS_CERTIFICATE_KEY_FILE_PASSWORD", + "help": "The MongoDB driver option to specify the password to decrypt the tlsCertificateKeyFile." + }, + "tlsInsecure": { + "env": "PARSE_SERVER_DATABASE_TLS_INSECURE", + "help": "The MongoDB driver option to disable various certificate validations.", + "action": parsers.booleanParser + }, + "waitQueueTimeoutMS": { + "env": "PARSE_SERVER_DATABASE_WAIT_QUEUE_TIMEOUT_MS", + "help": "The MongoDB driver option to specify the maximum time in milliseconds that a thread can wait for a connection to become available.", + "action": parsers.numberParser("waitQueueTimeoutMS") + }, + "zlibCompressionLevel": { + "env": "PARSE_SERVER_DATABASE_ZLIB_COMPRESSION_LEVEL", + "help": "The MongoDB driver option to specify the compression level if using zlib for network compression (0-9).", + "action": parsers.numberParser("zlibCompressionLevel") + } }; module.exports.DatabaseOptionsClientMetadata = { - name: { - env: 'PARSE_SERVER_DATABASE_CLIENT_METADATA_NAME', - help: "The name to identify your application in database logs (e.g., 'MyApp').", - required: true, - }, - version: { - env: 'PARSE_SERVER_DATABASE_CLIENT_METADATA_VERSION', - help: "The version of your application (e.g., '1.0.0').", - required: true, - }, + "name": { + "env": "PARSE_SERVER_DATABASE_CLIENT_METADATA_NAME", + "help": "The name to identify your application in database logs (e.g., 'MyApp').", + "required": true + }, + "version": { + "env": "PARSE_SERVER_DATABASE_CLIENT_METADATA_VERSION", + "help": "The version of your application (e.g., '1.0.0').", + "required": true + } }; module.exports.AuthAdapter = { - enabled: { - help: 'Is `true` if the auth adapter is enabled, `false` otherwise.', - action: parsers.booleanParser, - default: false, - }, + "enabled": { + "help": "Is `true` if the auth adapter is enabled, `false` otherwise.", + "action": parsers.booleanParser, + "default": false + } }; module.exports.LogLevels = { - cloudFunctionError: { - env: 'PARSE_SERVER_LOG_LEVELS_CLOUD_FUNCTION_ERROR', - help: 'Log level used by the Cloud Code Functions on error. Default is `error`. See [LogLevel](LogLevel.html) for available values.', - default: 'error', - }, - cloudFunctionSuccess: { - env: 'PARSE_SERVER_LOG_LEVELS_CLOUD_FUNCTION_SUCCESS', - help: 'Log level used by the Cloud Code Functions on success. Default is `info`. See [LogLevel](LogLevel.html) for available values.', - default: 'info', - }, - signupUsernameTaken: { - env: 'PARSE_SERVER_LOG_LEVELS_SIGNUP_USERNAME_TAKEN', - help: 'Log level used when a sign-up fails because the username already exists. Default is `info`. See [LogLevel](LogLevel.html) for available values.', - default: 'info', - }, - triggerAfter: { - env: 'PARSE_SERVER_LOG_LEVELS_TRIGGER_AFTER', - help: 'Log level used by the Cloud Code Triggers `afterSave`, `afterDelete`, `afterFind`, `afterLogout`. Default is `info`. See [LogLevel](LogLevel.html) for available values.', - default: 'info', - }, - triggerBeforeError: { - env: 'PARSE_SERVER_LOG_LEVELS_TRIGGER_BEFORE_ERROR', - help: 'Log level used by the Cloud Code Triggers `beforeSave`, `beforeDelete`, `beforeFind`, `beforeLogin` on error. Default is `error`. See [LogLevel](LogLevel.html) for available values.', - default: 'error', - }, - triggerBeforeSuccess: { - env: 'PARSE_SERVER_LOG_LEVELS_TRIGGER_BEFORE_SUCCESS', - help: 'Log level used by the Cloud Code Triggers `beforeSave`, `beforeDelete`, `beforeFind`, `beforeLogin` on success. Default is `info`. See [LogLevel](LogLevel.html) for available values.', - default: 'info', - }, + "cloudFunctionError": { + "env": "PARSE_SERVER_LOG_LEVELS_CLOUD_FUNCTION_ERROR", + "help": "Log level used by the Cloud Code Functions on error. Default is `error`. See [LogLevel](LogLevel.html) for available values.", + "default": "error" + }, + "cloudFunctionSuccess": { + "env": "PARSE_SERVER_LOG_LEVELS_CLOUD_FUNCTION_SUCCESS", + "help": "Log level used by the Cloud Code Functions on success. Default is `info`. See [LogLevel](LogLevel.html) for available values.", + "default": "info" + }, + "signupUsernameTaken": { + "env": "PARSE_SERVER_LOG_LEVELS_SIGNUP_USERNAME_TAKEN", + "help": "Log level used when a sign-up fails because the username already exists. Default is `info`. See [LogLevel](LogLevel.html) for available values.", + "default": "info" + }, + "triggerAfter": { + "env": "PARSE_SERVER_LOG_LEVELS_TRIGGER_AFTER", + "help": "Log level used by the Cloud Code Triggers `afterSave`, `afterDelete`, `afterFind`, `afterLogout`. Default is `info`. See [LogLevel](LogLevel.html) for available values.", + "default": "info" + }, + "triggerBeforeError": { + "env": "PARSE_SERVER_LOG_LEVELS_TRIGGER_BEFORE_ERROR", + "help": "Log level used by the Cloud Code Triggers `beforeSave`, `beforeDelete`, `beforeFind`, `beforeLogin` on error. Default is `error`. See [LogLevel](LogLevel.html) for available values.", + "default": "error" + }, + "triggerBeforeSuccess": { + "env": "PARSE_SERVER_LOG_LEVELS_TRIGGER_BEFORE_SUCCESS", + "help": "Log level used by the Cloud Code Triggers `beforeSave`, `beforeDelete`, `beforeFind`, `beforeLogin` on success. Default is `info`. See [LogLevel](LogLevel.html) for available values.", + "default": "info" + } }; diff --git a/src/Options/docs.js b/src/Options/docs.js index 1e19bfa703..caaa6d686f 100644 --- a/src/Options/docs.js +++ b/src/Options/docs.js @@ -27,6 +27,9 @@ * @property {Number} cacheTTL Sets the TTL for the in memory cache (in ms), defaults to 5000 (5 seconds) * @property {String} clientKey Key for iOS, MacOS, tvOS clients * @property {String} cloud Full path to your cloud code main.js + * @property {Object} cloudCodeAdapters Array of CloudCodeAdapter instances for BYO cloud code integration + * @property {String} cloudCodeCommand Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol) + * @property {Object} cloudCodeOptions Options for the external cloud code process adapter * @property {Number|Boolean} cluster Run with cluster, optionally set the number of processes default to os.cpus().length * @property {String} collectionPrefix A collection prefix for the classes * @property {Boolean} convertEmailToLowercase Optional. If set to `true`, the `email` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `email` property is stored as set, without any case modifications. Default is `false`. @@ -112,7 +115,7 @@ * @property {Boolean} verbose Set the logging to verbose * @property {Boolean} verifyServerUrl Parse Server makes a HTTP request to the URL set in `serverURL` at the end of its launch routine to verify that the launch succeeded. If this option is set to `false`, the verification will be skipped. This can be useful in environments where the server URL is not accessible from the server itself, such as when running behind a firewall or in certain containerized environments.

⚠️ Server URL verification requires Parse Server to be able to call itself by making requests to the URL set in `serverURL`.

Default is `true`. * @property {Boolean} verifyUserEmails Set to `true` to require users to verify their email address to complete the sign-up process. Supports a function with a return value of `true` or `false` for conditional verification. The function receives a request object that includes `createdWith` to indicate whether the invocation is for `signup` or `login` and the used auth provider.

The `createdWith` values per scenario:
  • Password signup: `{ action: 'signup', authProvider: 'password' }`
  • Auth provider signup: `{ action: 'signup', authProvider: '' }`
  • Password login: `{ action: 'login', authProvider: 'password' }`
  • Auth provider login: function not invoked; auth provider login bypasses email verification
  • Resend verification email: `createdWith` is `undefined`; use the `resendRequest` property to identify those
Default is `false`. - * @property {String} webhookKey Key sent with outgoing webhook calls + * @property {String} webhookKey Key for authenticating external cloud code process requests. Required when cloudCodeCommand is set. */ /** @@ -353,3 +356,4 @@ * @property {String} triggerBeforeError Log level used by the Cloud Code Triggers `beforeSave`, `beforeDelete`, `beforeFind`, `beforeLogin` on error. Default is `error`. See [LogLevel](LogLevel.html) for available values. * @property {String} triggerBeforeSuccess Log level used by the Cloud Code Triggers `beforeSave`, `beforeDelete`, `beforeFind`, `beforeLogin` on success. Default is `info`. See [LogLevel](LogLevel.html) for available values. */ + diff --git a/src/Options/index.js b/src/Options/index.js index 59ed994898..6003e32699 100644 --- a/src/Options/index.js +++ b/src/Options/index.js @@ -146,14 +146,9 @@ export interface ParseServerOptions { /* Key for authenticating external cloud code process requests. Required when cloudCodeCommand is set. */ webhookKey: ?string; /* Options for the external cloud code process adapter */ - cloudCodeOptions: ?{ - startupTimeout: ?number, - healthCheckInterval: ?number, - shutdownTimeout: ?number, - maxRestartDelay: ?number, - }; + cloudCodeOptions: ?Object; /* Array of CloudCodeAdapter instances for BYO cloud code integration */ - cloudCodeAdapters: ?Array; + cloudCodeAdapters: ?Object; /* A collection prefix for the classes :DEFAULT: '' */ collectionPrefix: ?string; diff --git a/src/cloud-code/adapters/ExternalProcessAdapter.ts b/src/cloud-code/adapters/ExternalProcessAdapter.ts index 27e279fea6..0b8e64c7de 100644 --- a/src/cloud-code/adapters/ExternalProcessAdapter.ts +++ b/src/cloud-code/adapters/ExternalProcessAdapter.ts @@ -1,6 +1,6 @@ // src/cloud-code/adapters/ExternalProcessAdapter.ts import { spawn, ChildProcess } from 'child_process'; -import http from 'http'; +import * as http from 'http'; import type { CloudCodeAdapter, CloudCodeRegistry, diff --git a/src/cloud-code/adapters/webhook-bridge.ts b/src/cloud-code/adapters/webhook-bridge.ts index 1c98e4880b..20b819a21b 100644 --- a/src/cloud-code/adapters/webhook-bridge.ts +++ b/src/cloud-code/adapters/webhook-bridge.ts @@ -1,4 +1,4 @@ -import { Parse } from 'parse/node'; +import Parse from 'parse/node'; import type { WebhookResponse } from '../types'; export function requestToWebhookBody(request: any): Record { From ad7f3218ee1c8621e4f460669b8b63562a57afb0 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 17 Mar 2026 21:13:01 +1100 Subject: [PATCH 07/14] fix: resolve remaining CI failures - Regenerate Definitions.js and docs.js via npm run definitions - Fix lint: replace instanceof Map with toBeInstanceOf (no-restricted-syntax) - Fix cloud type validation: reject non-string/function/object cloud values to preserve existing "cloud code must be valid type" test - Docker build failure is QEMU/arm64 infra flake (not code-related) --- spec/CloudCodeManager.spec.js | 2 +- src/Options/Definitions.js | 2890 +++++++++++++++-------------- src/Options/docs.js | 1 - src/cloud-code/resolveAdapters.ts | 4 +- 4 files changed, 1452 insertions(+), 1445 deletions(-) diff --git a/spec/CloudCodeManager.spec.js b/spec/CloudCodeManager.spec.js index cdf71d22cc..07023f337c 100644 --- a/spec/CloudCodeManager.spec.js +++ b/spec/CloudCodeManager.spec.js @@ -231,7 +231,7 @@ describe('CloudCodeManager', () => { manager.defineJob('job1', () => {}, 'source-a'); manager.defineJob('job2', () => {}, 'source-a'); const jobs = manager.getJobs(); - expect(jobs instanceof Map).toBe(true); + expect(jobs).toBeInstanceOf(Map); expect(jobs.size).toBe(2); expect(jobs.has('job1')).toBe(true); expect(jobs.has('job2')).toBe(true); diff --git a/src/Options/Definitions.js b/src/Options/Definitions.js index a91bb6a1ae..1b0e3931e1 100644 --- a/src/Options/Definitions.js +++ b/src/Options/Definitions.js @@ -3,1488 +3,1494 @@ This code has been generated by resources/buildConfigDefinitions.js Do not edit manually, but update Options/index.js */ -var parsers = require("./parsers"); +var parsers = require('./parsers'); module.exports.SchemaOptions = { - "afterMigration": { - "env": "PARSE_SERVER_SCHEMA_AFTER_MIGRATION", - "help": "Execute a callback after running schema migrations." - }, - "beforeMigration": { - "env": "PARSE_SERVER_SCHEMA_BEFORE_MIGRATION", - "help": "Execute a callback before running schema migrations." - }, - "definitions": { - "env": "PARSE_SERVER_SCHEMA_DEFINITIONS", - "help": "Rest representation on Parse.Schema https://docs.parseplatform.org/rest/guide/#adding-a-schema", - "required": true, - "action": parsers.objectParser, - "default": [] - }, - "deleteExtraFields": { - "env": "PARSE_SERVER_SCHEMA_DELETE_EXTRA_FIELDS", - "help": "Is true if Parse Server should delete any fields not defined in a schema definition. This should only be used during development.", - "action": parsers.booleanParser, - "default": false - }, - "keepUnknownIndexes": { - "env": "PARSE_SERVER_SCHEMA_KEEP_UNKNOWN_INDEXES", - "help": "(Optional) Keep indexes that are present in the database but not defined in the schema. Set this to `true` if you are adding indexes manually, so that they won't be removed when running schema migration. Default is `false`.", - "action": parsers.booleanParser, - "default": false - }, - "lockSchemas": { - "env": "PARSE_SERVER_SCHEMA_LOCK_SCHEMAS", - "help": "Is true if Parse Server will reject any attempts to modify the schema while the server is running.", - "action": parsers.booleanParser, - "default": false - }, - "recreateModifiedFields": { - "env": "PARSE_SERVER_SCHEMA_RECREATE_MODIFIED_FIELDS", - "help": "Is true if Parse Server should recreate any fields that are different between the current database schema and theschema definition. This should only be used during development.", - "action": parsers.booleanParser, - "default": false - }, - "strict": { - "env": "PARSE_SERVER_SCHEMA_STRICT", - "help": "Is true if Parse Server should exit if schema update fail.", - "action": parsers.booleanParser, - "default": false - } + afterMigration: { + env: 'PARSE_SERVER_SCHEMA_AFTER_MIGRATION', + help: 'Execute a callback after running schema migrations.', + }, + beforeMigration: { + env: 'PARSE_SERVER_SCHEMA_BEFORE_MIGRATION', + help: 'Execute a callback before running schema migrations.', + }, + definitions: { + env: 'PARSE_SERVER_SCHEMA_DEFINITIONS', + help: 'Rest representation on Parse.Schema https://docs.parseplatform.org/rest/guide/#adding-a-schema', + required: true, + action: parsers.objectParser, + default: [], + }, + deleteExtraFields: { + env: 'PARSE_SERVER_SCHEMA_DELETE_EXTRA_FIELDS', + help: 'Is true if Parse Server should delete any fields not defined in a schema definition. This should only be used during development.', + action: parsers.booleanParser, + default: false, + }, + keepUnknownIndexes: { + env: 'PARSE_SERVER_SCHEMA_KEEP_UNKNOWN_INDEXES', + help: "(Optional) Keep indexes that are present in the database but not defined in the schema. Set this to `true` if you are adding indexes manually, so that they won't be removed when running schema migration. Default is `false`.", + action: parsers.booleanParser, + default: false, + }, + lockSchemas: { + env: 'PARSE_SERVER_SCHEMA_LOCK_SCHEMAS', + help: 'Is true if Parse Server will reject any attempts to modify the schema while the server is running.', + action: parsers.booleanParser, + default: false, + }, + recreateModifiedFields: { + env: 'PARSE_SERVER_SCHEMA_RECREATE_MODIFIED_FIELDS', + help: 'Is true if Parse Server should recreate any fields that are different between the current database schema and theschema definition. This should only be used during development.', + action: parsers.booleanParser, + default: false, + }, + strict: { + env: 'PARSE_SERVER_SCHEMA_STRICT', + help: 'Is true if Parse Server should exit if schema update fail.', + action: parsers.booleanParser, + default: false, + }, }; module.exports.ParseServerOptions = { - "accountLockout": { - "env": "PARSE_SERVER_ACCOUNT_LOCKOUT", - "help": "The account lockout policy for failed login attempts.

Note: Setting a user's ACL to an empty object `{}` via master key is a separate mechanism that only prevents new logins; it does not invalidate existing session tokens. To immediately revoke a user's access, destroy their sessions via master key in addition to setting the ACL.", - "action": parsers.objectParser, - "type": "AccountLockoutOptions" - }, - "allowClientClassCreation": { - "env": "PARSE_SERVER_ALLOW_CLIENT_CLASS_CREATION", - "help": "Enable (or disable) client class creation, defaults to false", - "action": parsers.booleanParser, - "default": false - }, - "allowCustomObjectId": { - "env": "PARSE_SERVER_ALLOW_CUSTOM_OBJECT_ID", - "help": "Enable (or disable) custom objectId", - "action": parsers.booleanParser, - "default": false - }, - "allowExpiredAuthDataToken": { - "env": "PARSE_SERVER_ALLOW_EXPIRED_AUTH_DATA_TOKEN", - "help": "Allow a user to log in even if the 3rd party authentication token that was used to sign in to their account has expired. If this is set to `false`, then the token will be validated every time the user signs in to their account. This refers to the token that is stored in the `_User.authData` field. Defaults to `false`.", - "action": parsers.booleanParser, - "default": false - }, - "allowHeaders": { - "env": "PARSE_SERVER_ALLOW_HEADERS", - "help": "Add headers to Access-Control-Allow-Headers", - "action": parsers.arrayParser - }, - "allowOrigin": { - "env": "PARSE_SERVER_ALLOW_ORIGIN", - "help": "Sets origins for Access-Control-Allow-Origin. This can be a string for a single origin or an array of strings for multiple origins.", - "action": parsers.arrayParser - }, - "analyticsAdapter": { - "env": "PARSE_SERVER_ANALYTICS_ADAPTER", - "help": "Adapter module for the analytics", - "action": parsers.moduleOrObjectParser - }, - "appId": { - "env": "PARSE_SERVER_APPLICATION_ID", - "help": "Your Parse Application ID", - "required": true - }, - "appName": { - "env": "PARSE_SERVER_APP_NAME", - "help": "Sets the app name" - }, - "auth": { - "env": "PARSE_SERVER_AUTH_PROVIDERS", - "help": "Configuration for your authentication providers, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#oauth-and-3rd-party-authentication

Provider names must start with a letter and contain only letters, digits, and underscores (`/^[A-Za-z][A-Za-z0-9_]*$/`). This is because each provider name is used to construct a database field (`_auth_data_`), which must comply with Parse Server's field naming rules.", - "action": parsers.objectParser - }, - "cacheAdapter": { - "env": "PARSE_SERVER_CACHE_ADAPTER", - "help": "Adapter module for the cache", - "action": parsers.moduleOrObjectParser - }, - "cacheMaxSize": { - "env": "PARSE_SERVER_CACHE_MAX_SIZE", - "help": "Sets the maximum size for the in memory cache, defaults to 10000", - "action": parsers.numberParser("cacheMaxSize"), - "default": 10000 - }, - "cacheTTL": { - "env": "PARSE_SERVER_CACHE_TTL", - "help": "Sets the TTL for the in memory cache (in ms), defaults to 5000 (5 seconds)", - "action": parsers.numberParser("cacheTTL"), - "default": 5000 - }, - "clientKey": { - "env": "PARSE_SERVER_CLIENT_KEY", - "help": "Key for iOS, MacOS, tvOS clients" - }, - "cloud": { - "env": "PARSE_SERVER_CLOUD", - "help": "Full path to your cloud code main.js" - }, - "cloudCodeAdapters": { - "env": "PARSE_SERVER_CLOUD_CODE_ADAPTERS", - "help": "Array of CloudCodeAdapter instances for BYO cloud code integration", - "action": parsers.objectParser - }, - "cloudCodeCommand": { - "env": "PARSE_SERVER_CLOUD_CODE_COMMAND", - "help": "Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol)" - }, - "cloudCodeOptions": { - "env": "PARSE_SERVER_CLOUD_CODE_OPTIONS", - "help": "Options for the external cloud code process adapter", - "action": parsers.objectParser - }, - "cluster": { - "env": "PARSE_SERVER_CLUSTER", - "help": "Run with cluster, optionally set the number of processes default to os.cpus().length", - "action": parsers.numberOrBooleanParser - }, - "collectionPrefix": { - "env": "PARSE_SERVER_COLLECTION_PREFIX", - "help": "A collection prefix for the classes", - "default": "" - }, - "convertEmailToLowercase": { - "env": "PARSE_SERVER_CONVERT_EMAIL_TO_LOWERCASE", - "help": "Optional. If set to `true`, the `email` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `email` property is stored as set, without any case modifications. Default is `false`.", - "action": parsers.booleanParser, - "default": false - }, - "convertUsernameToLowercase": { - "env": "PARSE_SERVER_CONVERT_USERNAME_TO_LOWERCASE", - "help": "Optional. If set to `true`, the `username` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `username` property is stored as set, without any case modifications. Default is `false`.", - "action": parsers.booleanParser, - "default": false - }, - "customPages": { - "env": "PARSE_SERVER_CUSTOM_PAGES", - "help": "custom pages for password validation and reset", - "action": parsers.objectParser, - "type": "CustomPagesOptions", - "default": {} - }, - "databaseAdapter": { - "env": "PARSE_SERVER_DATABASE_ADAPTER", - "help": "Adapter module for the database; any options that are not explicitly described here are passed directly to the database client.", - "action": parsers.moduleOrObjectParser - }, - "databaseOptions": { - "env": "PARSE_SERVER_DATABASE_OPTIONS", - "help": "Options to pass to the database client", - "action": parsers.objectParser, - "type": "DatabaseOptions" - }, - "databaseURI": { - "env": "PARSE_SERVER_DATABASE_URI", - "help": "The full URI to your database. Supported databases are mongodb or postgres.", - "required": true, - "default": "mongodb://localhost:27017/parse" - }, - "defaultLimit": { - "env": "PARSE_SERVER_DEFAULT_LIMIT", - "help": "Default value for limit option on queries, defaults to `100`.", - "action": parsers.numberParser("defaultLimit"), - "default": 100 - }, - "directAccess": { - "env": "PARSE_SERVER_DIRECT_ACCESS", - "help": "Set to `true` if Parse requests within the same Node.js environment as Parse Server should be routed to Parse Server directly instead of via the HTTP interface. Default is `false`.

If set to `false` then Parse requests within the same Node.js environment as Parse Server are executed as HTTP requests sent to Parse Server via the `serverURL`. For example, a `Parse.Query` in Cloud Code is calling Parse Server via a HTTP request. The server is essentially making a HTTP request to itself, unnecessarily using network resources such as network ports.

\u26A0\uFE0F In environments where multiple Parse Server instances run behind a load balancer and Parse requests within the current Node.js environment should be routed via the load balancer and distributed as HTTP requests among all instances via the `serverURL`, this should be set to `false`.", - "action": parsers.booleanParser, - "default": true - }, - "dotNetKey": { - "env": "PARSE_SERVER_DOT_NET_KEY", - "help": "Key for Unity and .Net SDK" - }, - "emailAdapter": { - "env": "PARSE_SERVER_EMAIL_ADAPTER", - "help": "Adapter module for email sending", - "action": parsers.moduleOrObjectParser - }, - "emailVerifySuccessOnInvalidEmail": { - "env": "PARSE_SERVER_EMAIL_VERIFY_SUCCESS_ON_INVALID_EMAIL", - "help": "Set to `true` if a request to verify the email should return a success response even if the provided email address does not belong to a verifiable account, for example because it is unknown or already verified, or `false` if the request should return an error response in those cases.

Default is `true`.
Requires option `verifyUserEmails: true`.", - "action": parsers.booleanParser, - "default": true - }, - "emailVerifyTokenReuseIfValid": { - "env": "PARSE_SERVER_EMAIL_VERIFY_TOKEN_REUSE_IF_VALID", - "help": "Set to `true` if a email verification token should be reused in case another token is requested but there is a token that is still valid, i.e. has not expired. This avoids the often observed issue that a user requests multiple emails and does not know which link contains a valid token because each newly generated token would invalidate the previous token.

Default is `false`.
Requires option `verifyUserEmails: true`.", - "action": parsers.booleanParser, - "default": false - }, - "emailVerifyTokenValidityDuration": { - "env": "PARSE_SERVER_EMAIL_VERIFY_TOKEN_VALIDITY_DURATION", - "help": "Set the validity duration of the email verification token in seconds after which the token expires. The token is used in the link that is set in the email. After the token expires, the link becomes invalid and a new link has to be sent. If the option is not set or set to `undefined`, then the token never expires.

For example, to expire the token after 2 hours, set a value of 7200 seconds (= 60 seconds * 60 minutes * 2 hours).

Default is `undefined`.
Requires option `verifyUserEmails: true`.", - "action": parsers.numberParser("emailVerifyTokenValidityDuration") - }, - "enableAnonymousUsers": { - "env": "PARSE_SERVER_ENABLE_ANON_USERS", - "help": "Enable (or disable) anonymous users, defaults to true", - "action": parsers.booleanParser, - "default": true - }, - "enableCollationCaseComparison": { - "env": "PARSE_SERVER_ENABLE_COLLATION_CASE_COMPARISON", - "help": "Optional. If set to `true`, the collation rule of case comparison for queries and indexes is enabled. Enable this option to run Parse Server with MongoDB Atlas Serverless or AWS Amazon DocumentDB. If `false`, the collation rule of case comparison is disabled. Default is `false`.", - "action": parsers.booleanParser, - "default": false - }, - "enableExpressErrorHandler": { - "env": "PARSE_SERVER_ENABLE_EXPRESS_ERROR_HANDLER", - "help": "Enables the default express error handler for all errors", - "action": parsers.booleanParser, - "default": false - }, - "enableInsecureAuthAdapters": { - "env": "PARSE_SERVER_ENABLE_INSECURE_AUTH_ADAPTERS", - "help": "Optional. Enables insecure authentication adapters. Insecure auth adapters are deprecated and will be removed in a future version. Defaults to `false`.", - "action": parsers.booleanParser, - "default": false - }, - "enableProductPurchaseLegacyApi": { - "env": "PARSE_SERVER_ENABLE_PRODUCT_PURCHASE_LEGACY_API", - "help": "Deprecated. Enables the legacy product purchase API including the `_Product` class and the `/validate_purchase` endpoint. This is an undocumented, unmaintained legacy feature inherited from the original Parse platform that may not function as expected. We strongly advise against using it. It will be removed in a future major version.", - "action": parsers.booleanParser, - "default": true - }, - "enableSanitizedErrorResponse": { - "env": "PARSE_SERVER_ENABLE_SANITIZED_ERROR_RESPONSE", - "help": "If set to `true`, error details are removed from error messages in responses to client requests, and instead a generic error message is sent. Default is `true`.", - "action": parsers.booleanParser, - "default": true - }, - "encryptionKey": { - "env": "PARSE_SERVER_ENCRYPTION_KEY", - "help": "Key for encrypting your files" - }, - "enforcePrivateUsers": { - "env": "PARSE_SERVER_ENFORCE_PRIVATE_USERS", - "help": "Set to true if new users should be created without public read and write access.", - "action": parsers.booleanParser, - "default": true - }, - "expireInactiveSessions": { - "env": "PARSE_SERVER_EXPIRE_INACTIVE_SESSIONS", - "help": "Sets whether we should expire the inactive sessions, defaults to true. If false, all new sessions are created with no expiration date.", - "action": parsers.booleanParser, - "default": true - }, - "extendSessionOnUse": { - "env": "PARSE_SERVER_EXTEND_SESSION_ON_USE", - "help": "Whether Parse Server should automatically extend a valid session by the sessionLength. In order to reduce the number of session updates in the database, a session will only be extended when a request is received after at least half of the current session's lifetime has passed.", - "action": parsers.booleanParser, - "default": false - }, - "fileKey": { - "env": "PARSE_SERVER_FILE_KEY", - "help": "Key for your files" - }, - "filesAdapter": { - "env": "PARSE_SERVER_FILES_ADAPTER", - "help": "Adapter module for the files sub-system", - "action": parsers.moduleOrObjectParser - }, - "fileUpload": { - "env": "PARSE_SERVER_FILE_UPLOAD_OPTIONS", - "help": "Options for file uploads", - "action": parsers.objectParser, - "type": "FileUploadOptions", - "default": {} - }, - "graphQLPath": { - "env": "PARSE_SERVER_GRAPHQL_PATH", - "help": "The mount path for the GraphQL endpoint

\u26A0\uFE0F File upload inside the GraphQL mutation system requires Parse Server to be able to call itself by making requests to the URL set in `serverURL`.

Defaults is `/graphql`.", - "default": "/graphql" - }, - "graphQLPublicIntrospection": { - "env": "PARSE_SERVER_GRAPHQL_PUBLIC_INTROSPECTION", - "help": "Enable public introspection for the GraphQL endpoint, defaults to false", - "action": parsers.booleanParser, - "default": false - }, - "graphQLSchema": { - "env": "PARSE_SERVER_GRAPH_QLSCHEMA", - "help": "Full path to your GraphQL custom schema.graphql file" - }, - "host": { - "env": "PARSE_SERVER_HOST", - "help": "The host to serve ParseServer on, defaults to 0.0.0.0", - "default": "0.0.0.0" - }, - "idempotencyOptions": { - "env": "PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_OPTIONS", - "help": "Options for request idempotency to deduplicate identical requests that may be caused by network issues. Caution, this is an experimental feature that may not be appropriate for production.", - "action": parsers.objectParser, - "type": "IdempotencyOptions", - "default": {} - }, - "javascriptKey": { - "env": "PARSE_SERVER_JAVASCRIPT_KEY", - "help": "Key for the Javascript SDK" - }, - "jsonLogs": { - "env": "JSON_LOGS", - "help": "Log as structured JSON objects", - "action": parsers.booleanParser - }, - "liveQuery": { - "env": "PARSE_SERVER_LIVE_QUERY", - "help": "parse-server's LiveQuery configuration object", - "action": parsers.objectParser, - "type": "LiveQueryOptions" - }, - "liveQueryServerOptions": { - "env": "PARSE_SERVER_LIVE_QUERY_SERVER_OPTIONS", - "help": "Live query server configuration options (will start the liveQuery server)", - "action": parsers.objectParser, - "type": "LiveQueryServerOptions" - }, - "loggerAdapter": { - "env": "PARSE_SERVER_LOGGER_ADAPTER", - "help": "Adapter module for the logging sub-system", - "action": parsers.moduleOrObjectParser - }, - "logLevel": { - "env": "PARSE_SERVER_LOG_LEVEL", - "help": "Sets the level for logs" - }, - "logLevels": { - "env": "PARSE_SERVER_LOG_LEVELS", - "help": "(Optional) Overrides the log levels used internally by Parse Server to log events.", - "action": parsers.objectParser, - "type": "LogLevels", - "default": {} - }, - "logsFolder": { - "env": "PARSE_SERVER_LOGS_FOLDER", - "help": "Folder for the logs (defaults to './logs'); set to null to disable file based logging", - "default": "./logs" - }, - "maintenanceKey": { - "env": "PARSE_SERVER_MAINTENANCE_KEY", - "help": "(Optional) The maintenance key is used for modifying internal and read-only fields of Parse Server.

\u26A0\uFE0F This key is not intended to be used as part of a regular operation of Parse Server. This key is intended to conduct out-of-band changes such as one-time migrations or data correction tasks. Internal fields are not officially documented and may change at any time without publication in release changelogs. We strongly advice not to rely on internal fields as part of your regular operation and to investigate the implications of any planned changes *directly in the source code* of your current version of Parse Server.", - "required": true - }, - "maintenanceKeyIps": { - "env": "PARSE_SERVER_MAINTENANCE_KEY_IPS", - "help": "(Optional) Restricts the use of maintenance key permissions to a list of IP addresses or ranges.

This option accepts a list of single IP addresses, for example `['10.0.0.1', '10.0.0.2']`. You can also use CIDR notation to specify an IP address range, for example `['10.0.1.0/24']`.

Special scenarios:
- Setting an empty array `[]` means that the maintenance key cannot be used even in Parse Server Cloud Code. This value cannot be set via an environment variable as there is no way to pass an empty array to Parse Server via an environment variable.
- Setting `['0.0.0.0/0', '::0']` means to allow any IPv4 and IPv6 address to use the maintenance key and effectively disables the IP filter.

Considerations:
- IPv4 and IPv6 addresses are not compared against each other. Each IP version (IPv4 and IPv6) needs to be considered separately. For example, `['0.0.0.0/0']` allows any IPv4 address and blocks every IPv6 address. Conversely, `['::0']` allows any IPv6 address and blocks every IPv4 address.
- Keep in mind that the IP version in use depends on the network stack of the environment in which Parse Server runs. A local environment may use a different IP version than a remote environment. For example, it's possible that locally the value `['0.0.0.0/0']` allows the request IP because the environment is using IPv4, but when Parse Server is deployed remotely the request IP is blocked because the remote environment is using IPv6.
- When setting the option via an environment variable the notation is a comma-separated string, for example `\"0.0.0.0/0,::0\"`.
- IPv6 zone indices (`%` suffix) are not supported, for example `fe80::1%eth0`, `fe80::1%1` or `::1%lo`.

Defaults to `['127.0.0.1', '::1']` which means that only `localhost`, the server instance on which Parse Server runs, is allowed to use the maintenance key.", - "action": parsers.arrayParser, - "default": ["127.0.0.1", "::1"] - }, - "masterKey": { - "env": "PARSE_SERVER_MASTER_KEY", - "help": "Your Parse Master Key", - "required": true - }, - "masterKeyIps": { - "env": "PARSE_SERVER_MASTER_KEY_IPS", - "help": "(Optional) Restricts the use of master key permissions to a list of IP addresses or ranges.

This option accepts a list of single IP addresses, for example `['10.0.0.1', '10.0.0.2']`. You can also use CIDR notation to specify an IP address range, for example `['10.0.1.0/24']`.

Special scenarios:
- Setting an empty array `[]` means that the master key cannot be used even in Parse Server Cloud Code. This value cannot be set via an environment variable as there is no way to pass an empty array to Parse Server via an environment variable.
- Setting `['0.0.0.0/0', '::0']` means to allow any IPv4 and IPv6 address to use the master key and effectively disables the IP filter.

Considerations:
- IPv4 and IPv6 addresses are not compared against each other. Each IP version (IPv4 and IPv6) needs to be considered separately. For example, `['0.0.0.0/0']` allows any IPv4 address and blocks every IPv6 address. Conversely, `['::0']` allows any IPv6 address and blocks every IPv4 address.
- Keep in mind that the IP version in use depends on the network stack of the environment in which Parse Server runs. A local environment may use a different IP version than a remote environment. For example, it's possible that locally the value `['0.0.0.0/0']` allows the request IP because the environment is using IPv4, but when Parse Server is deployed remotely the request IP is blocked because the remote environment is using IPv6.
- When setting the option via an environment variable the notation is a comma-separated string, for example `\"0.0.0.0/0,::0\"`.
- IPv6 zone indices (`%` suffix) are not supported, for example `fe80::1%eth0`, `fe80::1%1` or `::1%lo`.

Defaults to `['127.0.0.1', '::1']` which means that only `localhost`, the server instance on which Parse Server runs, is allowed to use the master key.", - "action": parsers.arrayParser, - "default": ["127.0.0.1", "::1"] - }, - "masterKeyTtl": { - "env": "PARSE_SERVER_MASTER_KEY_TTL", - "help": "(Optional) The duration in seconds for which the current `masterKey` is being used before it is requested again if `masterKey` is set to a function. If `masterKey` is not set to a function, this option has no effect. Default is `0`, which means the master key is requested by invoking the `masterKey` function every time the master key is used internally by Parse Server.", - "action": parsers.numberParser("masterKeyTtl") - }, - "maxLimit": { - "env": "PARSE_SERVER_MAX_LIMIT", - "help": "Max value for limit option on queries, defaults to unlimited", - "action": parsers.numberParser("maxLimit") - }, - "maxLogFiles": { - "env": "PARSE_SERVER_MAX_LOG_FILES", - "help": "Maximum number of logs to keep. If not set, no logs will be removed. This can be a number of files or number of days. If using days, add 'd' as the suffix. (default: null)", - "action": parsers.numberOrStringParser("maxLogFiles") - }, - "maxUploadSize": { - "env": "PARSE_SERVER_MAX_UPLOAD_SIZE", - "help": "Max file size for uploads, defaults to 20mb", - "default": "20mb" - }, - "middleware": { - "env": "PARSE_SERVER_MIDDLEWARE", - "help": "middleware for express server, can be string or function" - }, - "mountGraphQL": { - "env": "PARSE_SERVER_MOUNT_GRAPHQL", - "help": "Mounts the GraphQL endpoint", - "action": parsers.booleanParser, - "default": false - }, - "mountPath": { - "env": "PARSE_SERVER_MOUNT_PATH", - "help": "Mount path for the server, defaults to /parse", - "default": "/parse" - }, - "mountPlayground": { - "env": "PARSE_SERVER_MOUNT_PLAYGROUND", - "help": "Deprecated. Mounts the GraphQL Playground which is deprecated and will be removed in a future version. The playground exposes the master key in the browser. Use Parse Dashboard as GraphQL IDE or configure a third-party GraphQL client with custom request headers.", - "action": parsers.booleanParser, - "default": false - }, - "objectIdSize": { - "env": "PARSE_SERVER_OBJECT_ID_SIZE", - "help": "Sets the number of characters in generated object id's, default 10", - "action": parsers.numberParser("objectIdSize"), - "default": 10 - }, - "pages": { - "env": "PARSE_SERVER_PAGES", - "help": "The options for pages such as password reset and email verification.", - "action": parsers.objectParser, - "type": "PagesOptions", - "default": {} - }, - "passwordPolicy": { - "env": "PARSE_SERVER_PASSWORD_POLICY", - "help": "The password policy for enforcing password related rules.", - "action": parsers.objectParser, - "type": "PasswordPolicyOptions" - }, - "playgroundPath": { - "env": "PARSE_SERVER_PLAYGROUND_PATH", - "help": "Deprecated. Mount path for the GraphQL Playground. The playground is deprecated and will be removed in a future version.", - "default": "/playground" - }, - "port": { - "env": "PORT", - "help": "The port to run the ParseServer, defaults to 1337.", - "action": parsers.numberParser("port"), - "default": 1337 - }, - "preserveFileName": { - "env": "PARSE_SERVER_PRESERVE_FILE_NAME", - "help": "Enable (or disable) the addition of a unique hash to the file names", - "action": parsers.booleanParser, - "default": false - }, - "preventLoginWithUnverifiedEmail": { - "env": "PARSE_SERVER_PREVENT_LOGIN_WITH_UNVERIFIED_EMAIL", - "help": "Set to `true` to prevent a user from logging in if the email has not yet been verified and email verification is required. Supports a function with a return value of `true` or `false` for conditional prevention. The function receives a request object that includes `createdWith` to indicate whether the invocation is for `signup` or `login` and the used auth provider.

The `createdWith` values per scenario:
  • Password signup: `{ action: 'signup', authProvider: 'password' }`
  • Auth provider signup: `{ action: 'signup', authProvider: '' }`
  • Password login: `{ action: 'login', authProvider: 'password' }`
  • Auth provider login: function not invoked; auth provider login bypasses email verification
Default is `false`.
Requires option `verifyUserEmails: true`.", - "action": parsers.booleanOrFunctionParser, - "default": false - }, - "preventSignupWithUnverifiedEmail": { - "env": "PARSE_SERVER_PREVENT_SIGNUP_WITH_UNVERIFIED_EMAIL", - "help": "If set to `true` it prevents a user from signing up if the email has not yet been verified and email verification is required. In that case the server responds to the sign-up with HTTP status 400 and a Parse Error 205 `EMAIL_NOT_FOUND`. If set to `false` the server responds with HTTP status 200, and client SDKs return an unauthenticated Parse User without session token. In that case subsequent requests fail until the user's email address is verified.

Default is `false`.
Requires option `verifyUserEmails: true`.", - "action": parsers.booleanParser, - "default": false - }, - "protectedFields": { - "env": "PARSE_SERVER_PROTECTED_FIELDS", - "help": "Protected fields that should be treated with extra security when fetching details.", - "action": parsers.objectParser, - "default": { - "_User": { - "*": ["email"] - } - } - }, - "publicServerURL": { - "env": "PARSE_PUBLIC_SERVER_URL", - "help": "Optional. The public URL to Parse Server. This URL will be used to reach Parse Server publicly for features like password reset and email verification links. The option can be set to a string or a function that can be asynchronously resolved. The returned URL string must start with `http://` or `https://`." - }, - "push": { - "env": "PARSE_SERVER_PUSH", - "help": "Configuration for push, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#push-notifications", - "action": parsers.objectParser - }, - "rateLimit": { - "env": "PARSE_SERVER_RATE_LIMIT", - "help": "Options to limit repeated requests to Parse Server APIs. This can be used to protect sensitive endpoints such as `/requestPasswordReset` from brute-force attacks or Parse Server as a whole from denial-of-service (DoS) attacks.

\u2139\uFE0F Mind the following limitations:
- rate limits applied per IP address; this limits protection against distributed denial-of-service (DDoS) attacks where many requests are coming from various IP addresses
- if multiple Parse Server instances are behind a load balancer or ran in a cluster, each instance will calculate it's own request rates, independent from other instances; this limits the applicability of this feature when using a load balancer and another rate limiting solution that takes requests across all instances into account may be more suitable
- this feature provides basic protection against denial-of-service attacks, but a more sophisticated solution works earlier in the request flow and prevents a malicious requests to even reach a server instance; it's therefore recommended to implement a solution according to architecture and user case.", - "action": parsers.arrayParser, - "type": "RateLimitOptions[]", - "default": [] - }, - "readOnlyMasterKey": { - "env": "PARSE_SERVER_READ_ONLY_MASTER_KEY", - "help": "Read-only key, which has the same capabilities as MasterKey without writes" - }, - "readOnlyMasterKeyIps": { - "env": "PARSE_SERVER_READ_ONLY_MASTER_KEY_IPS", - "help": "(Optional) Restricts the use of read-only master key permissions to a list of IP addresses or ranges.

This option accepts a list of single IP addresses, for example `['10.0.0.1', '10.0.0.2']`. You can also use CIDR notation to specify an IP address range, for example `['10.0.1.0/24']`.

Special scenarios:
- Setting an empty array `[]` means that the read-only master key cannot be used even in Parse Server Cloud Code. This value cannot be set via an environment variable as there is no way to pass an empty array to Parse Server via an environment variable.
- Setting `['0.0.0.0/0', '::0']` means to allow any IPv4 and IPv6 address to use the read-only master key and effectively disables the IP filter.

Considerations:
- IPv4 and IPv6 addresses are not compared against each other. Each IP version (IPv4 and IPv6) needs to be considered separately. For example, `['0.0.0.0/0']` allows any IPv4 address and blocks every IPv6 address. Conversely, `['::0']` allows any IPv6 address and blocks every IPv4 address.
- Keep in mind that the IP version in use depends on the network stack of the environment in which Parse Server runs. A local environment may use a different IP version than a remote environment. For example, it's possible that locally the value `['0.0.0.0/0']` allows the request IP because the environment is using IPv4, but when Parse Server is deployed remotely the request IP is blocked because the remote environment is using IPv6.
- When setting the option via an environment variable the notation is a comma-separated string, for example `\"0.0.0.0/0,::0\"`.
- IPv6 zone indices (`%` suffix) are not supported, for example `fe80::1%eth0`, `fe80::1%1` or `::1%lo`.

Defaults to `['0.0.0.0/0', '::0']` which means that any IP address is allowed to use the read-only master key. It is recommended to set this option to `['127.0.0.1', '::1']` to restrict access to `localhost`.", - "action": parsers.arrayParser, - "default": ["0.0.0.0/0", "::0"] - }, - "requestComplexity": { - "env": "PARSE_SERVER_REQUEST_COMPLEXITY", - "help": "Options to limit the complexity of requests to prevent denial-of-service attacks. Limits are enforced for all requests except those using the master or maintenance key. Each property can be set to `-1` to disable that specific limit.", - "action": parsers.objectParser, - "type": "RequestComplexityOptions", - "default": {} - }, - "requestContextMiddleware": { - "env": "PARSE_SERVER_REQUEST_CONTEXT_MIDDLEWARE", - "help": "Options to customize the request context using inversion of control/dependency injection." - }, - "requestKeywordDenylist": { - "env": "PARSE_SERVER_REQUEST_KEYWORD_DENYLIST", - "help": "An array of keys and values that are prohibited in database read and write requests to prevent potential security vulnerabilities. It is possible to specify only a key (`{\"key\":\"...\"}`), only a value (`{\"value\":\"...\"}`) or a key-value pair (`{\"key\":\"...\",\"value\":\"...\"}`). The specification can use the following types: `boolean`, `numeric` or `string`, where `string` will be interpreted as a regex notation. Request data is deep-scanned for matching definitions to detect also any nested occurrences. Defaults are patterns that are likely to be used in malicious requests. Setting this option will override the default patterns.", - "action": parsers.arrayParser, - "default": [{ - key: "_bsontype", - value: "Code" - }, { - key: "constructor" - }, { - key: "__proto__" - }] - }, - "restAPIKey": { - "env": "PARSE_SERVER_REST_API_KEY", - "help": "Key for REST calls" - }, - "revokeSessionOnPasswordReset": { - "env": "PARSE_SERVER_REVOKE_SESSION_ON_PASSWORD_RESET", - "help": "When a user changes their password, either through the reset password email or while logged in, all sessions are revoked if this is true. Set to false if you don't want to revoke sessions.", - "action": parsers.booleanParser, - "default": true - }, - "scheduledPush": { - "env": "PARSE_SERVER_SCHEDULED_PUSH", - "help": "Configuration for push scheduling, defaults to false.", - "action": parsers.booleanParser, - "default": false - }, - "schema": { - "env": "PARSE_SERVER_SCHEMA", - "help": "Defined schema", - "action": parsers.objectParser, - "type": "SchemaOptions" - }, - "security": { - "env": "PARSE_SERVER_SECURITY", - "help": "The security options to identify and report weak security settings.", - "action": parsers.objectParser, - "type": "SecurityOptions", - "default": {} - }, - "sendUserEmailVerification": { - "env": "PARSE_SERVER_SEND_USER_EMAIL_VERIFICATION", - "help": "Set to `false` to prevent sending of verification email. Supports a function with a return value of `true` or `false` for conditional email sending.

Default is `true`.
", - "action": parsers.booleanOrFunctionParser, - "default": true - }, - "serverCloseComplete": { - "env": "PARSE_SERVER_SERVER_CLOSE_COMPLETE", - "help": "Callback when server has closed" - }, - "serverURL": { - "env": "PARSE_SERVER_URL", - "help": "The URL to Parse Server.

\u26A0\uFE0F Certain server features or adapters may require Parse Server to be able to call itself by making requests to the URL set in `serverURL`. If a feature requires this, it is mentioned in the documentation. In that case ensure that the URL is accessible from the server itself.", - "required": true - }, - "sessionLength": { - "env": "PARSE_SERVER_SESSION_LENGTH", - "help": "Session duration, in seconds, defaults to 1 year", - "action": parsers.numberParser("sessionLength"), - "default": 31536000 - }, - "silent": { - "env": "SILENT", - "help": "Disables console output", - "action": parsers.booleanParser - }, - "startLiveQueryServer": { - "env": "PARSE_SERVER_START_LIVE_QUERY_SERVER", - "help": "Starts the liveQuery server", - "action": parsers.booleanParser - }, - "trustProxy": { - "env": "PARSE_SERVER_TRUST_PROXY", - "help": "The trust proxy settings. It is important to understand the exact setup of the reverse proxy, since this setting will trust values provided in the Parse Server API request. See the express trust proxy settings documentation. Defaults to `false`.", - "action": parsers.objectParser, - "default": [] - }, - "userSensitiveFields": { - "env": "PARSE_SERVER_USER_SENSITIVE_FIELDS", - "help": "Personally identifiable information fields in the user table the should be removed for non-authorized users. Deprecated @see protectedFields", - "action": parsers.arrayParser - }, - "verbose": { - "env": "VERBOSE", - "help": "Set the logging to verbose", - "action": parsers.booleanParser - }, - "verifyServerUrl": { - "env": "PARSE_SERVER_VERIFY_SERVER_URL", - "help": "Parse Server makes a HTTP request to the URL set in `serverURL` at the end of its launch routine to verify that the launch succeeded. If this option is set to `false`, the verification will be skipped. This can be useful in environments where the server URL is not accessible from the server itself, such as when running behind a firewall or in certain containerized environments.

\u26A0\uFE0F Server URL verification requires Parse Server to be able to call itself by making requests to the URL set in `serverURL`.

Default is `true`.", - "action": parsers.booleanParser, - "default": true - }, - "verifyUserEmails": { - "env": "PARSE_SERVER_VERIFY_USER_EMAILS", - "help": "Set to `true` to require users to verify their email address to complete the sign-up process. Supports a function with a return value of `true` or `false` for conditional verification. The function receives a request object that includes `createdWith` to indicate whether the invocation is for `signup` or `login` and the used auth provider.

The `createdWith` values per scenario:
  • Password signup: `{ action: 'signup', authProvider: 'password' }`
  • Auth provider signup: `{ action: 'signup', authProvider: '' }`
  • Password login: `{ action: 'login', authProvider: 'password' }`
  • Auth provider login: function not invoked; auth provider login bypasses email verification
  • Resend verification email: `createdWith` is `undefined`; use the `resendRequest` property to identify those
Default is `false`.", - "action": parsers.booleanOrFunctionParser, - "default": false - }, - "webhookKey": { - "env": "PARSE_SERVER_WEBHOOK_KEY", - "help": "Key for authenticating external cloud code process requests. Required when cloudCodeCommand is set." - } + accountLockout: { + env: 'PARSE_SERVER_ACCOUNT_LOCKOUT', + help: "The account lockout policy for failed login attempts.

Note: Setting a user's ACL to an empty object `{}` via master key is a separate mechanism that only prevents new logins; it does not invalidate existing session tokens. To immediately revoke a user's access, destroy their sessions via master key in addition to setting the ACL.", + action: parsers.objectParser, + type: 'AccountLockoutOptions', + }, + allowClientClassCreation: { + env: 'PARSE_SERVER_ALLOW_CLIENT_CLASS_CREATION', + help: 'Enable (or disable) client class creation, defaults to false', + action: parsers.booleanParser, + default: false, + }, + allowCustomObjectId: { + env: 'PARSE_SERVER_ALLOW_CUSTOM_OBJECT_ID', + help: 'Enable (or disable) custom objectId', + action: parsers.booleanParser, + default: false, + }, + allowExpiredAuthDataToken: { + env: 'PARSE_SERVER_ALLOW_EXPIRED_AUTH_DATA_TOKEN', + help: 'Allow a user to log in even if the 3rd party authentication token that was used to sign in to their account has expired. If this is set to `false`, then the token will be validated every time the user signs in to their account. This refers to the token that is stored in the `_User.authData` field. Defaults to `false`.', + action: parsers.booleanParser, + default: false, + }, + allowHeaders: { + env: 'PARSE_SERVER_ALLOW_HEADERS', + help: 'Add headers to Access-Control-Allow-Headers', + action: parsers.arrayParser, + }, + allowOrigin: { + env: 'PARSE_SERVER_ALLOW_ORIGIN', + help: 'Sets origins for Access-Control-Allow-Origin. This can be a string for a single origin or an array of strings for multiple origins.', + action: parsers.arrayParser, + }, + analyticsAdapter: { + env: 'PARSE_SERVER_ANALYTICS_ADAPTER', + help: 'Adapter module for the analytics', + action: parsers.moduleOrObjectParser, + }, + appId: { + env: 'PARSE_SERVER_APPLICATION_ID', + help: 'Your Parse Application ID', + required: true, + }, + appName: { + env: 'PARSE_SERVER_APP_NAME', + help: 'Sets the app name', + }, + auth: { + env: 'PARSE_SERVER_AUTH_PROVIDERS', + help: "Configuration for your authentication providers, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#oauth-and-3rd-party-authentication

Provider names must start with a letter and contain only letters, digits, and underscores (`/^[A-Za-z][A-Za-z0-9_]*$/`). This is because each provider name is used to construct a database field (`_auth_data_`), which must comply with Parse Server's field naming rules.", + action: parsers.objectParser, + }, + cacheAdapter: { + env: 'PARSE_SERVER_CACHE_ADAPTER', + help: 'Adapter module for the cache', + action: parsers.moduleOrObjectParser, + }, + cacheMaxSize: { + env: 'PARSE_SERVER_CACHE_MAX_SIZE', + help: 'Sets the maximum size for the in memory cache, defaults to 10000', + action: parsers.numberParser('cacheMaxSize'), + default: 10000, + }, + cacheTTL: { + env: 'PARSE_SERVER_CACHE_TTL', + help: 'Sets the TTL for the in memory cache (in ms), defaults to 5000 (5 seconds)', + action: parsers.numberParser('cacheTTL'), + default: 5000, + }, + clientKey: { + env: 'PARSE_SERVER_CLIENT_KEY', + help: 'Key for iOS, MacOS, tvOS clients', + }, + cloud: { + env: 'PARSE_SERVER_CLOUD', + help: 'Full path to your cloud code main.js', + }, + cloudCodeAdapters: { + env: 'PARSE_SERVER_CLOUD_CODE_ADAPTERS', + help: 'Array of CloudCodeAdapter instances for BYO cloud code integration', + action: parsers.objectParser, + }, + cloudCodeCommand: { + env: 'PARSE_SERVER_CLOUD_CODE_COMMAND', + help: 'Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol)', + }, + cloudCodeOptions: { + env: 'PARSE_SERVER_CLOUD_CODE_OPTIONS', + help: 'Options for the external cloud code process adapter', + action: parsers.objectParser, + }, + cluster: { + env: 'PARSE_SERVER_CLUSTER', + help: 'Run with cluster, optionally set the number of processes default to os.cpus().length', + action: parsers.numberOrBooleanParser, + }, + collectionPrefix: { + env: 'PARSE_SERVER_COLLECTION_PREFIX', + help: 'A collection prefix for the classes', + default: '', + }, + convertEmailToLowercase: { + env: 'PARSE_SERVER_CONVERT_EMAIL_TO_LOWERCASE', + help: 'Optional. If set to `true`, the `email` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `email` property is stored as set, without any case modifications. Default is `false`.', + action: parsers.booleanParser, + default: false, + }, + convertUsernameToLowercase: { + env: 'PARSE_SERVER_CONVERT_USERNAME_TO_LOWERCASE', + help: 'Optional. If set to `true`, the `username` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `username` property is stored as set, without any case modifications. Default is `false`.', + action: parsers.booleanParser, + default: false, + }, + customPages: { + env: 'PARSE_SERVER_CUSTOM_PAGES', + help: 'custom pages for password validation and reset', + action: parsers.objectParser, + type: 'CustomPagesOptions', + default: {}, + }, + databaseAdapter: { + env: 'PARSE_SERVER_DATABASE_ADAPTER', + help: 'Adapter module for the database; any options that are not explicitly described here are passed directly to the database client.', + action: parsers.moduleOrObjectParser, + }, + databaseOptions: { + env: 'PARSE_SERVER_DATABASE_OPTIONS', + help: 'Options to pass to the database client', + action: parsers.objectParser, + type: 'DatabaseOptions', + }, + databaseURI: { + env: 'PARSE_SERVER_DATABASE_URI', + help: 'The full URI to your database. Supported databases are mongodb or postgres.', + required: true, + default: 'mongodb://localhost:27017/parse', + }, + defaultLimit: { + env: 'PARSE_SERVER_DEFAULT_LIMIT', + help: 'Default value for limit option on queries, defaults to `100`.', + action: parsers.numberParser('defaultLimit'), + default: 100, + }, + directAccess: { + env: 'PARSE_SERVER_DIRECT_ACCESS', + help: 'Set to `true` if Parse requests within the same Node.js environment as Parse Server should be routed to Parse Server directly instead of via the HTTP interface. Default is `false`.

If set to `false` then Parse requests within the same Node.js environment as Parse Server are executed as HTTP requests sent to Parse Server via the `serverURL`. For example, a `Parse.Query` in Cloud Code is calling Parse Server via a HTTP request. The server is essentially making a HTTP request to itself, unnecessarily using network resources such as network ports.

\u26A0\uFE0F In environments where multiple Parse Server instances run behind a load balancer and Parse requests within the current Node.js environment should be routed via the load balancer and distributed as HTTP requests among all instances via the `serverURL`, this should be set to `false`.', + action: parsers.booleanParser, + default: true, + }, + dotNetKey: { + env: 'PARSE_SERVER_DOT_NET_KEY', + help: 'Key for Unity and .Net SDK', + }, + emailAdapter: { + env: 'PARSE_SERVER_EMAIL_ADAPTER', + help: 'Adapter module for email sending', + action: parsers.moduleOrObjectParser, + }, + emailVerifySuccessOnInvalidEmail: { + env: 'PARSE_SERVER_EMAIL_VERIFY_SUCCESS_ON_INVALID_EMAIL', + help: 'Set to `true` if a request to verify the email should return a success response even if the provided email address does not belong to a verifiable account, for example because it is unknown or already verified, or `false` if the request should return an error response in those cases.

Default is `true`.
Requires option `verifyUserEmails: true`.', + action: parsers.booleanParser, + default: true, + }, + emailVerifyTokenReuseIfValid: { + env: 'PARSE_SERVER_EMAIL_VERIFY_TOKEN_REUSE_IF_VALID', + help: 'Set to `true` if a email verification token should be reused in case another token is requested but there is a token that is still valid, i.e. has not expired. This avoids the often observed issue that a user requests multiple emails and does not know which link contains a valid token because each newly generated token would invalidate the previous token.

Default is `false`.
Requires option `verifyUserEmails: true`.', + action: parsers.booleanParser, + default: false, + }, + emailVerifyTokenValidityDuration: { + env: 'PARSE_SERVER_EMAIL_VERIFY_TOKEN_VALIDITY_DURATION', + help: 'Set the validity duration of the email verification token in seconds after which the token expires. The token is used in the link that is set in the email. After the token expires, the link becomes invalid and a new link has to be sent. If the option is not set or set to `undefined`, then the token never expires.

For example, to expire the token after 2 hours, set a value of 7200 seconds (= 60 seconds * 60 minutes * 2 hours).

Default is `undefined`.
Requires option `verifyUserEmails: true`.', + action: parsers.numberParser('emailVerifyTokenValidityDuration'), + }, + enableAnonymousUsers: { + env: 'PARSE_SERVER_ENABLE_ANON_USERS', + help: 'Enable (or disable) anonymous users, defaults to true', + action: parsers.booleanParser, + default: true, + }, + enableCollationCaseComparison: { + env: 'PARSE_SERVER_ENABLE_COLLATION_CASE_COMPARISON', + help: 'Optional. If set to `true`, the collation rule of case comparison for queries and indexes is enabled. Enable this option to run Parse Server with MongoDB Atlas Serverless or AWS Amazon DocumentDB. If `false`, the collation rule of case comparison is disabled. Default is `false`.', + action: parsers.booleanParser, + default: false, + }, + enableExpressErrorHandler: { + env: 'PARSE_SERVER_ENABLE_EXPRESS_ERROR_HANDLER', + help: 'Enables the default express error handler for all errors', + action: parsers.booleanParser, + default: false, + }, + enableInsecureAuthAdapters: { + env: 'PARSE_SERVER_ENABLE_INSECURE_AUTH_ADAPTERS', + help: 'Optional. Enables insecure authentication adapters. Insecure auth adapters are deprecated and will be removed in a future version. Defaults to `false`.', + action: parsers.booleanParser, + default: false, + }, + enableProductPurchaseLegacyApi: { + env: 'PARSE_SERVER_ENABLE_PRODUCT_PURCHASE_LEGACY_API', + help: 'Deprecated. Enables the legacy product purchase API including the `_Product` class and the `/validate_purchase` endpoint. This is an undocumented, unmaintained legacy feature inherited from the original Parse platform that may not function as expected. We strongly advise against using it. It will be removed in a future major version.', + action: parsers.booleanParser, + default: true, + }, + enableSanitizedErrorResponse: { + env: 'PARSE_SERVER_ENABLE_SANITIZED_ERROR_RESPONSE', + help: 'If set to `true`, error details are removed from error messages in responses to client requests, and instead a generic error message is sent. Default is `true`.', + action: parsers.booleanParser, + default: true, + }, + encryptionKey: { + env: 'PARSE_SERVER_ENCRYPTION_KEY', + help: 'Key for encrypting your files', + }, + enforcePrivateUsers: { + env: 'PARSE_SERVER_ENFORCE_PRIVATE_USERS', + help: 'Set to true if new users should be created without public read and write access.', + action: parsers.booleanParser, + default: true, + }, + expireInactiveSessions: { + env: 'PARSE_SERVER_EXPIRE_INACTIVE_SESSIONS', + help: 'Sets whether we should expire the inactive sessions, defaults to true. If false, all new sessions are created with no expiration date.', + action: parsers.booleanParser, + default: true, + }, + extendSessionOnUse: { + env: 'PARSE_SERVER_EXTEND_SESSION_ON_USE', + help: "Whether Parse Server should automatically extend a valid session by the sessionLength. In order to reduce the number of session updates in the database, a session will only be extended when a request is received after at least half of the current session's lifetime has passed.", + action: parsers.booleanParser, + default: false, + }, + fileKey: { + env: 'PARSE_SERVER_FILE_KEY', + help: 'Key for your files', + }, + filesAdapter: { + env: 'PARSE_SERVER_FILES_ADAPTER', + help: 'Adapter module for the files sub-system', + action: parsers.moduleOrObjectParser, + }, + fileUpload: { + env: 'PARSE_SERVER_FILE_UPLOAD_OPTIONS', + help: 'Options for file uploads', + action: parsers.objectParser, + type: 'FileUploadOptions', + default: {}, + }, + graphQLPath: { + env: 'PARSE_SERVER_GRAPHQL_PATH', + help: 'The mount path for the GraphQL endpoint

\u26A0\uFE0F File upload inside the GraphQL mutation system requires Parse Server to be able to call itself by making requests to the URL set in `serverURL`.

Defaults is `/graphql`.', + default: '/graphql', + }, + graphQLPublicIntrospection: { + env: 'PARSE_SERVER_GRAPHQL_PUBLIC_INTROSPECTION', + help: 'Enable public introspection for the GraphQL endpoint, defaults to false', + action: parsers.booleanParser, + default: false, + }, + graphQLSchema: { + env: 'PARSE_SERVER_GRAPH_QLSCHEMA', + help: 'Full path to your GraphQL custom schema.graphql file', + }, + host: { + env: 'PARSE_SERVER_HOST', + help: 'The host to serve ParseServer on, defaults to 0.0.0.0', + default: '0.0.0.0', + }, + idempotencyOptions: { + env: 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_OPTIONS', + help: 'Options for request idempotency to deduplicate identical requests that may be caused by network issues. Caution, this is an experimental feature that may not be appropriate for production.', + action: parsers.objectParser, + type: 'IdempotencyOptions', + default: {}, + }, + javascriptKey: { + env: 'PARSE_SERVER_JAVASCRIPT_KEY', + help: 'Key for the Javascript SDK', + }, + jsonLogs: { + env: 'JSON_LOGS', + help: 'Log as structured JSON objects', + action: parsers.booleanParser, + }, + liveQuery: { + env: 'PARSE_SERVER_LIVE_QUERY', + help: "parse-server's LiveQuery configuration object", + action: parsers.objectParser, + type: 'LiveQueryOptions', + }, + liveQueryServerOptions: { + env: 'PARSE_SERVER_LIVE_QUERY_SERVER_OPTIONS', + help: 'Live query server configuration options (will start the liveQuery server)', + action: parsers.objectParser, + type: 'LiveQueryServerOptions', + }, + loggerAdapter: { + env: 'PARSE_SERVER_LOGGER_ADAPTER', + help: 'Adapter module for the logging sub-system', + action: parsers.moduleOrObjectParser, + }, + logLevel: { + env: 'PARSE_SERVER_LOG_LEVEL', + help: 'Sets the level for logs', + }, + logLevels: { + env: 'PARSE_SERVER_LOG_LEVELS', + help: '(Optional) Overrides the log levels used internally by Parse Server to log events.', + action: parsers.objectParser, + type: 'LogLevels', + default: {}, + }, + logsFolder: { + env: 'PARSE_SERVER_LOGS_FOLDER', + help: "Folder for the logs (defaults to './logs'); set to null to disable file based logging", + default: './logs', + }, + maintenanceKey: { + env: 'PARSE_SERVER_MAINTENANCE_KEY', + help: '(Optional) The maintenance key is used for modifying internal and read-only fields of Parse Server.

\u26A0\uFE0F This key is not intended to be used as part of a regular operation of Parse Server. This key is intended to conduct out-of-band changes such as one-time migrations or data correction tasks. Internal fields are not officially documented and may change at any time without publication in release changelogs. We strongly advice not to rely on internal fields as part of your regular operation and to investigate the implications of any planned changes *directly in the source code* of your current version of Parse Server.', + required: true, + }, + maintenanceKeyIps: { + env: 'PARSE_SERVER_MAINTENANCE_KEY_IPS', + help: "(Optional) Restricts the use of maintenance key permissions to a list of IP addresses or ranges.

This option accepts a list of single IP addresses, for example `['10.0.0.1', '10.0.0.2']`. You can also use CIDR notation to specify an IP address range, for example `['10.0.1.0/24']`.

Special scenarios:
- Setting an empty array `[]` means that the maintenance key cannot be used even in Parse Server Cloud Code. This value cannot be set via an environment variable as there is no way to pass an empty array to Parse Server via an environment variable.
- Setting `['0.0.0.0/0', '::0']` means to allow any IPv4 and IPv6 address to use the maintenance key and effectively disables the IP filter.

Considerations:
- IPv4 and IPv6 addresses are not compared against each other. Each IP version (IPv4 and IPv6) needs to be considered separately. For example, `['0.0.0.0/0']` allows any IPv4 address and blocks every IPv6 address. Conversely, `['::0']` allows any IPv6 address and blocks every IPv4 address.
- Keep in mind that the IP version in use depends on the network stack of the environment in which Parse Server runs. A local environment may use a different IP version than a remote environment. For example, it's possible that locally the value `['0.0.0.0/0']` allows the request IP because the environment is using IPv4, but when Parse Server is deployed remotely the request IP is blocked because the remote environment is using IPv6.
- When setting the option via an environment variable the notation is a comma-separated string, for example `\"0.0.0.0/0,::0\"`.
- IPv6 zone indices (`%` suffix) are not supported, for example `fe80::1%eth0`, `fe80::1%1` or `::1%lo`.

Defaults to `['127.0.0.1', '::1']` which means that only `localhost`, the server instance on which Parse Server runs, is allowed to use the maintenance key.", + action: parsers.arrayParser, + default: ['127.0.0.1', '::1'], + }, + masterKey: { + env: 'PARSE_SERVER_MASTER_KEY', + help: 'Your Parse Master Key', + required: true, + }, + masterKeyIps: { + env: 'PARSE_SERVER_MASTER_KEY_IPS', + help: "(Optional) Restricts the use of master key permissions to a list of IP addresses or ranges.

This option accepts a list of single IP addresses, for example `['10.0.0.1', '10.0.0.2']`. You can also use CIDR notation to specify an IP address range, for example `['10.0.1.0/24']`.

Special scenarios:
- Setting an empty array `[]` means that the master key cannot be used even in Parse Server Cloud Code. This value cannot be set via an environment variable as there is no way to pass an empty array to Parse Server via an environment variable.
- Setting `['0.0.0.0/0', '::0']` means to allow any IPv4 and IPv6 address to use the master key and effectively disables the IP filter.

Considerations:
- IPv4 and IPv6 addresses are not compared against each other. Each IP version (IPv4 and IPv6) needs to be considered separately. For example, `['0.0.0.0/0']` allows any IPv4 address and blocks every IPv6 address. Conversely, `['::0']` allows any IPv6 address and blocks every IPv4 address.
- Keep in mind that the IP version in use depends on the network stack of the environment in which Parse Server runs. A local environment may use a different IP version than a remote environment. For example, it's possible that locally the value `['0.0.0.0/0']` allows the request IP because the environment is using IPv4, but when Parse Server is deployed remotely the request IP is blocked because the remote environment is using IPv6.
- When setting the option via an environment variable the notation is a comma-separated string, for example `\"0.0.0.0/0,::0\"`.
- IPv6 zone indices (`%` suffix) are not supported, for example `fe80::1%eth0`, `fe80::1%1` or `::1%lo`.

Defaults to `['127.0.0.1', '::1']` which means that only `localhost`, the server instance on which Parse Server runs, is allowed to use the master key.", + action: parsers.arrayParser, + default: ['127.0.0.1', '::1'], + }, + masterKeyTtl: { + env: 'PARSE_SERVER_MASTER_KEY_TTL', + help: '(Optional) The duration in seconds for which the current `masterKey` is being used before it is requested again if `masterKey` is set to a function. If `masterKey` is not set to a function, this option has no effect. Default is `0`, which means the master key is requested by invoking the `masterKey` function every time the master key is used internally by Parse Server.', + action: parsers.numberParser('masterKeyTtl'), + }, + maxLimit: { + env: 'PARSE_SERVER_MAX_LIMIT', + help: 'Max value for limit option on queries, defaults to unlimited', + action: parsers.numberParser('maxLimit'), + }, + maxLogFiles: { + env: 'PARSE_SERVER_MAX_LOG_FILES', + help: "Maximum number of logs to keep. If not set, no logs will be removed. This can be a number of files or number of days. If using days, add 'd' as the suffix. (default: null)", + action: parsers.numberOrStringParser('maxLogFiles'), + }, + maxUploadSize: { + env: 'PARSE_SERVER_MAX_UPLOAD_SIZE', + help: 'Max file size for uploads, defaults to 20mb', + default: '20mb', + }, + middleware: { + env: 'PARSE_SERVER_MIDDLEWARE', + help: 'middleware for express server, can be string or function', + }, + mountGraphQL: { + env: 'PARSE_SERVER_MOUNT_GRAPHQL', + help: 'Mounts the GraphQL endpoint', + action: parsers.booleanParser, + default: false, + }, + mountPath: { + env: 'PARSE_SERVER_MOUNT_PATH', + help: 'Mount path for the server, defaults to /parse', + default: '/parse', + }, + mountPlayground: { + env: 'PARSE_SERVER_MOUNT_PLAYGROUND', + help: 'Deprecated. Mounts the GraphQL Playground which is deprecated and will be removed in a future version. The playground exposes the master key in the browser. Use Parse Dashboard as GraphQL IDE or configure a third-party GraphQL client with custom request headers.', + action: parsers.booleanParser, + default: false, + }, + objectIdSize: { + env: 'PARSE_SERVER_OBJECT_ID_SIZE', + help: "Sets the number of characters in generated object id's, default 10", + action: parsers.numberParser('objectIdSize'), + default: 10, + }, + pages: { + env: 'PARSE_SERVER_PAGES', + help: 'The options for pages such as password reset and email verification.', + action: parsers.objectParser, + type: 'PagesOptions', + default: {}, + }, + passwordPolicy: { + env: 'PARSE_SERVER_PASSWORD_POLICY', + help: 'The password policy for enforcing password related rules.', + action: parsers.objectParser, + type: 'PasswordPolicyOptions', + }, + playgroundPath: { + env: 'PARSE_SERVER_PLAYGROUND_PATH', + help: 'Deprecated. Mount path for the GraphQL Playground. The playground is deprecated and will be removed in a future version.', + default: '/playground', + }, + port: { + env: 'PORT', + help: 'The port to run the ParseServer, defaults to 1337.', + action: parsers.numberParser('port'), + default: 1337, + }, + preserveFileName: { + env: 'PARSE_SERVER_PRESERVE_FILE_NAME', + help: 'Enable (or disable) the addition of a unique hash to the file names', + action: parsers.booleanParser, + default: false, + }, + preventLoginWithUnverifiedEmail: { + env: 'PARSE_SERVER_PREVENT_LOGIN_WITH_UNVERIFIED_EMAIL', + help: "Set to `true` to prevent a user from logging in if the email has not yet been verified and email verification is required. Supports a function with a return value of `true` or `false` for conditional prevention. The function receives a request object that includes `createdWith` to indicate whether the invocation is for `signup` or `login` and the used auth provider.

The `createdWith` values per scenario:
  • Password signup: `{ action: 'signup', authProvider: 'password' }`
  • Auth provider signup: `{ action: 'signup', authProvider: '' }`
  • Password login: `{ action: 'login', authProvider: 'password' }`
  • Auth provider login: function not invoked; auth provider login bypasses email verification
Default is `false`.
Requires option `verifyUserEmails: true`.", + action: parsers.booleanOrFunctionParser, + default: false, + }, + preventSignupWithUnverifiedEmail: { + env: 'PARSE_SERVER_PREVENT_SIGNUP_WITH_UNVERIFIED_EMAIL', + help: "If set to `true` it prevents a user from signing up if the email has not yet been verified and email verification is required. In that case the server responds to the sign-up with HTTP status 400 and a Parse Error 205 `EMAIL_NOT_FOUND`. If set to `false` the server responds with HTTP status 200, and client SDKs return an unauthenticated Parse User without session token. In that case subsequent requests fail until the user's email address is verified.

Default is `false`.
Requires option `verifyUserEmails: true`.", + action: parsers.booleanParser, + default: false, + }, + protectedFields: { + env: 'PARSE_SERVER_PROTECTED_FIELDS', + help: 'Protected fields that should be treated with extra security when fetching details.', + action: parsers.objectParser, + default: { + _User: { + '*': ['email'], + }, + }, + }, + publicServerURL: { + env: 'PARSE_PUBLIC_SERVER_URL', + help: 'Optional. The public URL to Parse Server. This URL will be used to reach Parse Server publicly for features like password reset and email verification links. The option can be set to a string or a function that can be asynchronously resolved. The returned URL string must start with `http://` or `https://`.', + }, + push: { + env: 'PARSE_SERVER_PUSH', + help: 'Configuration for push, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#push-notifications', + action: parsers.objectParser, + }, + rateLimit: { + env: 'PARSE_SERVER_RATE_LIMIT', + help: "Options to limit repeated requests to Parse Server APIs. This can be used to protect sensitive endpoints such as `/requestPasswordReset` from brute-force attacks or Parse Server as a whole from denial-of-service (DoS) attacks.

\u2139\uFE0F Mind the following limitations:
- rate limits applied per IP address; this limits protection against distributed denial-of-service (DDoS) attacks where many requests are coming from various IP addresses
- if multiple Parse Server instances are behind a load balancer or ran in a cluster, each instance will calculate it's own request rates, independent from other instances; this limits the applicability of this feature when using a load balancer and another rate limiting solution that takes requests across all instances into account may be more suitable
- this feature provides basic protection against denial-of-service attacks, but a more sophisticated solution works earlier in the request flow and prevents a malicious requests to even reach a server instance; it's therefore recommended to implement a solution according to architecture and user case.", + action: parsers.arrayParser, + type: 'RateLimitOptions[]', + default: [], + }, + readOnlyMasterKey: { + env: 'PARSE_SERVER_READ_ONLY_MASTER_KEY', + help: 'Read-only key, which has the same capabilities as MasterKey without writes', + }, + readOnlyMasterKeyIps: { + env: 'PARSE_SERVER_READ_ONLY_MASTER_KEY_IPS', + help: "(Optional) Restricts the use of read-only master key permissions to a list of IP addresses or ranges.

This option accepts a list of single IP addresses, for example `['10.0.0.1', '10.0.0.2']`. You can also use CIDR notation to specify an IP address range, for example `['10.0.1.0/24']`.

Special scenarios:
- Setting an empty array `[]` means that the read-only master key cannot be used even in Parse Server Cloud Code. This value cannot be set via an environment variable as there is no way to pass an empty array to Parse Server via an environment variable.
- Setting `['0.0.0.0/0', '::0']` means to allow any IPv4 and IPv6 address to use the read-only master key and effectively disables the IP filter.

Considerations:
- IPv4 and IPv6 addresses are not compared against each other. Each IP version (IPv4 and IPv6) needs to be considered separately. For example, `['0.0.0.0/0']` allows any IPv4 address and blocks every IPv6 address. Conversely, `['::0']` allows any IPv6 address and blocks every IPv4 address.
- Keep in mind that the IP version in use depends on the network stack of the environment in which Parse Server runs. A local environment may use a different IP version than a remote environment. For example, it's possible that locally the value `['0.0.0.0/0']` allows the request IP because the environment is using IPv4, but when Parse Server is deployed remotely the request IP is blocked because the remote environment is using IPv6.
- When setting the option via an environment variable the notation is a comma-separated string, for example `\"0.0.0.0/0,::0\"`.
- IPv6 zone indices (`%` suffix) are not supported, for example `fe80::1%eth0`, `fe80::1%1` or `::1%lo`.

Defaults to `['0.0.0.0/0', '::0']` which means that any IP address is allowed to use the read-only master key. It is recommended to set this option to `['127.0.0.1', '::1']` to restrict access to `localhost`.", + action: parsers.arrayParser, + default: ['0.0.0.0/0', '::0'], + }, + requestComplexity: { + env: 'PARSE_SERVER_REQUEST_COMPLEXITY', + help: 'Options to limit the complexity of requests to prevent denial-of-service attacks. Limits are enforced for all requests except those using the master or maintenance key. Each property can be set to `-1` to disable that specific limit.', + action: parsers.objectParser, + type: 'RequestComplexityOptions', + default: {}, + }, + requestContextMiddleware: { + env: 'PARSE_SERVER_REQUEST_CONTEXT_MIDDLEWARE', + help: 'Options to customize the request context using inversion of control/dependency injection.', + }, + requestKeywordDenylist: { + env: 'PARSE_SERVER_REQUEST_KEYWORD_DENYLIST', + help: 'An array of keys and values that are prohibited in database read and write requests to prevent potential security vulnerabilities. It is possible to specify only a key (`{"key":"..."}`), only a value (`{"value":"..."}`) or a key-value pair (`{"key":"...","value":"..."}`). The specification can use the following types: `boolean`, `numeric` or `string`, where `string` will be interpreted as a regex notation. Request data is deep-scanned for matching definitions to detect also any nested occurrences. Defaults are patterns that are likely to be used in malicious requests. Setting this option will override the default patterns.', + action: parsers.arrayParser, + default: [ + { + key: '_bsontype', + value: 'Code', + }, + { + key: 'constructor', + }, + { + key: '__proto__', + }, + ], + }, + restAPIKey: { + env: 'PARSE_SERVER_REST_API_KEY', + help: 'Key for REST calls', + }, + revokeSessionOnPasswordReset: { + env: 'PARSE_SERVER_REVOKE_SESSION_ON_PASSWORD_RESET', + help: "When a user changes their password, either through the reset password email or while logged in, all sessions are revoked if this is true. Set to false if you don't want to revoke sessions.", + action: parsers.booleanParser, + default: true, + }, + scheduledPush: { + env: 'PARSE_SERVER_SCHEDULED_PUSH', + help: 'Configuration for push scheduling, defaults to false.', + action: parsers.booleanParser, + default: false, + }, + schema: { + env: 'PARSE_SERVER_SCHEMA', + help: 'Defined schema', + action: parsers.objectParser, + type: 'SchemaOptions', + }, + security: { + env: 'PARSE_SERVER_SECURITY', + help: 'The security options to identify and report weak security settings.', + action: parsers.objectParser, + type: 'SecurityOptions', + default: {}, + }, + sendUserEmailVerification: { + env: 'PARSE_SERVER_SEND_USER_EMAIL_VERIFICATION', + help: 'Set to `false` to prevent sending of verification email. Supports a function with a return value of `true` or `false` for conditional email sending.

Default is `true`.
', + action: parsers.booleanOrFunctionParser, + default: true, + }, + serverCloseComplete: { + env: 'PARSE_SERVER_SERVER_CLOSE_COMPLETE', + help: 'Callback when server has closed', + }, + serverURL: { + env: 'PARSE_SERVER_URL', + help: 'The URL to Parse Server.

\u26A0\uFE0F Certain server features or adapters may require Parse Server to be able to call itself by making requests to the URL set in `serverURL`. If a feature requires this, it is mentioned in the documentation. In that case ensure that the URL is accessible from the server itself.', + required: true, + }, + sessionLength: { + env: 'PARSE_SERVER_SESSION_LENGTH', + help: 'Session duration, in seconds, defaults to 1 year', + action: parsers.numberParser('sessionLength'), + default: 31536000, + }, + silent: { + env: 'SILENT', + help: 'Disables console output', + action: parsers.booleanParser, + }, + startLiveQueryServer: { + env: 'PARSE_SERVER_START_LIVE_QUERY_SERVER', + help: 'Starts the liveQuery server', + action: parsers.booleanParser, + }, + trustProxy: { + env: 'PARSE_SERVER_TRUST_PROXY', + help: 'The trust proxy settings. It is important to understand the exact setup of the reverse proxy, since this setting will trust values provided in the Parse Server API request. See the express trust proxy settings documentation. Defaults to `false`.', + action: parsers.objectParser, + default: [], + }, + userSensitiveFields: { + env: 'PARSE_SERVER_USER_SENSITIVE_FIELDS', + help: 'Personally identifiable information fields in the user table the should be removed for non-authorized users. Deprecated @see protectedFields', + action: parsers.arrayParser, + }, + verbose: { + env: 'VERBOSE', + help: 'Set the logging to verbose', + action: parsers.booleanParser, + }, + verifyServerUrl: { + env: 'PARSE_SERVER_VERIFY_SERVER_URL', + help: 'Parse Server makes a HTTP request to the URL set in `serverURL` at the end of its launch routine to verify that the launch succeeded. If this option is set to `false`, the verification will be skipped. This can be useful in environments where the server URL is not accessible from the server itself, such as when running behind a firewall or in certain containerized environments.

\u26A0\uFE0F Server URL verification requires Parse Server to be able to call itself by making requests to the URL set in `serverURL`.

Default is `true`.', + action: parsers.booleanParser, + default: true, + }, + verifyUserEmails: { + env: 'PARSE_SERVER_VERIFY_USER_EMAILS', + help: "Set to `true` to require users to verify their email address to complete the sign-up process. Supports a function with a return value of `true` or `false` for conditional verification. The function receives a request object that includes `createdWith` to indicate whether the invocation is for `signup` or `login` and the used auth provider.

The `createdWith` values per scenario:
  • Password signup: `{ action: 'signup', authProvider: 'password' }`
  • Auth provider signup: `{ action: 'signup', authProvider: '' }`
  • Password login: `{ action: 'login', authProvider: 'password' }`
  • Auth provider login: function not invoked; auth provider login bypasses email verification
  • Resend verification email: `createdWith` is `undefined`; use the `resendRequest` property to identify those
Default is `false`.", + action: parsers.booleanOrFunctionParser, + default: false, + }, + webhookKey: { + env: 'PARSE_SERVER_WEBHOOK_KEY', + help: 'Key for authenticating external cloud code process requests. Required when cloudCodeCommand is set.', + }, }; module.exports.RateLimitOptions = { - "errorResponseMessage": { - "env": "PARSE_SERVER_RATE_LIMIT_ERROR_RESPONSE_MESSAGE", - "help": "The error message that should be returned in the body of the HTTP 429 response when the rate limit is hit. Default is `Too many requests.`.", - "default": "Too many requests." - }, - "includeInternalRequests": { - "env": "PARSE_SERVER_RATE_LIMIT_INCLUDE_INTERNAL_REQUESTS", - "help": "Optional, if `true` the rate limit will also apply to requests that are made in by Cloud Code, default is `false`. Note that a public Cloud Code function that triggers internal requests may circumvent rate limiting and be vulnerable to attacks.", - "action": parsers.booleanParser, - "default": false - }, - "includeMasterKey": { - "env": "PARSE_SERVER_RATE_LIMIT_INCLUDE_MASTER_KEY", - "help": "Optional, if `true` the rate limit will also apply to requests using the `masterKey`, default is `false`. Note that a public Cloud Code function that triggers internal requests using the `masterKey` may circumvent rate limiting and be vulnerable to attacks.", - "action": parsers.booleanParser, - "default": false - }, - "redisUrl": { - "env": "PARSE_SERVER_RATE_LIMIT_REDIS_URL", - "help": "Optional, the URL of the Redis server to store rate limit data. This allows to rate limit requests for multiple servers by calculating the sum of all requests across all servers. This is useful if multiple servers are processing requests behind a load balancer. For example, the limit of 10 requests is reached if each of 2 servers processed 5 requests." - }, - "requestCount": { - "env": "PARSE_SERVER_RATE_LIMIT_REQUEST_COUNT", - "help": "The number of requests that can be made per IP address within the time window set in `requestTimeWindow` before the rate limit is applied. For batch requests, this also limits the number of sub-requests in a single batch that target this path; however, requests already consumed in the current time window are not counted against the batch, so the effective limit may be higher when combining individual and batch requests. Note that this is a basic server-level rate limit; for comprehensive protection, use a reverse proxy or WAF for rate limiting.", - "action": parsers.numberParser("requestCount") - }, - "requestMethods": { - "env": "PARSE_SERVER_RATE_LIMIT_REQUEST_METHODS", - "help": "Optional, the HTTP request methods to which the rate limit should be applied, default is all methods.", - "action": parsers.arrayParser - }, - "requestPath": { - "env": "PARSE_SERVER_RATE_LIMIT_REQUEST_PATH", - "help": "The path of the API route to be rate limited. Route paths, in combination with a request method, define the endpoints at which requests can be made. Route paths can be strings or string patterns following path-to-regexp v8 syntax.", - "required": true - }, - "requestTimeWindow": { - "env": "PARSE_SERVER_RATE_LIMIT_REQUEST_TIME_WINDOW", - "help": "The window of time in milliseconds within which the number of requests set in `requestCount` can be made before the rate limit is applied.", - "action": parsers.numberParser("requestTimeWindow") - }, - "zone": { - "env": "PARSE_SERVER_RATE_LIMIT_ZONE", - "help": "The type of rate limit to apply. The following types are supported:
  • `global`: rate limit based on the number of requests made by all users
  • `ip`: rate limit based on the IP address of the request
  • `user`: rate limit based on the user ID of the request
  • `session`: rate limit based on the session token of the request
Default is `ip`.", - "default": "ip" - } + errorResponseMessage: { + env: 'PARSE_SERVER_RATE_LIMIT_ERROR_RESPONSE_MESSAGE', + help: 'The error message that should be returned in the body of the HTTP 429 response when the rate limit is hit. Default is `Too many requests.`.', + default: 'Too many requests.', + }, + includeInternalRequests: { + env: 'PARSE_SERVER_RATE_LIMIT_INCLUDE_INTERNAL_REQUESTS', + help: 'Optional, if `true` the rate limit will also apply to requests that are made in by Cloud Code, default is `false`. Note that a public Cloud Code function that triggers internal requests may circumvent rate limiting and be vulnerable to attacks.', + action: parsers.booleanParser, + default: false, + }, + includeMasterKey: { + env: 'PARSE_SERVER_RATE_LIMIT_INCLUDE_MASTER_KEY', + help: 'Optional, if `true` the rate limit will also apply to requests using the `masterKey`, default is `false`. Note that a public Cloud Code function that triggers internal requests using the `masterKey` may circumvent rate limiting and be vulnerable to attacks.', + action: parsers.booleanParser, + default: false, + }, + redisUrl: { + env: 'PARSE_SERVER_RATE_LIMIT_REDIS_URL', + help: 'Optional, the URL of the Redis server to store rate limit data. This allows to rate limit requests for multiple servers by calculating the sum of all requests across all servers. This is useful if multiple servers are processing requests behind a load balancer. For example, the limit of 10 requests is reached if each of 2 servers processed 5 requests.', + }, + requestCount: { + env: 'PARSE_SERVER_RATE_LIMIT_REQUEST_COUNT', + help: 'The number of requests that can be made per IP address within the time window set in `requestTimeWindow` before the rate limit is applied. For batch requests, this also limits the number of sub-requests in a single batch that target this path; however, requests already consumed in the current time window are not counted against the batch, so the effective limit may be higher when combining individual and batch requests. Note that this is a basic server-level rate limit; for comprehensive protection, use a reverse proxy or WAF for rate limiting.', + action: parsers.numberParser('requestCount'), + }, + requestMethods: { + env: 'PARSE_SERVER_RATE_LIMIT_REQUEST_METHODS', + help: 'Optional, the HTTP request methods to which the rate limit should be applied, default is all methods.', + action: parsers.arrayParser, + }, + requestPath: { + env: 'PARSE_SERVER_RATE_LIMIT_REQUEST_PATH', + help: 'The path of the API route to be rate limited. Route paths, in combination with a request method, define the endpoints at which requests can be made. Route paths can be strings or string patterns following path-to-regexp v8 syntax.', + required: true, + }, + requestTimeWindow: { + env: 'PARSE_SERVER_RATE_LIMIT_REQUEST_TIME_WINDOW', + help: 'The window of time in milliseconds within which the number of requests set in `requestCount` can be made before the rate limit is applied.', + action: parsers.numberParser('requestTimeWindow'), + }, + zone: { + env: 'PARSE_SERVER_RATE_LIMIT_ZONE', + help: 'The type of rate limit to apply. The following types are supported:
  • `global`: rate limit based on the number of requests made by all users
  • `ip`: rate limit based on the IP address of the request
  • `user`: rate limit based on the user ID of the request
  • `session`: rate limit based on the session token of the request
Default is `ip`.', + default: 'ip', + }, }; module.exports.RequestComplexityOptions = { - "graphQLDepth": { - "env": "PARSE_SERVER_REQUEST_COMPLEXITY_GRAPHQL_DEPTH", - "help": "Maximum depth of GraphQL field selections. Set to `-1` to disable. Default is `-1`.", - "action": parsers.numberParser("graphQLDepth"), - "default": -1 - }, - "graphQLFields": { - "env": "PARSE_SERVER_REQUEST_COMPLEXITY_GRAPHQL_FIELDS", - "help": "Maximum number of field selections in a GraphQL query. Set to `-1` to disable. Default is `-1`.", - "action": parsers.numberParser("graphQLFields"), - "default": -1 - }, - "includeCount": { - "env": "PARSE_SERVER_REQUEST_COMPLEXITY_INCLUDE_COUNT", - "help": "Maximum number of include paths in a single query. Set to `-1` to disable. Default is `-1`.", - "action": parsers.numberParser("includeCount"), - "default": -1 - }, - "includeDepth": { - "env": "PARSE_SERVER_REQUEST_COMPLEXITY_INCLUDE_DEPTH", - "help": "Maximum depth of include pointer chains (e.g. `a.b.c` = depth 3). Set to `-1` to disable. Default is `-1`.", - "action": parsers.numberParser("includeDepth"), - "default": -1 - }, - "queryDepth": { - "env": "PARSE_SERVER_REQUEST_COMPLEXITY_QUERY_DEPTH", - "help": "Maximum nesting depth of `$or`, `$and`, `$nor` query operators. Set to `-1` to disable. Default is `-1`.", - "action": parsers.numberParser("queryDepth"), - "default": -1 - }, - "subqueryDepth": { - "env": "PARSE_SERVER_REQUEST_COMPLEXITY_SUBQUERY_DEPTH", - "help": "Maximum nesting depth of `$inQuery`, `$notInQuery`, `$select`, `$dontSelect` subqueries. Set to `-1` to disable. Default is `-1`.", - "action": parsers.numberParser("subqueryDepth"), - "default": -1 - } + graphQLDepth: { + env: 'PARSE_SERVER_REQUEST_COMPLEXITY_GRAPHQL_DEPTH', + help: 'Maximum depth of GraphQL field selections. Set to `-1` to disable. Default is `-1`.', + action: parsers.numberParser('graphQLDepth'), + default: -1, + }, + graphQLFields: { + env: 'PARSE_SERVER_REQUEST_COMPLEXITY_GRAPHQL_FIELDS', + help: 'Maximum number of field selections in a GraphQL query. Set to `-1` to disable. Default is `-1`.', + action: parsers.numberParser('graphQLFields'), + default: -1, + }, + includeCount: { + env: 'PARSE_SERVER_REQUEST_COMPLEXITY_INCLUDE_COUNT', + help: 'Maximum number of include paths in a single query. Set to `-1` to disable. Default is `-1`.', + action: parsers.numberParser('includeCount'), + default: -1, + }, + includeDepth: { + env: 'PARSE_SERVER_REQUEST_COMPLEXITY_INCLUDE_DEPTH', + help: 'Maximum depth of include pointer chains (e.g. `a.b.c` = depth 3). Set to `-1` to disable. Default is `-1`.', + action: parsers.numberParser('includeDepth'), + default: -1, + }, + queryDepth: { + env: 'PARSE_SERVER_REQUEST_COMPLEXITY_QUERY_DEPTH', + help: 'Maximum nesting depth of `$or`, `$and`, `$nor` query operators. Set to `-1` to disable. Default is `-1`.', + action: parsers.numberParser('queryDepth'), + default: -1, + }, + subqueryDepth: { + env: 'PARSE_SERVER_REQUEST_COMPLEXITY_SUBQUERY_DEPTH', + help: 'Maximum nesting depth of `$inQuery`, `$notInQuery`, `$select`, `$dontSelect` subqueries. Set to `-1` to disable. Default is `-1`.', + action: parsers.numberParser('subqueryDepth'), + default: -1, + }, }; module.exports.SecurityOptions = { - "checkGroups": { - "env": "PARSE_SERVER_SECURITY_CHECK_GROUPS", - "help": "The security check groups to run. This allows to add custom security checks or override existing ones. Default are the groups defined in `CheckGroups.js`.", - "action": parsers.arrayParser - }, - "enableCheck": { - "env": "PARSE_SERVER_SECURITY_ENABLE_CHECK", - "help": "Is true if Parse Server should check for weak security settings.", - "action": parsers.booleanParser, - "default": false - }, - "enableCheckLog": { - "env": "PARSE_SERVER_SECURITY_ENABLE_CHECK_LOG", - "help": "Is true if the security check report should be written to logs. This should only be enabled temporarily to not expose weak security settings in logs.", - "action": parsers.booleanParser, - "default": false - } + checkGroups: { + env: 'PARSE_SERVER_SECURITY_CHECK_GROUPS', + help: 'The security check groups to run. This allows to add custom security checks or override existing ones. Default are the groups defined in `CheckGroups.js`.', + action: parsers.arrayParser, + }, + enableCheck: { + env: 'PARSE_SERVER_SECURITY_ENABLE_CHECK', + help: 'Is true if Parse Server should check for weak security settings.', + action: parsers.booleanParser, + default: false, + }, + enableCheckLog: { + env: 'PARSE_SERVER_SECURITY_ENABLE_CHECK_LOG', + help: 'Is true if the security check report should be written to logs. This should only be enabled temporarily to not expose weak security settings in logs.', + action: parsers.booleanParser, + default: false, + }, }; module.exports.PagesOptions = { - "customRoutes": { - "env": "PARSE_SERVER_PAGES_CUSTOM_ROUTES", - "help": "The custom routes.", - "action": parsers.arrayParser, - "type": "PagesRoute[]", - "default": [] - }, - "customUrls": { - "env": "PARSE_SERVER_PAGES_CUSTOM_URLS", - "help": "The URLs to the custom pages.", - "action": parsers.objectParser, - "type": "PagesCustomUrlsOptions", - "default": {} - }, - "enableLocalization": { - "env": "PARSE_SERVER_PAGES_ENABLE_LOCALIZATION", - "help": "Is true if pages should be localized; this has no effect on custom page redirects.", - "action": parsers.booleanParser, - "default": false - }, - "encodePageParamHeaders": { - "env": "PARSE_SERVER_PAGES_ENCODE_PAGE_PARAM_HEADERS", - "help": "Is `true` if the page parameter headers should be URI-encoded. This is required if any page parameter value contains non-ASCII characters, such as the app name.", - "action": parsers.booleanParser, - "default": false - }, - "forceRedirect": { - "env": "PARSE_SERVER_PAGES_FORCE_REDIRECT", - "help": "Is true if responses should always be redirects and never content, false if the response type should depend on the request type (GET request -> content response; POST request -> redirect response).", - "action": parsers.booleanParser, - "default": false - }, - "localizationFallbackLocale": { - "env": "PARSE_SERVER_PAGES_LOCALIZATION_FALLBACK_LOCALE", - "help": "The fallback locale for localization if no matching translation is provided for the given locale. This is only relevant when providing translation resources via JSON file.", - "default": "en" - }, - "localizationJsonPath": { - "env": "PARSE_SERVER_PAGES_LOCALIZATION_JSON_PATH", - "help": "The path to the JSON file for localization; the translations will be used to fill template placeholders according to the locale." - }, - "pagesEndpoint": { - "env": "PARSE_SERVER_PAGES_PAGES_ENDPOINT", - "help": "The API endpoint for the pages. Default is 'apps'.", - "default": "apps" - }, - "pagesPath": { - "env": "PARSE_SERVER_PAGES_PAGES_PATH", - "help": "The path to the pages directory; this also defines where the static endpoint '/apps' points to. Default is the './public/' directory of the parse-server module." - }, - "placeholders": { - "env": "PARSE_SERVER_PAGES_PLACEHOLDERS", - "help": "The placeholder keys and values which will be filled in pages; this can be a simple object or a callback function.", - "action": parsers.objectParser, - "default": {} - } + customRoutes: { + env: 'PARSE_SERVER_PAGES_CUSTOM_ROUTES', + help: 'The custom routes.', + action: parsers.arrayParser, + type: 'PagesRoute[]', + default: [], + }, + customUrls: { + env: 'PARSE_SERVER_PAGES_CUSTOM_URLS', + help: 'The URLs to the custom pages.', + action: parsers.objectParser, + type: 'PagesCustomUrlsOptions', + default: {}, + }, + enableLocalization: { + env: 'PARSE_SERVER_PAGES_ENABLE_LOCALIZATION', + help: 'Is true if pages should be localized; this has no effect on custom page redirects.', + action: parsers.booleanParser, + default: false, + }, + encodePageParamHeaders: { + env: 'PARSE_SERVER_PAGES_ENCODE_PAGE_PARAM_HEADERS', + help: 'Is `true` if the page parameter headers should be URI-encoded. This is required if any page parameter value contains non-ASCII characters, such as the app name.', + action: parsers.booleanParser, + default: false, + }, + forceRedirect: { + env: 'PARSE_SERVER_PAGES_FORCE_REDIRECT', + help: 'Is true if responses should always be redirects and never content, false if the response type should depend on the request type (GET request -> content response; POST request -> redirect response).', + action: parsers.booleanParser, + default: false, + }, + localizationFallbackLocale: { + env: 'PARSE_SERVER_PAGES_LOCALIZATION_FALLBACK_LOCALE', + help: 'The fallback locale for localization if no matching translation is provided for the given locale. This is only relevant when providing translation resources via JSON file.', + default: 'en', + }, + localizationJsonPath: { + env: 'PARSE_SERVER_PAGES_LOCALIZATION_JSON_PATH', + help: 'The path to the JSON file for localization; the translations will be used to fill template placeholders according to the locale.', + }, + pagesEndpoint: { + env: 'PARSE_SERVER_PAGES_PAGES_ENDPOINT', + help: "The API endpoint for the pages. Default is 'apps'.", + default: 'apps', + }, + pagesPath: { + env: 'PARSE_SERVER_PAGES_PAGES_PATH', + help: "The path to the pages directory; this also defines where the static endpoint '/apps' points to. Default is the './public/' directory of the parse-server module.", + }, + placeholders: { + env: 'PARSE_SERVER_PAGES_PLACEHOLDERS', + help: 'The placeholder keys and values which will be filled in pages; this can be a simple object or a callback function.', + action: parsers.objectParser, + default: {}, + }, }; module.exports.PagesRoute = { - "handler": { - "env": "PARSE_SERVER_PAGES_ROUTE_HANDLER", - "help": "The route handler that is an async function.", - "required": true - }, - "method": { - "env": "PARSE_SERVER_PAGES_ROUTE_METHOD", - "help": "The route method, e.g. 'GET' or 'POST'.", - "required": true - }, - "path": { - "env": "PARSE_SERVER_PAGES_ROUTE_PATH", - "help": "The route path.", - "required": true - } + handler: { + env: 'PARSE_SERVER_PAGES_ROUTE_HANDLER', + help: 'The route handler that is an async function.', + required: true, + }, + method: { + env: 'PARSE_SERVER_PAGES_ROUTE_METHOD', + help: "The route method, e.g. 'GET' or 'POST'.", + required: true, + }, + path: { + env: 'PARSE_SERVER_PAGES_ROUTE_PATH', + help: 'The route path.', + required: true, + }, }; module.exports.PagesCustomUrlsOptions = { - "emailVerificationLinkExpired": { - "env": "PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_LINK_EXPIRED", - "help": "The URL to the custom page for email verification -> link expired." - }, - "emailVerificationLinkInvalid": { - "env": "PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_LINK_INVALID", - "help": "The URL to the custom page for email verification -> link invalid." - }, - "emailVerificationSendFail": { - "env": "PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_SEND_FAIL", - "help": "The URL to the custom page for email verification -> link send fail." - }, - "emailVerificationSendSuccess": { - "env": "PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_SEND_SUCCESS", - "help": "The URL to the custom page for email verification -> resend link -> success." - }, - "emailVerificationSuccess": { - "env": "PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_SUCCESS", - "help": "The URL to the custom page for email verification -> success." - }, - "passwordReset": { - "env": "PARSE_SERVER_PAGES_CUSTOM_URL_PASSWORD_RESET", - "help": "The URL to the custom page for password reset." - }, - "passwordResetLinkInvalid": { - "env": "PARSE_SERVER_PAGES_CUSTOM_URL_PASSWORD_RESET_LINK_INVALID", - "help": "The URL to the custom page for password reset -> link invalid." - }, - "passwordResetSuccess": { - "env": "PARSE_SERVER_PAGES_CUSTOM_URL_PASSWORD_RESET_SUCCESS", - "help": "The URL to the custom page for password reset -> success." - } + emailVerificationLinkExpired: { + env: 'PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_LINK_EXPIRED', + help: 'The URL to the custom page for email verification -> link expired.', + }, + emailVerificationLinkInvalid: { + env: 'PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_LINK_INVALID', + help: 'The URL to the custom page for email verification -> link invalid.', + }, + emailVerificationSendFail: { + env: 'PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_SEND_FAIL', + help: 'The URL to the custom page for email verification -> link send fail.', + }, + emailVerificationSendSuccess: { + env: 'PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_SEND_SUCCESS', + help: 'The URL to the custom page for email verification -> resend link -> success.', + }, + emailVerificationSuccess: { + env: 'PARSE_SERVER_PAGES_CUSTOM_URL_EMAIL_VERIFICATION_SUCCESS', + help: 'The URL to the custom page for email verification -> success.', + }, + passwordReset: { + env: 'PARSE_SERVER_PAGES_CUSTOM_URL_PASSWORD_RESET', + help: 'The URL to the custom page for password reset.', + }, + passwordResetLinkInvalid: { + env: 'PARSE_SERVER_PAGES_CUSTOM_URL_PASSWORD_RESET_LINK_INVALID', + help: 'The URL to the custom page for password reset -> link invalid.', + }, + passwordResetSuccess: { + env: 'PARSE_SERVER_PAGES_CUSTOM_URL_PASSWORD_RESET_SUCCESS', + help: 'The URL to the custom page for password reset -> success.', + }, }; module.exports.CustomPagesOptions = { - "choosePassword": { - "env": "PARSE_SERVER_CUSTOM_PAGES_CHOOSE_PASSWORD", - "help": "choose password page path" - }, - "expiredVerificationLink": { - "env": "PARSE_SERVER_CUSTOM_PAGES_EXPIRED_VERIFICATION_LINK", - "help": "expired verification link page path" - }, - "invalidLink": { - "env": "PARSE_SERVER_CUSTOM_PAGES_INVALID_LINK", - "help": "invalid link page path" - }, - "invalidPasswordResetLink": { - "env": "PARSE_SERVER_CUSTOM_PAGES_INVALID_PASSWORD_RESET_LINK", - "help": "invalid password reset link page path" - }, - "invalidVerificationLink": { - "env": "PARSE_SERVER_CUSTOM_PAGES_INVALID_VERIFICATION_LINK", - "help": "invalid verification link page path" - }, - "linkSendFail": { - "env": "PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_FAIL", - "help": "verification link send fail page path" - }, - "linkSendSuccess": { - "env": "PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_SUCCESS", - "help": "verification link send success page path" - }, - "parseFrameURL": { - "env": "PARSE_SERVER_CUSTOM_PAGES_PARSE_FRAME_URL", - "help": "for masking user-facing pages" - }, - "passwordResetSuccess": { - "env": "PARSE_SERVER_CUSTOM_PAGES_PASSWORD_RESET_SUCCESS", - "help": "password reset success page path" - }, - "verifyEmailSuccess": { - "env": "PARSE_SERVER_CUSTOM_PAGES_VERIFY_EMAIL_SUCCESS", - "help": "verify email success page path" - } + choosePassword: { + env: 'PARSE_SERVER_CUSTOM_PAGES_CHOOSE_PASSWORD', + help: 'choose password page path', + }, + expiredVerificationLink: { + env: 'PARSE_SERVER_CUSTOM_PAGES_EXPIRED_VERIFICATION_LINK', + help: 'expired verification link page path', + }, + invalidLink: { + env: 'PARSE_SERVER_CUSTOM_PAGES_INVALID_LINK', + help: 'invalid link page path', + }, + invalidPasswordResetLink: { + env: 'PARSE_SERVER_CUSTOM_PAGES_INVALID_PASSWORD_RESET_LINK', + help: 'invalid password reset link page path', + }, + invalidVerificationLink: { + env: 'PARSE_SERVER_CUSTOM_PAGES_INVALID_VERIFICATION_LINK', + help: 'invalid verification link page path', + }, + linkSendFail: { + env: 'PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_FAIL', + help: 'verification link send fail page path', + }, + linkSendSuccess: { + env: 'PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_SUCCESS', + help: 'verification link send success page path', + }, + parseFrameURL: { + env: 'PARSE_SERVER_CUSTOM_PAGES_PARSE_FRAME_URL', + help: 'for masking user-facing pages', + }, + passwordResetSuccess: { + env: 'PARSE_SERVER_CUSTOM_PAGES_PASSWORD_RESET_SUCCESS', + help: 'password reset success page path', + }, + verifyEmailSuccess: { + env: 'PARSE_SERVER_CUSTOM_PAGES_VERIFY_EMAIL_SUCCESS', + help: 'verify email success page path', + }, }; module.exports.LiveQueryOptions = { - "classNames": { - "env": "PARSE_SERVER_LIVEQUERY_CLASSNAMES", - "help": "parse-server's LiveQuery classNames", - "action": parsers.arrayParser - }, - "pubSubAdapter": { - "env": "PARSE_SERVER_LIVEQUERY_PUB_SUB_ADAPTER", - "help": "LiveQuery pubsub adapter", - "action": parsers.moduleOrObjectParser - }, - "redisOptions": { - "env": "PARSE_SERVER_LIVEQUERY_REDIS_OPTIONS", - "help": "parse-server's LiveQuery redisOptions", - "action": parsers.objectParser - }, - "redisURL": { - "env": "PARSE_SERVER_LIVEQUERY_REDIS_URL", - "help": "parse-server's LiveQuery redisURL" - }, - "regexTimeout": { - "env": "PARSE_SERVER_LIVEQUERY_REGEX_TIMEOUT", - "help": "Sets the maximum execution time in milliseconds for regular expression pattern matching in LiveQuery. This protects against Regular Expression Denial of Service (ReDoS) attacks where a malicious regex pattern could block the event loop. A regex that exceeds the timeout will be treated as non-matching.

The protection runs each regex evaluation in an isolated VM context with a timeout. This adds approximately 50 microseconds of overhead per regex evaluation. For most applications this is negligible, but it can add up if you have a very large number of LiveQuery subscriptions that use `$regex` on the same class. For example, 10,000 concurrent regex subscriptions would add approximately 500ms of processing time per object save event on that class.

Set to `0` to disable the timeout and use native regex evaluation without protection. Defaults to `100`.", - "action": parsers.numberParser("regexTimeout"), - "default": 100 - }, - "wssAdapter": { - "env": "PARSE_SERVER_LIVEQUERY_WSS_ADAPTER", - "help": "Adapter module for the WebSocketServer", - "action": parsers.moduleOrObjectParser - } + classNames: { + env: 'PARSE_SERVER_LIVEQUERY_CLASSNAMES', + help: "parse-server's LiveQuery classNames", + action: parsers.arrayParser, + }, + pubSubAdapter: { + env: 'PARSE_SERVER_LIVEQUERY_PUB_SUB_ADAPTER', + help: 'LiveQuery pubsub adapter', + action: parsers.moduleOrObjectParser, + }, + redisOptions: { + env: 'PARSE_SERVER_LIVEQUERY_REDIS_OPTIONS', + help: "parse-server's LiveQuery redisOptions", + action: parsers.objectParser, + }, + redisURL: { + env: 'PARSE_SERVER_LIVEQUERY_REDIS_URL', + help: "parse-server's LiveQuery redisURL", + }, + regexTimeout: { + env: 'PARSE_SERVER_LIVEQUERY_REGEX_TIMEOUT', + help: 'Sets the maximum execution time in milliseconds for regular expression pattern matching in LiveQuery. This protects against Regular Expression Denial of Service (ReDoS) attacks where a malicious regex pattern could block the event loop. A regex that exceeds the timeout will be treated as non-matching.

The protection runs each regex evaluation in an isolated VM context with a timeout. This adds approximately 50 microseconds of overhead per regex evaluation. For most applications this is negligible, but it can add up if you have a very large number of LiveQuery subscriptions that use `$regex` on the same class. For example, 10,000 concurrent regex subscriptions would add approximately 500ms of processing time per object save event on that class.

Set to `0` to disable the timeout and use native regex evaluation without protection. Defaults to `100`.', + action: parsers.numberParser('regexTimeout'), + default: 100, + }, + wssAdapter: { + env: 'PARSE_SERVER_LIVEQUERY_WSS_ADAPTER', + help: 'Adapter module for the WebSocketServer', + action: parsers.moduleOrObjectParser, + }, }; module.exports.LiveQueryServerOptions = { - "appId": { - "env": "PARSE_LIVE_QUERY_SERVER_APP_ID", - "help": "This string should match the appId in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same appId." - }, - "cacheTimeout": { - "env": "PARSE_LIVE_QUERY_SERVER_CACHE_TIMEOUT", - "help": "Number in milliseconds. When clients provide the sessionToken to the LiveQuery server, the LiveQuery server will try to fetch its ParseUser's objectId from parse server and store it in the cache. The value defines the duration of the cache. Check the following Security section and our protocol specification for details, defaults to 5 * 1000 ms (5 seconds).", - "action": parsers.numberParser("cacheTimeout") - }, - "keyPairs": { - "env": "PARSE_LIVE_QUERY_SERVER_KEY_PAIRS", - "help": "A JSON object that serves as a whitelist of keys. It is used for validating clients when they try to connect to the LiveQuery server. Check the following Security section and our protocol specification for details.", - "action": parsers.objectParser - }, - "logLevel": { - "env": "PARSE_LIVE_QUERY_SERVER_LOG_LEVEL", - "help": "This string defines the log level of the LiveQuery server. We support VERBOSE, INFO, ERROR, NONE, defaults to INFO." - }, - "masterKey": { - "env": "PARSE_LIVE_QUERY_SERVER_MASTER_KEY", - "help": "This string should match the masterKey in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same masterKey." - }, - "port": { - "env": "PARSE_LIVE_QUERY_SERVER_PORT", - "help": "The port to run the LiveQuery server, defaults to 1337.", - "action": parsers.numberParser("port"), - "default": 1337 - }, - "pubSubAdapter": { - "env": "PARSE_LIVE_QUERY_SERVER_PUB_SUB_ADAPTER", - "help": "LiveQuery pubsub adapter", - "action": parsers.moduleOrObjectParser - }, - "redisOptions": { - "env": "PARSE_LIVE_QUERY_SERVER_REDIS_OPTIONS", - "help": "parse-server's LiveQuery redisOptions", - "action": parsers.objectParser - }, - "redisURL": { - "env": "PARSE_LIVE_QUERY_SERVER_REDIS_URL", - "help": "parse-server's LiveQuery redisURL" - }, - "serverURL": { - "env": "PARSE_LIVE_QUERY_SERVER_SERVER_URL", - "help": "This string should match the serverURL in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same serverURL." - }, - "websocketTimeout": { - "env": "PARSE_LIVE_QUERY_SERVER_WEBSOCKET_TIMEOUT", - "help": "Number of milliseconds between ping/pong frames. The WebSocket server sends ping/pong frames to the clients to keep the WebSocket alive. This value defines the interval of the ping/pong frame from the server to clients, defaults to 10 * 1000 ms (10 s).", - "action": parsers.numberParser("websocketTimeout") - }, - "wssAdapter": { - "env": "PARSE_LIVE_QUERY_SERVER_WSS_ADAPTER", - "help": "Adapter module for the WebSocketServer", - "action": parsers.moduleOrObjectParser - } + appId: { + env: 'PARSE_LIVE_QUERY_SERVER_APP_ID', + help: 'This string should match the appId in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same appId.', + }, + cacheTimeout: { + env: 'PARSE_LIVE_QUERY_SERVER_CACHE_TIMEOUT', + help: "Number in milliseconds. When clients provide the sessionToken to the LiveQuery server, the LiveQuery server will try to fetch its ParseUser's objectId from parse server and store it in the cache. The value defines the duration of the cache. Check the following Security section and our protocol specification for details, defaults to 5 * 1000 ms (5 seconds).", + action: parsers.numberParser('cacheTimeout'), + }, + keyPairs: { + env: 'PARSE_LIVE_QUERY_SERVER_KEY_PAIRS', + help: 'A JSON object that serves as a whitelist of keys. It is used for validating clients when they try to connect to the LiveQuery server. Check the following Security section and our protocol specification for details.', + action: parsers.objectParser, + }, + logLevel: { + env: 'PARSE_LIVE_QUERY_SERVER_LOG_LEVEL', + help: 'This string defines the log level of the LiveQuery server. We support VERBOSE, INFO, ERROR, NONE, defaults to INFO.', + }, + masterKey: { + env: 'PARSE_LIVE_QUERY_SERVER_MASTER_KEY', + help: 'This string should match the masterKey in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same masterKey.', + }, + port: { + env: 'PARSE_LIVE_QUERY_SERVER_PORT', + help: 'The port to run the LiveQuery server, defaults to 1337.', + action: parsers.numberParser('port'), + default: 1337, + }, + pubSubAdapter: { + env: 'PARSE_LIVE_QUERY_SERVER_PUB_SUB_ADAPTER', + help: 'LiveQuery pubsub adapter', + action: parsers.moduleOrObjectParser, + }, + redisOptions: { + env: 'PARSE_LIVE_QUERY_SERVER_REDIS_OPTIONS', + help: "parse-server's LiveQuery redisOptions", + action: parsers.objectParser, + }, + redisURL: { + env: 'PARSE_LIVE_QUERY_SERVER_REDIS_URL', + help: "parse-server's LiveQuery redisURL", + }, + serverURL: { + env: 'PARSE_LIVE_QUERY_SERVER_SERVER_URL', + help: 'This string should match the serverURL in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same serverURL.', + }, + websocketTimeout: { + env: 'PARSE_LIVE_QUERY_SERVER_WEBSOCKET_TIMEOUT', + help: 'Number of milliseconds between ping/pong frames. The WebSocket server sends ping/pong frames to the clients to keep the WebSocket alive. This value defines the interval of the ping/pong frame from the server to clients, defaults to 10 * 1000 ms (10 s).', + action: parsers.numberParser('websocketTimeout'), + }, + wssAdapter: { + env: 'PARSE_LIVE_QUERY_SERVER_WSS_ADAPTER', + help: 'Adapter module for the WebSocketServer', + action: parsers.moduleOrObjectParser, + }, }; module.exports.IdempotencyOptions = { - "paths": { - "env": "PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_PATHS", - "help": "An array of paths for which the feature should be enabled. The mount path must not be included, for example instead of `/parse/functions/myFunction` specifiy `functions/myFunction`. The entries are interpreted as regular expression, for example `functions/.*` matches all functions, `jobs/.*` matches all jobs, `classes/.*` matches all classes, `.*` matches all paths.", - "action": parsers.arrayParser, - "default": [] - }, - "ttl": { - "env": "PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_TTL", - "help": "The duration in seconds after which a request record is discarded from the database, defaults to 300s.", - "action": parsers.numberParser("ttl"), - "default": 300 - } + paths: { + env: 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_PATHS', + help: 'An array of paths for which the feature should be enabled. The mount path must not be included, for example instead of `/parse/functions/myFunction` specifiy `functions/myFunction`. The entries are interpreted as regular expression, for example `functions/.*` matches all functions, `jobs/.*` matches all jobs, `classes/.*` matches all classes, `.*` matches all paths.', + action: parsers.arrayParser, + default: [], + }, + ttl: { + env: 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_TTL', + help: 'The duration in seconds after which a request record is discarded from the database, defaults to 300s.', + action: parsers.numberParser('ttl'), + default: 300, + }, }; module.exports.AccountLockoutOptions = { - "duration": { - "env": "PARSE_SERVER_ACCOUNT_LOCKOUT_DURATION", - "help": "Set the duration in minutes that a locked-out account remains locked out before automatically becoming unlocked.

Valid values are greater than `0` and less than `100000`.", - "action": parsers.numberParser("duration") - }, - "threshold": { - "env": "PARSE_SERVER_ACCOUNT_LOCKOUT_THRESHOLD", - "help": "Set the number of failed sign-in attempts that will cause a user account to be locked. If the account is locked. The account will unlock after the duration set in the `duration` option has passed and no further login attempts have been made.

Valid values are greater than `0` and less than `1000`.", - "action": parsers.numberParser("threshold") - }, - "unlockOnPasswordReset": { - "env": "PARSE_SERVER_ACCOUNT_LOCKOUT_UNLOCK_ON_PASSWORD_RESET", - "help": "Set to `true` if the account should be unlocked after a successful password reset.

Default is `false`.
Requires options `duration` and `threshold` to be set.", - "action": parsers.booleanParser, - "default": false - } + duration: { + env: 'PARSE_SERVER_ACCOUNT_LOCKOUT_DURATION', + help: 'Set the duration in minutes that a locked-out account remains locked out before automatically becoming unlocked.

Valid values are greater than `0` and less than `100000`.', + action: parsers.numberParser('duration'), + }, + threshold: { + env: 'PARSE_SERVER_ACCOUNT_LOCKOUT_THRESHOLD', + help: 'Set the number of failed sign-in attempts that will cause a user account to be locked. If the account is locked. The account will unlock after the duration set in the `duration` option has passed and no further login attempts have been made.

Valid values are greater than `0` and less than `1000`.', + action: parsers.numberParser('threshold'), + }, + unlockOnPasswordReset: { + env: 'PARSE_SERVER_ACCOUNT_LOCKOUT_UNLOCK_ON_PASSWORD_RESET', + help: 'Set to `true` if the account should be unlocked after a successful password reset.

Default is `false`.
Requires options `duration` and `threshold` to be set.', + action: parsers.booleanParser, + default: false, + }, }; module.exports.PasswordPolicyOptions = { - "doNotAllowUsername": { - "env": "PARSE_SERVER_PASSWORD_POLICY_DO_NOT_ALLOW_USERNAME", - "help": "Set to `true` to disallow the username as part of the password.

Default is `false`.", - "action": parsers.booleanParser, - "default": false - }, - "maxPasswordAge": { - "env": "PARSE_SERVER_PASSWORD_POLICY_MAX_PASSWORD_AGE", - "help": "Set the number of days after which a password expires. Login attempts fail if the user does not reset the password before expiration.", - "action": parsers.numberParser("maxPasswordAge") - }, - "maxPasswordHistory": { - "env": "PARSE_SERVER_PASSWORD_POLICY_MAX_PASSWORD_HISTORY", - "help": "Set the number of previous password that will not be allowed to be set as new password. If the option is not set or set to `0`, no previous passwords will be considered.

Valid values are >= `0` and <= `20`.
Default is `0`.", - "action": parsers.numberParser("maxPasswordHistory") - }, - "resetPasswordSuccessOnInvalidEmail": { - "env": "PARSE_SERVER_PASSWORD_POLICY_RESET_PASSWORD_SUCCESS_ON_INVALID_EMAIL", - "help": "Set to `true` if a request to reset the password should return a success response even if the provided email address is invalid, or `false` if the request should return an error response if the email address is invalid.

Default is `true`.", - "action": parsers.booleanParser, - "default": true - }, - "resetTokenReuseIfValid": { - "env": "PARSE_SERVER_PASSWORD_POLICY_RESET_TOKEN_REUSE_IF_VALID", - "help": "Set to `true` if a password reset token should be reused in case another token is requested but there is a token that is still valid, i.e. has not expired. This avoids the often observed issue that a user requests multiple emails and does not know which link contains a valid token because each newly generated token would invalidate the previous token.

Default is `false`.", - "action": parsers.booleanParser, - "default": false - }, - "resetTokenValidityDuration": { - "env": "PARSE_SERVER_PASSWORD_POLICY_RESET_TOKEN_VALIDITY_DURATION", - "help": "Set the validity duration of the password reset token in seconds after which the token expires. The token is used in the link that is set in the email. After the token expires, the link becomes invalid and a new link has to be sent. If the option is not set or set to `undefined`, then the token never expires.

For example, to expire the token after 2 hours, set a value of 7200 seconds (= 60 seconds * 60 minutes * 2 hours).

Default is `undefined`.", - "action": parsers.numberParser("resetTokenValidityDuration") - }, - "validationError": { - "env": "PARSE_SERVER_PASSWORD_POLICY_VALIDATION_ERROR", - "help": "Set the error message to be sent.

Default is `Password does not meet the Password Policy requirements.`" - }, - "validatorCallback": { - "env": "PARSE_SERVER_PASSWORD_POLICY_VALIDATOR_CALLBACK", - "help": "Set a callback function to validate a password to be accepted.

If used in combination with `validatorPattern`, the password must pass both to be accepted." - }, - "validatorPattern": { - "env": "PARSE_SERVER_PASSWORD_POLICY_VALIDATOR_PATTERN", - "help": "Set the regular expression validation pattern a password must match to be accepted.

If used in combination with `validatorCallback`, the password must pass both to be accepted." - } + doNotAllowUsername: { + env: 'PARSE_SERVER_PASSWORD_POLICY_DO_NOT_ALLOW_USERNAME', + help: 'Set to `true` to disallow the username as part of the password.

Default is `false`.', + action: parsers.booleanParser, + default: false, + }, + maxPasswordAge: { + env: 'PARSE_SERVER_PASSWORD_POLICY_MAX_PASSWORD_AGE', + help: 'Set the number of days after which a password expires. Login attempts fail if the user does not reset the password before expiration.', + action: parsers.numberParser('maxPasswordAge'), + }, + maxPasswordHistory: { + env: 'PARSE_SERVER_PASSWORD_POLICY_MAX_PASSWORD_HISTORY', + help: 'Set the number of previous password that will not be allowed to be set as new password. If the option is not set or set to `0`, no previous passwords will be considered.

Valid values are >= `0` and <= `20`.
Default is `0`.', + action: parsers.numberParser('maxPasswordHistory'), + }, + resetPasswordSuccessOnInvalidEmail: { + env: 'PARSE_SERVER_PASSWORD_POLICY_RESET_PASSWORD_SUCCESS_ON_INVALID_EMAIL', + help: 'Set to `true` if a request to reset the password should return a success response even if the provided email address is invalid, or `false` if the request should return an error response if the email address is invalid.

Default is `true`.', + action: parsers.booleanParser, + default: true, + }, + resetTokenReuseIfValid: { + env: 'PARSE_SERVER_PASSWORD_POLICY_RESET_TOKEN_REUSE_IF_VALID', + help: 'Set to `true` if a password reset token should be reused in case another token is requested but there is a token that is still valid, i.e. has not expired. This avoids the often observed issue that a user requests multiple emails and does not know which link contains a valid token because each newly generated token would invalidate the previous token.

Default is `false`.', + action: parsers.booleanParser, + default: false, + }, + resetTokenValidityDuration: { + env: 'PARSE_SERVER_PASSWORD_POLICY_RESET_TOKEN_VALIDITY_DURATION', + help: 'Set the validity duration of the password reset token in seconds after which the token expires. The token is used in the link that is set in the email. After the token expires, the link becomes invalid and a new link has to be sent. If the option is not set or set to `undefined`, then the token never expires.

For example, to expire the token after 2 hours, set a value of 7200 seconds (= 60 seconds * 60 minutes * 2 hours).

Default is `undefined`.', + action: parsers.numberParser('resetTokenValidityDuration'), + }, + validationError: { + env: 'PARSE_SERVER_PASSWORD_POLICY_VALIDATION_ERROR', + help: 'Set the error message to be sent.

Default is `Password does not meet the Password Policy requirements.`', + }, + validatorCallback: { + env: 'PARSE_SERVER_PASSWORD_POLICY_VALIDATOR_CALLBACK', + help: 'Set a callback function to validate a password to be accepted.

If used in combination with `validatorPattern`, the password must pass both to be accepted.', + }, + validatorPattern: { + env: 'PARSE_SERVER_PASSWORD_POLICY_VALIDATOR_PATTERN', + help: 'Set the regular expression validation pattern a password must match to be accepted.

If used in combination with `validatorCallback`, the password must pass both to be accepted.', + }, }; module.exports.FileUploadOptions = { - "allowedFileUrlDomains": { - "env": "PARSE_SERVER_FILE_UPLOAD_ALLOWED_FILE_URL_DOMAINS", - "help": "Sets the allowed hostnames for file URLs referenced in Parse objects. When a File object includes a URL, its hostname must match one of these entries to be accepted. Supports exact hostnames (e.g., `'cdn.example.com'`) and wildcard subdomains (e.g., `'*.example.com'`). Use `['*']` to allow any domain. Use `[]` to block all file URLs (only name-based files allowed).", - "action": parsers.arrayParser, - "default": ["*"] - }, - "enableForAnonymousUser": { - "env": "PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_ANONYMOUS_USER", - "help": "Is true if file upload should be allowed for anonymous users.", - "action": parsers.booleanParser, - "default": false - }, - "enableForAuthenticatedUser": { - "env": "PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_AUTHENTICATED_USER", - "help": "Is true if file upload should be allowed for authenticated users.", - "action": parsers.booleanParser, - "default": true - }, - "enableForPublic": { - "env": "PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_PUBLIC", - "help": "Is true if file upload should be allowed for anyone, regardless of user authentication.", - "action": parsers.booleanParser, - "default": false - }, - "fileExtensions": { - "env": "PARSE_SERVER_FILE_UPLOAD_FILE_EXTENSIONS", - "help": "Sets the allowed file extensions for uploading files. The extension is defined as an array of file extensions, or a regex pattern.

It is recommended to only allow the file extensions that your app actually needs, rather than relying on blocking dangerous extensions. This allowlist approach is more secure because new dangerous file extensions may emerge that are not covered by the default blocklist.

The default blocks the most common file extensions that are known to be rendered as active content by web browsers, such as HTML, SVG, and XML files, which may be used by an attacker to compromise the session token of another user via accessing the browser's local storage. The blocked extensions are: `html`, `htm`, `shtml`, `xhtml`, `xhtml+xml`, `xht`, `svg`, `svgz`, `svg+xml`, `xml`, `xsl`, `xslt`, `xslt+xml`, `xsd`, `rng`, `rdf`, `rdf+xml`, `owl`, `mathml`, `mathml+xml`.

Defaults to `[\"^(?!([xXsS]?[hH][tT][mM][lL]?(\\\\+[xX][mM][lL])?|[xX][hH][tT]|[sS][vV][gG]([zZ]|\\\\+[xX][mM][lL])?|[xX][mM][lL]|[xX][sS][lL][tT]?(\\\\+[xX][mM][lL])?|[xX][sS][dD]|[rR][nN][gG]|[rR][dD][fF](\\\\+[xX][mM][lL])?|[oO][wW][lL]|[mM][aA][tT][hH][mM][lL](\\\\+[xX][mM][lL])?)$)\"]`.", - "action": parsers.arrayParser, - "default": ["^(?!([xXsS]?[hH][tT][mM][lL]?(\\+[xX][mM][lL])?|[xX][hH][tT]|[sS][vV][gG]([zZ]|\\+[xX][mM][lL])?|[xX][mM][lL]|[xX][sS][lL][tT]?(\\+[xX][mM][lL])?|[xX][sS][dD]|[rR][nN][gG]|[rR][dD][fF](\\+[xX][mM][lL])?|[oO][wW][lL]|[mM][aA][tT][hH][mM][lL](\\+[xX][mM][lL])?)$)"] - } + allowedFileUrlDomains: { + env: 'PARSE_SERVER_FILE_UPLOAD_ALLOWED_FILE_URL_DOMAINS', + help: "Sets the allowed hostnames for file URLs referenced in Parse objects. When a File object includes a URL, its hostname must match one of these entries to be accepted. Supports exact hostnames (e.g., `'cdn.example.com'`) and wildcard subdomains (e.g., `'*.example.com'`). Use `['*']` to allow any domain. Use `[]` to block all file URLs (only name-based files allowed).", + action: parsers.arrayParser, + default: ['*'], + }, + enableForAnonymousUser: { + env: 'PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_ANONYMOUS_USER', + help: 'Is true if file upload should be allowed for anonymous users.', + action: parsers.booleanParser, + default: false, + }, + enableForAuthenticatedUser: { + env: 'PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_AUTHENTICATED_USER', + help: 'Is true if file upload should be allowed for authenticated users.', + action: parsers.booleanParser, + default: true, + }, + enableForPublic: { + env: 'PARSE_SERVER_FILE_UPLOAD_ENABLE_FOR_PUBLIC', + help: 'Is true if file upload should be allowed for anyone, regardless of user authentication.', + action: parsers.booleanParser, + default: false, + }, + fileExtensions: { + env: 'PARSE_SERVER_FILE_UPLOAD_FILE_EXTENSIONS', + help: 'Sets the allowed file extensions for uploading files. The extension is defined as an array of file extensions, or a regex pattern.

It is recommended to only allow the file extensions that your app actually needs, rather than relying on blocking dangerous extensions. This allowlist approach is more secure because new dangerous file extensions may emerge that are not covered by the default blocklist.

The default blocks the most common file extensions that are known to be rendered as active content by web browsers, such as HTML, SVG, and XML files, which may be used by an attacker to compromise the session token of another user via accessing the browser\'s local storage. The blocked extensions are: `html`, `htm`, `shtml`, `xhtml`, `xhtml+xml`, `xht`, `svg`, `svgz`, `svg+xml`, `xml`, `xsl`, `xslt`, `xslt+xml`, `xsd`, `rng`, `rdf`, `rdf+xml`, `owl`, `mathml`, `mathml+xml`.

Defaults to `["^(?!([xXsS]?[hH][tT][mM][lL]?(\\\\+[xX][mM][lL])?|[xX][hH][tT]|[sS][vV][gG]([zZ]|\\\\+[xX][mM][lL])?|[xX][mM][lL]|[xX][sS][lL][tT]?(\\\\+[xX][mM][lL])?|[xX][sS][dD]|[rR][nN][gG]|[rR][dD][fF](\\\\+[xX][mM][lL])?|[oO][wW][lL]|[mM][aA][tT][hH][mM][lL](\\\\+[xX][mM][lL])?)$)"]`.', + action: parsers.arrayParser, + default: [ + '^(?!([xXsS]?[hH][tT][mM][lL]?(\\+[xX][mM][lL])?|[xX][hH][tT]|[sS][vV][gG]([zZ]|\\+[xX][mM][lL])?|[xX][mM][lL]|[xX][sS][lL][tT]?(\\+[xX][mM][lL])?|[xX][sS][dD]|[rR][nN][gG]|[rR][dD][fF](\\+[xX][mM][lL])?|[oO][wW][lL]|[mM][aA][tT][hH][mM][lL](\\+[xX][mM][lL])?)$)', + ], + }, }; /* The available log levels for Parse Server logging. Valid values are:
- `'error'` - Error level (highest priority)
- `'warn'` - Warning level
- `'info'` - Info level (default)
- `'verbose'` - Verbose level
- `'debug'` - Debug level
- `'silly'` - Silly level (lowest priority) */ module.exports.LogLevel = { - "debug": { - "env": "PARSE_SERVER_LOG_LEVEL_DEBUG", - "help": "Debug level", - "required": true - }, - "error": { - "env": "PARSE_SERVER_LOG_LEVEL_ERROR", - "help": "Error level - highest priority", - "required": true - }, - "info": { - "env": "PARSE_SERVER_LOG_LEVEL_INFO", - "help": "Info level - default", - "required": true - }, - "silly": { - "env": "PARSE_SERVER_LOG_LEVEL_SILLY", - "help": "Silly level - lowest priority", - "required": true - }, - "verbose": { - "env": "PARSE_SERVER_LOG_LEVEL_VERBOSE", - "help": "Verbose level", - "required": true - }, - "warn": { - "env": "PARSE_SERVER_LOG_LEVEL_WARN", - "help": "Warning level", - "required": true - } + debug: { + env: 'PARSE_SERVER_LOG_LEVEL_DEBUG', + help: 'Debug level', + required: true, + }, + error: { + env: 'PARSE_SERVER_LOG_LEVEL_ERROR', + help: 'Error level - highest priority', + required: true, + }, + info: { + env: 'PARSE_SERVER_LOG_LEVEL_INFO', + help: 'Info level - default', + required: true, + }, + silly: { + env: 'PARSE_SERVER_LOG_LEVEL_SILLY', + help: 'Silly level - lowest priority', + required: true, + }, + verbose: { + env: 'PARSE_SERVER_LOG_LEVEL_VERBOSE', + help: 'Verbose level', + required: true, + }, + warn: { + env: 'PARSE_SERVER_LOG_LEVEL_WARN', + help: 'Warning level', + required: true, + }, }; module.exports.LogClientEvent = { - "keys": { - "env": "PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS_KEYS", - "help": "Optional array of dot-notation paths to extract specific data from the event object. If not provided or empty, the entire event object will be logged.", - "action": parsers.arrayParser - }, - "logLevel": { - "env": "PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS_LOG_LEVEL", - "help": "The log level to use for this event. See [LogLevel](LogLevel.html) for available values. Defaults to `'info'`.", - "default": "info" - }, - "name": { - "env": "PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS_NAME", - "help": "The MongoDB driver event name to listen for. See the [MongoDB driver events documentation](https://www.mongodb.com/docs/drivers/node/current/fundamentals/monitoring/) for available events.", - "required": true - } + keys: { + env: 'PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS_KEYS', + help: 'Optional array of dot-notation paths to extract specific data from the event object. If not provided or empty, the entire event object will be logged.', + action: parsers.arrayParser, + }, + logLevel: { + env: 'PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS_LOG_LEVEL', + help: "The log level to use for this event. See [LogLevel](LogLevel.html) for available values. Defaults to `'info'`.", + default: 'info', + }, + name: { + env: 'PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS_NAME', + help: 'The MongoDB driver event name to listen for. See the [MongoDB driver events documentation](https://www.mongodb.com/docs/drivers/node/current/fundamentals/monitoring/) for available events.', + required: true, + }, }; module.exports.DatabaseOptions = { - "allowPublicExplain": { - "env": "PARSE_SERVER_DATABASE_ALLOW_PUBLIC_EXPLAIN", - "help": "Set to `true` to allow `Parse.Query.explain` without master key.

\u26A0\uFE0F Enabling this option may expose sensitive query performance data to unauthorized users and could potentially be exploited for malicious purposes.", - "action": parsers.booleanParser, - "default": false - }, - "appName": { - "env": "PARSE_SERVER_DATABASE_APP_NAME", - "help": "The MongoDB driver option to specify the name of the application that created this MongoClient instance." - }, - "authMechanism": { - "env": "PARSE_SERVER_DATABASE_AUTH_MECHANISM", - "help": "The MongoDB driver option to specify the authentication mechanism that MongoDB will use to authenticate the connection." - }, - "authMechanismProperties": { - "env": "PARSE_SERVER_DATABASE_AUTH_MECHANISM_PROPERTIES", - "help": "The MongoDB driver option to specify properties for the specified authMechanism as a comma-separated list of colon-separated key-value pairs.", - "action": parsers.objectParser - }, - "authSource": { - "env": "PARSE_SERVER_DATABASE_AUTH_SOURCE", - "help": "The MongoDB driver option to specify the database name associated with the user's credentials." - }, - "autoSelectFamily": { - "env": "PARSE_SERVER_DATABASE_AUTO_SELECT_FAMILY", - "help": "The MongoDB driver option to set whether the socket attempts to connect to IPv6 and IPv4 addresses until a connection is established. If available, the driver will select the first IPv6 address.", - "action": parsers.booleanParser - }, - "autoSelectFamilyAttemptTimeout": { - "env": "PARSE_SERVER_DATABASE_AUTO_SELECT_FAMILY_ATTEMPT_TIMEOUT", - "help": "The MongoDB driver option to specify the amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the autoSelectFamily option. If set to a positive integer less than 10, the value 10 is used instead.", - "action": parsers.numberParser("autoSelectFamilyAttemptTimeout") - }, - "batchSize": { - "env": "PARSE_SERVER_DATABASE_BATCH_SIZE", - "help": "The number of documents per batch for MongoDB cursor `getMore` operations. A lower value reduces memory usage per batch; a higher value reduces the number of network round-trips.", - "action": parsers.numberParser("batchSize"), - "default": 1000 - }, - "clientMetadata": { - "env": "PARSE_SERVER_DATABASE_CLIENT_METADATA", - "help": "Custom metadata to append to database client connections for identifying Parse Server instances in database logs. If set, this metadata will be visible in database logs during connection handshakes. This can help with debugging and monitoring in deployments with multiple database clients. Set `name` to identify your application (e.g., 'MyApp') and `version` to your application's version. Leave undefined (default) to disable this feature and avoid the additional data transfer overhead.", - "action": parsers.objectParser, - "type": "DatabaseOptionsClientMetadata" - }, - "compressors": { - "env": "PARSE_SERVER_DATABASE_COMPRESSORS", - "help": "The MongoDB driver option to specify an array or comma-delimited string of compressors to enable network compression for communication between this client and a mongod/mongos instance." - }, - "connectTimeoutMS": { - "env": "PARSE_SERVER_DATABASE_CONNECT_TIMEOUT_MS", - "help": "The MongoDB driver option to specify the amount of time, in milliseconds, to wait to establish a single TCP socket connection to the server before raising an error. Specifying 0 disables the connection timeout.", - "action": parsers.numberParser("connectTimeoutMS") - }, - "createIndexAuthDataUniqueness": { - "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_AUTH_DATA_UNIQUENESS", - "help": "Set to `true` to automatically create unique indexes on the authData fields of the _User collection for each configured auth provider on server start, including `anonymous` when anonymous users are enabled. These indexes prevent race conditions during concurrent signups with the same authData. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the indexes, keep in mind that the otherwise automatically created indexes may change in the future to be optimized for the internal usage by Parse Server.", - "action": parsers.booleanParser, - "default": true - }, - "createIndexRoleName": { - "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_ROLE_NAME", - "help": "Set to `true` to automatically create a unique index on the name field of the _Role collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", - "action": parsers.booleanParser, - "default": true - }, - "createIndexUserEmail": { - "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_USER_EMAIL", - "help": "Set to `true` to automatically create indexes on the email field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", - "action": parsers.booleanParser, - "default": true - }, - "createIndexUserEmailCaseInsensitive": { - "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_USER_EMAIL_CASE_INSENSITIVE", - "help": "Set to `true` to automatically create a case-insensitive index on the email field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", - "action": parsers.booleanParser, - "default": true - }, - "createIndexUserEmailVerifyToken": { - "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_USER_EMAIL_VERIFY_TOKEN", - "help": "Set to `true` to automatically create an index on the _email_verify_token field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", - "action": parsers.booleanParser, - "default": true - }, - "createIndexUserPasswordResetToken": { - "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_USER_PASSWORD_RESET_TOKEN", - "help": "Set to `true` to automatically create an index on the _perishable_token field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", - "action": parsers.booleanParser, - "default": true - }, - "createIndexUserUsername": { - "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_USER_USERNAME", - "help": "Set to `true` to automatically create indexes on the username field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", - "action": parsers.booleanParser, - "default": true - }, - "createIndexUserUsernameCaseInsensitive": { - "env": "PARSE_SERVER_DATABASE_CREATE_INDEX_USER_USERNAME_CASE_INSENSITIVE", - "help": "Set to `true` to automatically create a case-insensitive index on the username field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.", - "action": parsers.booleanParser, - "default": true - }, - "directConnection": { - "env": "PARSE_SERVER_DATABASE_DIRECT_CONNECTION", - "help": "The MongoDB driver option to force a Single topology type with a connection string containing one host.", - "action": parsers.booleanParser - }, - "disableIndexFieldValidation": { - "env": "PARSE_SERVER_DATABASE_DISABLE_INDEX_FIELD_VALIDATION", - "help": "Set to `true` to disable validation of index fields. When disabled, indexes can be created even if the fields do not exist in the schema. This can be useful when creating indexes on fields that will be added later.", - "action": parsers.booleanParser - }, - "enableSchemaHooks": { - "env": "PARSE_SERVER_DATABASE_ENABLE_SCHEMA_HOOKS", - "help": "Enables database real-time hooks to update single schema cache. Set to `true` if using multiple Parse Servers instances connected to the same database. Failing to do so will cause a schema change to not propagate to all instances and re-syncing will only happen when the instances restart. To use this feature with MongoDB, a replica set cluster with [change stream](https://docs.mongodb.com/manual/changeStreams/#availability) support is required.", - "action": parsers.booleanParser, - "default": false - }, - "forceServerObjectId": { - "env": "PARSE_SERVER_DATABASE_FORCE_SERVER_OBJECT_ID", - "help": "The MongoDB driver option to force server to assign _id values instead of driver.", - "action": parsers.booleanParser - }, - "heartbeatFrequencyMS": { - "env": "PARSE_SERVER_DATABASE_HEARTBEAT_FREQUENCY_MS", - "help": "The MongoDB driver option to specify the frequency in milliseconds at which the driver checks the state of the MongoDB deployment.", - "action": parsers.numberParser("heartbeatFrequencyMS") - }, - "loadBalanced": { - "env": "PARSE_SERVER_DATABASE_LOAD_BALANCED", - "help": "The MongoDB driver option to instruct the driver it is connecting to a load balancer fronting a mongos like service.", - "action": parsers.booleanParser - }, - "localThresholdMS": { - "env": "PARSE_SERVER_DATABASE_LOCAL_THRESHOLD_MS", - "help": "The MongoDB driver option to specify the size (in milliseconds) of the latency window for selecting among multiple suitable MongoDB instances.", - "action": parsers.numberParser("localThresholdMS") - }, - "logClientEvents": { - "env": "PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS", - "help": "An array of MongoDB client event configurations to enable logging of specific events.", - "action": parsers.arrayParser, - "type": "LogClientEvent[]" - }, - "maxConnecting": { - "env": "PARSE_SERVER_DATABASE_MAX_CONNECTING", - "help": "The MongoDB driver option to specify the maximum number of connections that may be in the process of being established concurrently by the connection pool.", - "action": parsers.numberParser("maxConnecting") - }, - "maxIdleTimeMS": { - "env": "PARSE_SERVER_DATABASE_MAX_IDLE_TIME_MS", - "help": "The MongoDB driver option to specify the amount of time in milliseconds that a connection can remain idle in the connection pool before being removed and closed.", - "action": parsers.numberParser("maxIdleTimeMS") - }, - "maxPoolSize": { - "env": "PARSE_SERVER_DATABASE_MAX_POOL_SIZE", - "help": "The MongoDB driver option to set the maximum number of opened, cached, ready-to-use database connections maintained by the driver.", - "action": parsers.numberParser("maxPoolSize") - }, - "maxStalenessSeconds": { - "env": "PARSE_SERVER_DATABASE_MAX_STALENESS_SECONDS", - "help": "The MongoDB driver option to set the maximum replication lag for reads from secondary nodes.", - "action": parsers.numberParser("maxStalenessSeconds") - }, - "maxTimeMS": { - "env": "PARSE_SERVER_DATABASE_MAX_TIME_MS", - "help": "The MongoDB driver option to set a cumulative time limit in milliseconds for processing operations on a cursor.", - "action": parsers.numberParser("maxTimeMS") - }, - "minPoolSize": { - "env": "PARSE_SERVER_DATABASE_MIN_POOL_SIZE", - "help": "The MongoDB driver option to set the minimum number of opened, cached, ready-to-use database connections maintained by the driver.", - "action": parsers.numberParser("minPoolSize") - }, - "proxyHost": { - "env": "PARSE_SERVER_DATABASE_PROXY_HOST", - "help": "The MongoDB driver option to configure a Socks5 proxy host used for creating TCP connections." - }, - "proxyPassword": { - "env": "PARSE_SERVER_DATABASE_PROXY_PASSWORD", - "help": "The MongoDB driver option to configure a Socks5 proxy password when the proxy requires username/password authentication." - }, - "proxyPort": { - "env": "PARSE_SERVER_DATABASE_PROXY_PORT", - "help": "The MongoDB driver option to configure a Socks5 proxy port used for creating TCP connections.", - "action": parsers.numberParser("proxyPort") - }, - "proxyUsername": { - "env": "PARSE_SERVER_DATABASE_PROXY_USERNAME", - "help": "The MongoDB driver option to configure a Socks5 proxy username when the proxy requires username/password authentication." - }, - "readConcernLevel": { - "env": "PARSE_SERVER_DATABASE_READ_CONCERN_LEVEL", - "help": "The MongoDB driver option to specify the level of isolation." - }, - "readPreference": { - "env": "PARSE_SERVER_DATABASE_READ_PREFERENCE", - "help": "The MongoDB driver option to specify the read preferences for this connection." - }, - "readPreferenceTags": { - "env": "PARSE_SERVER_DATABASE_READ_PREFERENCE_TAGS", - "help": "The MongoDB driver option to specify the tags document as a comma-separated list of colon-separated key-value pairs.", - "action": parsers.arrayParser - }, - "replicaSet": { - "env": "PARSE_SERVER_DATABASE_REPLICA_SET", - "help": "The MongoDB driver option to specify the name of the replica set, if the mongod is a member of a replica set." - }, - "retryReads": { - "env": "PARSE_SERVER_DATABASE_RETRY_READS", - "help": "The MongoDB driver option to enable retryable reads.", - "action": parsers.booleanParser - }, - "retryWrites": { - "env": "PARSE_SERVER_DATABASE_RETRY_WRITES", - "help": "The MongoDB driver option to set whether to retry failed writes.", - "action": parsers.booleanParser - }, - "schemaCacheTtl": { - "env": "PARSE_SERVER_DATABASE_SCHEMA_CACHE_TTL", - "help": "The duration in seconds after which the schema cache expires and will be refetched from the database. Use this option if using multiple Parse Servers instances connected to the same database. A low duration will cause the schema cache to be updated too often, causing unnecessary database reads. A high duration will cause the schema to be updated too rarely, increasing the time required until schema changes propagate to all server instances. This feature can be used as an alternative or in conjunction with the option `enableSchemaHooks`. Default is infinite which means the schema cache never expires.", - "action": parsers.numberParser("schemaCacheTtl") - }, - "serverMonitoringMode": { - "env": "PARSE_SERVER_DATABASE_SERVER_MONITORING_MODE", - "help": "The MongoDB driver option to instruct the driver monitors to use a specific monitoring mode." - }, - "serverSelectionTimeoutMS": { - "env": "PARSE_SERVER_DATABASE_SERVER_SELECTION_TIMEOUT_MS", - "help": "The MongoDB driver option to specify the amount of time in milliseconds for a server to be considered suitable for selection.", - "action": parsers.numberParser("serverSelectionTimeoutMS") - }, - "socketTimeoutMS": { - "env": "PARSE_SERVER_DATABASE_SOCKET_TIMEOUT_MS", - "help": "The MongoDB driver option to specify the amount of time, in milliseconds, spent attempting to send or receive on a socket before timing out. Specifying 0 means no timeout.", - "action": parsers.numberParser("socketTimeoutMS") - }, - "srvMaxHosts": { - "env": "PARSE_SERVER_DATABASE_SRV_MAX_HOSTS", - "help": "The MongoDB driver option to specify the maximum number of hosts to connect to when using an srv connection string, a setting of 0 means unlimited hosts.", - "action": parsers.numberParser("srvMaxHosts") - }, - "srvServiceName": { - "env": "PARSE_SERVER_DATABASE_SRV_SERVICE_NAME", - "help": "The MongoDB driver option to modify the srv URI service name." - }, - "ssl": { - "env": "PARSE_SERVER_DATABASE_SSL", - "help": "The MongoDB driver option to enable or disable TLS/SSL for the connection (equivalent to tls option).", - "action": parsers.booleanParser - }, - "tls": { - "env": "PARSE_SERVER_DATABASE_TLS", - "help": "The MongoDB driver option to enable or disable TLS/SSL for the connection.", - "action": parsers.booleanParser - }, - "tlsAllowInvalidCertificates": { - "env": "PARSE_SERVER_DATABASE_TLS_ALLOW_INVALID_CERTIFICATES", - "help": "The MongoDB driver option to bypass validation of the certificates presented by the mongod/mongos instance.", - "action": parsers.booleanParser - }, - "tlsAllowInvalidHostnames": { - "env": "PARSE_SERVER_DATABASE_TLS_ALLOW_INVALID_HOSTNAMES", - "help": "The MongoDB driver option to disable hostname validation of the certificate presented by the mongod/mongos instance.", - "action": parsers.booleanParser - }, - "tlsCAFile": { - "env": "PARSE_SERVER_DATABASE_TLS_CAFILE", - "help": "The MongoDB driver option to specify the location of a local .pem file that contains the root certificate chain from the Certificate Authority." - }, - "tlsCertificateKeyFile": { - "env": "PARSE_SERVER_DATABASE_TLS_CERTIFICATE_KEY_FILE", - "help": "The MongoDB driver option to specify the location of a local .pem file that contains the client's TLS/SSL certificate and key." - }, - "tlsCertificateKeyFilePassword": { - "env": "PARSE_SERVER_DATABASE_TLS_CERTIFICATE_KEY_FILE_PASSWORD", - "help": "The MongoDB driver option to specify the password to decrypt the tlsCertificateKeyFile." - }, - "tlsInsecure": { - "env": "PARSE_SERVER_DATABASE_TLS_INSECURE", - "help": "The MongoDB driver option to disable various certificate validations.", - "action": parsers.booleanParser - }, - "waitQueueTimeoutMS": { - "env": "PARSE_SERVER_DATABASE_WAIT_QUEUE_TIMEOUT_MS", - "help": "The MongoDB driver option to specify the maximum time in milliseconds that a thread can wait for a connection to become available.", - "action": parsers.numberParser("waitQueueTimeoutMS") - }, - "zlibCompressionLevel": { - "env": "PARSE_SERVER_DATABASE_ZLIB_COMPRESSION_LEVEL", - "help": "The MongoDB driver option to specify the compression level if using zlib for network compression (0-9).", - "action": parsers.numberParser("zlibCompressionLevel") - } + allowPublicExplain: { + env: 'PARSE_SERVER_DATABASE_ALLOW_PUBLIC_EXPLAIN', + help: 'Set to `true` to allow `Parse.Query.explain` without master key.

\u26A0\uFE0F Enabling this option may expose sensitive query performance data to unauthorized users and could potentially be exploited for malicious purposes.', + action: parsers.booleanParser, + default: false, + }, + appName: { + env: 'PARSE_SERVER_DATABASE_APP_NAME', + help: 'The MongoDB driver option to specify the name of the application that created this MongoClient instance.', + }, + authMechanism: { + env: 'PARSE_SERVER_DATABASE_AUTH_MECHANISM', + help: 'The MongoDB driver option to specify the authentication mechanism that MongoDB will use to authenticate the connection.', + }, + authMechanismProperties: { + env: 'PARSE_SERVER_DATABASE_AUTH_MECHANISM_PROPERTIES', + help: 'The MongoDB driver option to specify properties for the specified authMechanism as a comma-separated list of colon-separated key-value pairs.', + action: parsers.objectParser, + }, + authSource: { + env: 'PARSE_SERVER_DATABASE_AUTH_SOURCE', + help: "The MongoDB driver option to specify the database name associated with the user's credentials.", + }, + autoSelectFamily: { + env: 'PARSE_SERVER_DATABASE_AUTO_SELECT_FAMILY', + help: 'The MongoDB driver option to set whether the socket attempts to connect to IPv6 and IPv4 addresses until a connection is established. If available, the driver will select the first IPv6 address.', + action: parsers.booleanParser, + }, + autoSelectFamilyAttemptTimeout: { + env: 'PARSE_SERVER_DATABASE_AUTO_SELECT_FAMILY_ATTEMPT_TIMEOUT', + help: 'The MongoDB driver option to specify the amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the autoSelectFamily option. If set to a positive integer less than 10, the value 10 is used instead.', + action: parsers.numberParser('autoSelectFamilyAttemptTimeout'), + }, + batchSize: { + env: 'PARSE_SERVER_DATABASE_BATCH_SIZE', + help: 'The number of documents per batch for MongoDB cursor `getMore` operations. A lower value reduces memory usage per batch; a higher value reduces the number of network round-trips.', + action: parsers.numberParser('batchSize'), + default: 1000, + }, + clientMetadata: { + env: 'PARSE_SERVER_DATABASE_CLIENT_METADATA', + help: "Custom metadata to append to database client connections for identifying Parse Server instances in database logs. If set, this metadata will be visible in database logs during connection handshakes. This can help with debugging and monitoring in deployments with multiple database clients. Set `name` to identify your application (e.g., 'MyApp') and `version` to your application's version. Leave undefined (default) to disable this feature and avoid the additional data transfer overhead.", + action: parsers.objectParser, + type: 'DatabaseOptionsClientMetadata', + }, + compressors: { + env: 'PARSE_SERVER_DATABASE_COMPRESSORS', + help: 'The MongoDB driver option to specify an array or comma-delimited string of compressors to enable network compression for communication between this client and a mongod/mongos instance.', + }, + connectTimeoutMS: { + env: 'PARSE_SERVER_DATABASE_CONNECT_TIMEOUT_MS', + help: 'The MongoDB driver option to specify the amount of time, in milliseconds, to wait to establish a single TCP socket connection to the server before raising an error. Specifying 0 disables the connection timeout.', + action: parsers.numberParser('connectTimeoutMS'), + }, + createIndexAuthDataUniqueness: { + env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_AUTH_DATA_UNIQUENESS', + help: 'Set to `true` to automatically create unique indexes on the authData fields of the _User collection for each configured auth provider on server start, including `anonymous` when anonymous users are enabled. These indexes prevent race conditions during concurrent signups with the same authData. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the indexes, keep in mind that the otherwise automatically created indexes may change in the future to be optimized for the internal usage by Parse Server.', + action: parsers.booleanParser, + default: true, + }, + createIndexRoleName: { + env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_ROLE_NAME', + help: 'Set to `true` to automatically create a unique index on the name field of the _Role collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', + action: parsers.booleanParser, + default: true, + }, + createIndexUserEmail: { + env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_USER_EMAIL', + help: 'Set to `true` to automatically create indexes on the email field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', + action: parsers.booleanParser, + default: true, + }, + createIndexUserEmailCaseInsensitive: { + env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_USER_EMAIL_CASE_INSENSITIVE', + help: 'Set to `true` to automatically create a case-insensitive index on the email field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', + action: parsers.booleanParser, + default: true, + }, + createIndexUserEmailVerifyToken: { + env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_USER_EMAIL_VERIFY_TOKEN', + help: 'Set to `true` to automatically create an index on the _email_verify_token field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', + action: parsers.booleanParser, + default: true, + }, + createIndexUserPasswordResetToken: { + env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_USER_PASSWORD_RESET_TOKEN', + help: 'Set to `true` to automatically create an index on the _perishable_token field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', + action: parsers.booleanParser, + default: true, + }, + createIndexUserUsername: { + env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_USER_USERNAME', + help: 'Set to `true` to automatically create indexes on the username field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', + action: parsers.booleanParser, + default: true, + }, + createIndexUserUsernameCaseInsensitive: { + env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_USER_USERNAME_CASE_INSENSITIVE', + help: 'Set to `true` to automatically create a case-insensitive index on the username field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.

\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.', + action: parsers.booleanParser, + default: true, + }, + directConnection: { + env: 'PARSE_SERVER_DATABASE_DIRECT_CONNECTION', + help: 'The MongoDB driver option to force a Single topology type with a connection string containing one host.', + action: parsers.booleanParser, + }, + disableIndexFieldValidation: { + env: 'PARSE_SERVER_DATABASE_DISABLE_INDEX_FIELD_VALIDATION', + help: 'Set to `true` to disable validation of index fields. When disabled, indexes can be created even if the fields do not exist in the schema. This can be useful when creating indexes on fields that will be added later.', + action: parsers.booleanParser, + }, + enableSchemaHooks: { + env: 'PARSE_SERVER_DATABASE_ENABLE_SCHEMA_HOOKS', + help: 'Enables database real-time hooks to update single schema cache. Set to `true` if using multiple Parse Servers instances connected to the same database. Failing to do so will cause a schema change to not propagate to all instances and re-syncing will only happen when the instances restart. To use this feature with MongoDB, a replica set cluster with [change stream](https://docs.mongodb.com/manual/changeStreams/#availability) support is required.', + action: parsers.booleanParser, + default: false, + }, + forceServerObjectId: { + env: 'PARSE_SERVER_DATABASE_FORCE_SERVER_OBJECT_ID', + help: 'The MongoDB driver option to force server to assign _id values instead of driver.', + action: parsers.booleanParser, + }, + heartbeatFrequencyMS: { + env: 'PARSE_SERVER_DATABASE_HEARTBEAT_FREQUENCY_MS', + help: 'The MongoDB driver option to specify the frequency in milliseconds at which the driver checks the state of the MongoDB deployment.', + action: parsers.numberParser('heartbeatFrequencyMS'), + }, + loadBalanced: { + env: 'PARSE_SERVER_DATABASE_LOAD_BALANCED', + help: 'The MongoDB driver option to instruct the driver it is connecting to a load balancer fronting a mongos like service.', + action: parsers.booleanParser, + }, + localThresholdMS: { + env: 'PARSE_SERVER_DATABASE_LOCAL_THRESHOLD_MS', + help: 'The MongoDB driver option to specify the size (in milliseconds) of the latency window for selecting among multiple suitable MongoDB instances.', + action: parsers.numberParser('localThresholdMS'), + }, + logClientEvents: { + env: 'PARSE_SERVER_DATABASE_LOG_CLIENT_EVENTS', + help: 'An array of MongoDB client event configurations to enable logging of specific events.', + action: parsers.arrayParser, + type: 'LogClientEvent[]', + }, + maxConnecting: { + env: 'PARSE_SERVER_DATABASE_MAX_CONNECTING', + help: 'The MongoDB driver option to specify the maximum number of connections that may be in the process of being established concurrently by the connection pool.', + action: parsers.numberParser('maxConnecting'), + }, + maxIdleTimeMS: { + env: 'PARSE_SERVER_DATABASE_MAX_IDLE_TIME_MS', + help: 'The MongoDB driver option to specify the amount of time in milliseconds that a connection can remain idle in the connection pool before being removed and closed.', + action: parsers.numberParser('maxIdleTimeMS'), + }, + maxPoolSize: { + env: 'PARSE_SERVER_DATABASE_MAX_POOL_SIZE', + help: 'The MongoDB driver option to set the maximum number of opened, cached, ready-to-use database connections maintained by the driver.', + action: parsers.numberParser('maxPoolSize'), + }, + maxStalenessSeconds: { + env: 'PARSE_SERVER_DATABASE_MAX_STALENESS_SECONDS', + help: 'The MongoDB driver option to set the maximum replication lag for reads from secondary nodes.', + action: parsers.numberParser('maxStalenessSeconds'), + }, + maxTimeMS: { + env: 'PARSE_SERVER_DATABASE_MAX_TIME_MS', + help: 'The MongoDB driver option to set a cumulative time limit in milliseconds for processing operations on a cursor.', + action: parsers.numberParser('maxTimeMS'), + }, + minPoolSize: { + env: 'PARSE_SERVER_DATABASE_MIN_POOL_SIZE', + help: 'The MongoDB driver option to set the minimum number of opened, cached, ready-to-use database connections maintained by the driver.', + action: parsers.numberParser('minPoolSize'), + }, + proxyHost: { + env: 'PARSE_SERVER_DATABASE_PROXY_HOST', + help: 'The MongoDB driver option to configure a Socks5 proxy host used for creating TCP connections.', + }, + proxyPassword: { + env: 'PARSE_SERVER_DATABASE_PROXY_PASSWORD', + help: 'The MongoDB driver option to configure a Socks5 proxy password when the proxy requires username/password authentication.', + }, + proxyPort: { + env: 'PARSE_SERVER_DATABASE_PROXY_PORT', + help: 'The MongoDB driver option to configure a Socks5 proxy port used for creating TCP connections.', + action: parsers.numberParser('proxyPort'), + }, + proxyUsername: { + env: 'PARSE_SERVER_DATABASE_PROXY_USERNAME', + help: 'The MongoDB driver option to configure a Socks5 proxy username when the proxy requires username/password authentication.', + }, + readConcernLevel: { + env: 'PARSE_SERVER_DATABASE_READ_CONCERN_LEVEL', + help: 'The MongoDB driver option to specify the level of isolation.', + }, + readPreference: { + env: 'PARSE_SERVER_DATABASE_READ_PREFERENCE', + help: 'The MongoDB driver option to specify the read preferences for this connection.', + }, + readPreferenceTags: { + env: 'PARSE_SERVER_DATABASE_READ_PREFERENCE_TAGS', + help: 'The MongoDB driver option to specify the tags document as a comma-separated list of colon-separated key-value pairs.', + action: parsers.arrayParser, + }, + replicaSet: { + env: 'PARSE_SERVER_DATABASE_REPLICA_SET', + help: 'The MongoDB driver option to specify the name of the replica set, if the mongod is a member of a replica set.', + }, + retryReads: { + env: 'PARSE_SERVER_DATABASE_RETRY_READS', + help: 'The MongoDB driver option to enable retryable reads.', + action: parsers.booleanParser, + }, + retryWrites: { + env: 'PARSE_SERVER_DATABASE_RETRY_WRITES', + help: 'The MongoDB driver option to set whether to retry failed writes.', + action: parsers.booleanParser, + }, + schemaCacheTtl: { + env: 'PARSE_SERVER_DATABASE_SCHEMA_CACHE_TTL', + help: 'The duration in seconds after which the schema cache expires and will be refetched from the database. Use this option if using multiple Parse Servers instances connected to the same database. A low duration will cause the schema cache to be updated too often, causing unnecessary database reads. A high duration will cause the schema to be updated too rarely, increasing the time required until schema changes propagate to all server instances. This feature can be used as an alternative or in conjunction with the option `enableSchemaHooks`. Default is infinite which means the schema cache never expires.', + action: parsers.numberParser('schemaCacheTtl'), + }, + serverMonitoringMode: { + env: 'PARSE_SERVER_DATABASE_SERVER_MONITORING_MODE', + help: 'The MongoDB driver option to instruct the driver monitors to use a specific monitoring mode.', + }, + serverSelectionTimeoutMS: { + env: 'PARSE_SERVER_DATABASE_SERVER_SELECTION_TIMEOUT_MS', + help: 'The MongoDB driver option to specify the amount of time in milliseconds for a server to be considered suitable for selection.', + action: parsers.numberParser('serverSelectionTimeoutMS'), + }, + socketTimeoutMS: { + env: 'PARSE_SERVER_DATABASE_SOCKET_TIMEOUT_MS', + help: 'The MongoDB driver option to specify the amount of time, in milliseconds, spent attempting to send or receive on a socket before timing out. Specifying 0 means no timeout.', + action: parsers.numberParser('socketTimeoutMS'), + }, + srvMaxHosts: { + env: 'PARSE_SERVER_DATABASE_SRV_MAX_HOSTS', + help: 'The MongoDB driver option to specify the maximum number of hosts to connect to when using an srv connection string, a setting of 0 means unlimited hosts.', + action: parsers.numberParser('srvMaxHosts'), + }, + srvServiceName: { + env: 'PARSE_SERVER_DATABASE_SRV_SERVICE_NAME', + help: 'The MongoDB driver option to modify the srv URI service name.', + }, + ssl: { + env: 'PARSE_SERVER_DATABASE_SSL', + help: 'The MongoDB driver option to enable or disable TLS/SSL for the connection (equivalent to tls option).', + action: parsers.booleanParser, + }, + tls: { + env: 'PARSE_SERVER_DATABASE_TLS', + help: 'The MongoDB driver option to enable or disable TLS/SSL for the connection.', + action: parsers.booleanParser, + }, + tlsAllowInvalidCertificates: { + env: 'PARSE_SERVER_DATABASE_TLS_ALLOW_INVALID_CERTIFICATES', + help: 'The MongoDB driver option to bypass validation of the certificates presented by the mongod/mongos instance.', + action: parsers.booleanParser, + }, + tlsAllowInvalidHostnames: { + env: 'PARSE_SERVER_DATABASE_TLS_ALLOW_INVALID_HOSTNAMES', + help: 'The MongoDB driver option to disable hostname validation of the certificate presented by the mongod/mongos instance.', + action: parsers.booleanParser, + }, + tlsCAFile: { + env: 'PARSE_SERVER_DATABASE_TLS_CAFILE', + help: 'The MongoDB driver option to specify the location of a local .pem file that contains the root certificate chain from the Certificate Authority.', + }, + tlsCertificateKeyFile: { + env: 'PARSE_SERVER_DATABASE_TLS_CERTIFICATE_KEY_FILE', + help: "The MongoDB driver option to specify the location of a local .pem file that contains the client's TLS/SSL certificate and key.", + }, + tlsCertificateKeyFilePassword: { + env: 'PARSE_SERVER_DATABASE_TLS_CERTIFICATE_KEY_FILE_PASSWORD', + help: 'The MongoDB driver option to specify the password to decrypt the tlsCertificateKeyFile.', + }, + tlsInsecure: { + env: 'PARSE_SERVER_DATABASE_TLS_INSECURE', + help: 'The MongoDB driver option to disable various certificate validations.', + action: parsers.booleanParser, + }, + waitQueueTimeoutMS: { + env: 'PARSE_SERVER_DATABASE_WAIT_QUEUE_TIMEOUT_MS', + help: 'The MongoDB driver option to specify the maximum time in milliseconds that a thread can wait for a connection to become available.', + action: parsers.numberParser('waitQueueTimeoutMS'), + }, + zlibCompressionLevel: { + env: 'PARSE_SERVER_DATABASE_ZLIB_COMPRESSION_LEVEL', + help: 'The MongoDB driver option to specify the compression level if using zlib for network compression (0-9).', + action: parsers.numberParser('zlibCompressionLevel'), + }, }; module.exports.DatabaseOptionsClientMetadata = { - "name": { - "env": "PARSE_SERVER_DATABASE_CLIENT_METADATA_NAME", - "help": "The name to identify your application in database logs (e.g., 'MyApp').", - "required": true - }, - "version": { - "env": "PARSE_SERVER_DATABASE_CLIENT_METADATA_VERSION", - "help": "The version of your application (e.g., '1.0.0').", - "required": true - } + name: { + env: 'PARSE_SERVER_DATABASE_CLIENT_METADATA_NAME', + help: "The name to identify your application in database logs (e.g., 'MyApp').", + required: true, + }, + version: { + env: 'PARSE_SERVER_DATABASE_CLIENT_METADATA_VERSION', + help: "The version of your application (e.g., '1.0.0').", + required: true, + }, }; module.exports.AuthAdapter = { - "enabled": { - "help": "Is `true` if the auth adapter is enabled, `false` otherwise.", - "action": parsers.booleanParser, - "default": false - } + enabled: { + help: 'Is `true` if the auth adapter is enabled, `false` otherwise.', + action: parsers.booleanParser, + default: false, + }, }; module.exports.LogLevels = { - "cloudFunctionError": { - "env": "PARSE_SERVER_LOG_LEVELS_CLOUD_FUNCTION_ERROR", - "help": "Log level used by the Cloud Code Functions on error. Default is `error`. See [LogLevel](LogLevel.html) for available values.", - "default": "error" - }, - "cloudFunctionSuccess": { - "env": "PARSE_SERVER_LOG_LEVELS_CLOUD_FUNCTION_SUCCESS", - "help": "Log level used by the Cloud Code Functions on success. Default is `info`. See [LogLevel](LogLevel.html) for available values.", - "default": "info" - }, - "signupUsernameTaken": { - "env": "PARSE_SERVER_LOG_LEVELS_SIGNUP_USERNAME_TAKEN", - "help": "Log level used when a sign-up fails because the username already exists. Default is `info`. See [LogLevel](LogLevel.html) for available values.", - "default": "info" - }, - "triggerAfter": { - "env": "PARSE_SERVER_LOG_LEVELS_TRIGGER_AFTER", - "help": "Log level used by the Cloud Code Triggers `afterSave`, `afterDelete`, `afterFind`, `afterLogout`. Default is `info`. See [LogLevel](LogLevel.html) for available values.", - "default": "info" - }, - "triggerBeforeError": { - "env": "PARSE_SERVER_LOG_LEVELS_TRIGGER_BEFORE_ERROR", - "help": "Log level used by the Cloud Code Triggers `beforeSave`, `beforeDelete`, `beforeFind`, `beforeLogin` on error. Default is `error`. See [LogLevel](LogLevel.html) for available values.", - "default": "error" - }, - "triggerBeforeSuccess": { - "env": "PARSE_SERVER_LOG_LEVELS_TRIGGER_BEFORE_SUCCESS", - "help": "Log level used by the Cloud Code Triggers `beforeSave`, `beforeDelete`, `beforeFind`, `beforeLogin` on success. Default is `info`. See [LogLevel](LogLevel.html) for available values.", - "default": "info" - } + cloudFunctionError: { + env: 'PARSE_SERVER_LOG_LEVELS_CLOUD_FUNCTION_ERROR', + help: 'Log level used by the Cloud Code Functions on error. Default is `error`. See [LogLevel](LogLevel.html) for available values.', + default: 'error', + }, + cloudFunctionSuccess: { + env: 'PARSE_SERVER_LOG_LEVELS_CLOUD_FUNCTION_SUCCESS', + help: 'Log level used by the Cloud Code Functions on success. Default is `info`. See [LogLevel](LogLevel.html) for available values.', + default: 'info', + }, + signupUsernameTaken: { + env: 'PARSE_SERVER_LOG_LEVELS_SIGNUP_USERNAME_TAKEN', + help: 'Log level used when a sign-up fails because the username already exists. Default is `info`. See [LogLevel](LogLevel.html) for available values.', + default: 'info', + }, + triggerAfter: { + env: 'PARSE_SERVER_LOG_LEVELS_TRIGGER_AFTER', + help: 'Log level used by the Cloud Code Triggers `afterSave`, `afterDelete`, `afterFind`, `afterLogout`. Default is `info`. See [LogLevel](LogLevel.html) for available values.', + default: 'info', + }, + triggerBeforeError: { + env: 'PARSE_SERVER_LOG_LEVELS_TRIGGER_BEFORE_ERROR', + help: 'Log level used by the Cloud Code Triggers `beforeSave`, `beforeDelete`, `beforeFind`, `beforeLogin` on error. Default is `error`. See [LogLevel](LogLevel.html) for available values.', + default: 'error', + }, + triggerBeforeSuccess: { + env: 'PARSE_SERVER_LOG_LEVELS_TRIGGER_BEFORE_SUCCESS', + help: 'Log level used by the Cloud Code Triggers `beforeSave`, `beforeDelete`, `beforeFind`, `beforeLogin` on success. Default is `info`. See [LogLevel](LogLevel.html) for available values.', + default: 'info', + }, }; diff --git a/src/Options/docs.js b/src/Options/docs.js index 34e05f3d2e..fb51142e01 100644 --- a/src/Options/docs.js +++ b/src/Options/docs.js @@ -357,4 +357,3 @@ * @property {String} triggerBeforeError Log level used by the Cloud Code Triggers `beforeSave`, `beforeDelete`, `beforeFind`, `beforeLogin` on error. Default is `error`. See [LogLevel](LogLevel.html) for available values. * @property {String} triggerBeforeSuccess Log level used by the Cloud Code Triggers `beforeSave`, `beforeDelete`, `beforeFind`, `beforeLogin` on success. Default is `info`. See [LogLevel](LogLevel.html) for available values. */ - diff --git a/src/cloud-code/resolveAdapters.ts b/src/cloud-code/resolveAdapters.ts index c67c188fd8..1c11152d98 100644 --- a/src/cloud-code/resolveAdapters.ts +++ b/src/cloud-code/resolveAdapters.ts @@ -13,8 +13,10 @@ export function resolveAdapters(options: any): CloudCodeAdapter[] { if (options.cloud) { if (typeof options.cloud === 'object' && typeof options.cloud.getRouter === 'function') { adapters.push(new InProcessAdapter(options.cloud)); - } else { + } else if (typeof options.cloud === 'string' || typeof options.cloud === 'function') { adapters.push(new LegacyAdapter(options.cloud)); + } else { + throw "argument 'cloud' must either be a string or a function"; } } From 7cdf253929f66b64d397cae4644ac0a2fab96431 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 17 Mar 2026 21:46:57 +1100 Subject: [PATCH 08/14] fix: address code review findings across cloud adapter system - Use ephemeral ports and proper teardown in ExternalProcessAdapter specs - Add HTTP timeout/status validation and file trigger handling in ExternalProcessAdapter - Guard request.object for file hooks in InProcessAdapter beforeSave - Fix ESM/CJS loading order in LegacyAdapter (require-first, catch ERR_REQUIRE_ESM) - Best-effort shutdown and initialize rollback in CloudCodeManager - Add missing "net" import in README Go example - Update Options types for cloud, cloudCodeAdapters, cloudCodeOptions - Fix validator applicationId lookup in triggers.js and ParseLiveQueryServer - Throw Error objects instead of strings in resolveAdapters - Update JSDoc types in docs.js - Clarify intentional addParseCloud re-invocation in ParseServer --- spec/ExternalProcessAdapter.spec.js | 21 +++++----- src/LiveQuery/ParseLiveQueryServer.ts | 16 ++++---- src/Options/Definitions.js | 6 +-- src/Options/docs.js | 6 +-- src/Options/index.js | 22 ++++++++--- src/ParseServer.ts | 18 ++++++--- src/cloud-code/CloudCodeManager.ts | 26 ++++++++++++- src/cloud-code/README.md | 1 + .../adapters/ExternalProcessAdapter.ts | 38 ++++++++++++++++--- src/cloud-code/adapters/InProcessAdapter.ts | 7 +++- src/cloud-code/adapters/LegacyAdapter.ts | 12 +++--- src/cloud-code/resolveAdapters.ts | 2 +- src/triggers.js | 9 +++-- 13 files changed, 130 insertions(+), 54 deletions(-) diff --git a/spec/ExternalProcessAdapter.spec.js b/spec/ExternalProcessAdapter.spec.js index 6a328b92d8..5b3ecc4cd0 100644 --- a/spec/ExternalProcessAdapter.spec.js +++ b/spec/ExternalProcessAdapter.spec.js @@ -3,8 +3,8 @@ const { ExternalProcessAdapter } = require('../lib/cloud-code/adapters/ExternalP const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); const http = require('http'); -function createMockCloudServer(manifest, port) { - return new Promise((resolve) => { +function createMockCloudServer(manifest) { + return new Promise((resolve, reject) => { const server = http.createServer((req, res) => { if (req.url === '/' && req.method === 'GET') { res.writeHead(200, { 'Content-Type': 'application/json' }); @@ -24,7 +24,8 @@ function createMockCloudServer(manifest, port) { res.end(); } }); - server.listen(port, () => resolve(server)); + server.on('error', (err) => reject(err)); + server.listen(0, () => resolve({ server, port: server.address().port })); }); } @@ -45,15 +46,14 @@ describe('ExternalProcessAdapter', () => { it('spawns process and reads manifest', async () => { const manager = new CloudCodeManager(); - const port = 19876; - const server = await createMockCloudServer( - { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'ext-fn' }], triggers: [], jobs: [] } }, - port + const { server, port } = await createMockCloudServer( + { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'ext-fn' }], triggers: [], jobs: [] } } ); + let adapter; try { const cmd = `node -e "process.stdout.write('PARSE_CLOUD_READY:${port}\\n'); setTimeout(() => {}, 60000)"`; - const adapter = new ExternalProcessAdapter(cmd, 'test-key', { + adapter = new ExternalProcessAdapter(cmd, 'test-key', { startupTimeout: 5000, healthCheckInterval: 0, }); @@ -61,9 +61,10 @@ describe('ExternalProcessAdapter', () => { await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); expect(manager.getFunction('ext-fn')).toBeDefined(); - - await adapter.shutdown(); } finally { + if (adapter) { + await adapter.shutdown(); + } server.close(); } }, 10000); diff --git a/src/LiveQuery/ParseLiveQueryServer.ts b/src/LiveQuery/ParseLiveQueryServer.ts index a83b8e1a62..ef019d50a3 100644 --- a/src/LiveQuery/ParseLiveQueryServer.ts +++ b/src/LiveQuery/ParseLiveQueryServer.ts @@ -232,7 +232,7 @@ class ParseLiveQueryServer { installationId: client.installationId, sendEvent: true, }; - const trigger = getTrigger(className, 'afterEvent', Parse.applicationId); + const trigger = getTrigger(className, 'afterEvent', this.config.appId); if (trigger) { const auth = await this.getAuthFromClient(client, requestId); if (auth && auth.user) { @@ -241,7 +241,7 @@ class ParseLiveQueryServer { if (res.object) { res.object = Parse.Object.fromJSON(res.object); } - await runTrigger(trigger, `afterEvent.${className}`, res, auth); + await runTrigger(trigger, `afterEvent.${className}`, res, auth, this.config.appId); } if (!res.sendEvent) { return; @@ -388,7 +388,7 @@ class ParseLiveQueryServer { installationId: client.installationId, sendEvent: true, }; - const trigger = getTrigger(className, 'afterEvent', Parse.applicationId); + const trigger = getTrigger(className, 'afterEvent', this.config.appId); if (trigger) { if (res.object) { res.object = Parse.Object.fromJSON(res.object); @@ -400,7 +400,7 @@ class ParseLiveQueryServer { if (auth && auth.user) { res.user = auth.user; } - await runTrigger(trigger, `afterEvent.${className}`, res, auth); + await runTrigger(trigger, `afterEvent.${className}`, res, auth, this.config.appId); } if (!res.sendEvent) { return; @@ -845,13 +845,13 @@ class ParseLiveQueryServer { installationId: request.installationId, user: undefined, }; - const trigger = getTrigger('@Connect', 'beforeConnect', Parse.applicationId); + const trigger = getTrigger('@Connect', 'beforeConnect', this.config.appId); if (trigger) { const auth = await this.getAuthFromClient(client, request.requestId, req.sessionToken); if (auth && auth.user) { req.user = auth.user; } - await runTrigger(trigger, `beforeConnect.@Connect`, req, auth); + await runTrigger(trigger, `beforeConnect.@Connect`, req, auth, this.config.appId); } parseWebsocket.clientId = clientId; this.clients.set(parseWebsocket.clientId, client); @@ -908,7 +908,7 @@ class ParseLiveQueryServer { const className = request.query.className; let authCalled = false; try { - const trigger = getTrigger(className, 'beforeSubscribe', Parse.applicationId); + const trigger = getTrigger(className, 'beforeSubscribe', this.config.appId); if (trigger) { const auth = await this.getAuthFromClient(client, request.requestId, request.sessionToken); authCalled = true; @@ -919,7 +919,7 @@ class ParseLiveQueryServer { const parseQuery = new Parse.Query(className); parseQuery.withJSON(request.query); request.query = parseQuery; - await runTrigger(trigger, `beforeSubscribe.${className}`, request, auth); + await runTrigger(trigger, `beforeSubscribe.${className}`, request, auth, this.config.appId); const query = request.query.toJSON(); request.query = query; diff --git a/src/Options/Definitions.js b/src/Options/Definitions.js index 1b0e3931e1..d52f1b52dc 100644 --- a/src/Options/Definitions.js +++ b/src/Options/Definitions.js @@ -128,12 +128,12 @@ module.exports.ParseServerOptions = { }, cloud: { env: 'PARSE_SERVER_CLOUD', - help: 'Full path to your cloud code main.js', + help: 'Full path to your cloud code main.js, a cloud code function, or an object implementing getRouter() for in-process cloud code', }, cloudCodeAdapters: { env: 'PARSE_SERVER_CLOUD_CODE_ADAPTERS', help: 'Array of CloudCodeAdapter instances for BYO cloud code integration', - action: parsers.objectParser, + action: parsers.arrayParser, }, cloudCodeCommand: { env: 'PARSE_SERVER_CLOUD_CODE_COMMAND', @@ -141,7 +141,7 @@ module.exports.ParseServerOptions = { }, cloudCodeOptions: { env: 'PARSE_SERVER_CLOUD_CODE_OPTIONS', - help: 'Options for the external cloud code process adapter', + help: 'Options for the external cloud code process adapter: startupTimeout, healthCheckInterval, shutdownTimeout, maxRestartDelay', action: parsers.objectParser, }, cluster: { diff --git a/src/Options/docs.js b/src/Options/docs.js index fb51142e01..7ad923309a 100644 --- a/src/Options/docs.js +++ b/src/Options/docs.js @@ -26,10 +26,10 @@ * @property {Number} cacheMaxSize Sets the maximum size for the in memory cache, defaults to 10000 * @property {Number} cacheTTL Sets the TTL for the in memory cache (in ms), defaults to 5000 (5 seconds) * @property {String} clientKey Key for iOS, MacOS, tvOS clients - * @property {String} cloud Full path to your cloud code main.js - * @property {Object} cloudCodeAdapters Array of CloudCodeAdapter instances for BYO cloud code integration + * @property {Union} cloud Full path to your cloud code main.js, a cloud code function, or an object implementing getRouter() for in-process cloud code + * @property {CloudCodeAdapter[]} cloudCodeAdapters Array of CloudCodeAdapter instances for BYO cloud code integration * @property {String} cloudCodeCommand Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol) - * @property {Object} cloudCodeOptions Options for the external cloud code process adapter + * @property {CloudCodeOptions} cloudCodeOptions Options for the external cloud code process adapter: startupTimeout, healthCheckInterval, shutdownTimeout, maxRestartDelay * @property {Number|Boolean} cluster Run with cluster, optionally set the number of processes default to os.cpus().length * @property {String} collectionPrefix A collection prefix for the classes * @property {Boolean} convertEmailToLowercase Optional. If set to `true`, the `email` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `email` property is stored as set, without any case modifications. Default is `false`. diff --git a/src/Options/index.js b/src/Options/index.js index 948afbec27..6ea815a456 100644 --- a/src/Options/index.js +++ b/src/Options/index.js @@ -59,6 +59,18 @@ type SendEmailVerificationRequest = { user: any, master?: boolean, }; +type CloudCodeOptions = { + startupTimeout?: number, + healthCheckInterval?: number, + shutdownTimeout?: number, + maxRestartDelay?: number, +}; +type CloudCodeAdapter = { + name: string, + initialize(registry: any, config: any): Promise, + isHealthy(): Promise, + shutdown(): Promise, +}; export interface ParseServerOptions { /* Your Parse Application ID @@ -139,16 +151,16 @@ export interface ParseServerOptions { /* Optional. If set to `true`, the `username` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `username` property is stored as set, without any case modifications. Default is `false`. :DEFAULT: false */ convertUsernameToLowercase: ?boolean; - /* Full path to your cloud code main.js */ - cloud: ?string; + /* Full path to your cloud code main.js, a cloud code function, or an object implementing getRouter() for in-process cloud code */ + cloud: ?(string | Object); /* Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol) */ cloudCodeCommand: ?string; /* Key for authenticating external cloud code process requests. Required when cloudCodeCommand is set. */ webhookKey: ?string; - /* Options for the external cloud code process adapter */ - cloudCodeOptions: ?Object; + /* Options for the external cloud code process adapter: startupTimeout, healthCheckInterval, shutdownTimeout, maxRestartDelay */ + cloudCodeOptions: ?CloudCodeOptions; /* Array of CloudCodeAdapter instances for BYO cloud code integration */ - cloudCodeAdapters: ?Object; + cloudCodeAdapters: ?(CloudCodeAdapter[]); /* A collection prefix for the classes :DEFAULT: '' */ collectionPrefix: ?string; diff --git a/src/ParseServer.ts b/src/ParseServer.ts index 60731fb738..43f42a40f8 100644 --- a/src/ParseServer.ts +++ b/src/ParseServer.ts @@ -196,7 +196,12 @@ class ParseServer { }); if (adapters.length > 0) { - addParseCloud(); + // Re-invoke addParseCloud with the concrete appId so that Parse.Cloud + // methods (define, beforeSave, etc.) bind the correct appId in their + // closure. The module-level call (no appId) uses a lazy fallback to + // Parse.applicationId; this call upgrades it to a fixed binding before + // cloud code adapters run. + addParseCloud(this.config.appId); const cloudManager = new CloudCodeManager(); // CRITICAL: Store on this.config BEFORE adapter initialization. @@ -590,16 +595,19 @@ class ParseServer { } } -function addParseCloud() { - const ParseCloud = require('./cloud-code/Parse.Cloud'); +function addParseCloud(appId?: string) { + const { createParseCloud } = require('./cloud-code/Parse.Cloud'); const ParseServer = require('./cloud-code/Parse.Server'); + const ParseCloud = createParseCloud(appId); Object.defineProperty(Parse, 'Server', { get() { - const conf = Config.get(Parse.applicationId); + const resolvedAppId = appId || Parse.applicationId; + const conf = Config.get(resolvedAppId); return { ...conf, ...ParseServer }; }, set(newVal) { - newVal.appId = Parse.applicationId; + const resolvedAppId = appId || Parse.applicationId; + newVal.appId = resolvedAppId; Config.put(newVal); }, configurable: true, diff --git a/src/cloud-code/CloudCodeManager.ts b/src/cloud-code/CloudCodeManager.ts index b80d682a19..0757d2bcd6 100644 --- a/src/cloud-code/CloudCodeManager.ts +++ b/src/cloud-code/CloudCodeManager.ts @@ -262,15 +262,37 @@ export class CloudCodeManager { for (const adapter of adapters) { const registry = this.createRegistry(adapter.name); - await adapter.initialize(registry, config); + try { + await adapter.initialize(registry, config); + } catch (error) { + // Roll back any partial registrations from this adapter + this.unregisterAll(adapter.name); + // Attempt graceful shutdown of the failed adapter + try { + await adapter.shutdown(); + } catch { + // Ignore shutdown errors during initialization rollback + } + throw error; + } this.adapters.push(adapter); } } async shutdown(): Promise { + const errors: Array<{ name: string; error: unknown }> = []; for (const adapter of this.adapters) { - await adapter.shutdown(); + try { + await adapter.shutdown(); + } catch (error) { + // eslint-disable-next-line no-console + console.error(`Error shutting down adapter "${adapter.name}":`, error); + errors.push({ name: adapter.name, error }); + } } + // Clear all manager state regardless of individual shutdown failures + this.adapters.length = 0; + this.clearAll(); } async healthCheck(): Promise { diff --git a/src/cloud-code/README.md b/src/cloud-code/README.md index d38906e17e..36ff4572ba 100644 --- a/src/cloud-code/README.md +++ b/src/cloud-code/README.md @@ -277,6 +277,7 @@ package main import ( "encoding/json" "fmt" + "net" "net/http" "os" ) diff --git a/src/cloud-code/adapters/ExternalProcessAdapter.ts b/src/cloud-code/adapters/ExternalProcessAdapter.ts index 0b8e64c7de..df110a0e5f 100644 --- a/src/cloud-code/adapters/ExternalProcessAdapter.ts +++ b/src/cloud-code/adapters/ExternalProcessAdapter.ts @@ -18,13 +18,24 @@ const DEFAULT_OPTIONS: Required = { maxRestartDelay: 30000, }; +const HTTP_TIMEOUT = 10000; + function httpGet(url: string): Promise { return new Promise((resolve, reject) => { - http.get(url, (res) => { + const req = http.get(url, (res) => { + if (res.statusCode === undefined || res.statusCode < 200 || res.statusCode >= 300) { + res.resume(); + reject(new Error(`HTTP GET ${url} returned status ${res.statusCode}`)); + return; + } let data = ''; res.on('data', (chunk) => data += chunk); res.on('end', () => resolve(data)); - }).on('error', reject); + }); + req.setTimeout(HTTP_TIMEOUT, () => { + req.destroy(new Error(`HTTP GET ${url} timed out after ${HTTP_TIMEOUT}ms`)); + }); + req.on('error', reject); }); } @@ -43,6 +54,11 @@ function httpPost(url: string, body: Record, webhookKey: string 'X-Parse-Webhook-Key': webhookKey, }, }, (res) => { + if (res.statusCode === undefined || res.statusCode < 200 || res.statusCode >= 300) { + res.resume(); + reject(new Error(`HTTP POST ${url} returned status ${res.statusCode}`)); + return; + } let data = ''; res.on('data', (chunk) => data += chunk); res.on('end', () => { @@ -53,6 +69,9 @@ function httpPost(url: string, body: Record, webhookKey: string } }); }); + req.setTimeout(HTTP_TIMEOUT, () => { + req.destroy(new Error(`HTTP POST ${url} timed out after ${HTTP_TIMEOUT}ms`)); + }); req.on('error', reject); req.write(payload); req.end(); @@ -79,8 +98,13 @@ export class ExternalProcessAdapter implements CloudCodeAdapter { async initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise { this.port = await this.spawnAndWaitForReady(config); - const manifest = await this.fetchManifest(); - this.registerFromManifest(registry, manifest); + try { + const manifest = await this.fetchManifest(); + this.registerFromManifest(registry, manifest); + } catch (err) { + await this.shutdown(); + throw err; + } if (this.options.healthCheckInterval > 0) { this.healthInterval = setInterval(() => this.checkHealth(), this.options.healthCheckInterval); @@ -90,7 +114,8 @@ export class ExternalProcessAdapter implements CloudCodeAdapter { async isHealthy(): Promise { try { const response = await httpGet(`http://localhost:${this.port}/health`); - return response === 'OK' || response.includes('ok'); + const trimmed = response.trim(); + return trimmed === 'OK' || trimmed === 'ok'; } catch { return false; } @@ -190,6 +215,9 @@ export class ExternalProcessAdapter implements CloudCodeAdapter { this.webhookKey ); if (triggerName === 'beforeSave') { + if (request.file || className === 'File') { + return webhookResponseToResult(response); + } applyBeforeSaveResponse(request, response); return; } diff --git a/src/cloud-code/adapters/InProcessAdapter.ts b/src/cloud-code/adapters/InProcessAdapter.ts index db92a05290..910a3064bd 100644 --- a/src/cloud-code/adapters/InProcessAdapter.ts +++ b/src/cloud-code/adapters/InProcessAdapter.ts @@ -32,8 +32,11 @@ export class InProcessAdapter implements CloudCodeAdapter { const body = requestToWebhookBody(request); const response = await router.dispatchTrigger(className, triggerName, body); if (triggerName === 'beforeSave') { - applyBeforeSaveResponse(request, response); - return; + if (request.object) { + applyBeforeSaveResponse(request, response); + return; + } + return webhookResponseToResult(response); } return webhookResponseToResult(response); }); diff --git a/src/cloud-code/adapters/LegacyAdapter.ts b/src/cloud-code/adapters/LegacyAdapter.ts index 4f73e2ed49..b5de23f39c 100644 --- a/src/cloud-code/adapters/LegacyAdapter.ts +++ b/src/cloud-code/adapters/LegacyAdapter.ts @@ -21,16 +21,16 @@ export class LegacyAdapter implements CloudCodeAdapter { await Promise.resolve(this.cloud(Parse)); } else if (typeof this.cloud === 'string') { const path = require('path'); + const url = require('url'); const resolved = path.resolve(process.cwd(), this.cloud); try { - const pkg = require(path.resolve(process.cwd(), 'package.json')); - if (process.env.npm_package_type === 'module' || pkg?.type === 'module') { - await import(resolved); + require(resolved); + } catch (err: any) { + if (err?.code === 'ERR_REQUIRE_ESM') { + await import(url.pathToFileURL(resolved).href); } else { - require(resolved); + throw err; } - } catch { - require(resolved); } } } diff --git a/src/cloud-code/resolveAdapters.ts b/src/cloud-code/resolveAdapters.ts index 1c11152d98..34dc18ea7e 100644 --- a/src/cloud-code/resolveAdapters.ts +++ b/src/cloud-code/resolveAdapters.ts @@ -16,7 +16,7 @@ export function resolveAdapters(options: any): CloudCodeAdapter[] { } else if (typeof options.cloud === 'string' || typeof options.cloud === 'function') { adapters.push(new LegacyAdapter(options.cloud)); } else { - throw "argument 'cloud' must either be a string or a function"; + throw new Error("argument 'cloud' must either be a string or a function"); } } diff --git a/src/triggers.js b/src/triggers.js index 69443eff73..e8cc9b43d9 100644 --- a/src/triggers.js +++ b/src/triggers.js @@ -275,11 +275,11 @@ export function getTrigger(className, triggerType, applicationId) { return get(Category.Triggers, `${triggerType}.${className}`, applicationId); } -export async function runTrigger(trigger, name, request, auth) { +export async function runTrigger(trigger, name, request, auth, applicationId) { if (!trigger) { return; } - await maybeRunValidator(request, name, auth); + await maybeRunValidator(request, name, auth, applicationId); if (request.skipWithMasterKey) { return; } @@ -803,8 +803,9 @@ export function resolveError(message, defaultOpts) { } return error; } -export function maybeRunValidator(request, functionName, auth) { - const theValidator = getValidator(functionName, Parse.applicationId); +export function maybeRunValidator(request, functionName, auth, applicationId) { + applicationId = applicationId || (request.config && request.config.applicationId) || Parse.applicationId; + const theValidator = getValidator(functionName, applicationId); if (!theValidator) { return; } From a1c916239a9ac3cc5cf169f3802974f11ff00864 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 17 Mar 2026 21:46:57 +1100 Subject: [PATCH 09/14] fix: address code review findings across cloud adapter system - Use ephemeral ports and proper teardown in ExternalProcessAdapter specs - Add HTTP timeout/status validation and file trigger handling in ExternalProcessAdapter - Guard request.object for file hooks in InProcessAdapter beforeSave - Fix ESM/CJS loading order in LegacyAdapter (require-first, catch ERR_REQUIRE_ESM) - Best-effort shutdown and initialize rollback in CloudCodeManager - Add missing "net" import in README Go example - Update Options types for cloud, cloudCodeAdapters, cloudCodeOptions - Fix validator applicationId lookup in triggers.js and ParseLiveQueryServer - Throw Error objects instead of strings in resolveAdapters - Update JSDoc types in docs.js - Clarify intentional addParseCloud re-invocation in ParseServer --- spec/ExternalProcessAdapter.spec.js | 21 +++++----- src/LiveQuery/ParseLiveQueryServer.ts | 16 ++++---- src/Options/Definitions.js | 6 +-- src/Options/docs.js | 6 +-- src/Options/index.js | 22 ++++++++--- src/ParseServer.ts | 3 ++ src/cloud-code/CloudCodeManager.ts | 26 ++++++++++++- src/cloud-code/README.md | 1 + .../adapters/ExternalProcessAdapter.ts | 38 ++++++++++++++++--- src/cloud-code/adapters/InProcessAdapter.ts | 7 +++- src/cloud-code/adapters/LegacyAdapter.ts | 12 +++--- src/cloud-code/resolveAdapters.ts | 2 +- src/triggers.js | 9 +++-- 13 files changed, 120 insertions(+), 49 deletions(-) diff --git a/spec/ExternalProcessAdapter.spec.js b/spec/ExternalProcessAdapter.spec.js index 6a328b92d8..5b3ecc4cd0 100644 --- a/spec/ExternalProcessAdapter.spec.js +++ b/spec/ExternalProcessAdapter.spec.js @@ -3,8 +3,8 @@ const { ExternalProcessAdapter } = require('../lib/cloud-code/adapters/ExternalP const { CloudCodeManager } = require('../lib/cloud-code/CloudCodeManager'); const http = require('http'); -function createMockCloudServer(manifest, port) { - return new Promise((resolve) => { +function createMockCloudServer(manifest) { + return new Promise((resolve, reject) => { const server = http.createServer((req, res) => { if (req.url === '/' && req.method === 'GET') { res.writeHead(200, { 'Content-Type': 'application/json' }); @@ -24,7 +24,8 @@ function createMockCloudServer(manifest, port) { res.end(); } }); - server.listen(port, () => resolve(server)); + server.on('error', (err) => reject(err)); + server.listen(0, () => resolve({ server, port: server.address().port })); }); } @@ -45,15 +46,14 @@ describe('ExternalProcessAdapter', () => { it('spawns process and reads manifest', async () => { const manager = new CloudCodeManager(); - const port = 19876; - const server = await createMockCloudServer( - { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'ext-fn' }], triggers: [], jobs: [] } }, - port + const { server, port } = await createMockCloudServer( + { protocol: 'ParseCloud/1.0', hooks: { functions: [{ name: 'ext-fn' }], triggers: [], jobs: [] } } ); + let adapter; try { const cmd = `node -e "process.stdout.write('PARSE_CLOUD_READY:${port}\\n'); setTimeout(() => {}, 60000)"`; - const adapter = new ExternalProcessAdapter(cmd, 'test-key', { + adapter = new ExternalProcessAdapter(cmd, 'test-key', { startupTimeout: 5000, healthCheckInterval: 0, }); @@ -61,9 +61,10 @@ describe('ExternalProcessAdapter', () => { await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); expect(manager.getFunction('ext-fn')).toBeDefined(); - - await adapter.shutdown(); } finally { + if (adapter) { + await adapter.shutdown(); + } server.close(); } }, 10000); diff --git a/src/LiveQuery/ParseLiveQueryServer.ts b/src/LiveQuery/ParseLiveQueryServer.ts index a83b8e1a62..ef019d50a3 100644 --- a/src/LiveQuery/ParseLiveQueryServer.ts +++ b/src/LiveQuery/ParseLiveQueryServer.ts @@ -232,7 +232,7 @@ class ParseLiveQueryServer { installationId: client.installationId, sendEvent: true, }; - const trigger = getTrigger(className, 'afterEvent', Parse.applicationId); + const trigger = getTrigger(className, 'afterEvent', this.config.appId); if (trigger) { const auth = await this.getAuthFromClient(client, requestId); if (auth && auth.user) { @@ -241,7 +241,7 @@ class ParseLiveQueryServer { if (res.object) { res.object = Parse.Object.fromJSON(res.object); } - await runTrigger(trigger, `afterEvent.${className}`, res, auth); + await runTrigger(trigger, `afterEvent.${className}`, res, auth, this.config.appId); } if (!res.sendEvent) { return; @@ -388,7 +388,7 @@ class ParseLiveQueryServer { installationId: client.installationId, sendEvent: true, }; - const trigger = getTrigger(className, 'afterEvent', Parse.applicationId); + const trigger = getTrigger(className, 'afterEvent', this.config.appId); if (trigger) { if (res.object) { res.object = Parse.Object.fromJSON(res.object); @@ -400,7 +400,7 @@ class ParseLiveQueryServer { if (auth && auth.user) { res.user = auth.user; } - await runTrigger(trigger, `afterEvent.${className}`, res, auth); + await runTrigger(trigger, `afterEvent.${className}`, res, auth, this.config.appId); } if (!res.sendEvent) { return; @@ -845,13 +845,13 @@ class ParseLiveQueryServer { installationId: request.installationId, user: undefined, }; - const trigger = getTrigger('@Connect', 'beforeConnect', Parse.applicationId); + const trigger = getTrigger('@Connect', 'beforeConnect', this.config.appId); if (trigger) { const auth = await this.getAuthFromClient(client, request.requestId, req.sessionToken); if (auth && auth.user) { req.user = auth.user; } - await runTrigger(trigger, `beforeConnect.@Connect`, req, auth); + await runTrigger(trigger, `beforeConnect.@Connect`, req, auth, this.config.appId); } parseWebsocket.clientId = clientId; this.clients.set(parseWebsocket.clientId, client); @@ -908,7 +908,7 @@ class ParseLiveQueryServer { const className = request.query.className; let authCalled = false; try { - const trigger = getTrigger(className, 'beforeSubscribe', Parse.applicationId); + const trigger = getTrigger(className, 'beforeSubscribe', this.config.appId); if (trigger) { const auth = await this.getAuthFromClient(client, request.requestId, request.sessionToken); authCalled = true; @@ -919,7 +919,7 @@ class ParseLiveQueryServer { const parseQuery = new Parse.Query(className); parseQuery.withJSON(request.query); request.query = parseQuery; - await runTrigger(trigger, `beforeSubscribe.${className}`, request, auth); + await runTrigger(trigger, `beforeSubscribe.${className}`, request, auth, this.config.appId); const query = request.query.toJSON(); request.query = query; diff --git a/src/Options/Definitions.js b/src/Options/Definitions.js index 1b0e3931e1..d52f1b52dc 100644 --- a/src/Options/Definitions.js +++ b/src/Options/Definitions.js @@ -128,12 +128,12 @@ module.exports.ParseServerOptions = { }, cloud: { env: 'PARSE_SERVER_CLOUD', - help: 'Full path to your cloud code main.js', + help: 'Full path to your cloud code main.js, a cloud code function, or an object implementing getRouter() for in-process cloud code', }, cloudCodeAdapters: { env: 'PARSE_SERVER_CLOUD_CODE_ADAPTERS', help: 'Array of CloudCodeAdapter instances for BYO cloud code integration', - action: parsers.objectParser, + action: parsers.arrayParser, }, cloudCodeCommand: { env: 'PARSE_SERVER_CLOUD_CODE_COMMAND', @@ -141,7 +141,7 @@ module.exports.ParseServerOptions = { }, cloudCodeOptions: { env: 'PARSE_SERVER_CLOUD_CODE_OPTIONS', - help: 'Options for the external cloud code process adapter', + help: 'Options for the external cloud code process adapter: startupTimeout, healthCheckInterval, shutdownTimeout, maxRestartDelay', action: parsers.objectParser, }, cluster: { diff --git a/src/Options/docs.js b/src/Options/docs.js index fb51142e01..7ad923309a 100644 --- a/src/Options/docs.js +++ b/src/Options/docs.js @@ -26,10 +26,10 @@ * @property {Number} cacheMaxSize Sets the maximum size for the in memory cache, defaults to 10000 * @property {Number} cacheTTL Sets the TTL for the in memory cache (in ms), defaults to 5000 (5 seconds) * @property {String} clientKey Key for iOS, MacOS, tvOS clients - * @property {String} cloud Full path to your cloud code main.js - * @property {Object} cloudCodeAdapters Array of CloudCodeAdapter instances for BYO cloud code integration + * @property {Union} cloud Full path to your cloud code main.js, a cloud code function, or an object implementing getRouter() for in-process cloud code + * @property {CloudCodeAdapter[]} cloudCodeAdapters Array of CloudCodeAdapter instances for BYO cloud code integration * @property {String} cloudCodeCommand Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol) - * @property {Object} cloudCodeOptions Options for the external cloud code process adapter + * @property {CloudCodeOptions} cloudCodeOptions Options for the external cloud code process adapter: startupTimeout, healthCheckInterval, shutdownTimeout, maxRestartDelay * @property {Number|Boolean} cluster Run with cluster, optionally set the number of processes default to os.cpus().length * @property {String} collectionPrefix A collection prefix for the classes * @property {Boolean} convertEmailToLowercase Optional. If set to `true`, the `email` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `email` property is stored as set, without any case modifications. Default is `false`. diff --git a/src/Options/index.js b/src/Options/index.js index 948afbec27..6ea815a456 100644 --- a/src/Options/index.js +++ b/src/Options/index.js @@ -59,6 +59,18 @@ type SendEmailVerificationRequest = { user: any, master?: boolean, }; +type CloudCodeOptions = { + startupTimeout?: number, + healthCheckInterval?: number, + shutdownTimeout?: number, + maxRestartDelay?: number, +}; +type CloudCodeAdapter = { + name: string, + initialize(registry: any, config: any): Promise, + isHealthy(): Promise, + shutdown(): Promise, +}; export interface ParseServerOptions { /* Your Parse Application ID @@ -139,16 +151,16 @@ export interface ParseServerOptions { /* Optional. If set to `true`, the `username` property of a user is automatically converted to lowercase before being stored in the database. Consequently, queries must match the case as stored in the database, which would be lowercase in this scenario. If `false`, the `username` property is stored as set, without any case modifications. Default is `false`. :DEFAULT: false */ convertUsernameToLowercase: ?boolean; - /* Full path to your cloud code main.js */ - cloud: ?string; + /* Full path to your cloud code main.js, a cloud code function, or an object implementing getRouter() for in-process cloud code */ + cloud: ?(string | Object); /* Shell command to spawn an external cloud code process (ParseCloud/1.0 protocol) */ cloudCodeCommand: ?string; /* Key for authenticating external cloud code process requests. Required when cloudCodeCommand is set. */ webhookKey: ?string; - /* Options for the external cloud code process adapter */ - cloudCodeOptions: ?Object; + /* Options for the external cloud code process adapter: startupTimeout, healthCheckInterval, shutdownTimeout, maxRestartDelay */ + cloudCodeOptions: ?CloudCodeOptions; /* Array of CloudCodeAdapter instances for BYO cloud code integration */ - cloudCodeAdapters: ?Object; + cloudCodeAdapters: ?(CloudCodeAdapter[]); /* A collection prefix for the classes :DEFAULT: '' */ collectionPrefix: ?string; diff --git a/src/ParseServer.ts b/src/ParseServer.ts index 60731fb738..affcfff096 100644 --- a/src/ParseServer.ts +++ b/src/ParseServer.ts @@ -196,6 +196,9 @@ class ParseServer { }); if (adapters.length > 0) { + // Re-invoke addParseCloud() so Parse.Cloud methods are available + // before cloud code adapters initialize (the module-level call may + // run before Parse.applicationId is set). addParseCloud(); const cloudManager = new CloudCodeManager(); diff --git a/src/cloud-code/CloudCodeManager.ts b/src/cloud-code/CloudCodeManager.ts index b80d682a19..0757d2bcd6 100644 --- a/src/cloud-code/CloudCodeManager.ts +++ b/src/cloud-code/CloudCodeManager.ts @@ -262,15 +262,37 @@ export class CloudCodeManager { for (const adapter of adapters) { const registry = this.createRegistry(adapter.name); - await adapter.initialize(registry, config); + try { + await adapter.initialize(registry, config); + } catch (error) { + // Roll back any partial registrations from this adapter + this.unregisterAll(adapter.name); + // Attempt graceful shutdown of the failed adapter + try { + await adapter.shutdown(); + } catch { + // Ignore shutdown errors during initialization rollback + } + throw error; + } this.adapters.push(adapter); } } async shutdown(): Promise { + const errors: Array<{ name: string; error: unknown }> = []; for (const adapter of this.adapters) { - await adapter.shutdown(); + try { + await adapter.shutdown(); + } catch (error) { + // eslint-disable-next-line no-console + console.error(`Error shutting down adapter "${adapter.name}":`, error); + errors.push({ name: adapter.name, error }); + } } + // Clear all manager state regardless of individual shutdown failures + this.adapters.length = 0; + this.clearAll(); } async healthCheck(): Promise { diff --git a/src/cloud-code/README.md b/src/cloud-code/README.md index d38906e17e..36ff4572ba 100644 --- a/src/cloud-code/README.md +++ b/src/cloud-code/README.md @@ -277,6 +277,7 @@ package main import ( "encoding/json" "fmt" + "net" "net/http" "os" ) diff --git a/src/cloud-code/adapters/ExternalProcessAdapter.ts b/src/cloud-code/adapters/ExternalProcessAdapter.ts index 0b8e64c7de..df110a0e5f 100644 --- a/src/cloud-code/adapters/ExternalProcessAdapter.ts +++ b/src/cloud-code/adapters/ExternalProcessAdapter.ts @@ -18,13 +18,24 @@ const DEFAULT_OPTIONS: Required = { maxRestartDelay: 30000, }; +const HTTP_TIMEOUT = 10000; + function httpGet(url: string): Promise { return new Promise((resolve, reject) => { - http.get(url, (res) => { + const req = http.get(url, (res) => { + if (res.statusCode === undefined || res.statusCode < 200 || res.statusCode >= 300) { + res.resume(); + reject(new Error(`HTTP GET ${url} returned status ${res.statusCode}`)); + return; + } let data = ''; res.on('data', (chunk) => data += chunk); res.on('end', () => resolve(data)); - }).on('error', reject); + }); + req.setTimeout(HTTP_TIMEOUT, () => { + req.destroy(new Error(`HTTP GET ${url} timed out after ${HTTP_TIMEOUT}ms`)); + }); + req.on('error', reject); }); } @@ -43,6 +54,11 @@ function httpPost(url: string, body: Record, webhookKey: string 'X-Parse-Webhook-Key': webhookKey, }, }, (res) => { + if (res.statusCode === undefined || res.statusCode < 200 || res.statusCode >= 300) { + res.resume(); + reject(new Error(`HTTP POST ${url} returned status ${res.statusCode}`)); + return; + } let data = ''; res.on('data', (chunk) => data += chunk); res.on('end', () => { @@ -53,6 +69,9 @@ function httpPost(url: string, body: Record, webhookKey: string } }); }); + req.setTimeout(HTTP_TIMEOUT, () => { + req.destroy(new Error(`HTTP POST ${url} timed out after ${HTTP_TIMEOUT}ms`)); + }); req.on('error', reject); req.write(payload); req.end(); @@ -79,8 +98,13 @@ export class ExternalProcessAdapter implements CloudCodeAdapter { async initialize(registry: CloudCodeRegistry, config: ParseServerConfig): Promise { this.port = await this.spawnAndWaitForReady(config); - const manifest = await this.fetchManifest(); - this.registerFromManifest(registry, manifest); + try { + const manifest = await this.fetchManifest(); + this.registerFromManifest(registry, manifest); + } catch (err) { + await this.shutdown(); + throw err; + } if (this.options.healthCheckInterval > 0) { this.healthInterval = setInterval(() => this.checkHealth(), this.options.healthCheckInterval); @@ -90,7 +114,8 @@ export class ExternalProcessAdapter implements CloudCodeAdapter { async isHealthy(): Promise { try { const response = await httpGet(`http://localhost:${this.port}/health`); - return response === 'OK' || response.includes('ok'); + const trimmed = response.trim(); + return trimmed === 'OK' || trimmed === 'ok'; } catch { return false; } @@ -190,6 +215,9 @@ export class ExternalProcessAdapter implements CloudCodeAdapter { this.webhookKey ); if (triggerName === 'beforeSave') { + if (request.file || className === 'File') { + return webhookResponseToResult(response); + } applyBeforeSaveResponse(request, response); return; } diff --git a/src/cloud-code/adapters/InProcessAdapter.ts b/src/cloud-code/adapters/InProcessAdapter.ts index db92a05290..910a3064bd 100644 --- a/src/cloud-code/adapters/InProcessAdapter.ts +++ b/src/cloud-code/adapters/InProcessAdapter.ts @@ -32,8 +32,11 @@ export class InProcessAdapter implements CloudCodeAdapter { const body = requestToWebhookBody(request); const response = await router.dispatchTrigger(className, triggerName, body); if (triggerName === 'beforeSave') { - applyBeforeSaveResponse(request, response); - return; + if (request.object) { + applyBeforeSaveResponse(request, response); + return; + } + return webhookResponseToResult(response); } return webhookResponseToResult(response); }); diff --git a/src/cloud-code/adapters/LegacyAdapter.ts b/src/cloud-code/adapters/LegacyAdapter.ts index 4f73e2ed49..b5de23f39c 100644 --- a/src/cloud-code/adapters/LegacyAdapter.ts +++ b/src/cloud-code/adapters/LegacyAdapter.ts @@ -21,16 +21,16 @@ export class LegacyAdapter implements CloudCodeAdapter { await Promise.resolve(this.cloud(Parse)); } else if (typeof this.cloud === 'string') { const path = require('path'); + const url = require('url'); const resolved = path.resolve(process.cwd(), this.cloud); try { - const pkg = require(path.resolve(process.cwd(), 'package.json')); - if (process.env.npm_package_type === 'module' || pkg?.type === 'module') { - await import(resolved); + require(resolved); + } catch (err: any) { + if (err?.code === 'ERR_REQUIRE_ESM') { + await import(url.pathToFileURL(resolved).href); } else { - require(resolved); + throw err; } - } catch { - require(resolved); } } } diff --git a/src/cloud-code/resolveAdapters.ts b/src/cloud-code/resolveAdapters.ts index 1c11152d98..34dc18ea7e 100644 --- a/src/cloud-code/resolveAdapters.ts +++ b/src/cloud-code/resolveAdapters.ts @@ -16,7 +16,7 @@ export function resolveAdapters(options: any): CloudCodeAdapter[] { } else if (typeof options.cloud === 'string' || typeof options.cloud === 'function') { adapters.push(new LegacyAdapter(options.cloud)); } else { - throw "argument 'cloud' must either be a string or a function"; + throw new Error("argument 'cloud' must either be a string or a function"); } } diff --git a/src/triggers.js b/src/triggers.js index 69443eff73..e8cc9b43d9 100644 --- a/src/triggers.js +++ b/src/triggers.js @@ -275,11 +275,11 @@ export function getTrigger(className, triggerType, applicationId) { return get(Category.Triggers, `${triggerType}.${className}`, applicationId); } -export async function runTrigger(trigger, name, request, auth) { +export async function runTrigger(trigger, name, request, auth, applicationId) { if (!trigger) { return; } - await maybeRunValidator(request, name, auth); + await maybeRunValidator(request, name, auth, applicationId); if (request.skipWithMasterKey) { return; } @@ -803,8 +803,9 @@ export function resolveError(message, defaultOpts) { } return error; } -export function maybeRunValidator(request, functionName, auth) { - const theValidator = getValidator(functionName, Parse.applicationId); +export function maybeRunValidator(request, functionName, auth, applicationId) { + applicationId = applicationId || (request.config && request.config.applicationId) || Parse.applicationId; + const theValidator = getValidator(functionName, applicationId); if (!theValidator) { return; } From db7daf0bdc745709e6468c12b482c05a3c7ec401 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 17 Mar 2026 22:14:29 +1100 Subject: [PATCH 10/14] coderabbit comments --- spec/ExternalProcessAdapter.spec.js | 4 +++- src/cloud-code/CloudCodeManager.ts | 11 ++++------- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/spec/ExternalProcessAdapter.spec.js b/spec/ExternalProcessAdapter.spec.js index 990ac617a2..8fc06eb898 100644 --- a/spec/ExternalProcessAdapter.spec.js +++ b/spec/ExternalProcessAdapter.spec.js @@ -70,7 +70,9 @@ describe('ExternalProcessAdapter', () => { if (adapter) { await adapter.shutdown(); } - server.close(); + await new Promise((resolve, reject) => { + server.close(err => (err ? reject(err) : resolve())); + }); } }, 10000); }); diff --git a/src/cloud-code/CloudCodeManager.ts b/src/cloud-code/CloudCodeManager.ts index b900952fa1..cc6b558a8e 100644 --- a/src/cloud-code/CloudCodeManager.ts +++ b/src/cloud-code/CloudCodeManager.ts @@ -306,12 +306,9 @@ export class CloudCodeManager { } async healthCheck(): Promise { - for (const adapter of this.adapters) { - const healthy = await adapter.isHealthy(); - if (!healthy) { - return false; - } - } - return true; + const results = await Promise.allSettled( + this.adapters.map(adapter => adapter.isHealthy()) + ); + return results.every(r => r.status === 'fulfilled' && r.value === true); } } From 91199f89af446e5267e88b832b76bc7442669814 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 17 Mar 2026 22:24:45 +1100 Subject: [PATCH 11/14] Update CloudCodeManager.ts --- src/cloud-code/CloudCodeManager.ts | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/cloud-code/CloudCodeManager.ts b/src/cloud-code/CloudCodeManager.ts index cc6b558a8e..d24f47a926 100644 --- a/src/cloud-code/CloudCodeManager.ts +++ b/src/cloud-code/CloudCodeManager.ts @@ -161,7 +161,7 @@ export class CloudCodeManager { return this.jobs.get(name) ?? null; } - getJobs(): Map { + getJobs(): ReadonlyMap { return this.jobs; } @@ -181,7 +181,12 @@ export class CloudCodeManager { runLiveQueryEventHandlers(data: unknown): void { for (const entry of this.liveQueryHandlers) { - entry.handler(data); + try { + entry.handler(data); + } catch (error) { + // eslint-disable-next-line no-console + console.error(`LiveQuery handler from "${entry.source}" threw:`, error); + } } } From dbd7d093dbfe1084f0a055e821605fa1b459e6c7 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 17 Mar 2026 22:29:51 +1100 Subject: [PATCH 12/14] Update CloudCode.spec.js --- spec/CloudCode.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/CloudCode.spec.js b/spec/CloudCode.spec.js index 45f3461bc4..55a54c0bce 100644 --- a/spec/CloudCode.spec.js +++ b/spec/CloudCode.spec.js @@ -51,7 +51,7 @@ describe('Cloud Code', () => { it('cloud code must be valid type', async () => { spyOn(console, 'error').and.callFake(() => { }); - await expectAsync(reconfigureServer({ cloud: true })).toBeRejectedWith( + await expectAsync(reconfigureServer({ cloud: true })).toBeRejectedWithError( "argument 'cloud' must either be a string or a function" ); }); From 951ec8d5cae1fe4ff24439acf718db40164d08f3 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 17 Mar 2026 23:12:22 +1100 Subject: [PATCH 13/14] increase coverage --- spec/CloudCodeManager.spec.js | 82 ++++++++ spec/ExternalProcessAdapter.spec.js | 174 ++++++++++++++++ spec/InProcessAdapter.spec.js | 95 +++++++++ spec/LegacyAdapter.spec.js | 66 ++++++ spec/resolveAdapters.spec.js | 81 ++++++++ spec/webhook-bridge.spec.js | 309 ++++++++++++++++++++++++++++ 6 files changed, 807 insertions(+) create mode 100644 spec/LegacyAdapter.spec.js create mode 100644 spec/resolveAdapters.spec.js create mode 100644 spec/webhook-bridge.spec.js diff --git a/spec/CloudCodeManager.spec.js b/spec/CloudCodeManager.spec.js index 07023f337c..63bac49787 100644 --- a/spec/CloudCodeManager.spec.js +++ b/spec/CloudCodeManager.spec.js @@ -475,6 +475,47 @@ describe('CloudCodeManager', () => { expect(manager.getFunction('adapterFunc').source).toBe('test-adapter'); }); + it('rolls back failed adapter and previously-initialized adapters', async () => { + const shutdownCalls = []; + const adapterA = { + name: 'adapter-a', + initialize: async (registry) => { + registry.defineFunction('funcFromA', () => {}); + }, + isHealthy: async () => true, + shutdown: async () => { shutdownCalls.push('a'); }, + }; + const adapterB = { + name: 'adapter-b', + initialize: async () => { throw new Error('adapter-b failed'); }, + isHealthy: async () => true, + shutdown: async () => { shutdownCalls.push('b'); }, + }; + const config = { appId: 'testApp', masterKey: 'key', serverURL: 'http://localhost:1337/parse' }; + await expectAsync(manager.initialize([adapterA, adapterB], config)).toBeRejectedWithError('adapter-b failed'); + expect(manager.getFunction('funcFromA')).toBeNull(); + expect(shutdownCalls).toContain('a'); + expect(shutdownCalls).toContain('b'); + // manager should have no adapters left — healthCheck with no adapters returns true + const healthy = await manager.healthCheck(); + expect(healthy).toBe(true); + }); + + it('rolls back partial registrations from the failing adapter', async () => { + const adapter = { + name: 'partial-adapter', + initialize: async (registry) => { + registry.defineFunction('partialFunc', () => {}); + throw new Error('partial failure'); + }, + isHealthy: async () => true, + shutdown: async () => {}, + }; + const config = { appId: 'testApp', masterKey: 'key', serverURL: 'http://localhost:1337/parse' }; + await expectAsync(manager.initialize([adapter], config)).toBeRejectedWithError('partial failure'); + expect(manager.getFunction('partialFunc')).toBeNull(); + }); + it('throws when two adapters have the same name', async () => { const adapterA = { name: 'duplicate', @@ -507,6 +548,47 @@ describe('CloudCodeManager', () => { await manager.shutdown(); expect(calls).toEqual(['shutdown']); }); + + it('continues shutting down other adapters when one fails', async () => { + const shutdownCalls = []; + const adapterA = { + name: 'adapter-a', + initialize: async () => {}, + isHealthy: async () => true, + shutdown: async () => { + shutdownCalls.push('a'); + throw new Error('shutdown-a failed'); + }, + }; + const adapterB = { + name: 'adapter-b', + initialize: async () => {}, + isHealthy: async () => true, + shutdown: async () => { shutdownCalls.push('b'); }, + }; + const config = { appId: 'testApp', masterKey: 'key', serverURL: 'http://localhost:1337/parse' }; + await manager.initialize([adapterA, adapterB], config); + await manager.shutdown(); + expect(shutdownCalls).toContain('a'); + expect(shutdownCalls).toContain('b'); + }); + + it('clears all registrations after shutdown', async () => { + const adapter = { + name: 'adapter-a', + initialize: async (registry) => { + registry.defineFunction('myFunc', () => {}); + }, + isHealthy: async () => true, + shutdown: async () => {}, + }; + const config = { appId: 'testApp', masterKey: 'key', serverURL: 'http://localhost:1337/parse' }; + await manager.initialize([adapter], config); + expect(manager.getFunction('myFunc')).not.toBeNull(); + await manager.shutdown(); + expect(manager.getFunction('myFunc')).toBeNull(); + expect(manager.getFunctionNames()).toEqual([]); + }); }); describe('healthCheck', () => { diff --git a/spec/ExternalProcessAdapter.spec.js b/spec/ExternalProcessAdapter.spec.js index 8fc06eb898..c10dd0951e 100644 --- a/spec/ExternalProcessAdapter.spec.js +++ b/spec/ExternalProcessAdapter.spec.js @@ -49,6 +49,180 @@ describe('ExternalProcessAdapter', () => { await expectAsync(adapter.shutdown()).toBeResolved(); }); + it('isHealthy returns true for running server', async () => { + const manager = new CloudCodeManager(); + const { server, port } = await createMockCloudServer( + { protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [], jobs: [] } } + ); + + let adapter; + try { + const cmd = `node -e "process.stdout.write('PARSE_CLOUD_READY:${port}\\n'); setTimeout(() => {}, 60000)"`; + adapter = new ExternalProcessAdapter(cmd, 'test-key', { + startupTimeout: 5000, + healthCheckInterval: 0, + }); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + const healthy = await adapter.isHealthy(); + expect(healthy).toBe(true); + } finally { + if (adapter) { + await adapter.shutdown(); + } + await new Promise((resolve, reject) => { + server.close(err => (err ? reject(err) : resolve())); + }); + } + }, 10000); + + it('isHealthy returns false when server is down', async () => { + const adapter = new ExternalProcessAdapter('echo test', 'test-key'); + // Port is 0 (default) since we never initialized — any HTTP request will fail + const healthy = await adapter.isHealthy(); + expect(healthy).toBe(false); + }); + + it('cleans up process on manifest fetch failure', async () => { + // Create a server that returns 500 for the manifest endpoint + const server = await new Promise((resolve, reject) => { + const srv = http.createServer((req, res) => { + if (req.url === '/' && req.method === 'GET') { + res.writeHead(500); + res.end('Internal Server Error'); + } else { + res.writeHead(404); + res.end(); + } + }); + srv.on('error', reject); + srv.listen(0, () => resolve(srv)); + }); + const port = server.address().port; + + let adapter; + try { + const cmd = `node -e "process.stdout.write('PARSE_CLOUD_READY:${port}\\n'); setTimeout(() => {}, 60000)"`; + adapter = new ExternalProcessAdapter(cmd, 'test-key', { + startupTimeout: 5000, + healthCheckInterval: 0, + }); + const manager = new CloudCodeManager(); + const registry = manager.createRegistry(adapter.name); + + await expectAsync( + adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }) + ).toBeRejectedWithError(/500/); + + // After failure, the process should have been cleaned up by shutdown() + // Calling shutdown again should resolve cleanly (process already null) + await expectAsync(adapter.shutdown()).toBeResolved(); + } finally { + await new Promise((resolve, reject) => { + server.close(err => (err ? reject(err) : resolve())); + }); + } + }, 10000); + + it('registers triggers including beforeSave', async () => { + const manager = new CloudCodeManager(); + const { server, port } = await createMockCloudServer({ + protocol: 'ParseCloud/1.0', + hooks: { + functions: [], + triggers: [ + { className: 'GameScore', triggerName: 'beforeSave' }, + { className: 'GameScore', triggerName: 'afterSave' }, + ], + jobs: [], + }, + }); + + let adapter; + try { + const cmd = `node -e "process.stdout.write('PARSE_CLOUD_READY:${port}\\n'); setTimeout(() => {}, 60000)"`; + adapter = new ExternalProcessAdapter(cmd, 'test-key', { + startupTimeout: 5000, + healthCheckInterval: 0, + }); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + expect(manager.getTrigger('GameScore', 'beforeSave')).toBeDefined(); + expect(manager.getTrigger('GameScore', 'afterSave')).toBeDefined(); + } finally { + if (adapter) { + await adapter.shutdown(); + } + await new Promise((resolve, reject) => { + server.close(err => (err ? reject(err) : resolve())); + }); + } + }, 10000); + + it('registers jobs from manifest', async () => { + const manager = new CloudCodeManager(); + const { server, port } = await createMockCloudServer({ + protocol: 'ParseCloud/1.0', + hooks: { + functions: [], + triggers: [], + jobs: [{ name: 'cleanupJob' }], + }, + }); + + let adapter; + try { + const cmd = `node -e "process.stdout.write('PARSE_CLOUD_READY:${port}\\n'); setTimeout(() => {}, 60000)"`; + adapter = new ExternalProcessAdapter(cmd, 'test-key', { + startupTimeout: 5000, + healthCheckInterval: 0, + }); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + expect(manager.getJob('cleanupJob')).toBeDefined(); + } finally { + if (adapter) { + await adapter.shutdown(); + } + await new Promise((resolve, reject) => { + server.close(err => (err ? reject(err) : resolve())); + }); + } + }, 10000); + + it('shutdown terminates a running process', async () => { + const manager = new CloudCodeManager(); + const { server, port } = await createMockCloudServer( + { protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [], jobs: [] } } + ); + + let adapter; + try { + const cmd = `node -e "process.stdout.write('PARSE_CLOUD_READY:${port}\\n'); setTimeout(() => {}, 60000)"`; + adapter = new ExternalProcessAdapter(cmd, 'test-key', { + startupTimeout: 5000, + healthCheckInterval: 0, + shutdownTimeout: 2000, + }); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + // Shutdown should terminate the spawned process + await expectAsync(adapter.shutdown()).toBeResolved(); + + // After shutdown, isHealthy should return false (port no longer served by our process) + // and a second shutdown should be a no-op + await expectAsync(adapter.shutdown()).toBeResolved(); + } finally { + await new Promise((resolve, reject) => { + server.close(err => (err ? reject(err) : resolve())); + }); + } + }, 10000); + it('spawns process and reads manifest', async () => { const manager = new CloudCodeManager(); const { server, port } = await createMockCloudServer( diff --git a/spec/InProcessAdapter.spec.js b/spec/InProcessAdapter.spec.js index 3821492d8f..1822313493 100644 --- a/spec/InProcessAdapter.spec.js +++ b/spec/InProcessAdapter.spec.js @@ -124,4 +124,99 @@ describe('InProcessAdapter', () => { const adapter = new InProcessAdapter(cloud); expect(await adapter.isHealthy()).toBe(true); }); + + it('beforeSave trigger with request.object calls applyBeforeSaveResponse', async () => { + const cloud = createMockCloudCode( + { protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [{ className: 'Todo', triggerName: 'beforeSave' }], jobs: [] } }, + { 'trigger:beforeSave.Todo': () => ({ success: { field1: 'value1' } }) } + ); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + const entry = manager.getTrigger('Todo', 'beforeSave'); + const request = { + object: { set: jasmine.createSpy('set'), toJSON: () => ({}) }, + master: false, + ip: '', + headers: {}, + }; + const result = await entry.handler(request); + expect(result).toBeUndefined(); + expect(request.object.set).toHaveBeenCalledWith('field1', 'value1'); + }); + + it('beforeSave trigger without request.object returns webhookResponseToResult', async () => { + const cloud = createMockCloudCode( + { protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [{ className: '@File', triggerName: 'beforeSave' }], jobs: [] } }, + { 'trigger:beforeSave.@File': () => ({ success: { name: 'test.txt' } }) } + ); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + const entry = manager.getTrigger('@File', 'beforeSave'); + const request = { + file: { name: 'test.txt' }, + master: false, + ip: '', + headers: {}, + }; + const result = await entry.handler(request); + expect(result).toEqual({ name: 'test.txt' }); + }); + + it('beforeSave trigger without request.object returning empty object returns undefined', async () => { + const cloud = createMockCloudCode( + { protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [{ className: '@File', triggerName: 'beforeSave' }], jobs: [] } }, + { 'trigger:beforeSave.@File': () => ({ success: {} }) } + ); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + const entry = manager.getTrigger('@File', 'beforeSave'); + const request = { + file: { name: 'test.txt' }, + master: false, + ip: '', + headers: {}, + }; + const result = await entry.handler(request); + expect(result).toBeUndefined(); + }); + + it('non-beforeSave trigger returns webhookResponseToResult', async () => { + const cloud = createMockCloudCode( + { protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [{ className: 'Todo', triggerName: 'afterSave' }], jobs: [] } }, + { 'trigger:afterSave.Todo': () => ({ success: { saved: true } }) } + ); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + const entry = manager.getTrigger('Todo', 'afterSave'); + const result = await entry.handler({ object: { toJSON: () => ({}) }, master: false, ip: '', headers: {} }); + expect(result).toEqual({ saved: true }); + }); + + it('bridge handler dispatches job and returns result', async () => { + const cloud = createMockCloudCode( + { protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [], jobs: [{ name: 'cleanup' }] } }, + { 'job:cleanup': () => ({ success: 'done' }) } + ); + const adapter = new InProcessAdapter(cloud); + const registry = manager.createRegistry(adapter.name); + await adapter.initialize(registry, { appId: 'test', masterKey: 'mk', serverURL: 'http://localhost' }); + + const entry = manager.getJob('cleanup'); + const result = await entry.handler({ params: {}, master: true, ip: '', headers: {} }); + expect(result).toBe('done'); + }); + + it('shutdown resolves cleanly', async () => { + const cloud = createMockCloudCode({ protocol: 'ParseCloud/1.0', hooks: { functions: [], triggers: [], jobs: [] } }); + const adapter = new InProcessAdapter(cloud); + await expectAsync(adapter.shutdown()).toBeResolved(); + }); }); diff --git a/spec/LegacyAdapter.spec.js b/spec/LegacyAdapter.spec.js new file mode 100644 index 0000000000..04125b59e9 --- /dev/null +++ b/spec/LegacyAdapter.spec.js @@ -0,0 +1,66 @@ +const { LegacyAdapter } = require('../lib/cloud-code/adapters/LegacyAdapter'); +const path = require('path'); + +const mockRegistry = { + defineFunction: () => {}, + defineTrigger: () => {}, + defineJob: () => {}, + defineLiveQueryHandler: () => {}, +}; +const mockConfig = { + appId: 'test', + masterKey: 'mk', + serverURL: 'http://localhost', +}; + +describe('LegacyAdapter', () => { + it('has name "legacy"', () => { + const adapter = new LegacyAdapter(() => {}); + expect(adapter.name).toBe('legacy'); + }); + + it('initialize calls a function with Parse', async () => { + const cloudFn = jasmine.createSpy('cloudFn'); + const adapter = new LegacyAdapter(cloudFn); + await adapter.initialize(mockRegistry, mockConfig); + expect(cloudFn).toHaveBeenCalledTimes(1); + const Parse = require('parse/node').Parse; + expect(cloudFn).toHaveBeenCalledWith(Parse); + }); + + it('initialize awaits a function that returns a Promise', async () => { + let resolved = false; + const cloudFn = () => + new Promise((resolve) => { + setTimeout(() => { + resolved = true; + resolve(); + }, 10); + }); + const adapter = new LegacyAdapter(cloudFn); + await adapter.initialize(mockRegistry, mockConfig); + expect(resolved).toBe(true); + }); + + it('initialize with a valid cloud code file path loads the file', async () => { + const filePath = path.resolve(__dirname, 'cloud/cloudCodeRelativeFile.js'); + const adapter = new LegacyAdapter(filePath); + await expectAsync(adapter.initialize(mockRegistry, mockConfig)).toBeResolved(); + }); + + it('initialize with a non-existent path throws', async () => { + const adapter = new LegacyAdapter('/non/existent/path/cloud.js'); + await expectAsync(adapter.initialize(mockRegistry, mockConfig)).toBeRejected(); + }); + + it('isHealthy returns true', async () => { + const adapter = new LegacyAdapter(() => {}); + const result = await adapter.isHealthy(); + expect(result).toBe(true); + }); + + it('shutdown resolves cleanly', async () => { + const adapter = new LegacyAdapter(() => {}); + await expectAsync(adapter.shutdown()).toBeResolved(); + }); +}); diff --git a/spec/resolveAdapters.spec.js b/spec/resolveAdapters.spec.js new file mode 100644 index 0000000000..021dd30f28 --- /dev/null +++ b/spec/resolveAdapters.spec.js @@ -0,0 +1,81 @@ +const { resolveAdapters } = require('../lib/cloud-code/resolveAdapters'); +const { LegacyAdapter } = require('../lib/cloud-code/adapters/LegacyAdapter'); +const { InProcessAdapter } = require('../lib/cloud-code/adapters/InProcessAdapter'); +const { ExternalProcessAdapter } = require('../lib/cloud-code/adapters/ExternalProcessAdapter'); + +describe('resolveAdapters', () => { + it('should return an empty array when no relevant options are provided', () => { + const result = resolveAdapters({}); + expect(result).toEqual([]); + }); + + it('should spread cloudCodeAdapters into the result', () => { + const adapter1 = { name: 'adapter1' }; + const adapter2 = { name: 'adapter2' }; + const result = resolveAdapters({ cloudCodeAdapters: [adapter1, adapter2] }); + expect(result.length).toBe(2); + expect(result[0]).toBe(adapter1); + expect(result[1]).toBe(adapter2); + }); + + it('should create a LegacyAdapter when cloud is a string', () => { + const result = resolveAdapters({ cloud: './cloud/main.js' }); + expect(result.length).toBe(1); + expect(result[0]).toBeInstanceOf(LegacyAdapter); + }); + + it('should create a LegacyAdapter when cloud is a function', () => { + const cloudFunction = () => {}; + const result = resolveAdapters({ cloud: cloudFunction }); + expect(result.length).toBe(1); + expect(result[0]).toBeInstanceOf(LegacyAdapter); + }); + + it('should create an InProcessAdapter when cloud is an object with getRouter', () => { + const cloudObject = { getRouter: () => {} }; + const result = resolveAdapters({ cloud: cloudObject }); + expect(result.length).toBe(1); + expect(result[0]).toBeInstanceOf(InProcessAdapter); + }); + + it('should throw when cloud is an invalid type (boolean)', () => { + expect(() => resolveAdapters({ cloud: true })).toThrowError( + "argument 'cloud' must either be a string or a function" + ); + }); + + it('should throw when cloud is an invalid type (number)', () => { + expect(() => resolveAdapters({ cloud: 42 })).toThrowError( + "argument 'cloud' must either be a string or a function" + ); + }); + + it('should throw when cloudCodeCommand is provided without webhookKey', () => { + expect(() => resolveAdapters({ cloudCodeCommand: 'node cloud.js' })).toThrowError( + 'webhookKey is required when using cloudCodeCommand' + ); + }); + + it('should create an ExternalProcessAdapter when cloudCodeCommand and webhookKey are provided', () => { + const result = resolveAdapters({ + cloudCodeCommand: 'node cloud.js', + webhookKey: 'secret-key', + }); + expect(result.length).toBe(1); + expect(result[0]).toBeInstanceOf(ExternalProcessAdapter); + }); + + it('should combine multiple options into a single result array', () => { + const customAdapter = { name: 'custom' }; + const result = resolveAdapters({ + cloudCodeAdapters: [customAdapter], + cloud: './cloud/main.js', + cloudCodeCommand: 'node cloud.js', + webhookKey: 'secret-key', + }); + expect(result.length).toBe(3); + expect(result[0]).toBe(customAdapter); + expect(result[1]).toBeInstanceOf(LegacyAdapter); + expect(result[2]).toBeInstanceOf(ExternalProcessAdapter); + }); +}); diff --git a/spec/webhook-bridge.spec.js b/spec/webhook-bridge.spec.js new file mode 100644 index 0000000000..ef26639686 --- /dev/null +++ b/spec/webhook-bridge.spec.js @@ -0,0 +1,309 @@ +const { + requestToWebhookBody, + webhookResponseToResult, + applyBeforeSaveResponse, +} = require('../lib/cloud-code/adapters/webhook-bridge'); +const Parse = require('parse/node').Parse; + +describe('webhook-bridge', () => { + // ── requestToWebhookBody ────────────────────────────────────────────── + + describe('requestToWebhookBody', () => { + it('should return defaults for a minimal request', () => { + const body = requestToWebhookBody({}); + expect(body).toEqual({ + master: false, + ip: '', + headers: {}, + installationId: undefined, + }); + }); + + it('should pass through master, ip, headers, installationId', () => { + const body = requestToWebhookBody({ + master: true, + ip: '127.0.0.1', + headers: { 'x-custom': 'value' }, + installationId: 'abc-123', + }); + expect(body.master).toBe(true); + expect(body.ip).toBe('127.0.0.1'); + expect(body.headers).toEqual({ 'x-custom': 'value' }); + expect(body.installationId).toBe('abc-123'); + }); + + it('should serialise user via toJSON when available', () => { + const user = { toJSON: () => ({ objectId: 'u1', username: 'alice' }) }; + const body = requestToWebhookBody({ user }); + expect(body.user).toEqual({ objectId: 'u1', username: 'alice' }); + }); + + it('should use plain user object when toJSON is absent', () => { + const user = { objectId: 'u2', username: 'bob' }; + const body = requestToWebhookBody({ user }); + expect(body.user).toEqual({ objectId: 'u2', username: 'bob' }); + }); + + it('should not include user when it is undefined', () => { + const body = requestToWebhookBody({}); + expect(Object.hasOwn(body, 'user')).toBe(false); + }); + + it('should include params when defined', () => { + const body = requestToWebhookBody({ params: { key: 'val' } }); + expect(body.params).toEqual({ key: 'val' }); + }); + + it('should not include params when undefined', () => { + const body = requestToWebhookBody({}); + expect(Object.hasOwn(body, 'params')).toBe(false); + }); + + it('should include jobId when defined', () => { + const body = requestToWebhookBody({ jobId: 'job-42' }); + expect(body.jobId).toBe('job-42'); + }); + + it('should serialise object via toJSON when available', () => { + const obj = { toJSON: () => ({ className: 'Item', objectId: 'o1' }) }; + const body = requestToWebhookBody({ object: obj }); + expect(body.object).toEqual({ className: 'Item', objectId: 'o1' }); + }); + + it('should use plain object when toJSON is absent', () => { + const obj = { className: 'Item', objectId: 'o1' }; + const body = requestToWebhookBody({ object: obj }); + expect(body.object).toEqual({ className: 'Item', objectId: 'o1' }); + }); + + it('should not include object when it is falsy', () => { + const body = requestToWebhookBody({ object: null }); + expect(Object.hasOwn(body, 'object')).toBe(false); + }); + + it('should serialise original via toJSON when available', () => { + const original = { toJSON: () => ({ className: 'Item', objectId: 'o0' }) }; + const body = requestToWebhookBody({ original }); + expect(body.original).toEqual({ className: 'Item', objectId: 'o0' }); + }); + + it('should use plain original when toJSON is absent', () => { + const original = { className: 'Item', objectId: 'o0' }; + const body = requestToWebhookBody({ original }); + expect(body.original).toEqual({ className: 'Item', objectId: 'o0' }); + }); + + it('should include context when defined', () => { + const body = requestToWebhookBody({ context: { source: 'test' } }); + expect(body.context).toEqual({ source: 'test' }); + }); + + it('should not include context when undefined', () => { + const body = requestToWebhookBody({}); + expect(Object.hasOwn(body, 'context')).toBe(false); + }); + + it('should map query fields correctly', () => { + const query = { + className: 'Item', + _where: { score: { $gt: 10 } }, + _limit: 25, + _skip: 5, + _include: ['author', 'comments'], + _keys: ['title', 'score'], + _order: 'score,-createdAt', + }; + const body = requestToWebhookBody({ query }); + expect(body.query).toEqual({ + className: 'Item', + where: { score: { $gt: 10 } }, + limit: 25, + skip: 5, + include: 'author,comments', + keys: 'title,score', + order: 'score,-createdAt', + }); + }); + + it('should handle query with undefined _include and _keys', () => { + const query = { + className: 'Item', + _where: {}, + _limit: 10, + _skip: 0, + }; + const body = requestToWebhookBody({ query }); + expect(body.query.include).toBeUndefined(); + expect(body.query.keys).toBeUndefined(); + }); + + it('should not include query when undefined', () => { + const body = requestToWebhookBody({}); + expect(Object.hasOwn(body, 'query')).toBe(false); + }); + + it('should include count when defined', () => { + const body = requestToWebhookBody({ count: true }); + expect(body.count).toBe(true); + }); + + it('should include isGet when defined', () => { + const body = requestToWebhookBody({ isGet: true }); + expect(body.isGet).toBe(true); + }); + + it('should include file when defined', () => { + const file = { name: 'photo.png', data: 'base64...' }; + const body = requestToWebhookBody({ file }); + expect(body.file).toEqual(file); + }); + + it('should not include file when undefined', () => { + const body = requestToWebhookBody({}); + expect(Object.hasOwn(body, 'file')).toBe(false); + }); + + it('should include fileSize when defined', () => { + const body = requestToWebhookBody({ fileSize: 1024 }); + expect(body.fileSize).toBe(1024); + }); + + it('should include event when defined', () => { + const body = requestToWebhookBody({ event: 'create' }); + expect(body.event).toBe('create'); + }); + + it('should not include event when undefined', () => { + const body = requestToWebhookBody({}); + expect(Object.hasOwn(body, 'event')).toBe(false); + }); + + it('should include requestId when defined', () => { + const body = requestToWebhookBody({ requestId: 'req-99' }); + expect(body.requestId).toBe('req-99'); + }); + + it('should include clients when defined', () => { + const body = requestToWebhookBody({ clients: 5 }); + expect(body.clients).toBe(5); + }); + + it('should include subscriptions when defined', () => { + const body = requestToWebhookBody({ subscriptions: 12 }); + expect(body.subscriptions).toBe(12); + }); + }); + + // ── webhookResponseToResult ─────────────────────────────────────────── + + describe('webhookResponseToResult', () => { + it('should return success value when response is successful', () => { + const result = webhookResponseToResult({ success: { name: 'test' } }); + expect(result).toEqual({ name: 'test' }); + }); + + it('should return success value when it is a primitive', () => { + expect(webhookResponseToResult({ success: 42 })).toBe(42); + }); + + it('should return undefined when success is undefined', () => { + expect(webhookResponseToResult({ success: undefined })).toBeUndefined(); + }); + + it('should throw Parse.Error when response contains error', () => { + const response = { error: { code: 141, message: 'Cloud function failed' } }; + expect(() => webhookResponseToResult(response)).toThrowError(Parse.Error); + try { + webhookResponseToResult(response); + } catch (e) { + expect(e.code).toBe(141); + expect(e.message).toBe('Cloud function failed'); + } + }); + }); + + // ── applyBeforeSaveResponse ─────────────────────────────────────────── + + describe('applyBeforeSaveResponse', () => { + let request; + + beforeEach(() => { + request = { + object: { + set: jasmine.createSpy('set'), + }, + }; + }); + + it('should throw Parse.Error when response contains error', () => { + const response = { error: { code: 101, message: 'Object not found' } }; + expect(() => applyBeforeSaveResponse(request, response)).toThrowError(Parse.Error); + try { + applyBeforeSaveResponse(request, response); + } catch (e) { + expect(e.code).toBe(101); + expect(e.message).toBe('Object not found'); + } + }); + + it('should be a no-op when success is an empty object', () => { + applyBeforeSaveResponse(request, { success: {} }); + expect(request.object.set).not.toHaveBeenCalled(); + }); + + it('should set fields from the success object on request.object', () => { + applyBeforeSaveResponse(request, { success: { title: 'Hello', score: 10 } }); + expect(request.object.set).toHaveBeenCalledWith('title', 'Hello'); + expect(request.object.set).toHaveBeenCalledWith('score', 10); + expect(request.object.set).toHaveBeenCalledTimes(2); + }); + + it('should skip objectId field', () => { + applyBeforeSaveResponse(request, { success: { objectId: 'skip-me', title: 'keep' } }); + expect(request.object.set).not.toHaveBeenCalledWith('objectId', jasmine.anything()); + expect(request.object.set).toHaveBeenCalledWith('title', 'keep'); + }); + + it('should skip createdAt field', () => { + applyBeforeSaveResponse(request, { success: { createdAt: '2025-01-01', name: 'ok' } }); + expect(request.object.set).not.toHaveBeenCalledWith('createdAt', jasmine.anything()); + expect(request.object.set).toHaveBeenCalledWith('name', 'ok'); + }); + + it('should skip updatedAt field', () => { + applyBeforeSaveResponse(request, { success: { updatedAt: '2025-01-02', name: 'ok' } }); + expect(request.object.set).not.toHaveBeenCalledWith('updatedAt', jasmine.anything()); + expect(request.object.set).toHaveBeenCalledWith('name', 'ok'); + }); + + it('should skip className field', () => { + applyBeforeSaveResponse(request, { success: { className: 'Item', name: 'ok' } }); + expect(request.object.set).not.toHaveBeenCalledWith('className', jasmine.anything()); + expect(request.object.set).toHaveBeenCalledWith('name', 'ok'); + }); + + it('should skip all skip fields at once', () => { + applyBeforeSaveResponse(request, { + success: { + objectId: 'x', + createdAt: 'a', + updatedAt: 'b', + className: 'C', + realField: 'yes', + }, + }); + expect(request.object.set).toHaveBeenCalledTimes(1); + expect(request.object.set).toHaveBeenCalledWith('realField', 'yes'); + }); + + it('should not call set when success is null', () => { + applyBeforeSaveResponse(request, { success: null }); + expect(request.object.set).not.toHaveBeenCalled(); + }); + + it('should not call set when success is a primitive', () => { + applyBeforeSaveResponse(request, { success: 'string-value' }); + expect(request.object.set).not.toHaveBeenCalled(); + }); + }); +}); From 01b1f8d1851efd852e3dbf16e9ec0112dfa8eed8 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 17 Mar 2026 23:27:24 +1100 Subject: [PATCH 14/14] fix tests --- spec/CloudCodeManager.spec.js | 1 + spec/LegacyAdapter.spec.js | 14 +++++++++++--- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/spec/CloudCodeManager.spec.js b/spec/CloudCodeManager.spec.js index 63bac49787..7e1f1bfcd8 100644 --- a/spec/CloudCodeManager.spec.js +++ b/spec/CloudCodeManager.spec.js @@ -550,6 +550,7 @@ describe('CloudCodeManager', () => { }); it('continues shutting down other adapters when one fails', async () => { + spyOn(console, 'error').and.callFake(() => {}); const shutdownCalls = []; const adapterA = { name: 'adapter-a', diff --git a/spec/LegacyAdapter.spec.js b/spec/LegacyAdapter.spec.js index 04125b59e9..b67d707a7d 100644 --- a/spec/LegacyAdapter.spec.js +++ b/spec/LegacyAdapter.spec.js @@ -43,9 +43,17 @@ describe('LegacyAdapter', () => { }); it('initialize with a valid cloud code file path loads the file', async () => { - const filePath = path.resolve(__dirname, 'cloud/cloudCodeRelativeFile.js'); - const adapter = new LegacyAdapter(filePath); - await expectAsync(adapter.initialize(mockRegistry, mockConfig)).toBeResolved(); + // Use a minimal temp file that doesn't register global cloud functions + const fs = require('fs'); + const tmpFile = path.resolve(__dirname, '_legacyAdapterTestTemp.js'); + fs.writeFileSync(tmpFile, 'module.exports = {};'); + try { + const adapter = new LegacyAdapter(tmpFile); + await expectAsync(adapter.initialize(mockRegistry, mockConfig)).toBeResolved(); + } finally { + fs.unlinkSync(tmpFile); + delete require.cache[require.resolve(tmpFile)]; + } }); it('initialize with a non-existent path throws', async () => {