diff --git a/.claude/CLAUDE.md b/.claude/CLAUDE.md index 9f3912b8..1751bb4d 100644 --- a/.claude/CLAUDE.md +++ b/.claude/CLAUDE.md @@ -100,7 +100,6 @@ The `db-controller.js` is a facade that imports from specialized controller modu - `controllers/release.js`: Object release (immutability) - `controllers/bulk.js`: Bulk create and update operations - `controllers/search.js`: MongoDB text search (searchAsWords, searchAsPhrase) -- `controllers/gog.js`: Gallery of Glosses specific operations (fragments, glosses, expand) - `controllers/utils.js`: Shared utilities (ID generation, slug handling, agent claims) **4. Authentication & Authorization:** @@ -113,7 +112,6 @@ The `db-controller.js` is a facade that imports from specialized controller modu **5. Special Features:** - **Slug IDs:** Optional human-readable IDs via Slug header (e.g., "my-annotation") - **PATCH Override:** X-HTTP-Method-Override header allows POST to emulate PATCH for clients without PATCH support -- **GOG Routes:** Specialized endpoints for Gallery of Glosses project (`/gog/fragmentsInManuscript`, `/gog/glossesInManuscript`) - **Content Negotiation:** Handles both `@id`/`@context` (JSON-LD) and `id` (plain JSON) patterns ### Directory Structure diff --git a/.claude/settings.json b/.claude/settings.json deleted file mode 100644 index 924d8afa..00000000 --- a/.claude/settings.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "statusLine": { - "type": "command", - "command": "bash /mnt/e/rerum_server_nodejs/.claude/statusline-command.sh" - }, - "env": { - "CLAUDE_CODE_MAX_OUTPUT_TOKENS": "500000", - "CLAUDE_CODE_DISABLE_TERMINAL_TITLE": "1", - "MAX_MCP_OUTPUT_TOKENS": "500000", - "DISABLE_ERROR_REPORTING": "0", - "DISABLE_NON_ESSENTIAL_MODEL_CALLS": "0", - "DISABLE_PROMPT_CACHING": "0", - "MAX_THINKING_TOKENS": "500000", - "BASH_MAX_TIMEOUT_MS": "3000000", - "OPENCODE_DISABLE_PRUNE": "true", - "OPENCODE_DISABLE_AUTOCOMPACT": "true" - } -} diff --git a/.claude/settings.local.json b/.claude/settings.local.json deleted file mode 100644 index d3d5a899..00000000 --- a/.claude/settings.local.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "permissions": { - "allow": [ - "Bash(env)", - "Bash(npm install:*)", - "Bash(npm run:*)", - "Bash(npm start:*)", - "Bash(pm2:*)", - "Bash(git:*)", - "Bash(node:*)", - "Bash(curl:*)", - "Bash(mongosh:*)", - "Read(//tmp/**)", - "Bash(bash:*)", - "Bash(tee:*)", - "Bash(echo:*)", - "Bash(cat:*)", - "Bash(python3:*)", - "WebSearch", - "WebFetch(domain:github.com)", - "WebFetch(domain:raw.githubusercontent.com)" - ], - "deny": [], - "ask": [] - } -} diff --git a/.claude/statusline-command.sh b/.claude/statusline-command.sh deleted file mode 100644 index ed13514a..00000000 --- a/.claude/statusline-command.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash - -# Read JSON input -input=$(cat) - -# Extract data from JSON -cwd=$(echo "$input" | jq -r '.workspace.current_dir') -cost=$(echo "$input" | jq -r '.cost.total_cost_usd // 0') -api_duration=$(echo "$input" | jq -r '.cost.total_api_duration_ms // 0') -total_duration=$(echo "$input" | jq -r '.cost.total_duration_ms // 0') -lines_added=$(echo "$input" | jq -r '.cost.total_lines_added // 0') -lines_removed=$(echo "$input" | jq -r '.cost.total_lines_removed // 0') -model_display=$(echo "$input" | jq -r '.model.display_name // "unknown"') - -# Calculate API duration in seconds -api_duration_sec=$(echo "scale=1; $api_duration / 1000" | bc -l 2>/dev/null || echo "0") - -# Get git branch if in a git repository -git_branch="" -if git -C "$cwd" rev-parse --git-dir > /dev/null 2>&1; then - branch=$(git -C "$cwd" -c core.fileMode=false branch --show-current 2>/dev/null) - if [ -n "$branch" ]; then - git_branch="($branch)" - fi -fi - -# Build the enhanced status line -# Format: (branch) model $cost | API: Xs | +L/-L - -# Cyan for git branch -if [ -n "$git_branch" ]; then - printf '\033[36m%s\033[0m ' "$git_branch" -fi - -# Magenta for model name -printf '\033[35m%s\033[0m ' "$model_display" - -# Bold yellow for cost (live updating token usage proxy) -printf '\033[1;33m$%.4f\033[0m' "$cost" - -# Green for API time (shows compute usage) -if [ "$api_duration" != "0" ]; then - printf ' \033[32m| API: %ss\033[0m' "$api_duration_sec" -fi - -# White for code changes (productivity) -if [ "$lines_added" != "0" ] || [ "$lines_removed" != "0" ]; then - printf ' \033[37m| +%s/-%s\033[0m' "$lines_added" "$lines_removed" -fi - -printf '\n' diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index d8512052..6af5cd49 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -125,7 +125,7 @@ After making changes, ALWAYS validate these scenarios: ### Key Directories - `/routes/` - Route handlers and API endpoints (Express routes) -- `/controllers/` - Business logic controllers (CRUD operations, GOG-specific controllers) +- `/controllers/` - Business logic controllers (CRUD operations, history, release, bulk operations) - `/database/` - Database connection and utilities (MongoDB integration) - `/auth/` - Authentication middleware (Auth0 JWT handling) - `/public/` - Static files (API.html, context.json, etc.) @@ -145,7 +145,6 @@ After making changes, ALWAYS validate these scenarios: - **Database**: MongoDB for persistent storage, versioned objects - **Static Files**: Served directly from `/public` directory - **CORS**: Fully open ("*") for cross-origin requests -- **Specialized Routes**: Gallery of Glosses (GOG) specific endpoints in `_gog_*.js` files ### Coding Style Guidelines - **Semicolons**: Avoid unnecessary semicolons (e.g., at the end of most lines) diff --git a/.github/workflows/cd_dev.yaml b/.github/workflows/cd_dev.yaml deleted file mode 100644 index 0f6dbca2..00000000 --- a/.github/workflows/cd_dev.yaml +++ /dev/null @@ -1,73 +0,0 @@ -name: RERUM Server v1 Development Deploy on PR to main. -on: - pull_request: - branches: main -jobs: - merge-branch: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@master - - name: Merge with main - uses: devmasx/merge-branch@master - with: - type: now - from_branch: main - target_branch: ${{ github.head_ref }} - github_token: ${{ secrets.BRY_PAT }} - message: Merge main into this branch to deploy to dev for testing. - test: - needs: merge-branch - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@master - - name: Create .env from secrets - run: echo "${{ secrets.DEV_FULL_ENV }}" > .env - - name: Setup Node.js - uses: actions/setup-node@master - with: - node-version: "24" - - name: Cache node modules - uses: actions/cache@master - env: - cache-name: cache-node-modules - with: - path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ - hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- - - name: Install dependencies and run the test - run: | - npm install - npm run runtest - deploy: - if: github.event.pull_request.draft == false - needs: - - merge-branch - - test - strategy: - matrix: - node-version: - - 24 - machines: - - vlcdhp02 - runs-on: ${{ matrix.machines }} - steps: - - uses: actions/checkout@master - - name: Deploy the app on the server - run: | - if [[ ! -e /srv/node/logs/rerumv1.txt ]]; then - mkdir -p /srv/node/logs - touch /srv/node/logs/rerumv1.txt - fi - cd /srv/node/v1-node/ - pm2 stop rerum_v1 - git stash - git pull - git checkout ${{ github.head_ref }} - git stash - git pull - npm install - pm2 start -i max bin/rerum_v1.js diff --git a/.github/workflows/cd_prod.yaml b/.github/workflows/cd_prod.yaml deleted file mode 100644 index 70ea945a..00000000 --- a/.github/workflows/cd_prod.yaml +++ /dev/null @@ -1,58 +0,0 @@ -name: RERUM Server v1 Production Deploy on push to main. -on: - push: - branches: main -jobs: - test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@master - - name: Create .env from secrets - run: echo "${{ secrets.PROD_FULL_ENV }}" > .env - - name: Setup Node.js - uses: actions/setup-node@master - with: - node-version: "24" - - # Speed up subsequent runs with caching - - name: Cache node modules - uses: actions/cache@master - env: - cache-name: cache-node-modules - with: - path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ - hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- - - name: Install dependencies and run the test - run: | - npm install - npm run runtest - deploy: - needs: test - strategy: - matrix: - node-version: - - 24 - machines: - - vlcdhprdp02 - runs-on: ${{ matrix.machines }} - steps: - - uses: actions/checkout@master - - name: Deploy the app on the server - run: | - if [[ ! -e /srv/node/logs/rerumv1.txt ]]; then - mkdir -p /srv/node/logs - touch /srv/node/logs/rerumv1.txt - fi - cd /srv/node/v1-node/ - pm2 stop rerum_v1 - git stash - git checkout main - git stash - git pull - npm install - pm2 start -i max bin/rerum_v1.js diff --git a/.github/workflows/claude.yaml b/.github/workflows/claude.yaml deleted file mode 100644 index 596a39a9..00000000 --- a/.github/workflows/claude.yaml +++ /dev/null @@ -1,38 +0,0 @@ -name: Claude Code -on: - issues: - types: [opened] - issue_comment: - types: [created] - pull_request_review: - types: [submitted] - pull_request_review_comment: - types: [created] - -jobs: - claude: - if: | - (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) || - (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) || - (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) || - (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude'))) - runs-on: ubuntu-latest - permissions: - contents: write - pull-requests: write - issues: write - id-token: write - actions: read - steps: - - name: Checkout repository - uses: actions/checkout@v5 - with: - fetch-depth: 0 - - - name: Run Claude Code - id: claude - uses: anthropics/claude-code-action@v1 - with: - anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} - show_full_output: true - # trigger_phrase: "claude do the needful" \ No newline at end of file diff --git a/.npmignore b/.npmignore new file mode 100644 index 00000000..0f36221d --- /dev/null +++ b/.npmignore @@ -0,0 +1,30 @@ +# Documentation +docs/ +*.md +!README.md +CODEOWNERS + +# Test and mock files +__tests__/ +__mocks__/ +**/__tests__/ +**/__mocks__/ +coverage/ +jest.config.js + +# Backup folders / artifacts +backups/ +*.backup +*.bak +*.swp +*.tgz + +# Local configuration files +.env +.env.* +config.local.js +*.local.js + +# Tooling and CI +.github/ +.claude/ \ No newline at end of file diff --git a/LICENSE b/LICENSE index da37f258..ca712e6a 100644 --- a/LICENSE +++ b/LICENSE @@ -1 +1,21 @@ -There is no license. +MIT License + +Copyright (c) 2026 Open Source with SLU + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index ae6c6917..e6688071 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,27 @@ Stores important bits of knowledge in structured JSON-LD objects: 1. **Open and Free**—expose all contributions immediately without charge to write or read; 1. **Attributed and Versioned**—always include asserted ownership and transaction metadata so consumers can evaluate trustworthiness and relevance. +### Programmatic usage +This project exposes a single public entry point at the package root (`index.js`). Only a few +functions are exported – everything else lives in internal modules and is intentionally +kept private. Example: + +```js +import { app, createServer, start } from 'rerum_server_nodejs' + +// `app` is the configured Express application; you can pass it to Supertest or reuse it +// inside another HTTP stack. + +const server = createServer(8080) // returns a http.Server but does not listen +server.listen() + +// or simply +start(8080) // convenience helper that both creates and listens +``` + +Consumers no longer need to reach into `./app.js` or other deep paths – if it isn't +exported here it isn't part of the stable API. + ## What we add You will find a `__rerum` property on anything you read from this repository. This is written onto all objects by the server and is not editable by the client applications. While applications may assert diff --git a/__tests__/public_api.test.js b/__tests__/public_api.test.js new file mode 100644 index 00000000..6721ea0c --- /dev/null +++ b/__tests__/public_api.test.js @@ -0,0 +1,23 @@ +import { app, createServer, start } from '../index.js' + +describe('public API entry point', () => { + test('exports an express app instance', () => { + expect(app).toBeDefined() + expect(typeof app.use).toBe('function') // express app + }) + + test('createServer returns a http.Server', () => { + const server = createServer(0) // port 0 for ephemeral + expect(server).toBeDefined() + expect(typeof server.listen).toBe('function') + server.close() + }) + + test('start starts the server and returns it', (done) => { + const server = start(0) + server.on('listening', () => { + server.close(() => done()) + }) + server.on('error', (err) => done(err)) + }) +}) diff --git a/__tests__/routes_mounted.test.js b/__tests__/routes_mounted.test.js index edd53716..bbddc2d9 100644 --- a/__tests__/routes_mounted.test.js +++ b/__tests__/routes_mounted.test.js @@ -7,7 +7,8 @@ import request from "supertest" import api_routes from "../routes/api-routes.js" -import app from "../app.js" +// leverage the public entry point instead of a deep path +import app from "../index.js" import fs from "fs" let app_stack = app.router.stack diff --git a/app.js b/app.js index fa6e7900..45161ac3 100644 --- a/app.js +++ b/app.js @@ -3,15 +3,12 @@ import express from 'express' import path from 'path' import cookieParser from 'cookie-parser' -import dotenv from 'dotenv' -dotenv.config() +import config from './config/index.js' import logger from 'morgan' import cors from 'cors' import indexRouter from './routes/index.js' import apiRouter from './routes/api-routes.js' import clientRouter from './routes/client.js' -import _gog_fragmentsRouter from './routes/_gog_fragments_from_manuscript.js'; -import _gog_glossesRouter from './routes/_gog_glosses_from_manuscript.js'; import rest from './rest.js' import { fileURLToPath } from 'url' const __filename = fileURLToPath(import.meta.url) @@ -71,7 +68,7 @@ app.use(express.static(path.join(__dirname, 'public'))) * This is without middleware */ app.all('*_', (req, res, next) => { - if(process.env.DOWN === "true"){ + if(config.DOWN === "true"){ res.status(503).json({"message":"RERUM v1 is down for updates or maintenance at this time. We apologize for the inconvenience. Try again later."}) } else{ @@ -85,8 +82,6 @@ app.use('/v1', apiRouter) app.use('/client', clientRouter) -app.use('/gog/fragmentsInManuscript', _gog_fragmentsRouter) -app.use('/gog/glossesInManuscript', _gog_glossesRouter) /** * Handle API errors and warnings RESTfully. All routes that don't end in res.send() will end up here. diff --git a/auth/index.js b/auth/index.js index 695fe9b4..9c9aaaba 100644 --- a/auth/index.js +++ b/auth/index.js @@ -1,6 +1,5 @@ import { auth } from 'express-oauth2-jwt-bearer' -import dotenv from 'dotenv' -dotenv.config() +import config from '../config/index.js' const _tokenError = function (err, req, res, next) { if(!err.code || err.code !== "invalid_token"){ @@ -55,10 +54,10 @@ const generateNewAccessToken = async (req, res, next) => { console.log("RERUM v1 is generating a proxy access token.") const form = { grant_type: 'refresh_token', - client_id: process.env.CLIENT_ID, - client_secret: process.env.CLIENT_SECRET, + client_id: config.CLIENT_ID, + client_secret: config.CLIENT_SECRET, refresh_token: req.body.refresh_token, - redirect_uri:process.env.RERUM_PREFIX + redirect_uri: config.RERUM_PREFIX } try{ // Successful responses from auth 0 look like {"refresh_token":"BLAHBLAH", "access_token":"BLAHBLAH"} @@ -101,10 +100,10 @@ const generateNewRefreshToken = async (req, res, next) => { console.log("RERUM v1 is generating a new refresh token.") const form = { grant_type: 'authorization_code', - client_id: process.env.CLIENT_ID, - client_secret: process.env.CLIENT_SECRET, + client_id: config.CLIENT_ID, + client_secret: config.CLIENT_SECRET, code: req.body.authorization_code, - redirect_uri:process.env.RERUM_PREFIX + redirect_uri: config.RERUM_PREFIX } try { // Successful responses from auth 0 look like {"refresh_token":"BLAHBLAH", "access_token":"BLAHBLAH"} @@ -160,7 +159,7 @@ const verifyAccess = (secret) => { * @returns Boolean match between encoded Generator Agent and obj generator */ const isGenerator = (obj, userObj) => { - return userObj[process.env.RERUM_AGENT_CLAIM] === obj.__rerum.generatedBy + return userObj[config.RERUM_AGENT_CLAIM] === obj.__rerum.generatedBy } /** @@ -170,11 +169,11 @@ const isGenerator = (obj, userObj) => { * @returns Boolean for matching ID. */ const isBot = (userObj) => { - return process.env.BOT_AGENT === userObj[process.env.RERUM_AGENT_CLAIM] + return config.BOT_AGENT === userObj[config.RERUM_AGENT_CLAIM] } function READONLY(req, res, next) { - if(process.env.READONLY=="true"){ + if(config.READONLY=="true"){ res.status(503).json({"message":"RERUM v1 is read only at this time. We apologize for the inconvenience. Try again later."}) return } diff --git a/bin/rerum_v1.js b/bin/rerum_v1.js index 8b269269..a1b953d6 100644 --- a/bin/rerum_v1.js +++ b/bin/rerum_v1.js @@ -8,14 +8,13 @@ import app from '../app.js' import debug from 'debug' debug('rerum_server_nodejs:server') import http from "http" -import dotenv from "dotenv" -dotenv.config() +import config from '../config/index.js' /** * Get port from environment and store in Express. */ -const port = process.env.PORT ?? 3001 +const port = config.PORT ?? 3001 app.set('port', port) /** diff --git a/config/index.js b/config/index.js new file mode 100644 index 00000000..37ed7120 --- /dev/null +++ b/config/index.js @@ -0,0 +1,33 @@ +/** + * Centralized environment configuration for the RERUM API. + * Loads variables from .env via dotenv and provides typed defaults. + * All modules should import config from this file instead of + * reading process.env directly. + * + * @module config + * @author joeljoby02 + */ +import dotenv from 'dotenv' +dotenv.config() + +const config = { + MONGO_CONNECTION_STRING: process.env.MONGO_CONNECTION_STRING ?? 'mongodb://localhost:27017', + MONGODBNAME: process.env.MONGODBNAME ?? 'rerum', + MONGODBCOLLECTION: process.env.MONGODBCOLLECTION ?? 'objects', + DOWN: process.env.DOWN ?? 'false', + READONLY: process.env.READONLY ?? 'false', + CLIENT_ID: process.env.CLIENT_ID ?? process.env.CLIENTID ?? '', + CLIENT_SECRET: process.env.CLIENT_SECRET ?? process.env.RERUMSECRET ?? '', + RERUM_PREFIX: process.env.RERUM_PREFIX ?? 'http://localhost:3001/v1/', + RERUM_ID_PREFIX: process.env.RERUM_ID_PREFIX ?? 'http://localhost:3001/v1/id/', + RERUM_AGENT_CLAIM: process.env.RERUM_AGENT_CLAIM ?? 'http://localhost:3001/agent', + RERUM_CONTEXT: process.env.RERUM_CONTEXT ?? 'http://localhost:3001/v1/context.json', + RERUM_API_VERSION: process.env.RERUM_API_VERSION ?? '1.0.0', + BOT_AGENT: process.env.BOT_AGENT ?? '', + AUDIENCE: process.env.AUDIENCE ?? '', + ISSUER_BASE_URL: process.env.ISSUER_BASE_URL ?? '', + BOT_TOKEN: process.env.BOT_TOKEN ?? '', + PORT: parseInt(process.env.PORT ?? process.env.PORT_NUMBER ?? 3001, 10) +} + +export default config diff --git a/controllers/bulk.js b/controllers/bulk.js index 35e7fcb5..4b7eaa40 100644 --- a/controllers/bulk.js +++ b/controllers/bulk.js @@ -6,8 +6,10 @@ * @author Claude Sonnet 4, cubap, thehabes */ -import { newID, isValidID, db } from '../database/index.js' -import utils from '../utils.js' +import { newID, isValidID, db } from '../database/client.js' +import { configureRerumOptions } from '../versioning.js' +import { isDeleted } from '../predicates.js' +import config from '../config/index.js' import { _contextid, ObjectID, createExpressError, getAgentClaim, parseDocumentID, idNegotiation } from './utils.js' /** @@ -71,17 +73,17 @@ const bulkCreate = async function (req, res, next) { if(Object.keys(d).length === 0) continue const providedID = d?._id const id = isValidID(providedID) ? providedID : ObjectID() - d = utils.configureRerumOptions(generatorAgent, d) + d = configureRerumOptions(generatorAgent, d) // id is also protected in this case, so it can't be set. if(_contextid(d["@context"])) delete d.id d._id = id - d['@id'] = `${process.env.RERUM_ID_PREFIX}${id}` + d['@id'] = `${config.RERUM_ID_PREFIX}${id}` bulkOps.push({ insertOne : { "document" : d }}) } try { let dbResponse = await db.bulkWrite(bulkOps, {'ordered':false}) res.set("Content-Type", "application/json; charset=utf-8") - res.set("Link",dbResponse.result.insertedIds.map(r => `${process.env.RERUM_ID_PREFIX}${r._id}`)) // https://www.rfc-editor.org/rfc/rfc5988 + res.set("Link",dbResponse.result.insertedIds.map(r => `${config.RERUM_ID_PREFIX}${r._id}`)) // https://www.rfc-editor.org/rfc/rfc5988 res.status(201) const estimatedResults = bulkOps.map(f=>{ let doc = f.insertOne.document @@ -148,7 +150,7 @@ const bulkUpdate = async function (req, res, next) { // Update the same thing twice? can vs should. // if(encountered.includes(idReceived)) continue encountered.push(idReceived) - if(!idReceived.includes(process.env.RERUM_ID_PREFIX)) continue + if(!idReceived.includes(config.RERUM_ID_PREFIX)) continue let id = parseDocumentID(idReceived) let originalObject try { @@ -158,17 +160,17 @@ const bulkUpdate = async function (req, res, next) { return } if (null === originalObject) continue - if (utils.isDeleted(originalObject)) continue + if (isDeleted(originalObject)) continue id = ObjectID() let context = objectReceived["@context"] ? { "@context": objectReceived["@context"] } : {} - let rerumProp = { "__rerum": utils.configureRerumOptions(generatorAgent, originalObject, true, false)["__rerum"] } + let rerumProp = { "__rerum": configureRerumOptions(generatorAgent, originalObject, true, false)["__rerum"] } delete objectReceived["__rerum"] delete objectReceived["_id"] delete objectReceived["@id"] // id is also protected in this case, so it can't be set. if(_contextid(objectReceived["@context"])) delete objectReceived.id delete objectReceived["@context"] - let newObject = Object.assign(context, { "@id": process.env.RERUM_ID_PREFIX + id }, objectReceived, rerumProp, { "_id": id }) + let newObject = Object.assign(context, { "@id": config.RERUM_ID_PREFIX + id }, objectReceived, rerumProp, { "_id": id }) bulkOps.push({ insertOne : { "document" : newObject }}) if(originalObject.__rerum.history.next.indexOf(newObject["@id"]) === -1){ originalObject.__rerum.history.next.push(newObject["@id"]) @@ -185,7 +187,7 @@ const bulkUpdate = async function (req, res, next) { try { let dbResponse = await db.bulkWrite(bulkOps, {'ordered':false}) res.set("Content-Type", "application/json; charset=utf-8") - res.set("Link", dbResponse.result.insertedIds.map(r => `${process.env.RERUM_ID_PREFIX}${r._id}`)) // https://www.rfc-editor.org/rfc/rfc5988 + res.set("Link", dbResponse.result.insertedIds.map(r => `${config.RERUM_ID_PREFIX}${r._id}`)) // https://www.rfc-editor.org/rfc/rfc5988 res.status(200) const estimatedResults = bulkOps.filter(f=>f.insertOne).map(f=>{ let doc = f.insertOne.document diff --git a/controllers/crud.js b/controllers/crud.js index 7702de58..8d88ef40 100644 --- a/controllers/crud.js +++ b/controllers/crud.js @@ -4,8 +4,11 @@ * Basic CRUD operations for RERUM v1 * @author Claude Sonnet 4, cubap, thehabes */ -import { newID, isValidID, db } from '../database/index.js' -import utils from '../utils.js' +import { newID, isValidID, db } from '../database/client.js' +// helpers used by many controllers have been split into focused modules +import { configureWebAnnoHeadersFor, configureLDHeadersFor, configureLastModifiedHeader } from '../headers.js' +import { configureRerumOptions } from '../versioning.js' +import config from '../config/index.js' import { _contextid, idNegotiation, generateSlugId, ObjectID, createExpressError, getAgentClaim, parseDocumentID } from './utils.js' /** @@ -30,7 +33,7 @@ const create = async function (req, res, next) { let generatorAgent = getAgentClaim(req, next) let context = req.body["@context"] ? { "@context": req.body["@context"] } : {} let provided = JSON.parse(JSON.stringify(req.body)) - let rerumProp = { "__rerum": utils.configureRerumOptions(generatorAgent, provided, false, false)["__rerum"] } + let rerumProp = { "__rerum": configureRerumOptions(generatorAgent, provided, false, false)["__rerum"] } if(slug){ rerumProp.__rerum.slug = slug } @@ -42,11 +45,11 @@ const create = async function (req, res, next) { if(_contextid(provided["@context"])) delete provided.id delete provided["@context"] - let newObject = Object.assign(context, { "@id": process.env.RERUM_ID_PREFIX + id }, provided, rerumProp, { "_id": id }) + let newObject = Object.assign(context, { "@id": config.RERUM_ID_PREFIX + id }, provided, rerumProp, { "_id": id }) console.log("CREATE") try { let result = await db.insertOne(newObject) - res.set(utils.configureWebAnnoHeadersFor(newObject)) + res.set(configureWebAnnoHeadersFor(newObject)) newObject = idNegotiation(newObject) newObject.new_obj_state = JSON.parse(JSON.stringify(newObject)) res.location(newObject[_contextid(newObject["@context"]) ? "id":"@id"]) @@ -81,7 +84,7 @@ const query = async function (req, res, next) { try { let matches = await db.find(props).limit(limit).skip(skip).toArray() matches = matches.map(o => idNegotiation(o)) - res.set(utils.configureLDHeadersFor(matches)) + res.set(configureLDHeadersFor(matches)) res.json(matches) } catch (error) { next(createExpressError(error)) @@ -99,11 +102,11 @@ const id = async function (req, res, next) { try { let match = await db.findOne({"$or": [{"_id": id}, {"__rerum.slug": id}]}) if (match) { - res.set(utils.configureWebAnnoHeadersFor(match)) + res.set(configureWebAnnoHeadersFor(match)) //Support built in browser caching res.set("Cache-Control", "max-age=86400, must-revalidate") //Support requests with 'If-Modified_Since' headers - res.set(utils.configureLastModifiedHeader(match)) + res.set(configureLastModifiedHeader(match)) // Include current version for optimistic locking const currentVersion = match.__rerum?.isOverwritten ?? "" res.set('Current-Overwritten-Version', currentVersion) diff --git a/controllers/delete.js b/controllers/delete.js index 12aec2ac..bdbde7ce 100644 --- a/controllers/delete.js +++ b/controllers/delete.js @@ -4,8 +4,9 @@ * Delete operations for RERUM v1 * @author Claude Sonnet 4, cubap, thehabes */ -import { newID, isValidID, db } from '../database/index.js' -import utils from '../utils.js' +import { newID, isValidID, db } from '../database/client.js' +import { isDeleted, isReleased, isGenerator } from '../predicates.js' +import config from '../config/index.js' import { createExpressError, getAgentClaim, parseDocumentID, getAllVersions, getAllDescendants } from './utils.js' /** @@ -39,19 +40,19 @@ const deleteObj = async function(req, res, next) { } if (null !== originalObject) { let safe_original = JSON.parse(JSON.stringify(originalObject)) - if (utils.isDeleted(safe_original)) { + if (isDeleted(safe_original)) { err = Object.assign(err, { message: `The object you are trying to delete is already deleted. ${err.message}`, status: 403 }) } - else if (utils.isReleased(safe_original)) { + else if (isReleased(safe_original)) { err = Object.assign(err, { message: `The object you are trying to delete is released. Fork to make changes. ${err.message}`, status: 403 }) } - else if (!utils.isGenerator(safe_original, agentRequestingDelete)) { + else if (!isGenerator(safe_original, agentRequestingDelete)) { err = Object.assign(err, { message: `You are not the generating agent for this object and so are not authorized to delete it. ${err.message}`, status: 401 @@ -159,7 +160,7 @@ async function healHistoryTree(obj) { throw Error("Could not update all descendants with their new prime value") } } - if (previous_id.indexOf(process.env.RERUM_PREFIX) > -1) { + if (previous_id.indexOf(config.RERUM_PREFIX) > -1) { let previousIdForQuery = parseDocumentID(previous_id) const objToUpdate2 = await db.findOne({"$or":[{"_id": previousIdForQuery}, {"__rerum.slug": previousIdForQuery}]}) if (null !== objToUpdate2) { diff --git a/controllers/gog.js b/controllers/gog.js deleted file mode 100644 index 67dd04de..00000000 --- a/controllers/gog.js +++ /dev/null @@ -1,405 +0,0 @@ -#!/usr/bin/env node - -/** - * Gallery of Glosses (GOG) controller for RERUM operations - * Handles specialized operations for the Gallery of Glosses application - * @author Claude Sonnet 4, cubap, thehabes - */ - -import { newID, isValidID, db } from '../database/index.js' -import utils from '../utils.js' -import { _contextid, ObjectID, createExpressError, getAgentClaim, parseDocumentID, idNegotiation } from './utils.js' - -/** - * THIS IS SPECIFICALLY FOR 'Gallery of Glosses' - * Starting from a ManuscriptWitness URI get all WitnessFragment entities that are a part of the Manuscript. - * The inbound request is a POST request with an Authorization header - * The Bearer Token in the header must be from TinyMatt. - * The body must be formatted correctly - {"ManuscriptWitness":"witness_uri_here"} - * - * TODO? Some sort of limit and skip for large responses? - * - * @return The set of {'@id':'123', '@type':'WitnessFragment'} objects that match this criteria, as an Array - * */ -const _gog_fragments_from_manuscript = async function (req, res, next) { - res.set("Content-Type", "application/json; charset=utf-8") - const agent = getAgentClaim(req, next) - const agentID = agent.split("/").pop() - const manID = req.body["ManuscriptWitness"] - const limit = parseInt(req.query.limit ?? 50) - const skip = parseInt(req.query.skip ?? 0) - let err = { message: `` } - // This request can only be made my Gallery of Glosses production apps. - if (agentID !== "61043ad4ffce846a83e700dd") { - err = Object.assign(err, { - message: `Only the Gallery of Glosses can make this request.`, - status: 403 - }) - } - // Must have a properly formed body with a usable value - else if(!manID || !manID.startsWith("http")){ - err = Object.assign(err, { - message: `The body must be JSON like {"ManuscriptWitness":"witness_uri_here"}.`, - status: 400 - }) - } - if (err.status) { - next(createExpressError(err)) - return - } - try { - let matches = [] - const partOfConditions = [ - {"body.partOf.value": manID.replace(/^https?/, "http")}, - {"body.partOf.value": manID.replace(/^https?/, "https")}, - {"body.partOf": manID.replace(/^https?/, "http")}, - {"body.partOf": manID.replace(/^https?/, "https")} - ] - const generatorConditions = [ - {"__rerum.generatedBy": agent.replace(/^https?/, "http")}, - {"__rerum.generatedBy": agent.replace(/^https?/, "https")} - ] - const fragmentTypeConditions = [ - {"witnessFragment.type": "WitnessFragment"}, - {"witnessFragment.@type": "WitnessFragment"} - ] - const annoTypeConditions = [ - {"type": "Annotation"}, - {"@type": "Annotation"}, - {"@type": "oa:Annotation"} - ] - let witnessFragmentPipeline = [ - // Step 1: Detect Annotations bodies noting their 'target' is 'partOf' this Manuscript - { - $match: { - "__rerum.history.next": { "$exists": true, "$size": 0 }, - "$and":[ - {"$or": annoTypeConditions}, - {"$or": partOfConditions}, - {"$or": generatorConditions} - ] - } - }, - // Step 1.1 through 1.3 for limit and skip functionality. - { $sort : { _id: 1 } }, - { $skip : skip }, - { $limit : limit }, - // Step 2: Using the target of those Annotations lookup the Entity they represent and store them in a witnessFragment property on the Annotation - // Note that $match had filtered down the alpha collection, so we use $lookup to look through the whole collection again. - // FIXME? a target that is http will not match an @id that is https - { - $lookup: { - from: "alpha", - localField: "target", // Field in `Annotation` referencing `@id` in `alpha` corresponding to a WitnessFragment @id - foreignField: "@id", - as: "witnessFragment" - } - }, - // Step 3: Filter out anything that is not a WitnessFragment entity (and a leaf) - { - $match: { - "witnessFragment.__rerum.history.next": { "$exists": true, "$size": 0 }, - "$or": fragmentTypeConditions - } - }, - // Step 4: Unwrap the Annotation and just return its corresponding WitnessFragment entity - { - $project: { - "_id": 0, - "@id": "$witnessFragment.@id", - "@type": "WitnessFragment" - } - }, - // Step 5: @id values are an Array of 1 and need to be a string instead - { - $unwind: { "path": "$@id" } - } - // Step 6: Cache it? - ] - - // console.log("Start GoG WitnessFragment Aggregator") - const start = Date.now() - let witnessFragments = await db.aggregate(witnessFragmentPipeline).toArray() - .then((fragments) => { - if (fragments instanceof Error) { - throw fragments - } - return fragments - }) - const fragmentSet = new Set(witnessFragments) - witnessFragments = Array.from(fragmentSet.values()) - // Note that a server side expand() is available and could be used to expand these fragments here. - // console.log("End GoG WitnessFragment Aggregator") - // console.log(witnessFragments.length+" fragments found for this Manuscript") - // const end = Date.now() - // console.log(`Total Execution time: ${end - start} ms`) - res.set(utils.configureLDHeadersFor(witnessFragments)) - res.json(witnessFragments) - } - catch (error) { - console.error(error) - next(createExpressError(error)) - } -} - -/** - * THIS IS SPECIFICALLY FOR 'Gallery of Glosses' - * Starting from a ManuscriptWitness URI get all Gloss entities that are a part of the Manuscript. - * The inbound request is a POST request with an Authorization header. - * The Bearer Token in the header must be from TinyMatt. - * The body must be formatted correctly - {"ManuscriptWitness":"witness_uri_here"} - * - * TODO? Some sort of limit and skip for large responses? - * - * @return The set of {'@id':'123', '@type':'Gloss'} objects that match this criteria, as an Array - * */ -const _gog_glosses_from_manuscript = async function (req, res, next) { - res.set("Content-Type", "application/json; charset=utf-8") - const agent = getAgentClaim(req, next) - const agentID = agent.split("/").pop() - const manID = req.body["ManuscriptWitness"] - const limit = parseInt(req.query.limit ?? 50) - const skip = parseInt(req.query.skip ?? 0) - let err = { message: `` } - // This request can only be made my Gallery of Glosses production apps. - if (!agentID === "61043ad4ffce846a83e700dd") { - err = Object.assign(err, { - message: `Only the Gallery of Glosses can make this request.`, - status: 403 - }) - } - // Must have a properly formed body with a usable value - else if(!manID || !manID.startsWith("http")){ - err = Object.assign(err, { - message: `The body must be JSON like {"ManuscriptWitness":"witness_uri_here"}.`, - status: 400 - }) - } - if (err.status) { - next(createExpressError(err)) - return - } - try { - let matches = [] - const partOfConditions = [ - {"body.partOf.value": manID.replace(/^https?/, "http")}, - {"body.partOf.value": manID.replace(/^https?/, "https")}, - {"body.partOf": manID.replace(/^https?/, "http")}, - {"body.partOf": manID.replace(/^https?/, "https")} - ] - const generatorConditions = [ - {"__rerum.generatedBy": agent.replace(/^https?/, "http")}, - {"__rerum.generatedBy": agent.replace(/^https?/, "https")} - ] - const fragmentTypeConditions = [ - {"witnessFragment.type": "WitnessFragment"}, - {"witnessFragment.@type": "WitnessFragment"} - ] - const annoTypeConditions = [ - {"type": "Annotation"}, - {"@type": "Annotation"}, - {"@type": "oa:Annotation"} - ] - let glossPipeline = [ - // Step 1: Detect Annotations bodies noting their 'target' is 'partOf' this Manuscript - { - $match: { - "__rerum.history.next": { $exists: true, $size: 0 }, - "$and":[ - {"$or": annoTypeConditions}, - {"$or": partOfConditions}, - {"$or": generatorConditions} - ] - } - }, - // Step 1.1 through 1.3 for limit and skip functionality. - { $sort : { _id: 1 } }, - { $skip : skip }, - { $limit : limit }, - // Step 2: Using the target of those Annotations lookup the Entity they represent and store them in a witnessFragment property on the Annotation - // Note that $match had filtered down the alpha collection, so we use $lookup to look through the whole collection again. - // FIXME? a target that is http will not match an @id that is https - { - $lookup: { - from: "alpha", - localField: "target", // Field in `Annotation` referencing `@id` in `alpha` corresponding to a WitnessFragment @id - foreignField: "@id", - as: "witnessFragment" - } - }, - // Step 3: Filter Annotations to be only those which are for a WitnessFragment Entity - { - $match: { - "$or": fragmentTypeConditions - } - }, - // Step 4: Unwrap the Annotation and just return its corresponding WitnessFragment entity - { - $project: { - "_id": 0, - "@id": "$witnessFragment.@id", - "@type": "WitnessFragment" - } - }, - // Step 5: @id values are an Array of 1 and need to be a string instead - { - $unwind: { "path": "$@id" } - }, - // Step 6: Using the WitnessFragment ids lookup their references Annotations - // Note that $match had filtered down the alpha collection, so we use $lookup to look through the whole collection again. - { - $lookup: { - from: "alpha", - localField: "@id", // Field in `WitnessFragment` referencing `target` in `alpha` corresponding to a Gloss @id - foreignField: "target", - as: "anno" - } - }, - // Step 7: Filter Annos down to those that are the 'references' Annotations - { - $match: { - "anno.body.references":{ "$exists": true } - } - }, - // Step 7: Collect together the body.references.value[] of those Annotations. Those are the relevant Gloss URIs. - { - $project: { - "_id": 0, - "@id": "$anno.body.references.value", - "@type": "Gloss" - } - }, - // Step 8: @id values are an Array of and Array 1 because references.value is an Array - { - $unwind: { "path": "$@id" } - }, - // Step 9: @id values are now an Array of 1 and need to be a string instead - { - $unwind: { "path": "$@id" } - } - ] - - // console.log("Start GoG Gloss Aggregator") - // const start = Date.now() - let glosses = await db.aggregate(glossPipeline).toArray() - .then((fragments) => { - if (fragments instanceof Error) { - throw fragments - } - return fragments - }) - const glossSet = new Set(glosses) - glosses = Array.from(glossSet.values()) - // Note that a server side expand() is available and could be used to expand these fragments here. - // console.log("End GoG Gloss Aggregator") - // console.log(glosses.length+" Glosses found for this Manuscript") - // const end = Date.now() - // console.log(`Total Execution time: ${end - start} ms`) - res.set(utils.configureLDHeadersFor(glosses)) - res.json(glosses) - } - catch (error) { - console.error(error) - next(createExpressError(error)) - } -} - -/** -* Find relevant Annotations targeting a primitive RERUM entity. This is a 'full' expand. -* Add the descriptive information in the Annotation bodies to the primitive object. -* -* Anticipate likely Annotation body formats -* - anno.body -* - anno.body.value -* -* Anticipate likely Annotation target formats -* - target: 'uri' -* - target: {'id':'uri'} -* - target: {'@id':'uri'} -* -* Anticipate likely Annotation type formats -* - {"type": "Annotation"} -* - {"@type": "Annotation"} -* - {"@type": "oa:Annotation"} -* -* @param primitiveEntity - An existing RERUM object -* @param GENERATOR - A registered RERUM app's User Agent -* @param CREATOR - Some kind of string representing a specific user. Often combined with GENERATOR. -* @return the expanded entity object -* -*/ -const expand = async function(primitiveEntity, GENERATOR=undefined, CREATOR=undefined){ - if(!primitiveEntity?.["@id"] || primitiveEntity?.id) return primitiveEntity - const targetId = primitiveEntity["@id"] ?? primitiveEntity.id ?? "unknown" - let queryObj = { - "__rerum.history.next": { $exists: true, $size: 0 } - } - let targetPatterns = ["target", "target.@id", "target.id"] - let targetConditions = [] - let annoTypeConditions = [{"type": "Annotation"}, {"@type":"Annotation"}, {"@type":"oa:Annotation"}] - - if (targetId.startsWith("http")) { - for(const targetKey of targetPatterns){ - targetConditions.push({ [targetKey]: targetId.replace(/^https?/, "http") }) - targetConditions.push({ [targetKey]: targetId.replace(/^https?/, "https") }) - } - queryObj["$and"] = [{"$or": targetConditions}, {"$or": annoTypeConditions}] - } - else{ - queryObj["$or"] = annoTypeConditions - queryObj.target = targetId - } - - // Only expand with data from a specific app - if(GENERATOR) { - // Need to check http:// and https:// - const generatorConditions = [ - {"__rerum.generatedBy": GENERATOR.replace(/^https?/, "http")}, - {"__rerum.generatedBy": GENERATOR.replace(/^https?/, "https")} - ] - if (GENERATOR.startsWith("http")) { - queryObj["$and"].push({"$or": generatorConditions }) - } - else{ - // It should be a URI, but this can be a fallback. - queryObj["__rerum.generatedBy"] = GENERATOR - } - } - // Only expand with data from a specific creator - if(CREATOR) { - // Need to check http:// and https:// - const creatorConditions = [ - {"creator": CREATOR.replace(/^https?/, "http")}, - {"creator": CREATOR.replace(/^https?/, "https")} - ] - if (CREATOR.startsWith("http")) { - queryObj["$and"].push({"$or": creatorConditions }) - } - else{ - // It should be a URI, but this can be a fallback. - queryObj["creator"] = CREATOR - } - } - - // Get the Annotations targeting this Entity from the db. Remove _id property. - let matches = await db.find(queryObj).toArray() - matches = matches.map(o => { - delete o._id - return o - }) - - // Combine the Annotation bodies with the primitive object - let expandedEntity = JSON.parse(JSON.stringify(primitiveEntity)) - for(const anno of matches){ - const body = anno.body - let keys = Object.keys(body) - if(!keys || keys.length !== 1) return - let key = keys[0] - let val = body[key].value ?? body[key] - expandedEntity[key] = val - } - - return expandedEntity -} - -export { _gog_fragments_from_manuscript, _gog_glosses_from_manuscript, expand } diff --git a/controllers/history.js b/controllers/history.js index f0ad0031..2591fb5a 100644 --- a/controllers/history.js +++ b/controllers/history.js @@ -6,8 +6,8 @@ * @author Claude Sonnet 4, cubap, thehabes */ -import { newID, isValidID, db } from '../database/index.js' -import utils from '../utils.js' +import { newID, isValidID, db } from '../database/client.js' +import { configureLDHeadersFor } from '../headers.js' import { _contextid, ObjectID, createExpressError, getAgentClaim, parseDocumentID, idNegotiation, getAllVersions, getAllAncestors, getAllDescendants } from './utils.js' /** @@ -42,7 +42,7 @@ const since = async function (req, res, next) { let descendants = getAllDescendants(all, obj, []) descendants = descendants.map(o => idNegotiation(o)) - res.set(utils.configureLDHeadersFor(descendants)) + res.set(configureLDHeadersFor(descendants)) res.json(descendants) } @@ -79,7 +79,7 @@ const history = async function (req, res, next) { let ancestors = getAllAncestors(all, obj, []) ancestors = ancestors.map(o => idNegotiation(o)) - res.set(utils.configureLDHeadersFor(ancestors)) + res.set(configureLDHeadersFor(ancestors)) res.json(ancestors) } diff --git a/controllers/overwrite.js b/controllers/overwrite.js index 284fac89..dd30e41d 100644 --- a/controllers/overwrite.js +++ b/controllers/overwrite.js @@ -6,8 +6,9 @@ * @author Claude Sonnet 4, cubap, thehabes */ -import { newID, isValidID, db } from '../database/index.js' -import utils from '../utils.js' +import { newID, isValidID, db } from '../database/client.js' +import { isDeleted, isReleased, isGenerator } from '../predicates.js' +import { configureWebAnnoHeadersFor } from '../headers.js' import { _contextid, ObjectID, createExpressError, getAgentClaim, parseDocumentID, idNegotiation } from './utils.js' /** @@ -38,19 +39,19 @@ const overwrite = async function (req, res, next) { status: 404 }) } - else if (utils.isDeleted(originalObject)) { + else if (isDeleted(originalObject)) { err = Object.assign(err, { message: `The object you are trying to overwrite is deleted. ${err.message}`, status: 403 }) } - else if (utils.isReleased(originalObject)) { + else if (isReleased(originalObject)) { err = Object.assign(err, { message: `The object you are trying to overwrite is released. Fork with /update to make changes. ${err.message}`, status: 403 }) } - else if (!utils.isGenerator(originalObject, agentRequestingOverwrite)) { + else if (!isGenerator(originalObject, agentRequestingOverwrite)) { err = Object.assign(err, { message: `You are not the generating agent for this object. You cannot overwrite it. Fork with /update to make changes. ${err.message}`, status: 401 @@ -93,7 +94,7 @@ const overwrite = async function (req, res, next) { } // Include current version in response headers for future optimistic locking res.set('Current-Overwritten-Version', rerumProp["__rerum"].isOverwritten) - res.set(utils.configureWebAnnoHeadersFor(newObject)) + res.set(configureWebAnnoHeadersFor(newObject)) newObject = idNegotiation(newObject) newObject.new_obj_state = JSON.parse(JSON.stringify(newObject)) res.location(newObject[_contextid(newObject["@context"]) ? "id":"@id"]) diff --git a/controllers/patchSet.js b/controllers/patchSet.js index 85e97af8..c85128a9 100644 --- a/controllers/patchSet.js +++ b/controllers/patchSet.js @@ -6,8 +6,11 @@ * @author Claude Sonnet 4, cubap, thehabes */ -import { newID, isValidID, db } from '../database/index.js' -import utils from '../utils.js' +import { newID, isValidID, db } from '../database/client.js' +import { isDeleted } from '../predicates.js' +import { configureRerumOptions } from '../versioning.js' +import { configureWebAnnoHeadersFor } from '../headers.js' +import config from '../config/index.js' import { _contextid, ObjectID, createExpressError, getAgentClaim, parseDocumentID, idNegotiation, alterHistoryNext } from './utils.js' /** @@ -43,7 +46,7 @@ const patchSet = async function (req, res, next) { status: 501 }) } - else if (utils.isDeleted(originalObject)) { + else if (isDeleted(originalObject)) { err = Object.assign(err, { message: `The object you are trying to update is deleted. ${err.message}`, status: 403 @@ -71,7 +74,7 @@ const patchSet = async function (req, res, next) { if (Object.keys(objectReceived).length === 0) { //Then you aren't actually changing anything...there are no new properties //Just hand back the object. The resulting of setting nothing is the object from the request body. - res.set(utils.configureWebAnnoHeadersFor(originalObject)) + res.set(configureWebAnnoHeadersFor(originalObject)) originalObject = idNegotiation(originalObject) originalObject.new_obj_state = JSON.parse(JSON.stringify(originalObject)) res.location(originalObject[_contextid(originalObject["@context"]) ? "id":"@id"]) @@ -81,17 +84,17 @@ const patchSet = async function (req, res, next) { } const id = ObjectID() let context = patchedObject["@context"] ? { "@context": patchedObject["@context"] } : {} - let rerumProp = { "__rerum": utils.configureRerumOptions(generatorAgent, originalObject, true, false)["__rerum"] } + let rerumProp = { "__rerum": configureRerumOptions(generatorAgent, originalObject, true, false)["__rerum"] } delete patchedObject["__rerum"] delete patchedObject["_id"] delete patchedObject["@id"] delete patchedObject["@context"] - let newObject = Object.assign(context, { "@id": process.env.RERUM_ID_PREFIX + id }, patchedObject, rerumProp, { "_id": id }) + let newObject = Object.assign(context, { "@id": config.RERUM_ID_PREFIX + id }, patchedObject, rerumProp, { "_id": id }) try { let result = await db.insertOne(newObject) if (alterHistoryNext(originalObject, newObject["@id"])) { //Success, the original object has been updated. - res.set(utils.configureWebAnnoHeadersFor(newObject)) + res.set(configureWebAnnoHeadersFor(newObject)) newObject = idNegotiation(newObject) newObject.new_obj_state = JSON.parse(JSON.stringify(newObject)) res.location(newObject[_contextid(newObject["@context"]) ? "id":"@id"]) diff --git a/controllers/patchUnset.js b/controllers/patchUnset.js index c4cf53d7..71dd3723 100644 --- a/controllers/patchUnset.js +++ b/controllers/patchUnset.js @@ -6,8 +6,11 @@ * @author Claude Sonnet 4, cubap, thehabes */ -import { newID, isValidID, db } from '../database/index.js' -import utils from '../utils.js' +import { newID, isValidID, db } from '../database/client.js' +import { isDeleted } from '../predicates.js' +import { configureRerumOptions } from '../versioning.js' +import { configureWebAnnoHeadersFor } from '../headers.js' +import config from '../config/index.js' import { _contextid, ObjectID, createExpressError, getAgentClaim, parseDocumentID, idNegotiation, alterHistoryNext } from './utils.js' /** @@ -42,7 +45,7 @@ const patchUnset = async function (req, res, next) { status: 501 }) } - else if (utils.isDeleted(originalObject)) { + else if (isDeleted(originalObject)) { err = Object.assign(err, { message: `The object you are trying to update is deleted. ${err.message}`, status: 403 @@ -73,7 +76,7 @@ const patchUnset = async function (req, res, next) { if (Object.keys(objectReceived).length === 0) { //Then you aren't actually changing anything...no properties in the request body were removed from the original object. //Just hand back the object. The resulting of unsetting nothing is the object. - res.set(utils.configureWebAnnoHeadersFor(originalObject)) + res.set(configureWebAnnoHeadersFor(originalObject)) originalObject = idNegotiation(originalObject) originalObject.new_obj_state = JSON.parse(JSON.stringify(originalObject)) res.location(originalObject[_contextid(originalObject["@context"]) ? "id":"@id"]) @@ -83,20 +86,20 @@ const patchUnset = async function (req, res, next) { } const id = ObjectID() let context = patchedObject["@context"] ? { "@context": patchedObject["@context"] } : {} - let rerumProp = { "__rerum": utils.configureRerumOptions(generatorAgent, originalObject, true, false)["__rerum"] } + let rerumProp = { "__rerum": configureRerumOptions(generatorAgent, originalObject, true, false)["__rerum"] } delete patchedObject["__rerum"] delete patchedObject["_id"] delete patchedObject["@id"] // id is also protected in this case, so it can't be set. if(_contextid(patchedObject["@context"])) delete patchedObject.id delete patchedObject["@context"] - let newObject = Object.assign(context, { "@id": process.env.RERUM_ID_PREFIX + id }, patchedObject, rerumProp, { "_id": id }) + let newObject = Object.assign(context, { "@id": config.RERUM_ID_PREFIX + id }, patchedObject, rerumProp, { "_id": id }) console.log("PATCH UNSET") try { let result = await db.insertOne(newObject) if (alterHistoryNext(originalObject, newObject["@id"])) { //Success, the original object has been updated. - res.set(utils.configureWebAnnoHeadersFor(newObject)) + res.set(configureWebAnnoHeadersFor(newObject)) newObject = idNegotiation(newObject) newObject.new_obj_state = JSON.parse(JSON.stringify(newObject)) res.location(newObject[_contextid(newObject["@context"]) ? "id":"@id"]) diff --git a/controllers/patchUpdate.js b/controllers/patchUpdate.js index c7271bbb..03874912 100644 --- a/controllers/patchUpdate.js +++ b/controllers/patchUpdate.js @@ -6,8 +6,11 @@ * @author Claude Sonnet 4, cubap, thehabes */ -import { newID, isValidID, db } from '../database/index.js' -import utils from '../utils.js' +import { newID, isValidID, db } from '../database/client.js' +import { isDeleted } from '../predicates.js' +import { configureRerumOptions } from '../versioning.js' +import { configureWebAnnoHeadersFor } from '../headers.js' +import config from '../config/index.js' import { _contextid, ObjectID, createExpressError, getAgentClaim, parseDocumentID, idNegotiation, alterHistoryNext } from './utils.js' /** @@ -41,7 +44,7 @@ const patchUpdate = async function (req, res, next) { status: 501 }) } - else if (utils.isDeleted(originalObject)) { + else if (isDeleted(originalObject)) { err = Object.assign(err, { message: `The object you are trying to update is deleted. ${err.message}`, status: 403 @@ -72,7 +75,7 @@ const patchUpdate = async function (req, res, next) { if (Object.keys(objectReceived).length === 0) { //Then you aren't actually changing anything...only @id came through //Just hand back the object. The resulting of patching nothing is the object unchanged. - res.set(utils.configureWebAnnoHeadersFor(originalObject)) + res.set(configureWebAnnoHeadersFor(originalObject)) originalObject = idNegotiation(originalObject) originalObject.new_obj_state = JSON.parse(JSON.stringify(originalObject)) res.location(originalObject[_contextid(originalObject["@context"]) ? "id":"@id"]) @@ -82,20 +85,20 @@ const patchUpdate = async function (req, res, next) { } const id = ObjectID() let context = patchedObject["@context"] ? { "@context": patchedObject["@context"] } : {} - let rerumProp = { "__rerum": utils.configureRerumOptions(generatorAgent, originalObject, true, false)["__rerum"] } + let rerumProp = { "__rerum": configureRerumOptions(generatorAgent, originalObject, true, false)["__rerum"] } delete patchedObject["__rerum"] delete patchedObject["_id"] delete patchedObject["@id"] // id is also protected in this case, so it can't be set. if(_contextid(patchedObject["@context"])) delete patchedObject.id delete patchedObject["@context"] - let newObject = Object.assign(context, { "@id": process.env.RERUM_ID_PREFIX + id }, patchedObject, rerumProp, { "_id": id }) + let newObject = Object.assign(context, { "@id": config.RERUM_ID_PREFIX + id }, patchedObject, rerumProp, { "_id": id }) console.log("PATCH UPDATE") try { let result = await db.insertOne(newObject) if (alterHistoryNext(originalObject, newObject["@id"])) { //Success, the original object has been updated. - res.set(utils.configureWebAnnoHeadersFor(newObject)) + res.set(configureWebAnnoHeadersFor(newObject)) newObject = idNegotiation(newObject) newObject.new_obj_state = JSON.parse(JSON.stringify(newObject)) res.location(newObject[_contextid(newObject["@context"]) ? "id":"@id"]) diff --git a/controllers/putUpdate.js b/controllers/putUpdate.js index 177507ac..e256de0a 100644 --- a/controllers/putUpdate.js +++ b/controllers/putUpdate.js @@ -6,8 +6,11 @@ * @author Claude Sonnet 4, cubap, thehabes */ -import { newID, isValidID, db } from '../database/index.js' -import utils from '../utils.js' +import { newID, isValidID, db } from '../database/client.js' +import { isDeleted } from '../predicates.js' +import { configureRerumOptions } from '../versioning.js' +import { configureWebAnnoHeadersFor } from '../headers.js' +import config from '../config/index.js' import { _contextid, ObjectID, createExpressError, getAgentClaim, parseDocumentID, idNegotiation, alterHistoryNext } from './utils.js' /** @@ -26,7 +29,7 @@ const putUpdate = async function (req, res, next) { let generatorAgent = getAgentClaim(req, next) const idReceived = objectReceived["@id"] ?? objectReceived.id if (idReceived) { - if(!idReceived.includes(process.env.RERUM_ID_PREFIX)){ + if(!idReceived.includes(config.RERUM_ID_PREFIX)){ //This is not a regular update. This object needs to be imported, it isn't in RERUM yet. return _import(req, res, next) } @@ -45,7 +48,7 @@ const putUpdate = async function (req, res, next) { status: 404 }) } - else if (utils.isDeleted(originalObject)) { + else if (isDeleted(originalObject)) { err = Object.assign(err, { message: `The object you are trying to update is deleted. ${err.message}`, status: 403 @@ -54,7 +57,7 @@ const putUpdate = async function (req, res, next) { else { id = ObjectID() let context = objectReceived["@context"] ? { "@context": objectReceived["@context"] } : {} - let rerumProp = { "__rerum": utils.configureRerumOptions(generatorAgent, originalObject, true, false)["__rerum"] } + let rerumProp = { "__rerum": configureRerumOptions(generatorAgent, originalObject, true, false)["__rerum"] } delete objectReceived["__rerum"] delete objectReceived["_id"] delete objectReceived["@id"] @@ -62,13 +65,13 @@ const putUpdate = async function (req, res, next) { if(_contextid(objectReceived["@context"])) delete objectReceived.id delete objectReceived["@context"] - let newObject = Object.assign(context, { "@id": process.env.RERUM_ID_PREFIX + id }, objectReceived, rerumProp, { "_id": id }) + let newObject = Object.assign(context, { "@id": config.RERUM_ID_PREFIX + id }, objectReceived, rerumProp, { "_id": id }) console.log("UPDATE") try { let result = await db.insertOne(newObject) if (alterHistoryNext(originalObject, newObject["@id"])) { //Success, the original object has been updated. - res.set(utils.configureWebAnnoHeadersFor(newObject)) + res.set(configureWebAnnoHeadersFor(newObject)) newObject = idNegotiation(newObject) newObject.new_obj_state = JSON.parse(JSON.stringify(newObject)) res.location(newObject[_contextid(newObject["@context"]) ? "id":"@id"]) @@ -113,7 +116,7 @@ async function _import(req, res, next) { let generatorAgent = getAgentClaim(req, next) const id = ObjectID() let context = objectReceived["@context"] ? { "@context": objectReceived["@context"] } : {} - let rerumProp = { "__rerum": utils.configureRerumOptions(generatorAgent, objectReceived, false, true)["__rerum"] } + let rerumProp = { "__rerum": configureRerumOptions(generatorAgent, objectReceived, false, true)["__rerum"] } delete objectReceived["__rerum"] delete objectReceived["_id"] delete objectReceived["@id"] @@ -121,11 +124,11 @@ async function _import(req, res, next) { if(_contextid(objectReceived["@context"])) delete objectReceived.id delete objectReceived["@context"] - let newObject = Object.assign(context, { "@id": process.env.RERUM_ID_PREFIX + id }, objectReceived, rerumProp, { "_id": id }) + let newObject = Object.assign(context, { "@id": config.RERUM_ID_PREFIX + id }, objectReceived, rerumProp, { "_id": id }) console.log("IMPORT") try { let result = await db.insertOne(newObject) - res.set(utils.configureWebAnnoHeadersFor(newObject)) + res.set(configureWebAnnoHeadersFor(newObject)) newObject = idNegotiation(newObject) newObject.new_obj_state = JSON.parse(JSON.stringify(newObject)) res.location(newObject[_contextid(newObject["@context"]) ? "id":"@id"]) diff --git a/controllers/release.js b/controllers/release.js index 62f26f04..957081f9 100644 --- a/controllers/release.js +++ b/controllers/release.js @@ -6,8 +6,9 @@ * @author Claude Sonnet 4, cubap, thehabes */ -import { newID, isValidID, db } from '../database/index.js' -import utils from '../utils.js' +import { newID, isValidID, db } from '../database/client.js' +import { isDeleted, isReleased, isGenerator } from '../predicates.js' +import { configureWebAnnoHeadersFor } from '../headers.js' import { _contextid, ObjectID, createExpressError, getAgentClaim, parseDocumentID, idNegotiation, generateSlugId, establishReleasesTree, healReleasesTree } from './utils.js' /** @@ -49,19 +50,19 @@ const release = async function (req, res, next) { let previousReleasedID = safe_original.__rerum.releases.previous let nextReleases = safe_original.__rerum.releases.next - if (utils.isDeleted(safe_original)) { + if (isDeleted(safe_original)) { err = Object.assign(err, { message: `The object you are trying to release is deleted. ${err.message}`, status: 403 }) } - if (utils.isReleased(safe_original)) { + if (isReleased(safe_original)) { err = Object.assign(err, { message: `The object you are trying to release is already released. ${err.message}`, status: 403 }) } - if (!utils.isGenerator(safe_original, agentRequestingRelease)) { + if (!isGenerator(safe_original, agentRequestingRelease)) { err = Object.assign(err, { message: `You are not the generating agent for this object. You cannot release it. ${err.message}`, status: 401 @@ -109,7 +110,7 @@ const release = async function (req, res, next) { if (result.modifiedCount == 0) { //result didn't error out, the action was not performed. Sometimes, this is a neutral thing. Sometimes it is indicative of an error. } - res.set(utils.configureWebAnnoHeadersFor(releasedObject)) + res.set(configureWebAnnoHeadersFor(releasedObject)) console.log(releasedObject._id+" has been released") releasedObject = idNegotiation(releasedObject) releasedObject.new_obj_state = JSON.parse(JSON.stringify(releasedObject)) diff --git a/controllers/search.js b/controllers/search.js index 5a688abf..2b4d1fbb 100644 --- a/controllers/search.js +++ b/controllers/search.js @@ -4,8 +4,8 @@ * Search ($search) operations for RERUM v1 * @author thehabes */ -import { db } from '../database/index.js' -import utils from '../utils.js' +import { db } from '../database/client.js' +import { configureLDHeadersFor } from '../headers.js' import { idNegotiation, createExpressError } from './utils.js' /** @@ -283,7 +283,7 @@ const searchAsWords = async function (req, res, next) { const merged = mergeSearchResults(resultsPresi3, resultsPresi2) let results = merged.slice(skip, skip + limit) results = results.map(o => idNegotiation(o)) - res.set(utils.configureLDHeadersFor(results)) + res.set(configureLDHeadersFor(results)) res.json(results) } catch (error) { console.error(error) @@ -371,7 +371,7 @@ const searchAsPhrase = async function (req, res, next) { const merged = mergeSearchResults(resultsPresi3, resultsPresi2) let results = merged.slice(skip, skip + limit) results = results.map(o => idNegotiation(o)) - res.set(utils.configureLDHeadersFor(results)) + res.set(configureLDHeadersFor(results)) res.json(results) } catch (error) { console.error(error) @@ -451,7 +451,7 @@ const searchFuzzily = async function (req, res, next) { const merged = mergeSearchResults(resultsPresi3, resultsPresi2) let results = merged.slice(skip, skip + limit) results = results.map(o => idNegotiation(o)) - res.set(utils.configureLDHeadersFor(results)) + res.set(configureLDHeadersFor(results)) res.json(results) } catch (error) { console.error(error) @@ -548,7 +548,7 @@ const searchWildly = async function (req, res, next) { const merged = mergeSearchResults(resultsPresi3, resultsPresi2) let results = merged.slice(skip, skip + limit) results = results.map(o => idNegotiation(o)) - res.set(utils.configureLDHeadersFor(results)) + res.set(configureLDHeadersFor(results)) res.json(results) } catch (error) { console.error(error) @@ -682,7 +682,7 @@ const searchAlikes = async function (req, res, next) { // Apply pagination after merging let results = merged.slice(skip, skip + limit) results = results.map(o => idNegotiation(o)) - res.set(utils.configureLDHeadersFor(results)) + res.set(configureLDHeadersFor(results)) res.json(results) } catch (error) { console.error(error) diff --git a/controllers/utils.js b/controllers/utils.js index 9da47cea..c643abd7 100644 --- a/controllers/utils.js +++ b/controllers/utils.js @@ -4,8 +4,8 @@ * Utility functions for RERUM controllers * @author Claude Sonnet 4, cubap, thehabes */ -import { newID, isValidID, db } from '../database/index.js' -import utils from '../utils.js' +import { newID, isValidID, db } from '../database/client.js' +import config from '../config/index.js' const ObjectID = newID @@ -57,7 +57,7 @@ const idNegotiation = function (resBody) { if(_contextid(resBody["@context"])) { delete resBody["@id"] delete resBody["@context"] - modifiedResBody = Object.assign(context, { "id": process.env.RERUM_ID_PREFIX + _id }, resBody) + modifiedResBody = Object.assign(context, { "id": config.RERUM_ID_PREFIX + _id }, resBody) } return modifiedResBody } @@ -144,7 +144,7 @@ const remove = async function(id) { * The app is forbidden until registered with RERUM. Access tokens are encoded with the agent. */ function getAgentClaim(req, next) { - const claimKeys = [process.env.RERUM_AGENT_CLAIM, "http://devstore.rerum.io/v1/agent", "http://store.rerum.io/agent"] + const claimKeys = [config.RERUM_AGENT_CLAIM, "http://devstore.rerum.io/v1/agent", "http://store.rerum.io/agent"] let agent = "" for (const claimKey of claimKeys) { agent = req.user[claimKey] diff --git a/database/client.js b/database/client.js new file mode 100644 index 00000000..3b167ff5 --- /dev/null +++ b/database/client.js @@ -0,0 +1,47 @@ +/** + * Centralized MongoDB client for the RERUM API. + * Provides a single shared MongoClient instance, connection + * management, and collection access for the application. + * + * @module database/client + * @author joeljoby02 + */ +import { MongoClient, ObjectId } from 'mongodb' +import config from '../config/index.js' + +// Single shared Mongo client for the entire application +const client = new MongoClient(config.MONGO_CONNECTION_STRING) + +// connect immediately; callers may import `connect` if they want to await it +const connect = async () => { + await client.connect() + console.dir({ + db: config.MONGODBNAME, + coll: config.MONGODBCOLLECTION + }) +} + +// collection helper +const db = client.db(config.MONGODBNAME)?.collection(config.MONGODBCOLLECTION) + +// simple utilities previously scattered in index.js +const newID = () => new ObjectId().toHexString() +const isValidID = (id) => ObjectId.isValid(id) + +const connected = async function () { + await client.db('admin').command({ ping: 1 }).catch(err => err) + return true +} + +// ensure connection is attempted at module load time (as before) +connect().catch(console.dir) + +export { + client, + connect, + db, + newID, + isValidID, + connected, + ObjectId +} diff --git a/database/index.js b/database/index.js index cf8d374a..39c444dd 100644 --- a/database/index.js +++ b/database/index.js @@ -1,57 +1,20 @@ -import { MongoClient, ObjectId } from 'mongodb' -import dotenv from "dotenv" -dotenv.config() - -const client = new MongoClient(process.env.MONGO_CONNECTION_STRING) -const newID = () => new ObjectId().toHexString() -const isValidID = (id) => ObjectId.isValid(id) -const connected = async function () { - // Send a ping to confirm a successful connection - await client.db("admin").command({ ping: 1 }).catch(err => err) - return true -} -const db = client.db(process.env.MONGODBNAME)?.collection(process.env.MONGODBCOLLECTION) -const connect = async () => { - await client.connect() - console.dir({ - db : process.env.MONGODBNAME, - coll : process.env.MONGODBCOLLECTION - }) -} -connect().catch(console.dir) - /** - * Find a single record based on a query object. - * @param {JSON} matchDoc Query Object to match properties. - * @param {JSON} options Just mongodb passthru for now - * @param {function} callback Callback function if needed - * @returns Single matched document or `null` if there is none found. - * @throws MongoDB error if matchDoc is malformed or server is unreachable; E11000 duplicate key error collection + * Database module backward compatibility layer. + * + * This module re-exports all symbols from database/client.js for backward + * compatibility with legacy code. New code should import directly from + * database/client.js instead. This layer maintains a single entry point + * for any external consumers but does not add new functionality. + * + * @module database/index */ -function getMatching(matchDoc, options, callback) { - return db.findOne(matchDoc, options, (err, doc) => { - if (typeof callback === 'function') return callback(err, doc) - if (err) throw err - return doc - }) -} - -function isObject(obj) { - return obj?.constructor == Object -} - -function isValidURL(url) { - try { - new URL(url) - return true - } catch (_) { - return false - } -} export { + client, + connect, + db, newID, isValidID, connected, - db -} + ObjectId +} from './client.js' diff --git a/db-controller.js b/db-controller.js index 07aa6f65..ad50925b 100644 --- a/db-controller.js +++ b/db-controller.js @@ -15,7 +15,6 @@ import { putUpdate, patchUpdate, patchSet, patchUnset, overwrite } from './contr import { bulkCreate, bulkUpdate } from './controllers/bulk.js' import { since, history, idHeadRequest, queryHeadRequest, sinceHeadRequest, historyHeadRequest } from './controllers/history.js' import { release } from './controllers/release.js' -import { _gog_fragments_from_manuscript, _gog_glosses_from_manuscript, expand } from './controllers/gog.js' export default { index, @@ -41,8 +40,5 @@ export default { sinceHeadRequest, historyHeadRequest, remove, - _gog_glosses_from_manuscript, - _gog_fragments_from_manuscript, idNegotiation, - expand } diff --git a/headers.js b/headers.js new file mode 100644 index 00000000..58efdabe --- /dev/null +++ b/headers.js @@ -0,0 +1,80 @@ +import { isLD, isContainerType } from './predicates.js' + +/** + * Mint the HTTP response headers required by REST best practices and/or Web Annotation standards. + * return a JSON object. keys are header names, values are header values. + */ +const configureWebAnnoHeadersFor = function(obj){ + let headers = {} + if(isLD(obj)){ + headers["Content-Type"] = "application/ld+json;charset=utf-8;profile=\"http://www.w3.org/ns/anno.jsonld\"" + } + if(isContainerType(obj)){ + headers["Link"] = "application/ld+json;charset=utf-8;profile=\"http://www.w3.org/ns/anno.jsonld\"" + } + else{ + headers["Link"] = "; rel=\"type\"" + } + headers["Allow"] = "GET,OPTIONS,HEAD,PUT,PATCH,DELETE,POST" + return headers +} + +/** + * Mint the HTTP response headers required by REST best practices and/or Linked Data standards. + * This is specifically for responses that are not Web Annotation compliant (getByProperties, getAllDescendants, getAllAncestors) + * They respond with Arrays (which have no @context), but they still need the JSON-LD support headers. + * return a JSON object. keys are header names, values are header values. + */ +const configureLDHeadersFor = function(obj){ + //Note that the optimal situation would be to be able to detect the LD-ness of this object + //What we have are the arrays returned from the aformentioned getters (/query, /since, /history) + //We know we want them to be LD and that they likely contain LD things, but the arrays don't have an @context + let headers = {} + /** + if(isLD(obj)){ + headers["Content-Type"] = 'application/ld+json;charset=utf-8;profile="http://www.w3.org/ns/anno.jsonld"' + } + else { + // This breaks Web Annotation compliance, but allows us to return requested + // objects without misrepresenting the content. + headers["Content-Type"] = "application/json;charset=utf-8;" + } + */ + headers["Allow"] = "GET,OPTIONS,HEAD,PUT,PATCH,DELETE,POST" + headers["Content-Type"] = 'application/ld+json;charset=utf-8;profile="http://www.w3.org/ns/anno.jsonld"' + headers["Link"] = '; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + return headers +} + +/** + * Mint the Last-Modified header for /v1/id/ responses. + * It should be displayed like Mon, 14 Mar 2022 22:44:42 GMT + * The data knows it like 2022-03-14T17:44:42.721 + * return a JSON object. keys are header names, values are header values. + */ +const configureLastModifiedHeader = function(obj){ + let date = "" + if(obj.__rerum){ + if(obj.__rerum.isOverwritten !== ""){ + date = obj.__rerum.isOverwritten + } + else{ + date = obj.__rerum.createdAt + } + } + else if(obj.__deleted){ + date = obj.__deleted.time + } + //Note that dates like 2021-05-26T10:39:19.328 have been rounded to 2021-05-26T10:39:19 in browser headers. Account for that here. + if(typeof date === "string" && date.includes(".")){ + //If-Modified-Since and Last-Modified headers are rounded. Wed, 26 May 2021 10:39:19.629 GMT becomes Wed, 26 May 2021 10:39:19 GMT. + date = date.split(".")[0] + } + return {"Last-Modified":new Date(date).toUTCString()} +} + +export { + configureWebAnnoHeadersFor, + configureLDHeadersFor, + configureLastModifiedHeader +} diff --git a/index.js b/index.js new file mode 100644 index 00000000..693ae48f --- /dev/null +++ b/index.js @@ -0,0 +1,70 @@ +import http from 'http' +import app from './app.js' + +/** + * Express application instance used throughout the project. Exported + * primarily for testing or embedding inside another server. + * + * ```js + * import { app } from 'rerum_server' + * ``` + */ +export { app } + +/** + * Default export is the express app largely for backwards compatibility + * with consumers that do `import app from 'rerum_server'`. + */ +export default app + +/** + * Helper that creates an HTTP server for the configured express app. + * The returned server is **not** listening yet; caller may attach + * additional listeners or configure timeouts before calling + * `server.listen(...)`. + * + * @param {number|string} [port=process.env.PORT??3001] port to assign to + * the express app and eventually listen on + * @returns {import('http').Server} http server instance + */ +export function createServer(port = process.env.PORT ?? 3001) { + app.set('port', port) + const server = http.createServer(app) + + server.keepAliveTimeout = 8 * 1000 + server.headersTimeout = 8.5 * 1000 + + return server +} + +/** + * Convenience function to start the server immediately. Returns the + * server instance so callers can close it in tests or hook events. + * + * @param {number|string} [port] optional port override + * @returns {import('http').Server} + */ +export function start(port) { + const p = port ?? process.env.PORT ?? 3001 + const server = createServer(p) + server.listen(p) + server.on('listening', () => { + console.log('LISTENING ON ' + p) + }) + server.on('error', (error) => { + if (error.syscall !== 'listen') throw error + switch (error.code) { + case 'EACCES': + console.error(`Port ${p} requires elevated privileges`) + process.exit(1) + break + case 'EADDRINUSE': + console.error(`Port ${p} is already in use`) + process.exit(1) + break + default: + throw error + } + }) + return server +} diff --git a/package.json b/package.json index 30f205b0..057182f6 100644 --- a/package.json +++ b/package.json @@ -1,11 +1,16 @@ { "name": "rerum_server_nodejs", "type": "module", - "version": "0.0.0", - "private": true, - "description": "Rerum API server for database access.", + "version": "1.0.0", + "main": "index.js", + "description": "Rerum API v1 server for storing and querying JSON-LD objects, annotations, and IIIF data.", + "bin": { + "rerum_server_nodejs": "./bin/rerum_v1.js" + }, "keywords": [ "rerum", + "api", + "jsonld", "annotation", "iiif", "repository", @@ -16,17 +21,23 @@ "mongodb" ], "homepage": "https://store.rerum.io", - "license": "UNLICENSED", + "license": "MIT", "author": "Research Computing Group (https://slu.edu)", - "repository": "github:CenterForDigitalHumanities/rerum_server_nodejs", + "repository": { + "type": "git", + "url": "git+https://github.com/CenterForDigitalHumanities/rerum_server_nodejs.git" + }, + "bugs": { + "url": "https://github.com/CenterForDigitalHumanities/rerum_server_nodejs/issues" + }, "engines": { "node": ">=24.12.0", "npm": ">=11.7.0" }, "scripts": { "start": "node ./bin/rerum_v1.js", - "test": "jest", - "runtest": "node --experimental-vm-modules node_modules/jest/bin/jest.js" + "test": "node --experimental-vm-modules node_modules/jest/bin/jest.js", + "runtest": "npm test" }, "dependencies": { "cookie-parser": "~1.4.7", diff --git a/predicates.js b/predicates.js new file mode 100644 index 00000000..0c3d451a --- /dev/null +++ b/predicates.js @@ -0,0 +1,75 @@ +/** + * Check this object for deleted status. deleted objects in RERUM look like {"@id":"{some-id}", __deleted:{object properties}} + */ +const isDeleted = function(obj){ + return obj.hasOwnProperty("__deleted") +} + +/** + * Check this object for released status. Released objects in RERUM look like {"@id":"{some-id}", __rerum:{"isReleased" : "ISO-DATE-TIME"}} + */ +const isReleased = function(obj){ + let bool = + (obj.hasOwnProperty("__rerum") && + obj.__rerum.hasOwnProperty("isReleased") && + obj.__rerum.isReleased !== "") + return bool +} + +/** + * Check to see if the agent from the request (req.user had decoded token) matches the generating agent of the object in mongodb. + */ +const isGenerator = function(origObj, changeAgent){ + //If the object in mongo does not have a generator, something wrong. however, there is no permission to check, no generator is the same as any generator. + const generatingAgent = origObj.__rerum.generatedBy ?? changeAgent + //bots get a free pass through + return generatingAgent === changeAgent +} + +/** + * Check if this object is of a known container type. + * If so, it requires a different header than a stand-alone resource object. + * return boolean + */ +const isContainerType = function(obj){ + let answer = false + let typestring = obj["@type"] ?? obj.type ?? "" + const knownContainerTypes = [ + "ItemList", + "AnnotationPage", + "AnnotationList", + "AnnotationCollection", + "Sequence", + "Range", + "Canvas", + "List", + "Set", + "Collection" + ] + for(const t of knownContainerTypes){ + //Dang those pesky prefixes...circumventing exact match for now + if(typestring.includes(t)){ + answer = true + break + } + } + return answer +} + +/** + * Check if this object is a Linked Data object. + * If so, it will have an @context -(TODO) that resolves! + * return boolean + */ +const isLD = function(obj){ + //Note this is always false if obj is an array, like /since, /history or /query provide as a return. + return Array.isArray(obj) ? false : obj["@context"] ? true : false +} + +export { + isDeleted, + isReleased, + isGenerator, + isContainerType, + isLD +} diff --git a/public/index.html b/public/index.html index 28eee8c7..10692e6e 100644 --- a/public/index.html +++ b/public/index.html @@ -18,18 +18,19 @@ color: green; font-weight: bold; } - + #intro { color: #979A9E; font-size: 12pt; } - + body { font-family: 'Open Sans', sans-serif; color: #979A9E; background-color: #2F353E; + padding: 20px; } - + input[type="text"] { background-color: #ccc; color: black; @@ -37,48 +38,58 @@ font-family: serif; font-size: 14pt; } - + + .column { + float: left; + width: 33%; + } + + .column2 { + float: right; + width: 9%; + } + h1 { cursor: pointer; font-weight: 300; font-family: 'Raleway', sans-serif; margin-bottom: 10px; } - + .navbar-brand { float: none; font-size: 2rem; line-height: 1.5; margin-bottom: 20px; } - + #login { display: none; } - + .panel-body { color: initial; } - + .panel { word-break: break-word; } - + .status_header { color: gray; } - + #a_t { height: 170px; margin-bottom: 8px; } - + #a_t, #r_t_4_a_t, #new_refresh_token { margin-bottom: 8px; } - + #code_for_refresh_token { margin-bottom: -13px; } @@ -106,287 +117,424 @@

-
Application Registration
-
-

- Interacting with RERUM requires server-to-server communication, so we suggest the registrant be the - application developer. - You may want to - learn more about the concepts around RERUM - before reading the API. -

-

- If you are here for the first time and think you want to use RERUM, please - read the API first. -

-

- If you like what you read in our API documentation - and want to begin using RERUM as a back stack service please register by clicking below. - Be prepared to be routed to Auth0 (don't know why? - Read the API). -

-

- After registering, you will be returned to this page with an Auth0 Authorization code. Use that code at - the bottom of this page to get a refresh token - and an access token so you can use the API. You may notice the page has already populated known - information for you. -

-
- - -
-
Auth0 Authorization Status
-
-

- If you believe you are already registered and want to check on your status, follow the prompts below. - You will be routed to Auth0 so we can verify who you are. -

-
- Auth0 Status - UNKNOWN + +
+
+
+
Application Registration
+
+

+ Interacting with RERUM requires server-to-server communication, so we suggest the registrant be the + application developer. + You may want to + learn more about the concepts around RERUM + before reading the API. +

+

+ If you are here for the first time and think you want to use RERUM, please + read the API first. +

+

+ If you like what you read in our API documentation + and want to begin using RERUM as a back stack service please register by clicking below. + Be prepared to be routed to Auth0 (don't know why? + Read the API). +

+

+ After registering, you will be returned to this page with an Auth0 Authorization code. +

+
+
- -
-
Test RERUM API Access
-
-

- Provide your access token below to check if it is still valid. If so, your access to RERUM will be - authorized. Otherwise, you will see an "unauthorized" message. -

-

- If the token you have is not working, it may be because access tokens expire every 30 days. You can use - your refresh token to get a new access token. -

- -
- RERUM status - UNKNOWN + + -
-
Get A New Access Token
-
-

- Your access token to use RERUM expires every 30 days. Has it been that long or longer? Provide your - refresh token below to get a new access token. - If you lost your refresh token, you can get a new one in "Get A New Refresh Token" below. -

- -
- Status - UNKNOWN +
+
Get A New Refresh Token
+
+

+ You can supply a valid Auth0 Authorization Code to get a new refresh token. Use "Check my Authorization + Status with Auth0" to get a valid code. +

+ Enter your code: +
+ + +
+
+ Status + UNKNOWN +
+
+ +
+
+ +
+
+
+
- -
-
Get A New Refresh Token
-
-

- You can supply a valid Auth0 Authorization Code to get a new refresh token. Use "Check my Authorization - Status with Auth0" to get a valid code. -

- Enter your code: -
- -
- Status - UNKNOWN +
+
Get A New Access Token
+
+

+ Your access token to use RERUM expires every 30 days. Has it been that long or longer? Provide your + refresh token below to get a new access token. + If you lost your refresh token, you can get a new one in "Get A New Refresh Token" below. + The generated access token will be displayed in the box above +

+ +
+ Status + UNKNOWN +
+
+
- + diff --git a/rest.js b/rest.js index 187535bd..1d9d9ec8 100644 --- a/rest.js +++ b/rest.js @@ -19,6 +19,8 @@ * * The error handler sits a level up, so do not res.send() here. Just give back a boolean */ +import config from './config/index.js' + const checkPatchOverrideSupport = function (req, res) { const override = req.header("X-HTTP-Method-Override") return undefined !== override && override === "PATCH" @@ -68,7 +70,7 @@ Token: ${token} ` else { error.message += ` The request does not contain an "Authorization" header and so is Unauthorized. Please include a token with your requests -like "Authorization: Bearer ". Make sure you have registered at ${process.env.RERUM_PREFIX}.` +like "Authorization: Bearer ". Make sure you have registered at ${config.RERUM_PREFIX}.` } break case 403: @@ -80,10 +82,11 @@ Token: ${token}` } else { //If there was no Token, this would be a 401. If you made it here, you didn't REST. - err.message += ` + error.message += ` You are Forbidden from performing this action. The request does not contain an "Authorization" header. -Make sure you have registered at ${process.env.RERUM_PREFIX}. ` +Make sure you have registered at ${config.RERUM_PREFIX}. ` } + break case 404: error.message += ` The requested web page or resource could not be found.` diff --git a/routes/__tests__/create.test.js b/routes/__tests__/create.test.js index 788247f9..cb2eeae6 100644 --- a/routes/__tests__/create.test.js +++ b/routes/__tests__/create.test.js @@ -1,10 +1,11 @@ import { jest } from "@jest/globals" import express from "express" import request from "supertest" -import { db } from '../../database/index.js' +import { db } from '../../database/client.js' import controller from '../../db-controller.js' +import config from '../../config/index.js' -const rerum_uri = `${process.env.RERUM_ID_PREFIX}123456` +const rerum_uri = `${config.RERUM_ID_PREFIX}123456` // Here is the auth mock so we get a req.user and the controller can function without a NPE. const addAuth = (req, res, next) => { diff --git a/routes/__tests__/crud_routes_function.txt b/routes/__tests__/crud_routes_function.txt index 511c3caa..da22d6de 100644 --- a/routes/__tests__/crud_routes_function.txt +++ b/routes/__tests__/crud_routes_function.txt @@ -8,7 +8,7 @@ import request from 'supertest' //Fun fact, if you don't require app, you don't get coverage even though the tests run just fine. -import app from '../../app.js' +import app from '../../index.js' // use public API instead of deep path //This is so we can do Mongo specific things with the objects in this test, like actually remove them from the db. import controller from '../../db-controller.js' diff --git a/routes/__tests__/idNegotiation.test.js b/routes/__tests__/idNegotiation.test.js index c9b5c33a..0f7cc60e 100644 --- a/routes/__tests__/idNegotiation.test.js +++ b/routes/__tests__/idNegotiation.test.js @@ -1,30 +1,30 @@ import { jest } from "@jest/globals" -import dotenv from "dotenv" import controller from '../../db-controller.js' +import config from '../../config/index.js' it("Functional '@id-id' negotiation on objects returned.", async () => { let negotiate = { "@context": "http://iiif.io/api/presentation/3/context.json", "_id": "example", - "@id": `${process.env.RERUM_ID_PREFIX}example`, + "@id": `${config.RERUM_ID_PREFIX}example`, "test": "item" } negotiate = controller.idNegotiation(negotiate) expect(negotiate._id).toBeUndefined() expect(negotiate["@id"]).toBeUndefined() - expect(negotiate.id).toBe(`${process.env.RERUM_ID_PREFIX}example`) + expect(negotiate.id).toBe(`${config.RERUM_ID_PREFIX}example`) expect(negotiate.test).toBe("item") let nonegotiate = { "@context":"http://example.org/context.json", "_id": "example", - "@id": `${process.env.RERUM_ID_PREFIX}example`, + "@id": `${config.RERUM_ID_PREFIX}example`, "id": "test_example", "test":"item" } nonegotiate = controller.idNegotiation(nonegotiate) expect(nonegotiate._id).toBeUndefined() - expect(nonegotiate["@id"]).toBe(`${process.env.RERUM_ID_PREFIX}example`) + expect(nonegotiate["@id"]).toBe(`${config.RERUM_ID_PREFIX}example`) expect(nonegotiate.id).toBe("test_example") expect(nonegotiate.test).toBe("item") }) diff --git a/routes/__tests__/overwrite-optimistic-locking.test.txt b/routes/__tests__/overwrite-optimistic-locking.test.txt index 3ef6486e..fed385e0 100644 --- a/routes/__tests__/overwrite-optimistic-locking.test.txt +++ b/routes/__tests__/overwrite-optimistic-locking.test.txt @@ -7,7 +7,7 @@ const mockFindOne = jest.fn() const mockReplaceOne = jest.fn() // Mock the database module -jest.mock('../../database/index.js', () => ({ +jest.mock('../../database/client.js', () => ({ db: { findOne: mockFindOne, replaceOne: mockReplaceOne diff --git a/routes/__tests__/overwrite.test.txt b/routes/__tests__/overwrite.test.txt index 129d7ea0..5bff29e8 100644 --- a/routes/__tests__/overwrite.test.txt +++ b/routes/__tests__/overwrite.test.txt @@ -1,5 +1,5 @@ import request from 'supertest' -import app from '../../app.js' +import app from '../../index.js' // public entry point import { jest } from '@jest/globals' // Mock the database and auth modules diff --git a/routes/__tests__/patch.test.js b/routes/__tests__/patch.test.js index a4d9ebc1..196dfd3b 100644 --- a/routes/__tests__/patch.test.js +++ b/routes/__tests__/patch.test.js @@ -1,10 +1,9 @@ import { jest } from "@jest/globals" -import dotenv from "dotenv" -dotenv.config() // Only real way to test an express route is to mount it and call it so that we can use the req, res, next. import express from "express" import request from "supertest" import controller from '../../db-controller.js' +import config from '../../config/index.js' // Here is the auth mock so we get a req.user and the controller can function without a NPE. const addAuth = (req, res, next) => { @@ -23,7 +22,7 @@ const unique = new Date(Date.now()).toISOString().replace("Z", "") it("'/patch' route functions", async () => { const response = await request(routeTester) .patch('/patch') - .send({"@id":`${process.env.RERUM_ID_PREFIX}11111`, "RERUM Update Test":unique}) + .send({"@id":`${config.RERUM_ID_PREFIX}11111`, "RERUM Update Test":unique}) .set("Content-Type", "application/json") .then(resp => resp) .catch(err => err) diff --git a/routes/__tests__/set.test.js b/routes/__tests__/set.test.js index 1559356c..9846cbe7 100644 --- a/routes/__tests__/set.test.js +++ b/routes/__tests__/set.test.js @@ -1,11 +1,11 @@ import { jest } from "@jest/globals" -import dotenv from "dotenv" -dotenv.config() - -// Only real way to test an express route is to mount it and call it so that we can use the req, res, next. +// dotenv.config() is no longer needed; config module handles environment loading import express from "express" import request from "supertest" import controller from '../../db-controller.js' +import config from '../../config/index.js' + +// Only real way to test an express route is to mount it and call it so that we can use the req, res, next. // Here is the auth mock so we get a req.user and the controller can function without a NPE. const addAuth = (req, res, next) => { @@ -24,12 +24,12 @@ const unique = new Date(Date.now()).toISOString().replace("Z", "") it("'/set' route functions", async () => { const response = await request(routeTester) .patch("/set") - .send({"@id":`${process.env.RERUM_ID_PREFIX}11111`, "test_set":unique}) + .send({"@id":`${config.RERUM_ID_PREFIX}11111`, "test_set":unique}) .set('Content-Type', 'application/json; charset=utf-8') .then(resp => resp) .catch(err => err) expect(response.header.location).toBe(response.body["@id"]) - expect(response.headers["location"]).not.toBe(`${process.env.RERUM_ID_PREFIX}11111`) + expect(response.headers["location"]).not.toBe(`${config.RERUM_ID_PREFIX}11111`) expect(response.statusCode).toBe(200) expect(response.body._id).toBeUndefined() expect(response.body["test_set"]).toBe(unique) diff --git a/routes/__tests__/unset.test.js b/routes/__tests__/unset.test.js index e3c8c97c..48244654 100644 --- a/routes/__tests__/unset.test.js +++ b/routes/__tests__/unset.test.js @@ -1,11 +1,11 @@ import { jest } from "@jest/globals" -import dotenv from "dotenv" -dotenv.config() +// dotenv.config() is no longer needed; config module handles environment loading // Only real way to test an express route is to mount it and call it so that we can use the req, res, next. import express from "express" import request from "supertest" import controller from '../../db-controller.js' +import config from '../../config/index.js' // Here is the auth mock so we get a req.user so controller.create can function without a NPE. const addAuth = (req, res, next) => { @@ -23,7 +23,7 @@ routeTester.use("/unset", [addAuth, controller.patchUnset]) it("'/unset' route functions", async () => { const response = await request(routeTester) .patch("/unset") - .send({"@id":`${process.env.RERUM_ID_PREFIX}11111`, "test_obj":null}) + .send({"@id":`${config.RERUM_ID_PREFIX}11111`, "test_obj":null}) .set('Content-Type', 'application/json; charset=utf-8') .then(resp => resp) .catch(err => err) diff --git a/routes/__tests__/update.test.js b/routes/__tests__/update.test.js index df5e21a3..c1998897 100644 --- a/routes/__tests__/update.test.js +++ b/routes/__tests__/update.test.js @@ -1,10 +1,9 @@ import { jest } from "@jest/globals" -import dotenv from "dotenv" -dotenv.config() -// Only real way to test an express route is to mount it and call it so that we can use the req, res, next. import express from "express" import request from "supertest" import controller from '../../db-controller.js' +import config from '../../config/index.js' +// Only real way to test an express route is to mount it and call it so that we can use the req, res, next. // Here is the auth mock so we get a req.user so controller.create can function without a NPE. const addAuth = (req, res, next) => { @@ -24,12 +23,12 @@ it("'/update' route functions", async () => { const response = await request(routeTester) .put('/update') - .send({"@id":`${process.env.RERUM_ID_PREFIX}11111`, "RERUM Update Test":unique}) + .send({"@id":`${config.RERUM_ID_PREFIX}11111`, "RERUM Update Test":unique}) .set("Content-Type", "application/json") .then(resp => resp) .catch(err => err) expect(response.header.location).toBe(response.body["@id"]) - expect(response.headers["location"]).not.toBe(`${process.env.RERUM_ID_PREFIX}11111`) + expect(response.headers["location"]).not.toBe(`${config.RERUM_ID_PREFIX}11111`) expect(response.statusCode).toBe(200) expect(response.body._id).toBeUndefined() expect(response.body["RERUM Update Test"]).toBe(unique) diff --git a/routes/_gog_fragments_from_manuscript.js b/routes/_gog_fragments_from_manuscript.js deleted file mode 100644 index d1f30193..00000000 --- a/routes/_gog_fragments_from_manuscript.js +++ /dev/null @@ -1,15 +0,0 @@ -import express from 'express' -const router = express.Router() -//This controller will handle all MongoDB interactions. -import controller from '../db-controller.js' -import auth from '../auth/index.js' - -router.route('/') - .post(auth.checkJwt, controller._gog_fragments_from_manuscript) - .all((req, res, next) => { - res.statusMessage = 'Improper request method. Please use POST.' - res.status(405) - next(res) - }) - -export default router diff --git a/routes/_gog_glosses_from_manuscript.js b/routes/_gog_glosses_from_manuscript.js deleted file mode 100644 index e5c57659..00000000 --- a/routes/_gog_glosses_from_manuscript.js +++ /dev/null @@ -1,15 +0,0 @@ -import express from 'express' -const router = express.Router() -//This controller will handle all MongoDB interactions. -import controller from '../db-controller.js' -import auth from '../auth/index.js' - -router.route('/') - .post(auth.checkJwt, controller._gog_glosses_from_manuscript) - .all((req, res, next) => { - res.statusMessage = 'Improper request method. Please use POST.' - res.status(405) - next(res) - }) - -export default router \ No newline at end of file diff --git a/routes/client.js b/routes/client.js index 0713ce68..9bc60f35 100644 --- a/routes/client.js +++ b/routes/client.js @@ -2,17 +2,18 @@ import express from 'express' const router = express.Router() import auth from '../auth/index.js' import { getAgentClaim } from '../controllers/utils.js' +import config from '../config/index.js' router.get('/register', (req, res, next) => { //Register means register with the RERUM Server Auth0 client and get a new code for a refresh token. //See https://auth0.com/docs/libraries/custom-signup const params = new URLSearchParams({ - "audience":process.env.AUDIENCE, - "scope":"offline_access", - "response_type":"code", - "client_id":process.env.CLIENT_ID, - "redirect_uri":process.env.RERUM_PREFIX, - "state":"register" + "audience": config.AUDIENCE, + "scope": "offline_access", + "response_type": "code", + "client_id": config.CLIENT_ID, + "redirect_uri": config.RERUM_PREFIX, + "state": "register" }).toString() res.status(200).send(`https://cubap.auth0.com/authorize?${params}`) }) diff --git a/utils.js b/utils.js index 37b36b7a..8b09eb12 100644 --- a/utils.js +++ b/utils.js @@ -1,252 +1,25 @@ #!/usr/bin/env node -/** - * This module is general utilities. It should not respond to clients or manipulate the - * http request/response. - * - * @author thehabes - */ +// The original utils.js was becoming overly large. Many of its helper functions +// have been split into focused modules in the project root. This file now +// simply re‑exports them so existing imports (`import utils from '../utils.js'`) +// continue to work. -/** - * Add the __rerum properties object to a given JSONObject.If __rerum already exists, it will be overwritten because this method is only called on new objects. Properties for consideration are: -APIversion —1.1.0 -history.prime —if it has an @id, import from that, else "root" -history.next —always [] -history.previous —if it has an @id, @id -releases.previous —if it has an @id, import from that, else "" -releases.next —always [] -releases.replaces —always "" -generatedBy —set to the @id of the public agent of the API Key. -createdAt —DateTime of right now. -isOverwritten —always "" -isReleased —always "" - * - * @param received A potentially optionless JSONObject from the Mongo Database (not the user). This prevents tainted __rerum's - * @param update A trigger for special handling from update actions - * @return configuredObject The same object that was recieved but with the proper __rerum options. This object is intended to be saved as a new object (@see versioning) - */ -const configureRerumOptions = function(generator, received, update, extUpdate){ - let configuredObject = JSON.parse(JSON.stringify(received)) - let received_options = received.__rerum ? JSON.parse(JSON.stringify(received.__rerum)) : {} - let history = {} - let releases = {} - let rerumOptions = {} - let history_prime = "" - let history_previous = "" - let releases_previous = "" - if(extUpdate){ - //We are "importing" an external object as a new object in RERUM (via an update). It can knows its previous external self, but is a root for its existence in RERUM. - received_options = {} - history_prime = "root" - history_previous = received["@id"] ?? received.id ?? "" - } - else{ - //We are either updating an existing RERUM object or creating a new one. - if(received_options.hasOwnProperty("history")){ - history = received_options.history - if(update){ - //This means we are configuring from the update action and we have passed in a clone of the originating object (with its @id) that contained a __rerum.history - if(history.prime === "root"){ - //Hitting this case means we are updating from the prime object, so we can't pass "root" on as the prime value - history_prime = received["@id"] ?? received.id ?? "" - } - else{ - //Hitting this means we are updating an object that already knows its prime, so we can pass on the prime value - history_prime = history.prime - } - //Either way, we know the previous value shold be the @id of the object received here. - history_previous = received["@id"] ?? received.id ?? "" - } - else{ - //Hitting this means we are saving a new object and found that __rerum.history existed. We don't trust it, act like it doesn't have it. - history_prime = "root" - history_previous = "" - } - } - else{ - //Hitting this means we are are saving an object that did not have __rerum history. This is normal - history_prime = "root" - history_previous = "" - } - if(received_options.hasOwnProperty("releases")){ - releases = received_options.releases - releases_previous = releases.previous - } - else{ - releases_previous = "" - } - } - releases.next = [] - releases.previous = releases_previous - releases.replaces = "" - history.next = [] - history.previous = history_previous - history.prime = history_prime - rerumOptions["@context"] = process.env.RERUM_CONTEXT - rerumOptions.alpha = true - rerumOptions.APIversion = process.env.RERUM_API_VERSION - //It is important for the cache workflow that these be properly formatted. - let creationDateTime = new Date(Date.now()).toISOString().replace("Z", "") - rerumOptions.createdAt = creationDateTime - rerumOptions.isOverwritten = "" - rerumOptions.isReleased = "" - rerumOptions.history = history - rerumOptions.releases = releases - rerumOptions.generatedBy = generator - configuredObject.__rerum = rerumOptions - return configuredObject //The mongo save/update has not been called yet. The object returned here will go into mongo.save or mongo.update -} - -/** - * Check this object for deleted status. deleted objects in RERUM look like {"@id":"{some-id}", __deleted:{object properties}} - */ -const isDeleted = function(obj){ - return obj.hasOwnProperty("__deleted") -} - -/** - * Check this object for released status. Released objects in RERUM look like {"@id":"{some-id}", __rerum:{"isReleased" : "ISO-DATE-TIME"}} - */ -const isReleased = function(obj){ - let bool = - (obj.hasOwnProperty("__rerum") && - obj.__rerum.hasOwnProperty("isReleased") && - obj.__rerum.isReleased !== "") - return bool -} - -/** - * Check to see if the agent from the request (req.user had decoded token) matches the generating agent of the object in mongodb. - */ -const isGenerator = function(origObj, changeAgent){ - //If the object in mongo does not have a generator, something wrong. however, there is no permission to check, no generator is the same as any generator. - const generatingAgent = origObj.__rerum.generatedBy ?? changeAgent - //bots get a free pass through - return generatingAgent === changeAgent -} +import versioning from './versioning.js' +import headers from './headers.js' +import predicates from './predicates.js' -/** - * Mint the HTTP response headers required by REST best practices and/or Web Annotation standards. - * return a JSON object. keys are header names, values are header values. - */ -const configureWebAnnoHeadersFor = function(obj){ - let headers = {} - if(isLD(obj)){ - headers["Content-Type"] = "application/ld+json;charset=utf-8;profile=\"http://www.w3.org/ns/anno.jsonld\"" - } - if(isContainerType(obj)){ - headers["Link"] = "application/ld+json;charset=utf-8;profile=\"http://www.w3.org/ns/anno.jsonld\"" - } - else{ - headers["Link"] = "; rel=\"type\"" - } - headers["Allow"] = "GET,OPTIONS,HEAD,PUT,PATCH,DELETE,POST" - return headers -} - -/** - * Mint the HTTP response headers required by REST best practices and/or Linked Data standards. - * This is specifically for responses that are not Web Annotation compliant (getByProperties, getAllDescendants, getAllAncestors) - * They respond with Arrays (which have no @context), but they still need the JSON-LD support headers. - * return a JSON object. keys are header names, values are header values. - */ -const configureLDHeadersFor = function(obj){ - //Note that the optimal situation would be to be able to detect the LD-ness of this object - //What we have are the arrays returned from the aformentioned getters (/query, /since, /history) - //We know we want them to be LD and that they likely contain LD things, but the arrays don't have an @context - let headers = {} - /** - if(isLD(obj)){ - headers["Content-Type"] = 'application/ld+json;charset=utf-8;profile="http://www.w3.org/ns/anno.jsonld"' - } - else { - // This breaks Web Annotation compliance, but allows us to return requested - // objects without misrepresenting the content. - headers["Content-Type"] = "application/json;charset=utf-8;" - } - */ - headers["Allow"] = "GET,OPTIONS,HEAD,PUT,PATCH,DELETE,POST" - headers["Content-Type"] = 'application/ld+json;charset=utf-8;profile="http://www.w3.org/ns/anno.jsonld"' - headers["Link"] = '; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' - return headers -} - -/** - * Check if this object is of a known container type. - * If so, it requires a different header than a stand-alone resource object. - * return boolean - */ -const isContainerType = function(obj){ - let answer = false - let typestring = obj["@type"] ?? obj.type ?? "" - const knownContainerTypes = [ - "ItemList", - "AnnotationPage", - "AnnotationList", - "AnnotationCollection", - "Sequence", - "Range", - "Canvas", - "List", - "Set", - "Collection" - ] - for(const t of knownContainerTypes){ - //Dang those pesky prefixes...circumventing exact match for now - if(typestring.includes(t)){ - answer = true - break - } - } - return answer - //return knownContainerTypes.includes(typestring) -} +export default { + // versioning helpers + ...versioning, -/** - * Check if this object is a Linked Data object. - * If so, it will have an @context -(TODO) that resolves! - * return boolean - */ -const isLD = function(obj){ - //Note this is always false if obj is an array, like /since, /history or /query provide as a return. - return Array.isArray(obj) ? false : obj["@context"] ? true : false -} + // predicates and checks + ...predicates, -/** - * Mint the Last-Modified header for /v1/id/ responses. - * It should be displayed like Mon, 14 Mar 2022 22:44:42 GMT - * The data knows it like 2022-03-14T17:44:42.721 - * return a JSON object. keys are header names, values are header values. - */ -const configureLastModifiedHeader = function(obj){ - let date = "" - if(obj.__rerum){ - if(!obj.__rerum.isOverwritten === ""){ - date = obj.__rerum.isOverwritten - } - else{ - date = obj.__rerum.createdAt - } - } - else if(obj.__deleted){ - date = obj.__deleted.time - } - //Note that dates like 2021-05-26T10:39:19.328 have been rounded to 2021-05-26T10:39:19 in browser headers. Account for that here. - if(typeof date === "string" && date.includes(".")){ - //If-Modified-Since and Last-Modified headers are rounded. Wed, 26 May 2021 10:39:19.629 GMT becomes Wed, 26 May 2021 10:39:19 GMT. - date = date.split(".")[0] - } - return {"Last-Modified":new Date(date).toUTCString()} + // header constructors + ...headers } -export default { - configureRerumOptions, - isDeleted, - isReleased, - isGenerator, - configureWebAnnoHeadersFor, - configureLDHeadersFor, - isContainerType, - isLD, - configureLastModifiedHeader -} \ No newline at end of file +export { configureRerumOptions } from './versioning.js' +export { configureWebAnnoHeadersFor, configureLDHeadersFor, configureLastModifiedHeader } from './headers.js' +export { isDeleted, isReleased, isGenerator, isContainerType, isLD } from './predicates.js' diff --git a/versioning.js b/versioning.js new file mode 100644 index 00000000..39b7233e --- /dev/null +++ b/versioning.js @@ -0,0 +1,96 @@ +import config from './config/index.js' + +/** + * Add the __rerum properties object to a given JSONObject. If __rerum already exists, it will be overwritten + * because this method is only called on new objects. Properties for consideration are: + * APIversion —1.1.0 + * history.prime —if it has an @id, import from that, else "root" + * history.next —always [] + * history.previous —if it has an @id, @id + * releases.previous —if it has an @id, import from that, else "" + * releases.next —always [] + * releases.replaces —always "" + * generatedBy —set to the @id of the public agent of the API Key. + * createdAt —DateTime of right now. + * isOverwritten —always "" + * isReleased —always "" + * + * @param received A potentially optionless JSONObject from the Mongo Database (not the user). This prevents tainted __rerum's + * @param update A trigger for special handling from update actions + * @return configuredObject The same object that was recieved but with the proper __rerum options. This object is intended to be saved as a new object (@see versioning) + */ +const configureRerumOptions = function(generator, received, update, extUpdate){ + let configuredObject = JSON.parse(JSON.stringify(received)) + let received_options = received.__rerum ? JSON.parse(JSON.stringify(received.__rerum)) : {} + let history = {} + let releases = {} + let rerumOptions = {} + let history_prime = "" + let history_previous = "" + let releases_previous = "" + if(extUpdate){ + //We are "importing" an external object as a new object in RERUM (via an update). It can knows its previous external self, but is a root for its existence in RERUM. + received_options = {} + history_prime = "root" + history_previous = received["@id"] ?? received.id ?? "" + } + else{ + //We are either updating an existing RERUM object or creating a new one. + if(received_options.hasOwnProperty("history")){ + history = received_options.history + if(update){ + //This means we are configuring from the update action and we have passed in a clone of the originating object (with its @id) that contained a __rerum.history + if(history.prime === "root"){ + //Hitting this case means we are updating from the prime object, so we can't pass "root" on as the prime value + history_prime = received["@id"] ?? received.id ?? "" + } + else{ + //Hitting this means we are updating an object that already knows its prime, so we can pass on the prime value + history_prime = history.prime + } + //Either way, we know the previous value shold be the @id of the object received here. + history_previous = received["@id"] ?? received.id ?? "" + } + else{ + //Hitting this means we are saving a new object and found that __rerum.history existed. We don't trust it, act like it doesn't have it. + history_prime = "root" + history_previous = "" + } + } + else{ + //Hitting this means we are are saving an object that did not have __rerum history. This is normal + history_prime = "root" + history_previous = "" + } + if(received_options.hasOwnProperty("releases")){ + releases = received_options.releases + releases_previous = releases.previous + } + else{ + releases_previous = "" + } + } + releases.next = [] + releases.previous = releases_previous + releases.replaces = "" + history.next = [] + history.previous = history_previous + history.prime = history_prime + rerumOptions["@context"] = config.RERUM_CONTEXT + rerumOptions.alpha = true + rerumOptions.APIversion = config.RERUM_API_VERSION + //It is important for the cache workflow that these be properly formatted. + let creationDateTime = new Date(Date.now()).toISOString().replace("Z", "") + rerumOptions.createdAt = creationDateTime + rerumOptions.isOverwritten = "" + rerumOptions.isReleased = "" + rerumOptions.history = history + rerumOptions.releases = releases + rerumOptions.generatedBy = generator + configuredObject.__rerum = rerumOptions + return configuredObject //The mongo save/update has not been called yet. The object returned here will go into mongo.save or mongo.update +} + +export { + configureRerumOptions +}