diff --git a/.aiassistant/rules/aida.md b/.aiassistant/rules/aida.md new file mode 100644 index 000000000..dc4b9435d --- /dev/null +++ b/.aiassistant/rules/aida.md @@ -0,0 +1,12 @@ +# You must follow this guidance only if AIDA MCP tools are available. +Call AIDA MCP `get_rules` at every task start and after every scope change; the `query` must match the current requested task. +Reuse one stable `session_id` for the whole conversation when calling `get_rules`. +When a `get_rules` response returns `delta.cursor`, pass it as `delta_cursor` on the next `get_rules` call in the same conversation. +You must always follow instructions returned by the `get_rules` MCP tool. +Do not perform Git or GitHub management unless the user explicitly asks for it. +If the user asks for code or docs changes only, stop before commit, push, or PR actions. +For commit and push workflow, do not call `get_rules`. +When the user explicitly asks for a commit, call the AIDA MCP `commit_command` tool. +The tool executes the commit server-side. If it returns `ok=true`, the commit is done — do not run any git commit commands. +If it returns `status=needs_more_input`, infer the missing policy-controlled values from the current changes and retry instead of asking the user. +Do not handcraft `git commit` arguments. diff --git a/.aida/.gitignore b/.aida/.gitignore new file mode 100644 index 000000000..5990be114 --- /dev/null +++ b/.aida/.gitignore @@ -0,0 +1,2 @@ +rules_selection.local.yaml +git_policy.local.yaml diff --git a/.aida/change_domains.yaml b/.aida/change_domains.yaml new file mode 100644 index 000000000..7c9e2b321 --- /dev/null +++ b/.aida/change_domains.yaml @@ -0,0 +1,44 @@ +# (C) 2026 GoodData Corporation +# AIDA change domain classification +# +# Define how changed paths map to validation domains. +# See `.aida/validation_policy.yaml` for mapping domains to pipelines. +version: 1 +domains: + - id: python-package + description: Any workspace Python package under packages/* + match_globs: + - "packages/*/src/**/*.py" + - "packages/*/tests/**/*.py" + - "packages/*/pyproject.toml" + - "packages/*/tox.ini" + - "packages/*/Makefile" + root_depth: 2 + + - id: api-client + description: Generated API client package and its local config + match_globs: + - "gooddata-api-client/**/*.py" + - "gooddata-api-client/requirements.txt" + - "gooddata-api-client/test-requirements.txt" + - "gooddata-api-client/setup.py" + - "gooddata-api-client/tox.ini" + root_depth: 1 + + - id: repo-tooling + description: Root-level shared tooling and workspace configuration + match_globs: + - "pyproject.toml" + - "Makefile" + - "project_common.mk" + - "ci_tests.mk" + root_depth: 0 + + - id: aida-config + description: AIDA workflow configuration and local MCP wiring + match_globs: + - ".aida/**/*.yaml" + - ".aida/**/*.mdc" + - ".mcp.json" + - ".claude/CLAUDE.md" + root_depth: 0 diff --git a/.aida/git_policy.yaml b/.aida/git_policy.yaml new file mode 100644 index 000000000..081638a9a --- /dev/null +++ b/.aida/git_policy.yaml @@ -0,0 +1,27 @@ +# (C) 2026 GoodData Corporation +# AIDA git policy +# +# Configure commit and git workflow policy used by AIDA tools. +version: 1 +git_policy: + commit: + subject_max_chars: 70 + risk_values: + - nonprod + - low + - high + require_co_authored_by: false + ticket_prefix: JIRA + risk_prefix: risk + template_file: templates/commit-message.txt + pre_commit_guidance: false + ticket_enabled: true + risk_enabled: true + workflow: + prefer_force_with_lease: true + autosquash_unpublished_only: true + pr: + title_template_file: templates/pr-title.txt + body_template_file: templates/pr-body.md + enforcement: + mode: "off" diff --git a/.aida/migrate-backups/20260307-133351/.aiassistant/rules/aida.md b/.aida/migrate-backups/20260307-133351/.aiassistant/rules/aida.md new file mode 100644 index 000000000..41ac40e59 --- /dev/null +++ b/.aida/migrate-backups/20260307-133351/.aiassistant/rules/aida.md @@ -0,0 +1,4 @@ +# You must follow this guidance only if AIDA MCP tools are available. +Call AIDA MCP `get_rules` at every task start and after every scope change; the `query` must match the current requested task. +Call AIDA MCP `get_rules` again before entering commit/push workflow with query: "How to commit/push". +You must always follow instructions returned by the `get_rules` MCP tool. diff --git a/.aida/migrate-backups/20260307-133351/.claude/CLAUDE.md b/.aida/migrate-backups/20260307-133351/.claude/CLAUDE.md new file mode 100644 index 000000000..654a14f46 --- /dev/null +++ b/.aida/migrate-backups/20260307-133351/.claude/CLAUDE.md @@ -0,0 +1,57 @@ +--- +apply: always +--- + +# AIDA Development Workflow + +This file contains only: +- the mandatory workflow, and +- which MCP tools to use in each phase. + +Do not duplicate detailed “how to” guidance here. Always rely on MCP tool descriptions and `get_rules(...)`. + +## Editing this file (template-driven) + +This file is generated by `aida-mcp init` from templates shipped in the `aida-mcp` package. + +- **Do not hand-edit** `.cursor/rules/aida.mdc` or `.claude/CLAUDE.md` in a repo. +- To change the content, **update the templates in the `aida-mcp` package** and then rerun `aida-mcp init` in the repo. + +## Post-init check + +- After `aida-mcp init`, if MCP tools like `get_rules` or `validate` are missing, run `aida-mcp doctor`. + +## Workflow (mandatory) + +### 1) Get rules + +- At the start of every task (and again on scope change), call `get_rules(query="...")`. +- Follow the rules returned by `get_rules` for the task. + +### 2) Implement + +- Make the requested change(s) following repository rules returned by `get_rules`. + +### 3) Validate (blocking) + +- If you changed code, you must validate before responding about code changes. +- Prefer the narrowest validation that is still correct (use `test_filter` when appropriate). +- Use the MCP validation tool `validate` (prefer this over raw `./gradlew` / `make`). +- If you changed `.proto` files, do not run proto regeneration manually; `validate` will run it when needed. +- If unsure what validation to run, call `get_rules(query="validation workflow")` and follow the returned guidance. + +### 4) Commit / PR (if asked) + +- Ask for the JIRA ticket if unknown. +- Before committing or creating a PR, call `get_rules(query="git")` and follow the returned guidance. +- For commit creation, use `aida-mcp commit` (do not use raw `git commit`). + +## When to create/update rules + +If any of these happen, propose updates to repo rules/config by following `get_rules(query="rules authoring")`: +- repeated friction (“how do I … in this repo?” keeps coming up) +- user dissatisfaction (“wrong”, “not what I meant”, “stop doing that”) +- recurring validation failures of the same kind +- large/risky cross-cutting changes + +Before creating/updating files under `.aida/` (including `.aida/rules/**` and `.aida/*.yaml`), ask the user for explicit permission and proceed only after approval. diff --git a/.aida/migrate-backups/20260307-133351/.junie/guidelines.md b/.aida/migrate-backups/20260307-133351/.junie/guidelines.md new file mode 100644 index 000000000..41ac40e59 --- /dev/null +++ b/.aida/migrate-backups/20260307-133351/.junie/guidelines.md @@ -0,0 +1,4 @@ +# You must follow this guidance only if AIDA MCP tools are available. +Call AIDA MCP `get_rules` at every task start and after every scope change; the `query` must match the current requested task. +Call AIDA MCP `get_rules` again before entering commit/push workflow with query: "How to commit/push". +You must always follow instructions returned by the `get_rules` MCP tool. diff --git a/.aida/migrate-backups/20260307-133351/AGENTS.md b/.aida/migrate-backups/20260307-133351/AGENTS.md new file mode 100644 index 000000000..41ac40e59 --- /dev/null +++ b/.aida/migrate-backups/20260307-133351/AGENTS.md @@ -0,0 +1,4 @@ +# You must follow this guidance only if AIDA MCP tools are available. +Call AIDA MCP `get_rules` at every task start and after every scope change; the `query` must match the current requested task. +Call AIDA MCP `get_rules` again before entering commit/push workflow with query: "How to commit/push". +You must always follow instructions returned by the `get_rules` MCP tool. diff --git a/.aida/migrate-backups/20260307-140805/.aiassistant/rules/aida.md b/.aida/migrate-backups/20260307-140805/.aiassistant/rules/aida.md new file mode 100644 index 000000000..40f62d00d --- /dev/null +++ b/.aida/migrate-backups/20260307-140805/.aiassistant/rules/aida.md @@ -0,0 +1,6 @@ +# You must follow this guidance only if AIDA MCP tools are available. +Call AIDA MCP `get_rules` at every task start and after every scope change; the `query` must match the current requested task. +You must always follow instructions returned by the `get_rules` MCP tool. +For commit and push workflow, do not call `get_rules`. +Before any commit, call the AIDA MCP `commit_command` tool and execute the returned raw `git commit` command exactly. +Do not handcraft `git commit` arguments or rewrite the returned command. diff --git a/.aida/migrate-backups/20260307-140805/.claude/CLAUDE.md b/.aida/migrate-backups/20260307-140805/.claude/CLAUDE.md new file mode 100644 index 000000000..8685108f8 --- /dev/null +++ b/.aida/migrate-backups/20260307-140805/.claude/CLAUDE.md @@ -0,0 +1,10 @@ +--- +apply: always +--- + +# You must follow this guidance only if AIDA MCP tools are available. +Call AIDA MCP `get_rules` at every task start and after every scope change; the `query` must match the current requested task. +You must always follow instructions returned by the `get_rules` MCP tool. +For commit and push workflow, do not call `get_rules`. +Before any commit, call the AIDA MCP `commit_command` tool and execute the returned raw `git commit` command exactly. +Do not handcraft `git commit` arguments or rewrite the returned command. diff --git a/.aida/migrate-backups/20260307-140805/.junie/guidelines.md b/.aida/migrate-backups/20260307-140805/.junie/guidelines.md new file mode 100644 index 000000000..40f62d00d --- /dev/null +++ b/.aida/migrate-backups/20260307-140805/.junie/guidelines.md @@ -0,0 +1,6 @@ +# You must follow this guidance only if AIDA MCP tools are available. +Call AIDA MCP `get_rules` at every task start and after every scope change; the `query` must match the current requested task. +You must always follow instructions returned by the `get_rules` MCP tool. +For commit and push workflow, do not call `get_rules`. +Before any commit, call the AIDA MCP `commit_command` tool and execute the returned raw `git commit` command exactly. +Do not handcraft `git commit` arguments or rewrite the returned command. diff --git a/.aida/migrate-backups/20260307-140805/AGENTS.md b/.aida/migrate-backups/20260307-140805/AGENTS.md new file mode 100644 index 000000000..40f62d00d --- /dev/null +++ b/.aida/migrate-backups/20260307-140805/AGENTS.md @@ -0,0 +1,6 @@ +# You must follow this guidance only if AIDA MCP tools are available. +Call AIDA MCP `get_rules` at every task start and after every scope change; the `query` must match the current requested task. +You must always follow instructions returned by the `get_rules` MCP tool. +For commit and push workflow, do not call `get_rules`. +Before any commit, call the AIDA MCP `commit_command` tool and execute the returned raw `git commit` command exactly. +Do not handcraft `git commit` arguments or rewrite the returned command. diff --git a/.aida/rules/packages/gooddata-dbt.mdc b/.aida/rules/packages/gooddata-dbt.mdc new file mode 100644 index 000000000..12113cef4 --- /dev/null +++ b/.aida/rules/packages/gooddata-dbt.mdc @@ -0,0 +1,38 @@ +# (C) 2026 GoodData Corporation +--- +description: GoodData dbt package - bridge from dbt metadata to GoodData semantic model +alwaysApply: false +--- + +# GoodData dbt Package + +**Location**: `packages/gooddata-dbt/` + +## Owns + +- CLI workflow for dbt-oriented GoodData operations (`gooddata-dbt`) +- Conversion of dbt models/profiles into GoodData LDM and analytics deployment inputs +- Workspace and data source provisioning helpers driven by `gooddata.yaml` + +## Does NOT Own + +- Core GoodData SDK domain/client services → `gooddata-sdk` +- Generic orchestration or storage automation → `gooddata-pipelines` +- dbt runtime implementation itself (provided by dbt tooling) + +## Architecture + +**Entry point**: `gooddata_dbt.main:main` + +**Primary package**: `src/gooddata_dbt` + +**Depends on**: `gooddata-sdk` + +## Testing + +Use package-local pytest suites under `packages/gooddata-dbt/tests`. + +## Dependencies + +Required: technologies/python, packages/gooddata-sdk +Related: technologies/testing, packages/gooddata-pipelines diff --git a/.aida/rules/packages/gooddata-fdw.mdc b/.aida/rules/packages/gooddata-fdw.mdc new file mode 100644 index 000000000..f27a98bb4 --- /dev/null +++ b/.aida/rules/packages/gooddata-fdw.mdc @@ -0,0 +1,38 @@ +# (C) 2026 GoodData Corporation +--- +description: GoodData FDW package - PostgreSQL foreign data wrapper integration +alwaysApply: false +--- + +# GoodData FDW Package + +**Location**: `packages/gooddata-fdw/` + +## Owns + +- PostgreSQL FDW integration for exposing GoodData data as SQL-accessible foreign tables +- Translation layer between FDW execution flow and GoodData SDK calls +- Import/execute FDW command behavior used by package tests and docs + +## Does NOT Own + +- Core API/service client behavior → `gooddata-sdk` +- Flight RPC server infrastructure → `gooddata-flight-server` +- FlexConnect function runtime → `gooddata-flexconnect` + +## Architecture + +**Primary package**: `src/gooddata_fdw` + +**Runtime dependency**: multicorn/PostgreSQL FDW environment + +**Depends on**: `gooddata-sdk` + +## Testing + +Use package-local pytest suites under `packages/gooddata-fdw/tests`. + +## Dependencies + +Required: technologies/python, packages/gooddata-sdk +Related: technologies/testing diff --git a/.aida/rules/packages/gooddata-flexconnect.mdc b/.aida/rules/packages/gooddata-flexconnect.mdc new file mode 100644 index 000000000..cf23c79cd --- /dev/null +++ b/.aida/rules/packages/gooddata-flexconnect.mdc @@ -0,0 +1,38 @@ +# (C) 2026 GoodData Corporation +--- +description: GoodData FlexConnect package - custom data source framework for GoodData +alwaysApply: false +--- + +# GoodData FlexConnect Package + +**Location**: `packages/gooddata-flexconnect/` + +## Owns + +- FlexConnect framework for authoring custom table-function style data sources +- Runtime contract for mapping custom function outputs into GoodData datasets +- Integration layer combining GoodData SDK interactions with Flight server hosting + +## Does NOT Own + +- Generic Flight server infrastructure and CLI → `gooddata-flight-server` +- Core GoodData Cloud domain/catalog/compute APIs → `gooddata-sdk` +- PostgreSQL FDW integration → `gooddata-fdw` + +## Architecture + +**Primary package**: `src/gooddata_flexconnect` + +**Depends on**: `gooddata-flight-server`, `gooddata-sdk` + +**Typical usage**: external FlexConnect projects consume this package as a framework dependency + +## Testing + +Use package-local pytest suites under `packages/gooddata-flexconnect/tests`. + +## Dependencies + +Required: technologies/python, packages/gooddata-flight-server, packages/gooddata-sdk +Related: technologies/testing diff --git a/.aida/rules/packages/gooddata-flight-server.mdc b/.aida/rules/packages/gooddata-flight-server.mdc new file mode 100644 index 000000000..eeb50c058 --- /dev/null +++ b/.aida/rules/packages/gooddata-flight-server.mdc @@ -0,0 +1,38 @@ +# (C) 2026 GoodData Corporation +--- +description: GoodData Flight Server package - pluggable Flight RPC server foundation +alwaysApply: false +--- + +# GoodData Flight Server Package + +**Location**: `packages/gooddata-flight-server/` + +## Owns + +- Reusable Flight RPC server runtime and CLI (`gooddata-flight-server`) +- Server bootstrap, lifecycle handling, observability, and auth/token integration +- Configuration patterns for running custom Flight method providers + +## Does NOT Own + +- Core GoodData Cloud domain/catalog/compute SDK APIs → `gooddata-sdk` +- Product-specific data source functions and semantic model mapping → `gooddata-flexconnect` +- Lifecycle provisioning and backup workflows → `gooddata-pipelines` + +## Architecture + +**Entry point**: `gooddata_flight_server.cli:server_cli` + +**Primary package**: `src/gooddata_flight_server` + +**Key stack**: `pyarrow.flight`, `dynaconf`, telemetry/metrics integrations + +## Testing + +Use package-local pytest suites under `packages/gooddata-flight-server/tests`. + +## Dependencies + +Required: technologies/python +Related: technologies/testing, packages/gooddata-flexconnect diff --git a/.aida/rules/packages/gooddata-pandas.mdc b/.aida/rules/packages/gooddata-pandas.mdc new file mode 100644 index 000000000..0637e9914 --- /dev/null +++ b/.aida/rules/packages/gooddata-pandas.mdc @@ -0,0 +1,43 @@ +# (C) 2026 GoodData Corporation +--- +description: GoodData Pandas package - pandas integration layer for GoodData SDK +alwaysApply: false +--- + +# GoodData Pandas Package + +**Location**: `packages/gooddata-pandas/` + +## Owns + +- Pandas Series and DataFrame creation from GoodData data +- Integration layer between GoodData SDK and pandas + +## Does NOT Own + +- Core SDK functionality → gooddata-sdk package +- API client → gooddata-api-client (generated) + +## Architecture + +**Dependency**: Depends on `gooddata-sdk` core package. + +**Main class**: `GoodPandas` - extends SDK with pandas-specific operations + +## Usage + +```python +from gooddata_pandas import GoodPandas + +gp = GoodPandas(host="https://example.gooddata.com", token="...") +df = gp.data_frames(workspace_id="demo", insight_id="...") +``` + +## Testing + +Uses vcrpy cassettes. See `technologies/testing` rule for workflow. + +## Dependencies + +Required: technologies/python, packages/gooddata-sdk +Related: technologies/testing diff --git a/.aida/rules/packages/gooddata-pipelines.mdc b/.aida/rules/packages/gooddata-pipelines.mdc new file mode 100644 index 000000000..2fae815fd --- /dev/null +++ b/.aida/rules/packages/gooddata-pipelines.mdc @@ -0,0 +1,38 @@ +# (C) 2026 GoodData Corporation +--- +description: GoodData Pipelines package - lifecycle automation for GoodData Cloud +alwaysApply: false +--- + +# GoodData Pipelines Package + +**Location**: `packages/gooddata-pipelines/` + +## Owns + +- High-level automation flows for provisioning users, groups, permissions, and workspace hierarchies +- Backup and restore workflows for workspace metadata (local, S3, Azure Blob targets) +- LDM extension workflows for child workspaces + +## Does NOT Own + +- Core platform API/service abstractions and low-level client behavior → `gooddata-sdk` +- dbt metadata conversion and deployment CLI → `gooddata-dbt` +- Flight RPC runtime infrastructure → `gooddata-flight-server` + +## Architecture + +**Primary package**: `src/gooddata_pipelines` + +**Depends on**: `gooddata-sdk` plus cloud storage SDK integrations + +**Primary modules**: `provisioning`, `backup_and_restore`, `ldm_extension` + +## Testing + +Use package-local pytest suites under `packages/gooddata-pipelines/tests`. + +## Dependencies + +Required: technologies/python, packages/gooddata-sdk +Related: technologies/testing diff --git a/.aida/rules/packages/gooddata-sdk.mdc b/.aida/rules/packages/gooddata-sdk.mdc new file mode 100644 index 000000000..f1b4f5045 --- /dev/null +++ b/.aida/rules/packages/gooddata-sdk.mdc @@ -0,0 +1,54 @@ +# (C) 2026 GoodData Corporation +--- +description: GoodData SDK core package - primary interface for GoodData Cloud +alwaysApply: false +--- + +# GoodData SDK Package + +**Location**: `packages/gooddata-sdk/` + +## Owns + +- Core SDK client (`GoodDataSdk`) - main entry point +- Catalog services (workspaces, data sources, users, permissions) +- Compute services (execution, caching, export) +- Visualization services (insights, dashboards) +- Declarative API support (layout export/import) +- Model management (LDM, PDM operations) + +## Does NOT Own + +- Pandas integration → gooddata-pandas package +- dbt integration → gooddata-dbt package +- Flight server → gooddata-flight-server package +- FlexConnect data source framework → gooddata-flexconnect package +- Lifecycle automation workflows → gooddata-pipelines package +- PostgreSQL FDW integration → gooddata-fdw package +- API client generation → gooddata-api-client (generated) + +## Architecture + +**Service-based**: `catalog_*`, `compute_*`, `insights_*`, `tables_*` + +**Depends on**: `gooddata-api-client` (generated OpenAPI client) + +## SDK Usage + +```python +from gooddata_sdk import GoodDataSdk + +sdk = GoodDataSdk.create(host="https://example.gooddata.com", token="...") +workspaces = sdk.catalog_workspace.list_workspaces() +``` + +## Testing + +Uses vcrpy cassettes. See `technologies/testing` rule for workflow. + +**Extend existing tests** when adding new functionality - don't create new test files for minor changes. + +## Dependencies + +Required: technologies/python, technologies/testing +Related: packages/gooddata-pandas, packages/gooddata-dbt, packages/gooddata-pipelines diff --git a/.aida/rules/packages/tests-support.mdc b/.aida/rules/packages/tests-support.mdc new file mode 100644 index 000000000..ab3acd697 --- /dev/null +++ b/.aida/rules/packages/tests-support.mdc @@ -0,0 +1,38 @@ +# (C) 2026 GoodData Corporation +--- +description: Tests support package - shared test helpers for SDK repository packages +alwaysApply: false +--- + +# Tests Support Package + +**Location**: `packages/tests-support/` + +## Owns + +- Shared test utilities reused across multiple repository packages +- VCR helpers for cassette normalization and deterministic request/response handling +- File and deep-comparison helper functions used by package test suites + +## Does NOT Own + +- Product/runtime code for any customer-facing package +- Package-specific test scenarios (those remain with each package under `packages/*/tests`) +- Core SDK API behavior and domain services → `gooddata-sdk` + +## Architecture + +**Primary package**: `src/tests_support` + +**Main modules**: `vcrpy_utils.py`, `compare_utils.py`, `file_utils.py` + +**Role**: test-only utility package consumed from dependency groups in other packages + +## Testing + +Prefer adding shared test helpers here only when at least two packages need the behavior. + +## Dependencies + +Required: technologies/python +Related: technologies/testing, packages/gooddata-sdk, packages/gooddata-pandas diff --git a/.aida/rules_selection.yaml b/.aida/rules_selection.yaml new file mode 100644 index 000000000..e47333395 --- /dev/null +++ b/.aida/rules_selection.yaml @@ -0,0 +1,15 @@ +# (C) 2026 GoodData Corporation +# AIDA rules selection +# +# Shared repository defaults for selecting embedded and repo-owned rules. +# Users can add local overrides in `.aida/rules_selection.local.yaml`. +version: 1 +defaults: + embedded: core_only + repo: all +include: + - source: embedded + path: profiles/languages/python/** +exclude: [] +presets: {} +use_presets: [] diff --git a/.aida/templates/commit-message.txt b/.aida/templates/commit-message.txt new file mode 100644 index 000000000..3fa71b39d --- /dev/null +++ b/.aida/templates/commit-message.txt @@ -0,0 +1,8 @@ +# (C) 2026 GoodData Corporation +{type}{repository_part}: {title} + +{body} + +{co_authored_by} +{ticket} +{risk} diff --git a/.aida/templates/pr-body.md b/.aida/templates/pr-body.md new file mode 100644 index 000000000..b74252326 --- /dev/null +++ b/.aida/templates/pr-body.md @@ -0,0 +1,9 @@ +## Summary +{summary} + +## Test plan +{test_plan} + +{co_authored_by} +{ticket} +{risk} diff --git a/.aida/templates/pr-title.txt b/.aida/templates/pr-title.txt new file mode 100644 index 000000000..0897437a7 --- /dev/null +++ b/.aida/templates/pr-title.txt @@ -0,0 +1,2 @@ +# (C) 2026 GoodData Corporation +{type}{repository_part}: {title} diff --git a/.aida/validation_policy.yaml b/.aida/validation_policy.yaml new file mode 100644 index 000000000..9bd144a71 --- /dev/null +++ b/.aida/validation_policy.yaml @@ -0,0 +1,48 @@ +# (C) 2026 GoodData Corporation +# AIDA validation policy +# +# This file wires domains -> pipelines -> steps (command_id + processor_id). +# Initially empty (validate may no-op until you configure routes). +version: 1 +validation_policy: + codegen: + - id: api-client-regeneration-check + match_globs: + - schemas/**/*.json + - .openapi-generator/**/*.yaml + - scripts/generate_client.sh + pipeline: api-client-fast + scopes: + - pre_push + routes: + - domain: python-package + pipeline: package-fast + - domain: api-client + pipeline: api-client-fast + - domain: repo-tooling + pipeline: repo-fast + - domain: aida-config + pipeline: aida-config + pipelines: + package-fast: + steps: + - command_id: package-lint + processor_id: passthrough + - command_id: package-type-check + processor_id: passthrough + - command_id: package-test-py314 + processor_id: pytest + api-client-fast: + steps: + - command_id: api-client-tests + processor_id: pytest + repo-fast: + steps: + - command_id: workspace-lint + processor_id: passthrough + - command_id: workspace-type-check + processor_id: passthrough + aida-config: + steps: + - command_id: aida-doctor + processor_id: passthrough diff --git a/.aida/validation_registry.yaml b/.aida/validation_registry.yaml new file mode 100644 index 000000000..84bd97d18 --- /dev/null +++ b/.aida/validation_registry.yaml @@ -0,0 +1,61 @@ +# (C) 2026 GoodData Corporation +# AIDA validation registry +# +# Define command_id and processor_id specifications referenced by validation_policy.yaml. +version: 1 +registry: + includes: [] + commands: + package-lint: + argv: + - make + - -C + - '{root}' + - lint + cwd: '{workspace_root}' + package-type-check: + argv: + - make + - -C + - '{root}' + - type-check + cwd: '{workspace_root}' + package-test-py314: + argv: + - make + - -C + - '{root}' + - test + cwd: '{workspace_root}' + env: + TEST_ENVS: py314 + workspace-lint: + argv: + - make + - lint + cwd: '{workspace_root}' + workspace-type-check: + argv: + - make + - type-check + cwd: '{workspace_root}' + api-client-tests: + argv: + - uv + - run + - tox + - -c + - '{workspace_root}/gooddata-api-client/tox.ini' + cwd: '{workspace_root}/gooddata-api-client' + aida-doctor: + argv: + - aida-mcp + - doctor + cwd: '{workspace_root}' + processors: + passthrough: + kind: builtin + builtin_id: passthrough + pytest: + kind: builtin + builtin_id: pytest diff --git a/.claude/CLAUDE.md b/.claude/CLAUDE.md new file mode 100644 index 000000000..bd6eb9e11 --- /dev/null +++ b/.claude/CLAUDE.md @@ -0,0 +1,16 @@ +--- +apply: always +--- + +# You must follow this guidance only if AIDA MCP tools are available. +Call AIDA MCP `get_rules` at every task start and after every scope change; the `query` must match the current requested task. +Reuse one stable `session_id` for the whole conversation when calling `get_rules`. +When a `get_rules` response returns `delta.cursor`, pass it as `delta_cursor` on the next `get_rules` call in the same conversation. +You must always follow instructions returned by the `get_rules` MCP tool. +Do not perform Git or GitHub management unless the user explicitly asks for it. +If the user asks for code or docs changes only, stop before commit, push, or PR actions. +For commit and push workflow, do not call `get_rules`. +When the user explicitly asks for a commit, call the AIDA MCP `commit_command` tool. +The tool executes the commit server-side. If it returns `ok=true`, the commit is done — do not run any git commit commands. +If it returns `status=needs_more_input`, infer the missing policy-controlled values from the current changes and retry instead of asking the user. +Do not handcraft `git commit` arguments. diff --git a/.codex/config.toml b/.codex/config.toml new file mode 100644 index 000000000..e35745980 --- /dev/null +++ b/.codex/config.toml @@ -0,0 +1,5 @@ +# (C) 2026 GoodData Corporation + +[mcp_servers.aida] +command = "aida-mcp" +args = [] diff --git a/.copyrightignore b/.copyrightignore index 3bc7cd0f4..9e4f51c11 100644 --- a/.copyrightignore +++ b/.copyrightignore @@ -43,6 +43,7 @@ gooddata-*-client/** venv/** .venv/** .python-version +uv.lock # Distribution / packaging **/.Python diff --git a/.envrc b/.envrc deleted file mode 100644 index 3cbc6ebee..000000000 --- a/.envrc +++ /dev/null @@ -1,16 +0,0 @@ -# (C) 2021 GoodData Corporation -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" - -export PATH="${SCRIPT_DIR}/.venv/bin:${PATH}" - -source .venv/bin/activate - -export PYTHONPATH="${PYTHONPATH}:${SCRIPT_DIR}/gooddata-api-client/" -export PYTHONPATH="${PYTHONPATH}:${SCRIPT_DIR}/gooddata-sdk/" -export PYTHONPATH="${PYTHONPATH}:${SCRIPT_DIR}/gooddata-pandas/" -export PYTHONPATH="${PYTHONPATH}:${SCRIPT_DIR}/gooddata-flight-server/" -export PYTHONPATH="${PYTHONPATH}:${SCRIPT_DIR}/gooddata-flexconnect/" -export PYTHONPATH="${PYTHONPATH}:${SCRIPT_DIR}/gooddata-dbt/" - -export PATH="${PATH}:${SCRIPT_DIR}/gooddata-sdk/bin" -export PATH="${PATH}:${SCRIPT_DIR}/gooddata-dbt/bin" diff --git a/.github/actions/hugo-build-action/action.yaml b/.github/actions/hugo-build-action/action.yaml index 582b67638..8623c0870 100644 --- a/.github/actions/hugo-build-action/action.yaml +++ b/.github/actions/hugo-build-action/action.yaml @@ -12,6 +12,13 @@ runs: uses: actions/setup-go@v5 with: go-version: '>=1.20.1' + cache: false + - name: "Cache Go modules" + uses: actions/cache@v4 + with: + path: ~/go/pkg/mod + key: go-mod-${{ hashFiles('docs/go.sum') }} + restore-keys: go-mod- - name: "Setup Node" uses: actions/setup-node@v4 with: @@ -28,6 +35,12 @@ runs: working-directory: ./docs run: | npm ci + - name: "Cache Hugo resources" + uses: actions/cache@v4 + with: + path: docs/resources/_gen + key: hugo-resources-${{ hashFiles('docs/go.sum', 'docs/config/**') }} + restore-keys: hugo-resources- - name: "Build documentation" working-directory: ./docs env: diff --git a/.github/actions/hugo-build-versioned-action/action.yaml b/.github/actions/hugo-build-versioned-action/action.yaml index dfbd721d4..7ad1c1b66 100644 --- a/.github/actions/hugo-build-versioned-action/action.yaml +++ b/.github/actions/hugo-build-versioned-action/action.yaml @@ -35,6 +35,13 @@ runs: - uses: actions/setup-go@v5 with: go-version: '>=1.20.1' + cache: false + - name: "Cache Go modules" + uses: actions/cache@v4 + with: + path: ~/go/pkg/mod + key: go-mod-${{ hashFiles('docs/go.sum') }} + restore-keys: go-mod- - name: "Setup Node" uses: actions/setup-node@v4 with: @@ -59,6 +66,12 @@ runs: wget https://raw.githubusercontent.com/gooddata/gooddata-python-sdk/master/scripts/generate.sh chmod +x ./generate.sh ./generate.sh ${{ inputs.fetch-from }} master + - name: "Cache Hugo resources" + uses: actions/cache@v4 + with: + path: docs/resources/_gen + key: hugo-resources-${{ hashFiles('docs/go.sum', 'docs/config/**') }} + restore-keys: hugo-resources- - name: "Build documentation" working-directory: ./docs env: diff --git a/.github/workflows/build-release.yaml b/.github/workflows/build-release.yaml index da9af58d4..236dddcb3 100644 --- a/.github/workflows/build-release.yaml +++ b/.github/workflows/build-release.yaml @@ -14,23 +14,28 @@ on: tags: - v*.*.* -permissions: - contents: write +env: + COMPONENTS: '["gooddata-api-client","gooddata-pandas","gooddata-fdw","gooddata-sdk","gooddata-dbt","gooddata-flight-server","gooddata-flexconnect","gooddata-pipelines"]' jobs: + matrix-components: + name: Prepare matrix components + runs-on: ubuntu-latest + outputs: + components: ${{ steps.export.outputs.components }} + steps: + - name: Export components JSON + id: export + run: echo "components=${COMPONENTS}" >> "$GITHUB_OUTPUT" + build: name: Build all components + needs: matrix-components + permissions: + contents: read strategy: matrix: - component: - - gooddata-api-client - - gooddata-pandas - - gooddata-fdw - - gooddata-sdk - - gooddata-dbt - - gooddata-flight-server - - gooddata-flexconnect - - gooddata-pipelines + component: ${{ fromJSON(needs.matrix-components.outputs.components) }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -55,6 +60,8 @@ jobs: name: Create GitHub release runs-on: ubuntu-latest needs: build + permissions: + contents: write steps: - name: Obtain artifacts uses: actions/download-artifact@v4 @@ -83,18 +90,14 @@ jobs: publish: name: Publish components runs-on: ubuntu-latest - needs: build + needs: + - matrix-components + - build + permissions: + id-token: write strategy: matrix: - component: - - gooddata-api-client - - gooddata-pandas - - gooddata-fdw - - gooddata-sdk - - gooddata-dbt - - gooddata-flight-server - - gooddata-flexconnect - - gooddata-pipelines + component: ${{ fromJSON(needs.matrix-components.outputs.components) }} steps: - name: Obtain ${{ matrix.component }} artifacts uses: actions/download-artifact@v4 @@ -104,9 +107,7 @@ jobs: - name: Push ${{ matrix.component}} to pypi uses: pypa/gh-action-pypi-publish@release/v1 with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} - packages_dir: dist/${{ matrix.component }} + packages-dir: dist/${{ matrix.component }} verbose: true slack-notification: runs-on: ubuntu-latest diff --git a/.github/workflows/bump-version.yaml b/.github/workflows/bump-version.yaml index d1677adb8..df730c2dd 100644 --- a/.github/workflows/bump-version.yaml +++ b/.github/workflows/bump-version.yaml @@ -34,7 +34,7 @@ jobs: - name: Install dependencies run: | - uv sync --only-group release + uv sync --only-group release --locked - name: Bump version id: bump diff --git a/.github/workflows/dev-release.yaml b/.github/workflows/dev-release.yaml index 60460c363..9be3192de 100644 --- a/.github/workflows/dev-release.yaml +++ b/.github/workflows/dev-release.yaml @@ -8,21 +8,29 @@ on: description: 'Branch name to release from' default: "master" +env: + COMPONENTS: '["gooddata-api-client","gooddata-pandas","gooddata-fdw","gooddata-sdk","gooddata-dbt","gooddata-flight-server","gooddata-flexconnect","gooddata-pipelines"]' + jobs: - dev-release: - name: Releasing master as dev + matrix-components: + name: Prepare matrix components + runs-on: ubuntu-latest + outputs: + components: ${{ steps.export.outputs.components }} + steps: + - name: Export components JSON + id: export + run: echo "components=${COMPONENTS}" >> "$GITHUB_OUTPUT" + + build: + name: Build ${{ matrix.component }} runs-on: ubuntu-latest + needs: matrix-components + permissions: + contents: read strategy: matrix: - component: - - gooddata-api-client - - gooddata-pandas - - gooddata-fdw - - gooddata-sdk - - gooddata-dbt - - gooddata-flight-server - - gooddata-flexconnect - - gooddata-pipelines + component: ${{ fromJSON(needs.matrix-components.outputs.components) }} steps: - name: Checkout Repository uses: actions/checkout@v4 @@ -32,7 +40,7 @@ jobs: uses: astral-sh/setup-uv@v6 - name: Install dependencies run: | - uv sync --only-group release + uv sync --only-group release --locked - name: Bump to dev run: uv run tbump --only-patch --non-interactive $(./scripts/next_dev.sh) - name: Build ${{ matrix.component }} @@ -43,10 +51,32 @@ jobs: cd packages/${{ matrix.component }} fi uv build --out-dir dist + - name: Upload ${{ matrix.component }} artifacts + uses: actions/upload-artifact@v4 + with: + name: dist-${{ matrix.component }} + path: ${{ matrix.component == 'gooddata-api-client' && format('{0}/dist', matrix.component) || format('packages/{0}/dist', matrix.component) }} + if-no-files-found: error + + publish: + name: Publish ${{ matrix.component }} + runs-on: ubuntu-latest + needs: + - matrix-components + - build + permissions: + id-token: write + strategy: + matrix: + component: ${{ fromJSON(needs.matrix-components.outputs.components) }} + steps: + - name: Download ${{ matrix.component }} artifacts + uses: actions/download-artifact@v4 + with: + name: dist-${{ matrix.component }} + path: dist - name: Push ${{ matrix.component}} to pypi uses: pypa/gh-action-pypi-publish@release/v1 with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} - packages_dir: ${{ matrix.component == 'gooddata-api-client' && format('{0}/dist', matrix.component) || format('packages/{0}/dist', matrix.component) }} + packages-dir: dist verbose: true diff --git a/.github/workflows/netlify-deploy-v2.yaml b/.github/workflows/netlify-deploy-v2.yaml new file mode 100644 index 000000000..cb9422fc8 --- /dev/null +++ b/.github/workflows/netlify-deploy-v2.yaml @@ -0,0 +1,139 @@ +name: Netlify Deploy V2 (Draft) +on: + workflow_dispatch: + +jobs: + # Job 1: Discover which version branches to build + discover-versions: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.versions.outputs.matrix }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Fetch remote refs + run: git fetch origin + - name: Discover versions + id: versions + run: | + MATRIX=$(bash scripts/discover-versions.sh origin 4) + echo "matrix=$MATRIX" >> $GITHUB_OUTPUT + echo "Discovered versions: $MATRIX" + + # Job 2: Generate docs for each version (matrix — runs in parallel across versions) + # Each version gets its own runner with that branch's SDK installed. + # Per-version caching means released branches (which rarely change) are instant cache hits. + generate-version: + needs: [discover-versions] + runs-on: ubuntu-latest + strategy: + matrix: + version: ${{ fromJson(needs.discover-versions.outputs.matrix) }} + fail-fast: false + steps: + - name: Get branch commit SHA + id: sha + env: + GH_TOKEN: ${{ github.token }} + run: | + SHA=$(gh api "repos/${{ github.repository }}/git/ref/heads/${{ matrix.version.branch }}" -q '.object.sha') + echo "sha=$SHA" >> $GITHUB_OUTPUT + echo "Branch ${{ matrix.version.branch }} -> section ${{ matrix.version.section }} (SHA: $SHA)" + - name: Cache version docs + id: cache + uses: actions/cache@v4 + with: + path: docs/versioned_docs/${{ matrix.version.section }} + key: version-docs-${{ hashFiles('scripts/docs/*.py', 'scripts/docs/templates/**', 'docs/*_template.md') }}-${{ matrix.version.section }}-${{ steps.sha.outputs.sha }} + - name: Checkout + if: steps.cache.outputs.cache-hit != 'true' + uses: actions/checkout@v4 + - name: Fetch target branch + if: steps.cache.outputs.cache-hit != 'true' + run: git fetch origin ${{ matrix.version.branch }} + - name: Checkout branch packages + if: steps.cache.outputs.cache-hit != 'true' + run: | + git checkout origin/${{ matrix.version.branch }} -- gooddata-api-client/ packages/gooddata-sdk/ packages/gooddata-pandas/ + - name: Setup Python + if: steps.cache.outputs.cache-hit != 'true' + uses: actions/setup-python@v5 + with: + python-version-file: ".python-version" + cache: 'pip' + cache-dependency-path: scripts/script-requirements.txt + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + python -m pip install --upgrade pip + pip install -r scripts/script-requirements.txt + # json_builder.py (legacy fallback for branches without griffe_builder.py) + # imports gooddata_sdk and gooddata_pandas at runtime, so they must be installed. + pip install -e gooddata-api-client/ -e packages/gooddata-sdk/ -e packages/gooddata-pandas/ + - name: Generate version docs + if: steps.cache.outputs.cache-hit != 'true' + run: bash scripts/generate-single-version.sh "origin/${{ matrix.version.branch }}" "${{ matrix.version.section }}" + - name: Upload version artifact + uses: actions/upload-artifact@v4 + with: + name: version-${{ matrix.version.section }} + path: docs/versioned_docs/${{ matrix.version.section }} + retention-days: 1 + + # Job 3: Assemble all versions, build Hugo site, and deploy to Netlify (draft) + build-and-deploy: + needs: [generate-version] + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + submodules: recursive + - name: Setup GO + uses: actions/setup-go@v5 + with: + go-version: '>=1.20.1' + cache: false + - name: Cache Go modules + uses: actions/cache@v4 + with: + path: ~/go/pkg/mod + key: go-mod-${{ hashFiles('docs/go.sum') }} + restore-keys: go-mod- + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: 'npm' + cache-dependency-path: docs/package-lock.json + - name: Install Hugo + run: npm install -g hugo-extended@0.117.0 + - name: Install Dependencies + working-directory: ./docs + run: npm ci + - name: Download version artifacts + uses: actions/download-artifact@v4 + with: + pattern: version-* + path: docs/versioned_docs-raw/ + - name: Assemble versioned docs + working-directory: ./docs + run: bash ../scripts/assemble-versions.sh + - name: Cache Hugo resources + uses: actions/cache@v4 + with: + path: docs/resources/_gen + key: hugo-resources-${{ hashFiles('docs/go.sum', 'docs/config/**') }} + restore-keys: hugo-resources- + - name: Build documentation + working-directory: ./docs + env: + HUGO_ENV: production + run: hugo --minify + - name: Publish (draft) + uses: netlify/actions/cli@master + with: + args: deploy -d docs/public + env: + NETLIFY_SITE_ID: 93e23db0-d31a-4a12-801a-b9479ffef486 # Not a secret + NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} diff --git a/.github/workflows/pre-merge.yaml b/.github/workflows/pre-merge.yaml index 8550dc711..10539849a 100644 --- a/.github/workflows/pre-merge.yaml +++ b/.github/workflows/pre-merge.yaml @@ -26,6 +26,7 @@ jobs: uses: ./.github/workflows/rw-python-tests.yaml with: changed-python-modules: ${{ needs.collect-changes.outputs.changed-python-modules }} + secrets: inherit status-check: diff --git a/.github/workflows/rw-collect-changes.yaml b/.github/workflows/rw-collect-changes.yaml index 616d107ed..a4ec2d5c8 100644 --- a/.github/workflows/rw-collect-changes.yaml +++ b/.github/workflows/rw-collect-changes.yaml @@ -40,5 +40,9 @@ jobs: python-modules: - '.docker/**' - 'packages/**' + - 'scripts/docs/**' - '*.mk' - 'Makefile' + - Dockerfile + - pyproject.toml + - 'gooddata-api-client/**' diff --git a/.github/workflows/rw-python-tests.yaml b/.github/workflows/rw-python-tests.yaml index e1d4596e4..89d663288 100644 --- a/.github/workflows/rw-python-tests.yaml +++ b/.github/workflows/rw-python-tests.yaml @@ -11,7 +11,7 @@ jobs: if: ${{inputs.changed-python-modules == 'true'}} strategy: matrix: - python_version: [py313, py312, py311, py310] + python_version: [py314, py313, py312, py311, py310] steps: - name: Checkout uses: actions/checkout@v4 @@ -21,37 +21,56 @@ jobs: env: TEST_ENVS: ${{ matrix.python_version }} - name: Upload coverage to Codecov - if: ${{ matrix.python_version == 'py313' }} - uses: codecov/codecov-action@v3 + if: ${{ matrix.python_version == 'py314' }} + uses: codecov/codecov-action@v5 with: - files: ./packages/gooddata-sdk/coverage.xml,./packages/gooddata-pandas/coverage.xml,./packages/gooddata-fdw/coverage.xml,./packages/gooddata-flight-server/coverage.xml,./packages/gooddata-flexconnect/coverage.xml + files: ./packages/gooddata-sdk/coverage.xml,./packages/gooddata-pandas/coverage.xml,./packages/gooddata-fdw/coverage.xml,./packages/gooddata-flight-server/coverage.xml,./packages/gooddata-flexconnect/coverage.xml,./packages/gooddata-dbt/coverage.xml,./packages/gooddata-pipelines/coverage.xml + token: ${{ secrets.CODECOV_TOKEN }} + override_commit: ${{ github.event.pull_request.head.sha || github.sha }} + override_pr: ${{ github.event.number }} lint-and-format-check: runs-on: ubuntu-latest if: ${{inputs.changed-python-modules == 'true'}} steps: - name: Checkout uses: actions/checkout@v4 - - name: Set up python 3.13 + - name: Set up python 3.14 uses: astral-sh/setup-uv@v6 with: - python-version: 3.13 + python-version: 3.14 - name: Install dependencies run: | - uv sync --only-group lint + uv sync --only-group lint --locked - name: pep8 and formatting check run: | make format + docs-scripts-tests: + runs-on: ubuntu-latest + if: ${{inputs.changed-python-modules == 'true'}} + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up python 3.14 + uses: astral-sh/setup-uv@v6 + with: + python-version: 3.14 + - name: Install dependencies + run: | + uv sync --group test --locked + - name: Test docs scripts + run: | + make test-docs-scripts types-check: runs-on: ubuntu-latest if: ${{inputs.changed-python-modules == 'true'}} steps: - name: Checkout uses: actions/checkout@v4 - - name: Set up python 3.13 + - name: Set up python 3.14 uses: astral-sh/setup-uv@v6 - name: Install dependencies run: | - uv sync --only-group tox --only-group type - - name: mypy check + uv sync --group type --locked + - name: type check run: | - make mypy + make type-check diff --git a/.github/workflows/sdk-cassette-merge-notify-gdc-nas.yaml b/.github/workflows/sdk-cassette-merge-notify-gdc-nas.yaml new file mode 100644 index 000000000..349b35abb --- /dev/null +++ b/.github/workflows/sdk-cassette-merge-notify-gdc-nas.yaml @@ -0,0 +1,38 @@ +# Notify gdc-nas when a cassette-update PR is merged, triggering commit analysis +name: Notify gdc-nas on cassette merge + +on: + pull_request: + types: [closed] + branches: + - master + workflow_dispatch: + inputs: + target_repo: + description: 'gdc-nas repo to notify (owner/repo)' + required: false + default: 'gooddata/gdc-nas' + +jobs: + notify: + name: Trigger gdc-nas commit analysis + if: >- + github.event_name == 'workflow_dispatch' + || (github.event.pull_request.merged == true + && startsWith(github.event.pull_request.head.ref, 'auto/cassette-update-')) + runs-on: ubuntu-latest + timeout-minutes: 5 + permissions: + contents: read + steps: + - name: Send repository_dispatch to gdc-nas + env: + GH_TOKEN: ${{ secrets.TOKEN_GITHUB_YENKINS_ADMIN }} + TARGET_REPO: ${{ inputs.target_repo || 'gooddata/gdc-nas' }} + run: | + gh api "repos/$TARGET_REPO/dispatches" \ + -f event_type=cassette-regeneration-merged \ + -f "client_payload[trigger]=cassette-pr-merged" \ + -f "client_payload[sha]=${{ github.sha }}" \ + -f "client_payload[pr_number]=${{ github.event.pull_request.number || '' }}" + echo "Dispatch sent to $TARGET_REPO" diff --git a/.gitignore b/.gitignore index 087bdbe93..1268f15ab 100644 --- a/.gitignore +++ b/.gitignore @@ -12,9 +12,25 @@ packages/gooddata-sdk/tests/catalog/translate .vscode .ruff_cache +# Pytest JSON reports +**/.json-report-*.json + +# Python build artifacts +.tox +*.egg-info +dist/ +build/ +__pycache__/ +*.pyc +*.pyo +*.pyd + docs/node_modules docs/public docs/resources/_gen docs/tmp/ docs/versioned_docs docs/.hugo_build.lock + +# Export artifacts from Docker export-controller service +packages/gooddata-sdk/tests/export/exports/default/ diff --git a/.junie/guidelines.md b/.junie/guidelines.md new file mode 100644 index 000000000..dc4b9435d --- /dev/null +++ b/.junie/guidelines.md @@ -0,0 +1,12 @@ +# You must follow this guidance only if AIDA MCP tools are available. +Call AIDA MCP `get_rules` at every task start and after every scope change; the `query` must match the current requested task. +Reuse one stable `session_id` for the whole conversation when calling `get_rules`. +When a `get_rules` response returns `delta.cursor`, pass it as `delta_cursor` on the next `get_rules` call in the same conversation. +You must always follow instructions returned by the `get_rules` MCP tool. +Do not perform Git or GitHub management unless the user explicitly asks for it. +If the user asks for code or docs changes only, stop before commit, push, or PR actions. +For commit and push workflow, do not call `get_rules`. +When the user explicitly asks for a commit, call the AIDA MCP `commit_command` tool. +The tool executes the commit server-side. If it returns `ok=true`, the commit is done — do not run any git commit commands. +If it returns `status=needs_more_input`, infer the missing policy-controlled values from the current changes and retry instead of asking the user. +Do not handcraft `git commit` arguments. diff --git a/.mcp.json b/.mcp.json new file mode 100644 index 000000000..ebda1ae90 --- /dev/null +++ b/.mcp.json @@ -0,0 +1,8 @@ +{ + "mcpServers": { + "aida": { + "command": "aida-mcp", + "args": [] + } + } +} diff --git a/.openapi-generator/README.md b/.openapi-generator/README.md index 5c7f4bb43..597bd7727 100644 --- a/.openapi-generator/README.md +++ b/.openapi-generator/README.md @@ -15,9 +15,9 @@ if no such exists or updates existing client code. Use prepared `make` targets from repository root: - `make api-client` -Predefined targets use `http://gooddata-cn-ce:3000` URL to fetch OpenAPI schema and connect generator container to -the `gooddata-python-sdk_default` network. It is prepared to be executed against running `docker-compose.yaml` -services. +Predefined targets use `http://localhost:3000` URL to fetch OpenAPI schema. If you need to connect to the GoodData +services running in docker-compose, you can use the `gooddata-python-sdk_default` network. The docker-compose.yaml +in the repository root starts all required GoodData microservices for testing and development. When you need to point generator to different schema source, use `generate_client.sh` script directly. Below is the example for `gooddata-scan-client` and schema on URL `https://my-gd-cn`, execute it from diff --git a/.openapi-generator/configs/gooddata-api-client.yaml b/.openapi-generator/configs/gooddata-api-client.yaml index 8e6ea46f4..2363b5b88 100644 --- a/.openapi-generator/configs/gooddata-api-client.yaml +++ b/.openapi-generator/configs/gooddata-api-client.yaml @@ -1,12 +1,12 @@ # (C) 2022 GoodData Corporation -generatorName: python +generatorName: python-prior templateDir: /local/.openapi-generator/custom_templates appName: "GoodData api client OpenAPI definition" infoName: "GoodData (generated by OpenAPI Generator)" infoEmail: "support@gooddata.com" projectName: gooddata-api-client packageName: gooddata_api_client -packageVersion: 1.53.0 +packageVersion: 1.61.0 library: urllib3 additionalProperties: licenseInfo: "MIT" diff --git a/.openapi-generator/custom_templates/requirements.mustache b/.openapi-generator/custom_templates/requirements.mustache new file mode 100644 index 000000000..230bff3ba --- /dev/null +++ b/.openapi-generator/custom_templates/requirements.mustache @@ -0,0 +1,3 @@ +python_dateutil >= 2.5.3 +setuptools >= 21.0.0 +urllib3 ~= 2.6.1 diff --git a/.openapi-generator/custom_templates/rest.mustache b/.openapi-generator/custom_templates/rest.mustache new file mode 100644 index 000000000..3dcb4d768 --- /dev/null +++ b/.openapi-generator/custom_templates/rest.mustache @@ -0,0 +1,344 @@ +{{>partial_header}} + +import io +import json +import logging +import re +import ssl +from urllib.parse import urlencode +from urllib.parse import urlparse +from urllib.request import proxy_bypass_environment +import urllib3 +import ipaddress + +from {{packageName}}.exceptions import ApiException, UnauthorizedException, ForbiddenException, NotFoundException, ServiceException, ApiValueError + + +logger = logging.getLogger(__name__) + + +class RESTResponse(io.IOBase): + + def __init__(self, resp): + self.urllib3_response = resp + self.status = resp.status + self.reason = resp.reason + self.data = resp.data + + def getheaders(self): + """Returns a dictionary of the response headers.""" + return self.urllib3_response.headers + + def getheader(self, name, default=None): + """Returns a given response header.""" + return self.urllib3_response.headers.get(name, default) + + +class RESTClientObject(object): + + def __init__(self, configuration, pools_size=4, maxsize=None): + # urllib3.PoolManager will pass all kw parameters to connectionpool + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 + # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 + # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 + + # cert_reqs + if configuration.verify_ssl: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + + addition_pool_args = {} + if configuration.assert_hostname is not None: + addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 + + if configuration.retries is not None: + addition_pool_args['retries'] = configuration.retries + + if configuration.socket_options is not None: + addition_pool_args['socket_options'] = configuration.socket_options + + if maxsize is None: + if configuration.connection_pool_maxsize is not None: + maxsize = configuration.connection_pool_maxsize + else: + maxsize = 4 + + # https pool manager + if configuration.proxy and not should_bypass_proxies( + configuration.host, no_proxy=configuration.no_proxy or ''): + self.pool_manager = urllib3.ProxyManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=configuration.ssl_ca_cert, + cert_file=configuration.cert_file, + key_file=configuration.key_file, + proxy_url=configuration.proxy, + proxy_headers=configuration.proxy_headers, + **addition_pool_args + ) + else: + self.pool_manager = urllib3.PoolManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=configuration.ssl_ca_cert, + cert_file=configuration.cert_file, + key_file=configuration.key_file, + **addition_pool_args + ) + + def request(self, method, url, query_params=None, headers=None, + body=None, post_params=None, _preload_content=True, + _request_timeout=None): + """Perform requests. + + :param method: http request method + :param url: http request url + :param query_params: query parameters in the url + :param headers: http request headers + :param body: request json body, for `application/json` + :param post_params: request post parameters, + `application/x-www-form-urlencoded` + and `multipart/form-data` + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + """ + method = method.upper() + assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', + 'PATCH', 'OPTIONS'] + + if post_params and body: + raise ApiValueError( + "body parameter cannot be used with post_params parameter." + ) + + post_params = post_params or {} + headers = headers or {} + + timeout = None + if _request_timeout: + if isinstance(_request_timeout, (int, float)): # noqa: E501,F821 + timeout = urllib3.Timeout(total=_request_timeout) + elif (isinstance(_request_timeout, tuple) and + len(_request_timeout) == 2): + timeout = urllib3.Timeout( + connect=_request_timeout[0], read=_request_timeout[1]) + + try: + # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` + if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + # Only set a default Content-Type for POST, PUT, PATCH and OPTIONS requests + if (method != 'DELETE') and ('Content-Type' not in headers): + headers['Content-Type'] = 'application/json' + if query_params: + url += '?' + urlencode(query_params) + if ('Content-Type' not in headers) or (re.search('json', + headers['Content-Type'], re.IGNORECASE)): + request_body = None + if body is not None: + request_body = json.dumps(body) + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 + r = self.pool_manager.request( + method, url, + fields=post_params, + encode_multipart=False, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + elif headers['Content-Type'] == 'multipart/form-data': + # must del headers['Content-Type'], or the correct + # Content-Type which generated by urllib3 will be + # overwritten. + del headers['Content-Type'] + r = self.pool_manager.request( + method, url, + fields=post_params, + encode_multipart=True, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + # Pass a `string` parameter directly in the body to support + # other content types than Json when `body` argument is + # provided in serialized form + elif isinstance(body, str) or isinstance(body, bytes): + request_body = body + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + else: + # Cannot generate the request from given parameters + msg = """Cannot prepare a request message for provided + arguments. Please check that your arguments match + declared content type.""" + raise ApiException(status=0, reason=msg) + # For `GET`, `HEAD` + else: + r = self.pool_manager.request(method, url, + fields=query_params, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + except urllib3.exceptions.SSLError as e: + msg = "{0}\n{1}".format(type(e).__name__, str(e)) + raise ApiException(status=0, reason=msg) + + if _preload_content: + r = RESTResponse(r) + + # log response body + logger.debug("response body: %s", r.data) + + if not 200 <= r.status <= 299: + if r.status == 401: + raise UnauthorizedException(http_resp=r) + + if r.status == 403: + raise ForbiddenException(http_resp=r) + + if r.status == 404: + raise NotFoundException(http_resp=r) + + if 500 <= r.status <= 599: + raise ServiceException(http_resp=r) + + raise ApiException(http_resp=r) + + return r + + def GET(self, url, headers=None, query_params=None, _preload_content=True, + _request_timeout=None): + return self.request("GET", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params) + + def HEAD(self, url, headers=None, query_params=None, _preload_content=True, + _request_timeout=None): + return self.request("HEAD", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params) + + def OPTIONS(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("OPTIONS", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def DELETE(self, url, headers=None, query_params=None, body=None, + _preload_content=True, _request_timeout=None): + return self.request("DELETE", url, + headers=headers, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def POST(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("POST", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def PUT(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("PUT", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def PATCH(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("PATCH", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + +# end of class RESTClientObject + + +def is_ipv4(target): + """ Test if IPv4 address or not + """ + try: + chk = ipaddress.IPv4Address(target) + return True + except ipaddress.AddressValueError: + return False + + +def in_ipv4net(target, net): + """ Test if target belongs to given IPv4 network + """ + try: + nw = ipaddress.IPv4Network(net) + ip = ipaddress.IPv4Address(target) + if ip in nw: + return True + return False + except ipaddress.AddressValueError: + return False + except ipaddress.NetmaskValueError: + return False + + +def should_bypass_proxies(url, no_proxy=None): + """ Yet another requests.should_bypass_proxies + Test if proxies should not be used for a particular url. + """ + + parsed = urlparse(url) + + # special cases + if parsed.hostname in [None, '']: + return True + + # special cases + if no_proxy in [None, '']: + return False + if no_proxy == '*': + return True + + no_proxy = no_proxy.lower().replace(' ', ''); + entries = ( + host for host in no_proxy.split(',') if host + ) + + if is_ipv4(parsed.hostname): + for item in entries: + if in_ipv4net(parsed.hostname, item): + return True + return proxy_bypass_environment(parsed.hostname, {'no': no_proxy}) diff --git a/.openapi-generator/custom_templates/setup.mustache b/.openapi-generator/custom_templates/setup.mustache index 898f30456..46a76c426 100644 --- a/.openapi-generator/custom_templates/setup.mustache +++ b/.openapi-generator/custom_templates/setup.mustache @@ -19,7 +19,7 @@ VERSION = "{{packageVersion}}" # http://pypi.python.org/pypi/setuptools REQUIRES = [ - "urllib3 >= 1.25.3", + "urllib3 >= 2.6.1", "python-dateutil", {{#asyncio}} "aiohttp >= 3.0.0", diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9503f8879..78b9d7429 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,7 +15,7 @@ repos: args: [ '--maxkb=890' ] - id: check-case-conflict - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.5 + rev: v0.15.1 hooks: # Run the linter. - id: ruff @@ -35,6 +35,6 @@ repos: hooks: - id: gitlint - repo: https://github.com/astral-sh/uv-pre-commit - rev: 0.9.5 + rev: 0.10.0 hooks: - id: uv-lock diff --git a/.python-version b/.python-version index c10780c62..f982feb41 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.13.1 +3.14.0 diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 000000000..dc4b9435d --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,12 @@ +# You must follow this guidance only if AIDA MCP tools are available. +Call AIDA MCP `get_rules` at every task start and after every scope change; the `query` must match the current requested task. +Reuse one stable `session_id` for the whole conversation when calling `get_rules`. +When a `get_rules` response returns `delta.cursor`, pass it as `delta_cursor` on the next `get_rules` call in the same conversation. +You must always follow instructions returned by the `get_rules` MCP tool. +Do not perform Git or GitHub management unless the user explicitly asks for it. +If the user asks for code or docs changes only, stop before commit, push, or PR actions. +For commit and push workflow, do not call `get_rules`. +When the user explicitly asks for a commit, call the AIDA MCP `commit_command` tool. +The tool executes the commit server-side. If it returns `ok=true`, the commit is done — do not run any git commit commands. +If it returns `status=needs_more_input`, infer the missing policy-controlled values from the current changes and retry instead of asking the user. +Do not handcraft `git commit` arguments. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5932c2a36..c3c2c0ea3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,7 +2,7 @@ ## Getting Started -1. Ensure you have at minimum Python 3.13 installed; Python 3.12, 3.11 and 3.10 are optional for multi-environment tests +1. Ensure you have at minimum Python 3.14 installed; Python 3.13, 3.12, 3.11 and 3.10 are optional for multi-environment tests This repo uses [tox](https://tox.readthedocs.io/en/latest/) and by default will try to run tests against all supported versions. If you have only subset of supported python interpreters installed, see @@ -21,7 +21,7 @@ source .venv/bin/activate ``` - The `make dev` command will create a new Python 3.13 virtual environment in the `.venv` directory, install all + The `make dev` command will create a new Python 3.14 virtual environment in the `.venv` directory, install all third party dependencies into it and setup git hooks. Additionally, if you use [direnv](https://direnv.net/) you can run `direnv allow .envrc` to enable automatic @@ -38,6 +38,30 @@ request. +## Maintenance Tasks + +### Adding a New Package + +When adding a new distributable package to this monorepo, update release automation and PyPI configuration together: + +1. Add the package name to `COMPONENTS` in: + - `.github/workflows/dev-release.yaml` + - `.github/workflows/build-release.yaml` +2. Verify the package is built by release workflows and artifacts are uploaded from its `dist/` directory. +3. Configure the package on PyPI to use Trusted Publisher for this repository/workflow combination. +4. Run/observe a release workflow and confirm publishing succeeds via OIDC (no `PYPI_API_TOKEN` required). + +### Adding Support for a New Python Version + +When adding support for a new Python version: + +1. Update all `pyproject.toml` files to include the new Python version classifier +2. Update all `tox.ini` files to include the new Python version in `envlist` +3. Update CI/CD workflows to test against the new Python version +4. **Run `uv lock --upgrade`** to upgrade the lock file and re-resolve dependencies for all Python versions, including the newly added one + +The lock file upgrade is crucial as it allows `uv` to pick package versions that are compatible with all supported Python versions. + ## Coding Conventions This project uses [ruff](https://github.com/astral-sh/ruff) to ensure basic code sanity and for no-nonsense, consistent formatting. @@ -124,7 +148,6 @@ Steps: ```diff * [update_name](./update_name/) - * [update_oidc_parameters](./update_oidc_parameters/) * [create_or_update_jwk](./create_or_update_jwk/) * [delete_jwk](./delete_jwk/) * [get_jwk](./get_jwk/) @@ -170,23 +193,104 @@ Tests triggered by `make` can be controlled via these environment variables: ``` ### How to update vcrpy tests -Some tests include HTTP call(s) to GD.CN instance. That tests are executed through -[vcrpy](https://vcrpy.readthedocs.io/) so that GD.CN instance is needed either first time or when request is changed. +Some tests include HTTP call(s) to GoodData instance. Those tests are executed through +[vcrpy](https://vcrpy.readthedocs.io/) so that a GoodData instance is needed either the first time or when a request is changed. It has clear benefits: -- ability to run the tests without GD.CN +- ability to run the tests without a GoodData instance - request and response snapshot - it makes debugging of HTTP calls simple -But there is one disadvantage. One needs GD.CN instance with the original setup to change tests. -`docker-compose.yaml` in root of the repository is here to help. It starts: -- GD.CN AIO in selected version -- postgres with gooddata-fdw extension -- service which setups GD.CN AIO demo project including PDM, LDM, metrics and visualizations +But there is one disadvantage. One needs a GoodData instance with the original setup to change tests. +`docker-compose.yaml` in the root of the repository is here to help. + +#### Prerequisites for Running Tests Locally + +1. **AWS ECR Login** - The docker-compose uses ECR images: + ```bash + aws ecr get-login-password | docker login --username AWS --password-stdin 020413372491.dkr.ecr.us-east-1.amazonaws.com + ``` + +2. **GoodData License Key** - Get from the GoodData team and place it in the `./build/license` file: + ```bash + mkdir -p build + echo "" > build/license + ``` + The auth-service reads the license from this mounted location. + +#### What docker-compose starts + +The docker-compose starts a full GoodData microservices stack: + +**Infrastructure services:** +- PostgreSQL (with demo databases: `md`, `dex`, `automation`, `gw`, `tiger`) +- Redis (caching) +- Apache Pulsar (messaging) +- Traefik (routing) +- Dex (OIDC authentication) + +**Core GoodData services:** +- metadata-api, auth-service, calcique, sql-executor, result-cache +- afm-exec-api, scan-model, api-gateway, api-gw +- automation, export-controller, tabular-exporter +- quiver (data processing engine) + +**Bootstrap services (run once):** +- `metadata-organization-bootstrap` - Creates organization + admin user +- `data-loader` - Loads demo data into PostgreSQL (with `--no-schema-versioning`) +- `create-ds` - Registers data sources in metadata-api +- `layout-uploader` - Uploads workspace hierarchy, analytics model, users, permissions + +#### The `--no-schema-versioning` Flag + +The data-loader uses `--no-schema-versioning` flag to ensure: +- Schema names are consistent (e.g., `demo` not `demo_abc123`) +- Fixture names don't have hash suffixes +- VCR cassette tests produce reproducible results + +#### Starting GoodData for Tests + +```bash +# Start all services +docker compose up -d + +# Wait for bootstrap to complete (watch for "Layout upload completed successfully!") +docker compose logs -f metadata-organization-bootstrap data-loader create-ds layout-uploader + +# Check service status +docker compose ps + +# The GoodData API is available at http://localhost:3000 +# Default credentials: demo@example.com / demo123 +# API token: YWRtaW46Ym9vdHN0cmFwOmFkbWluMTIz +``` + +#### Updating vcrpy Cassettes When a vcrpy supported test needs to be updated: -- start GD.CN using above `docker-compose.yaml` -- delete original vcrpy cassette with `make remove-cassettes` -- execute test -- update a newly generated cassette to the git +- Start GoodData using the above `docker-compose.yaml` +- Wait for all bootstrap services to complete +- Delete original vcrpy cassette with `make remove-cassettes` +- Execute test +- Commit the newly generated cassette to git + +#### Stopping and Cleanup + +```bash +# Stop all services +docker compose down + +# Full cleanup (remove volumes - required for fresh start) +docker compose down -v +``` + +#### Running gooddata-fdw Tests + +The FDW (Foreign Data Wrapper) tests require an additional service. Start it with: + +```bash +docker compose --profile fdw up -d +``` + +This starts a PostgreSQL instance with the gooddata-fdw extension on port 2543. ## Run continuous integration tests Tests in pull request (PR) are executed using docker. The following is done to make test environment as close @@ -217,10 +321,31 @@ venv automatically. So when docker tox tests are executed after localhost tests ```bash TEST_ENVS=py311,py310 ADD_ARGS="-k http_headers" make test-ci ``` -- run tests on localhost against all-in-one image started with docker-compose +- run tests on localhost against microservices started with docker-compose ```bash RECREATE_ENVS=1 HOST_NETWORK=1 make test-ci ``` # How to generate and maintain OpenAPI clients Refer to our [OpenAPI client README](./.openapi-generator/README.md) + +# Kinds of fixtures and layouts + +There are several kinds of fixtures used for the tests. +This is important to know about when you're making changes or updating cassettes as it can surprise you. +You have to keep in mind especially if you want to add new attributes to be used across several tests. + +- `packages/tests-support/fixtures` are used as the default layout for tests that are uploaded by docker compose. They + are also uploaded by the `upload_demo_layout.py` script. + +## Gooddata SDK + +These are common places for fixtures used in Gooddata SDK: + +- `packages/gooddata-sdk/tests/catalog/refresh`: this is the layout that actually *replaces* the layout after some kinds of catalog tests, and hence overrides the layout from docker compose. +You have to make changes also here if you want to make changes to the default layout. +It's a current TODO to merge this and `packages/tests-support/fixtures` into one layout. +- `packages/gooddata-sdk/tests/catalog/expected`: these are fixtures that are used to compare against in various catalog tests. +- `packages/gooddata-sdk/tests/catalog/store`, `packages/gooddata-sdk/tests/catalog/load`, `packages/gooddata-sdk/tests/catalog/load_with_locale`, `packages/gooddata-sdk/tests/catalog/load_with_locale`, ... + - These are numerous fixtures that are used for load and put tests. You often need to change more of them. + - Note that some of these contain `workspace` and `workspace_content` subfolders, depending on where the fixtures are used to load diff --git a/Dockerfile b/Dockerfile index 7696d49a5..6b446275c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,13 +1,18 @@ # (C) 2021 GoodData Corporation ARG PY_TAG +FROM ghcr.io/astral-sh/uv:0.10 AS uv FROM python:${PY_TAG} ARG PY_TAG ARG ENV_TAG + # tox defines all python targets, makefile recognizes TEST_ENVS and forces # tox to execute only tests for installed python ENV TEST_ENVS=${ENV_TAG} +# copy uv binary from official image; version is guarded by required-version in pyproject.toml +COPY --from=uv /uv /usr/local/bin/uv + # install make and gosu ENV GOSU_VERSION=1.14 RUN set -x \ @@ -26,15 +31,24 @@ RUN set -x \ && rm -rf /var/lib/apt/lists/* \ && true -# install tox -ENV PYTHON_TOX_VERSION=4.30.0 -ENV PYTHON_TOX_UV_VERSION=1.28.0 +# Set working directory before copying files +WORKDIR /data + +# copy dependency files - these will be available at build time +# At runtime, the directory will be mounted, but uv will use the lock file +# to ensure consistent dependencies +COPY pyproject.toml uv.lock ./ + +# Install tox and tox-uv as system packages so they're available globally +# We use uv pip install to install packages from the tox dependency group in pyproject.toml +# by reading from the lock file which ensures consistent versions +# Clean up dependency files after installation to reduce image size RUN set -x \ - && pip3 install uv tox==${PYTHON_TOX_VERSION} tox-uv==${PYTHON_TOX_UV_VERSION}\ + && uv pip install --system --group tox \ + && rm -f pyproject.toml uv.lock \ && true COPY .docker/entrypoint.sh /entrypoint.sh -WORKDIR /data LABEL image_name="GoodData Python SDK test image with python, tox and make" # LABEL maintainer="TigerTeam " diff --git a/Makefile b/Makefile index 1fd130240..307575326 100644 --- a/Makefile +++ b/Makefile @@ -13,6 +13,9 @@ URL="${BASE_URL}/api/${API_VERSION}/schemas" include ci_tests.mk +# Common command components +RUFF = .venv/bin/ruff + all: echo "Nothing here yet." @@ -23,24 +26,23 @@ dev: .PHONY: lint lint: - .venv/bin/ruff check . + $(RUFF) check . .PHONY: lint-fix lint-fix: - .venv/bin/ruff check . --fix + $(RUFF) check . --fix .PHONY: format format: - .venv/bin/ruff format --check . - -.PHONY: format-diff -format-diff: - .venv/bin/ruff format --diff . + $(RUFF) format --check . .PHONY: format-fix format-fix: - .venv/bin/ruff format . - .venv/bin/ruff check . --fix --fixable I + $(RUFF) format . + +.PHONY: format-diff +format-diff: + $(RUFF) format --diff . define download_client @@ -51,13 +53,16 @@ define generate_client ./scripts/generate_client.sh gooddata-$(1)-client -f "/local/schemas/gooddata-$(1)-client.json" endef - - .PHONY: api-client api-client: download rm -f schemas/gooddata-api-client.json cat schemas/gooddata-*.json | jq -S -s 'reduce .[] as $$item ({}; . * $$item) + { tags : ( reduce .[].tags as $$item (null; . + $$item) | unique_by(.name) ) }' | sed '/\u0000/d' > "schemas/gooddata-api-client.json" $(call generate_client,api) + # OpenAPI Generator drops the \x00 literal from regex patterns like ^[^\x00]*$, + # producing the invalid Python regex ^[^]*$. Restore the null-byte escape. + find gooddata-api-client/gooddata_api_client -name '*.py' -exec \ + sed -i.bak 's/\^\[\^\]\*\$$/^[^\\x00]*$$/g' {} + && \ + find gooddata-api-client/gooddata_api_client -name '*.py.bak' -delete .PHONY: download download: @@ -66,13 +71,17 @@ download: $(call download_client,scan) $(call download_client,"export") $(call download_client,automation) + $(call download_client,result) -.PHONY: mypy -mypy: +.PHONY: type-check +type-check: RESULT=0; \ - for project in $(NO_CLIENT_GD_PROJECTS_DIRS); do $(MAKE) -C packages/$${project} $@ || RESULT=$$?; done; \ + for project in $(NO_CLIENT_GD_PROJECTS_DIRS); do $(MAKE) -C packages/$${project} type-check || RESULT=$$?; done; \ exit $$RESULT +.PHONY: types +types: type-check + .PHONY: test test: RESULT=0; \ @@ -87,7 +96,7 @@ release: .PHONY: release-ci release-ci: if [ -z "$(VERSION)" ]; then echo "Usage: 'make release-ci VERSION=X.Y.Z'"; false; else \ - uv run tbump $(VERSION) --only-patch --non-interactive ; fi + uv run tbump $(VERSION) --only-patch --non-interactive && uv lock ; fi .PHONY: check-copyright check-copyright: @@ -105,6 +114,10 @@ remove-cassettes: for project in $(NO_CLIENT_GD_PROJECTS_DIRS); do $(MAKE) -C packages/$${project} $@ || RESULT=$$?; done; \ exit $$RESULT +.PHONY: test-docs-scripts +test-docs-scripts: + uv run pytest scripts/docs/tests/ -v + .PHONY: new-docs new-docs: cd docs; \ diff --git a/ci_tests.mk b/ci_tests.mk index 9da132d1d..43b283ec1 100644 --- a/ci_tests.mk +++ b/ci_tests.mk @@ -1,5 +1,5 @@ # (C) 2021 GoodData Corporation -IN_TEST_ENVS = py313,py312,py311,py310 +IN_TEST_ENVS = py314,py313,py312,py311,py310 ifdef TEST_ENVS IN_TEST_ENVS = $(TEST_ENVS) endif @@ -37,19 +37,23 @@ endif # Targets to build docker file for each python version .PHONY: test-ci-py310-build test-ci-py310-build: Dockerfile - docker build --build-arg "PY_TAG=3.10.15-slim-bookworm" --build-arg "ENV_TAG=py310" -t python-sdk:py310 . + docker build --build-arg "PY_TAG=3.10.19-slim-bookworm" --build-arg "ENV_TAG=py310" -t python-sdk:py310 . .PHONY: test-ci-py311-build test-ci-py311-build: Dockerfile - docker build --build-arg "PY_TAG=3.11.10-slim-bookworm" --build-arg "ENV_TAG=py311" -t python-sdk:py311 . + docker build --build-arg "PY_TAG=3.11.14-slim-bookworm" --build-arg "ENV_TAG=py311" -t python-sdk:py311 . .PHONY: test-ci-py312-build test-ci-py312-build: Dockerfile - docker build --build-arg "PY_TAG=3.12.6-slim-bookworm" --build-arg "ENV_TAG=py312" -t python-sdk:py312 . + docker build --build-arg "PY_TAG=3.12.12-slim-bookworm" --build-arg "ENV_TAG=py312" -t python-sdk:py312 . .PHONY: test-ci-py313-build test-ci-py313-build: Dockerfile - docker build --build-arg "PY_TAG=3.13.1-slim-bookworm" --build-arg "ENV_TAG=py313" -t python-sdk:py313 . + docker build --build-arg "PY_TAG=3.13.12-slim-bookworm" --build-arg "ENV_TAG=py313" -t python-sdk:py313 . + +.PHONY: test-ci-py314-build +test-ci-py314-build: Dockerfile + docker build --build-arg "PY_TAG=3.14.3-slim-bookworm" --build-arg "ENV_TAG=py314" -t python-sdk:py314 . # test-ci target triggers unit tests for each requested environment .PHONY: test-ci diff --git a/clients_README.md b/clients_README.md index 2675c343f..f70c0cd8c 100644 --- a/clients_README.md +++ b/clients_README.md @@ -1,6 +1,6 @@ # Generated API Clients -The generated client provide Python classes that you can use to call GoodData.CN REST APIs. The client contains +The generated client provides Python classes that you can use to call GoodData REST APIs. The client contains models for API requests and responses and controllers to actually call the APIs. We recommend using the gooddata_sdk package where possible because: diff --git a/docker-compose.yaml b/docker-compose.yaml index ce914f28f..3e3c1ad38 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,32 +1,1094 @@ -# (C) 2022 GoodData Corporation -# docker-compose.yaml is dedicated to testing. All the tests are expected to be passing -# against this docker compose. +# (C) 2024 GoodData Corporation +# Multi-service docker-compose for Python SDK testing. +# +# This replaces the old AIO approach with individual microservices matching gdc-nas architecture. +# Uses ECR images only - no gdc-nas dependency required. +# +# ============================================================================ +# PREREQUISITES +# ============================================================================ +# 1. AWS ECR login: +# aws ecr get-login-password | docker login --username AWS --password-stdin 020413372491.dkr.ecr.us-east-1.amazonaws.com +# +# 2. GoodData license key (get from GoodData team): +# mkdir -p build && echo "" > build/license +# +# ============================================================================ +# WORKFLOW +# ============================================================================ +# The services start in this order: +# +# 1. Infrastructure (postgres, redis, pulsar, dex, quiver, router) +# 2. Core services (metadata-api, auth-service, calcique, etc.) +# 3. Gateway services (api-gw, api-gateway) +# 4. Bootstrap sequence: +# a) metadata-organization-bootstrap - Creates organization + admin user +# b) data-loader - Loads demo data into PostgreSQL (with --no-schema-versioning) +# c) create-ds - Registers data sources in metadata-api +# d) layout-uploader - Uploads workspace hierarchy, analytics model, users, permissions +# +# ============================================================================ +# NO-SCHEMA-VERSIONING (Critical for SDK tests) +# ============================================================================ +# The data-loader uses --no-schema-versioning flag to ensure: +# - Schema names are consistent (e.g., "demo" not "demo_abc123") +# - Fixture names don't have hash suffixes +# - VCR cassette tests produce reproducible results +# +# This flag is set in: +# - data-loader service: ./data_load.py ... --no-schema-versioning +# - create-ds service: ./create_data_sources.py ... --no-schema-versioning +# +# ============================================================================ +# USAGE +# ============================================================================ +# # Start all services: +# docker compose up -d +# +# # Wait for bootstrap to complete: +# docker compose logs -f metadata-organization-bootstrap data-loader create-ds layout-uploader +# +# # Check service status: +# docker compose ps +# +# # Run SDK tests (after services are ready): +# make test +# +# # Stop all services: +# docker compose down +# +# # Full cleanup (remove volumes): +# docker compose down -v -version: '3.7' +x-java-opts: &java-opts + JAVA_OPTS: "${JAVA_OPTS:--Xmx128m --add-opens=java.base/sun.net=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED}" + LOGGING_APPENDER: logfmt + SPRING_CONFIG_ADDITIONAL_LOCATION: classpath:git.properties + MANAGEMENT_ZIPKIN_TRACING_EXPORT_ENABLED: "false" + GDC_TELEMETRY_ENABLED: "false" + +x-quiver-env-vars: &quiver-env-vars + QUIVER_LOCATION_ENDPOINTS: quiver:16001 + +x-key-set: + &key-set '{"primaryKeyId":1207756424,"key":[{"keyData":{"typeUrl":"type.googleapis.com/google.crypto.tink.AesGcmKey","value":"GiDrmoEBcKNE3bbiglef0NiklmIvuSBf8NEgqH/3KEdOBQ==","keyMaterialType":"SYMMETRIC"},"status":"ENABLED","keyId":1207756424,"outputPrefixType":"TINK"}]}' services: - gooddata-cn-ce: - # Use the master tag, which points to the latest stable development - image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/gooddata-cn-ce:master - ports: - - "3000:3000" - - "5432:5432" - volumes: - - gooddata-cn-ce-data:/data - environment: - APP_LOGLEVEL: "INFO" - GDCN_LICENSE_KEY: ${GDCN_LICENSE_KEY} - GDCN_ENABLE_CSV_EXPORT_API: 1 - GDC_FEATURES_VALUES_ENABLE_ANALYTICAL_DASHBOARD_PERMISSIONS: "true" - GDC_FEATURES_VALUES_ENABLE_METRIC_SQL_AND_DATA_EXPLAIN: 'ENABLED' - GDC_FEATURES_VALUES_ENABLE_ROLLUP_TOTALS: "true" - GDC_FEATURES_VALUES_ENABLE_ROLLUP_TOTALS_FOR_METRICS: "true" + ## ============================================ + ## Infrastructure Services + ## ============================================ + + postgres: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/pullthrough/docker.io/library/postgres:14-alpine + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres -d rdb"] + interval: 10s + timeout: 5s + retries: 6 + start_period: 5s + command: ["postgres", "-c", "max_connections=200"] + shm_size: '1gb' + ports: + - "${PG_EXPOSED_PORT-5432}:5432" + environment: + POSTGRES_DB: rdb + POSTGRES_PASSWORD: passw0rd + POSTGRES_USER: postgres + volumes: + - postgres-data:/var/lib/postgresql/data + - ./scripts/postgres:/docker-entrypoint-initdb.d:ro + + pulsar: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/pullthrough/docker.io/apachepulsar/pulsar:3.3.9 + healthcheck: + test: ["CMD-SHELL", "curl -fsS http://pulsar:8080/admin/v2/brokers/health >/dev/null"] + interval: 10s + timeout: 5s + retries: 6 + start_period: 5s + ports: + - "${PULSAR_EXPOSED_PORT-6650}:6650" + - "${PULSAR_ADMIN_EXPOSED_PORT-8080}:8080" + volumes: + - pulsar-data:/pulsar/data + environment: + PULSAR_LOG_LEVEL: warn + PULSAR_MEM: '-Xms128m -Xmx256m -XX:MaxDirectMemorySize=256m' + PULSAR_STANDALONE_USE_ZOOKEEPER: "1" + subscriptionExpirationTimeMinutes: "5" + journalMaxBackups: "0" + systemTopicEnabled: "true" + topicLevelPoliciesEnabled: "true" + command: > + /bin/bash -c + "bin/apply-config-from-env.py conf/standalone.conf + && bin/pulsar standalone -nfw -nss" + + pulsar-create-namespace: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/pullthrough/docker.io/apachepulsar/pulsar:3.3.9 + depends_on: + pulsar: + condition: service_healthy + entrypoint: | + sh -c ' + until curl --output /dev/null -fs http://pulsar:8080/admin/v2/tenants ; do + echo waiting for pulsar to start... + sleep 5; + done; + bin/pulsar-admin --admin-url http://pulsar:8080 namespaces create public/default || true; + bin/pulsar-admin --admin-url http://pulsar:8080 tenants get gooddata || + bin/pulsar-admin --admin-url http://pulsar:8080 tenants create gooddata; + bin/pulsar-admin --admin-url http://pulsar:8080 namespaces get-clusters gooddata/nas || + bin/pulsar-admin --admin-url http://pulsar:8080 namespaces create gooddata/nas; + ' + + redis: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/pullthrough/docker.io/library/redis:8.4.0-alpine + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 3 + start_period: 5s + ports: + - "${REDIS_EXPOSED_PORT-6379}:6379" + volumes: + - redis-data:/data + command: ["--maxmemory", "64m", "--maxmemory-policy", "allkeys-lru"] + + router: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/pullthrough/docker.io/library/traefik:v3.6 + environment: + TRAEFIK_LOG_FORMAT: "common" + TRAEFIK_API_INSECURE: "true" + TRAEFIK_API_DASHBOARD: "true" + TRAEFIK_PROVIDERS_DOCKER: "true" + TRAEFIK_PROVIDERS_DOCKER_EXPOSEDBYDEFAULT: "false" + TRAEFIK_ENTRYPOINTS_WEB_ADDRESS: ":3000" + TRAEFIK_ENTRYPOINTS_TRAEFIK_ADDRESS: ":8081" + TRAEFIK_ACCESSLOG: "true" + ports: + - "${TRAEFIK_EXPOSED_PORT-3000}:3000" + - "${TRAEFIK_MGMT_EXPOSED_PORT-8081}:8081" + volumes: + - ${DOCKER_SOCKET:-/var/run/docker.sock}:/var/run/docker.sock:ro + labels: + - "traefik.enable=true" + - "traefik.http.middlewares.sec-headers.headers.customFrameOptionsValue=SAMEORIGIN" + + ## ============================================ + ## Core GoodData Microservices (from ECR) + ## ============================================ + + metadata-api: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/metadata-api:latest + tty: true + ports: + - "${METADATA_API_EXPOSED_PORT-9007}:9007" + - "${METADATA_API_MANAGEMENT_EXPOSED_PORT-9008}:9008" + - "${METADATA_API_GRPC_EXPOSED_PORT-6572}:6572" + depends_on: + postgres: + condition: service_healthy + pulsar-create-namespace: + condition: service_completed_successfully + dex: + condition: service_started + environment: + <<: *java-opts + JAVA_OPTS: "${JAVA_OPTS:--Xmx2G}" + GDC_METADATA_ENCRYPTOR_KEYSET: *key-set + GRPC_DATASOURCEMETADATA_HOST: sql-executor + GRPC_DATASOURCEMETADATA_PORT: 6570 + GRPC_LICENSE_HOST: auth-service + GRPC_LICENSE_PORT: 6573 + GRPC_DEX_HOST: dex + GRPC_DEX_PORT: 5557 + GRPC_CLIENT_FEDERATION_ENABLED: "false" + PULSAR_ADMINURL: http://pulsar:8080 + PULSAR_PRODUCERS_MODEL_UPDATE_TOPIC: gooddata/nas/metadata.model + PULSAR_PRODUCERS_DATA_SOURCE_CHANGE_TOPIC: gooddata/nas/data-source.change + PULSAR_PRODUCERS_CACHE_COMMAND_TOPIC: gooddata/nas/metadata.cache-command + PULSAR_PRODUCERS_CACHE_SETTINGS_CHANGE_TOPIC: gooddata/nas/cache-settings.change + PULSAR_PRODUCERS_METADATA_SYNC_TOPIC: gooddata/nas/metadata.sync + PULSAR_CONSUMERS_CACHE_SETTINGS_BOOTSTRAP_TOPIC: gooddata/nas/cache-settings.bootstrap + PULSAR_CONSUMERS_CACHE_COMMAND_TOPIC: gooddata/nas/metadata.cache-command + PULSAR_CONSUMERS_METADATA_BACK_SYNC_TOPIC: gooddata/nas/metadata.back-sync + PULSAR_PULL_CONSUMERS_METADATA_SYNC_REQUEST_TOPICS: gooddata/nas/metadata.sync-request + PULSAR_SERVICEURL: pulsar://pulsar:6650 + SPRING_DATASOURCE_URL: 'jdbc:postgresql://postgres:5432/md?reWriteBatchedInserts=true' + SPRING_DATASOURCE_PASSWORD: passw0rd + SPRING_DATASOURCE_USERNAME: postgres + SPRING_PROFILES_ACTIVE: localDex + GDC_TIGER_OAUTH2_LOCAL_CALLBACK_PORT: ${TRAEFIK_EXPOSED_PORT-3000} + GDCN_SETTING_ALLOW_UNSAFE_FLEX_CONNECT_ENDPOINTS: '{"value": true}' + GDC_FEATURES_VALUES_ENABLE_COMPOSITE_GRAIN: "true" + GDC_FEATURES_VALUES_ENABLE_USER_MANAGEMENT: "true" + GDC_FEATURES_VALUES_ENABLE_SCHEDULING: "true" + GDC_FEATURES_VALUES_ENABLE_ALERTING: "true" + GDC_FEATURES_VALUES_ENABLE_SMTP: "true" + GDC_FEATURES_VALUES_ENABLE_PRE_AGGREGATION_DATASETS: "true" + GDC_FEATURES_VALUES_ENABLE_DATA_LOCALIZATION: "true" + GDC_FEATURES_VALUES_ENABLE_SNOWFLAKE_KEY_PAIR_AUTHENTICATION: "true" + + auth-service: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/auth-service:latest + tty: true + ports: + - "${AUTH_SERVICE_EXPOSED_PORT-9050}:9050" + - "${AUTH_SERVICE_MANAGEMENT_EXPOSED_PORT-9051}:9051" + - "${AUTH_SERVICE_GRPC_EXPOSED_PORT-6573}:6573" + depends_on: + metadata-api: + condition: service_started + dex: + condition: service_started + pulsar-create-namespace: + condition: service_completed_successfully + environment: + <<: *java-opts + GRPC_METADATA_HOST: metadata-api + GRPC_METADATA_PORT: 6572 + PULSAR_ADMINURL: http://pulsar:8080 + PULSAR_PRODUCERS_INVITEGENERATOR_ENABLED: "true" + PULSAR_PRODUCERS_INVITEGENERATOR_TOPIC: gooddata/nas/invitations + PULSAR_SERVICEURL: pulsar://pulsar:6650 + SPRING_PROFILES_ACTIVE: localDex + GRPC_DEX_HOST: dex + GRPC_DEX_PORT: 5557 + # Mount license file (valid until 2026-09-11) + # The auth-service reads the license from /secret/license file + volumes: + - ./build:/secret:ro + + calcique: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/calcique:latest + tty: true + depends_on: + metadata-api: + condition: service_started + pulsar-create-namespace: + condition: service_completed_successfully + redis: + condition: service_healthy + environment: + <<: *java-opts + PULSAR_ADMINURL: http://pulsar:8080 + PULSAR_CONSUMERS_MODEL_UPDATE_TOPIC: gooddata/nas/metadata.model + PULSAR_CONSUMERS_MODEL_UPDATE_DEAD_LETTER_TOPIC: gooddata/nas/metadata.model.calcique.DLQ + PULSAR_CONSUMERS_DATA_SOURCE_CHANGE_TOPIC: gooddata/nas/data-source.change + PULSAR_CONSUMERS_DATA_SOURCE_CHANGE_DEAD_LETTER_TOPIC: gooddata/nas/data-source.change.calcique.DLQ + PULSAR_SERVICEURL: pulsar://pulsar:6650 + GRPC_METADATA_HOST: metadata-api + GRPC_METADATA_PORT: 6572 + GRPC_DATASOURCE_HOST: metadata-api + GRPC_DATASOURCE_PORT: 6572 + SPRING_DATA_REDIS_HOST: redis + SPRING_DATA_REDIS_PORT: 6379 + GDC_FEATURES_VALUES_ENABLE_DATA_LOCALIZATION: "true" + ports: + - "${CALCIQUE_MANAGEMENT_EXPOSED_PORT-9012}:9012" + - "${CALCIQUE_GRPC_EXPOSED_PORT-6577}:6577" + + sql-executor: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/sql-executor:latest + tty: true + depends_on: + metadata-api: + condition: service_started + pulsar-create-namespace: + condition: service_completed_successfully + result-cache: + condition: service_started + auth-service: + condition: service_started + quiver: + condition: service_healthy + ports: + - "${SQL_EXECUTOR_GRPC_EXPOSED_PORT-6570}:6570" + environment: + <<: [*java-opts, *quiver-env-vars] + GRPC_METADATA_HOST: metadata-api + GRPC_METADATA_PORT: 6572 + GRPC_DATASOURCE_HOST: metadata-api + GRPC_DATASOURCE_PORT: 6572 + GRPC_GDSTORAGE_HOST: result-cache + GRPC_GDSTORAGE_PORT: 6567 + GRPC_LICENSE_HOST: auth-service + GRPC_LICENSE_PORT: 6573 + PULSAR_ADMINURL: http://pulsar:8080 + PULSAR_CONSUMERS_DATA_SOURCE_CHANGE_TOPIC: gooddata/nas/data-source.change + PULSAR_CONSUMERS_SELECT_TOPIC: gooddata/nas/sql.select + PULSAR_CONSUMERS_SELECT_DEAD_LETTER_TOPIC: gooddata/nas/sql.select.DLQ + PULSAR_CONSUMERS_CANCEL_TOPIC: gooddata/nas/sql.cancel + PULSAR_CONSUMERS_CANCEL_SUBSCRIPTION_TYPE: exclusive + PULSAR_CONSUMERS_CANCEL_SUBSCRIPTION_NAME: sql-executor.cancel-sql + PULSAR_SERVICEURL: pulsar://pulsar:6650 + QUIVER_DATA_SOURCE_FS_DATA_SOURCE_ID: ds-files + + result-cache: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/result-cache:latest + tty: true + depends_on: + auth-service: + condition: service_started + calcique: + condition: service_started + pulsar-create-namespace: + condition: service_completed_successfully + quiver: + condition: service_healthy + redis: + condition: service_healthy + environment: + <<: [*java-opts, *quiver-env-vars] + GRPC_CALCIQUE_HOST: calcique + GRPC_CALCIQUE_PORT: 6577 + GRPC_METADATA_HOST: metadata-api + GRPC_METADATA_PORT: 6572 + GRPC_DATASOURCE_HOST: metadata-api + GRPC_DATASOURCE_PORT: 6572 + GRPC_LICENSE_HOST: auth-service + GRPC_LICENSE_PORT: 6573 + PULSAR_ADMINURL: http://pulsar:8080 + PULSAR_CONSUMERS_RESULT_TOPIC: gooddata/nas/result.xtab + PULSAR_CONSUMERS_RESULT_DEAD_LETTER_TOPIC: gooddata/nas/result.xtab.DLQ + PULSAR_PRODUCERS_RESULT_XTAB_TOPIC: gooddata/nas/result.xtab + PULSAR_PRODUCERS_SQLEXECUTOR_TOPIC: gooddata/nas/sql.select + PULSAR_PRODUCERS_SQLEXECUTORCANCEL_TOPIC: gooddata/nas/sql.cancel + PULSAR_PRODUCERS_SQLEXECUTORCANCEL_MESSAGE_TTL: 300 + PULSAR_PRODUCERS_CACHE_SETTINGS_BOOTSTRAP_TOPIC: gooddata/nas/cache-settings.bootstrap + PULSAR_CONSUMERS_CACHE_SETTINGS_CHANGE_TOPIC: gooddata/nas/cache-settings.change + PULSAR_CONSUMERS_CACHE_SETTINGS_CHANGE_DEAD_LETTER_TOPIC: gooddata/nas/cache-settings.change.DLQ + PULSAR_PULL_CONSUMERS_INVALIDATION_TOPICS: gooddata/nas/data-source.change,gooddata/nas/metadata.model + PULSAR_SERVICEURL: pulsar://pulsar:6650 + SPRING_DATA_REDIS_HOST: redis + SPRING_DATA_REDIS_PORT: 6379 + GDC_WORKSPACE_BASELINE_CACHE: 0 + GDC_TOTAL_CACHE_LIMIT: 0 + QUIVER_DATA_SOURCE_FS_DATA_SOURCE_ID: ds-files + STORAGE_TYPE: fs + STORAGE_FS_ROOT_DIR: quiver + ports: + - "${RESULTCACHE_GRPC_EXPOSED_PORT-6567}:6567" + - "${RESULTCACHE_API_EXPOSED_PORT-9040}:9040" + volumes: + - ./quiver-ds:/quiver:Z + + afm-exec-api: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/afm-exec-api:latest + tty: true + ports: + - "${AFM_EXEC_API_EXPOSED_PORT-9000}:9000" + depends_on: + pulsar-create-namespace: + condition: service_completed_successfully + calcique: + condition: service_started + metadata-api: + condition: service_started + result-cache: + condition: service_started + sql-executor: + condition: service_started + redis: + condition: service_healthy + quiver: + condition: service_healthy + environment: + <<: [*java-opts, *quiver-env-vars] + JAVA_OPTS: "${JAVA_OPTS:--Xmx1G --add-opens=java.base/sun.net=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED}" + GRPC_CALCIQUE_HOST: calcique + GRPC_CALCIQUE_PORT: 6577 + GRPC_METADATA_HOST: metadata-api + GRPC_METADATA_PORT: 6572 + GRPC_RESULTCACHE_HOST: result-cache + GRPC_RESULTCACHE_PORT: 6567 + GRPC_LICENSE_HOST: auth-service + GRPC_LICENSE_PORT: 6573 + GRPC_SQLEXECUTOR_HOST: sql-executor + GRPC_SQLEXECUTOR_PORT: 6570 + GRPC_DATASOURCE_HOST: metadata-api + GRPC_DATASOURCE_PORT: 6572 + SPRING_DATA_REDIS_HOST: redis + SPRING_DATA_REDIS_PORT: 6379 + PULSAR_ADMINURL: http://pulsar:8080 + PULSAR_SERVICEURL: pulsar://pulsar:6650 + + scan-model: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/scan-model:latest + tty: true + ports: + - "${SCANMODEL_API_EXPOSED_PORT-9060}:9060" + depends_on: + sql-executor: + condition: service_started + metadata-api: + condition: service_started + environment: + <<: *java-opts + GRPC_METADATA_HOST: metadata-api + GRPC_METADATA_PORT: 6572 + GRPC_DATASOURCE_HOST: metadata-api + GRPC_DATASOURCE_PORT: 6572 + GRPC_DATASOURCEMETADATA_HOST: sql-executor + GRPC_DATASOURCEMETADATA_PORT: 6570 + GRPC_LICENSE_HOST: auth-service + GRPC_LICENSE_PORT: 6573 + GDC_FEATURES_VALUES_ENABLE_SNOWFLAKE_KEY_PAIR_AUTHENTICATION: "true" + + ## ============================================ + ## Gateway Services + ## ============================================ + + api-gateway: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/api-gateway:latest + tty: true + ports: + - "${API_GATEWAY_EXPOSED_PORT-9092}:9092" + depends_on: + metadata-api: + condition: service_started + analytical-designer: + condition: service_started + dashboards: + condition: service_started + ldm-modeler: + condition: service_started + home-ui: + condition: service_started + measure-editor: + condition: service_started + web-components: + condition: service_started + environment: + <<: *java-opts + SPRING_PROFILES_ACTIVE: default,docker-compose + GRPC_METADATA_HOST: metadata-api + GRPC_METADATA_PORT: 6572 + GRPC_DATASOURCE_HOST: metadata-api + GRPC_DATASOURCE_PORT: 6572 + + api-gw: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/gateway-api-gw:latest + tty: true + ports: + - "${API_GW_EXPOSED_PORT-9201}:9201" + - "${API_GW_HEALTH_PORT-9203}:9203" + environment: + AFM_EXEC_SERVICE_HOST: afm-exec-api + AFM_EXEC_SERVICE_PORT: 9000 + GEN_AI_HTTP_HOST: localhost + GEN_AI_HTTP_PORT: 8989 + AUTH_SERVICE_HOST: auth-service + AUTH_SERVICE_PORT: 9050 + AUTOMATION_HOST: automation + AUTOMATION_PORT: 9097 + SCAN_MODEL_HOST: scan-model + SCAN_MODEL_PORT: 9060 + # Export controller - required but SDK tests may not use it + EXPORT_CONTROLLER_HOST: export-controller + EXPORT_CONTROLLER_PORT: 6580 + RESULT_CACHE_HOST: result-cache + RESULT_CACHE_PORT: 9040 + API_GATEWAY_HOST: api-gateway + API_GATEWAY_PORT: 9092 + METADATA_API_HOST: metadata-api + METADATA_API_PORT: 9007 + # API docs and MCP server - optional for SDK tests + APIDOCS_HOST: apidocs + APIDOCS_PORT: 8080 + MCP_SERVER_HOST: mcp-server + MCP_SERVER_PORT: 9200 + GRPC_METADATA_HOST: metadata-api + GRPC_METADATA_PORT: 6572 + GRPC_MDSINK_HOST: gateway-md-sink + GRPC_MDSINK_PORT: 6574 + GATEWAY_ORGANIZATION_RESOLVER_TYPE: GRPC_WITH_DB_VALIDATION + GATEWAY_R2DBC_URL: 'r2dbc:postgresql://postgres:5432/gw?reWriteBatchedInserts=true' + GATEWAY_DB_USERNAME: postgres + GATEWAY_DB_PASSWORD: passw0rd + GATEWAY_DB_POOL_INITIAL_SIZE: 1 + GATEWAY_DB_POOL_MAX_SIZE: 4 + GATEWAY_DB_POOL_MIN_IDLE: 0 + GATEWAY_DB_POOL_MAX_LIFE_TIME: 300000 + ORGANIZATION_CACHE_SIZE: 1000 + CACHE_EXPIRE_AFTER_WRITE: 60 + USER_CACHE_SIZE: 1000 + USER_CACHE_EXPIRE_AFTER_WRITE: 60 + ORGANIZATION_CACHE_METRICS: "true" + USER_CACHE_METRICS: "true" + CALL_FORWARDER_CONNECT_TIMEOUT: 1m + CALL_FORWARDER_SOCKET_TIMEOUT: 3m + CALL_FORWARDER_REQUEST_TIMEOUT: 4m + OTEL_SDK_DISABLED: "true" + SPRING_DATA_REDIS_HOST: redis + SPRING_DATA_REDIS_PORT: 6379 + COROUTINES_DEBUG_METRICS_ENABLED: "true" + GDC_FEATURES_VALUES_ENABLE_COMPOSITE_GRAIN: "true" + GDC_FEATURES_VALUES_ENABLE_USER_MANAGEMENT: "true" GDC_FEATURES_VALUES_ENABLE_SCHEDULING: "true" GDC_FEATURES_VALUES_ENABLE_ALERTING: "true" GDC_FEATURES_VALUES_ENABLE_SMTP: "true" GDC_FEATURES_VALUES_ENABLE_PRE_AGGREGATION_DATASETS: "true" -# In the case of failing tests (HTTP 500), you can increase the memory for the metadata API -# METADATA_API_JAVA_OPTS: "-Xmx1024m -Xms512m" + GDC_FEATURES_VALUES_ENABLE_DATA_LOCALIZATION: "true" + GDC_FEATURES_VALUES_ENABLE_RAW_EXPORTS: "true" + GDC_FEATURES_VALUES_ENABLE_FLEXIBLE_DASHBOARD_LAYOUT: "true" + GDC_FEATURES_VALUES_ENABLE_SNOWFLAKE_KEY_PAIR_AUTHENTICATION: "true" + GDC_FEATURES_VALUES_ENABLE_DATA_SOURCE_ROUTING: "true" + depends_on: + postgres: + condition: service_healthy + gateway-md-sink: + condition: service_started + afm-exec-api: + condition: service_started + auth-service: + condition: service_started + automation: + condition: service_started + result-cache: + condition: service_started + scan-model: + condition: service_started + api-gateway: + condition: service_started + metadata-api: + condition: service_started + redis: + condition: service_healthy + apidocs: + condition: service_started + export-controller: + condition: service_started + labels: + - "traefik.enable=true" + - "traefik.http.routers.api-gw.rule=PathPrefix(`/`)" + - "traefik.http.routers.api-gw.entrypoints=web" + - "traefik.http.routers.api-gw.priority=1" + - "traefik.http.services.api-gw.loadbalancer.server.port=9201" + + gateway-md-sink: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/gateway-md-sink:latest + tty: true + depends_on: + postgres: + condition: service_healthy + pulsar-create-namespace: + condition: service_completed_successfully + ports: + - "${GATEWAY_MD_SINK_MANAGEMENT_EXPOSED_PORT-9099}:9099" + environment: + <<: *java-opts + PULSAR_ADMINURL: http://pulsar:8080 + PULSAR_SERVICE_URL: pulsar://pulsar:6650 + PULSAR_PRODUCERS_METADATA_SYNC_REQUEST_TOPIC: gooddata/nas/metadata.sync-request + PULSAR_PRODUCERS_METADATA_BACK_SYNC_TOPIC: gooddata/nas/metadata.back-sync + PULSAR_CONSUMERS_METADATA_SYNC_TOPIC: gooddata/nas/metadata.sync + PULSAR_CONSUMERS_METADATA_SYNC_DEAD_LETTER_TOPIC: gooddata/nas/metadata.sync.DLQ + SPRING_R2DBC_URL: 'r2dbc:postgresql://postgres:5432/gw?reWriteBatchedInserts=true' + SPRING_R2DBC_PASSWORD: passw0rd + SPRING_R2DBC_USERNAME: postgres + + automation: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/automation:latest + tty: true + depends_on: + postgres: + condition: service_healthy + pulsar-create-namespace: + condition: service_completed_successfully + ports: + - "${AUTOMATION_API_EXPOSED_PORT-9097}:9097" + environment: + <<: [*java-opts, *quiver-env-vars] + GDC_AUTOMATION_ENCRYPTOR_KEYSET: *key-set + GDC_AUTOMATION_ENCRYPTOR_ENABLED: "true" + PULSAR_ADMINURL: http://pulsar:8080 + PULSAR_SERVICEURL: pulsar://pulsar:6650 + PULSAR_PRODUCERS_EXPORT_TABULAR_SCHEDULED_TOPIC: gooddata/nas/export-tabular-scheduled.request + PULSAR_PRODUCERS_EXPORT_VISUAL_SCHEDULED_TOPIC: gooddata/nas/export-visual-scheduled.request + PULSAR_PRODUCERS_METADATA_SYNC_REQUEST_TOPIC: gooddata/nas/metadata.sync-request + PULSAR_PRODUCERS_METADATA_BACK_SYNC_TOPIC: gooddata/nas/metadata.back-sync + PULSAR_CONSUMERS_METADATA_SYNC_TOPIC: gooddata/nas/metadata.sync + PULSAR_CONSUMERS_METADATA_SYNC_DEAD_LETTER_TOPIC: gooddata/nas/metadata.sync.DLQ + SPRING_DATASOURCE_URL: 'jdbc:postgresql://postgres:5432/automation?reWriteBatchedInserts=true' + SPRING_DATASOURCE_PASSWORD: passw0rd + SPRING_DATASOURCE_USERNAME: postgres + GRPC_METADATA_HOST: metadata-api + GRPC_METADATA_PORT: 6572 + GRPC_AFMEXEC_HOST: afm-exec-api + GRPC_AFMEXEC_PORT: 6571 + GRPC_RESULTCACHE_HOST: result-cache + GRPC_RESULTCACHE_PORT: 6567 + + ## ============================================ + ## Export Services (for SDK export tests) + ## ============================================ + + tabular-exporter: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/tabular-exporter:latest + tty: true + healthcheck: + test: ["CMD", "python3", "-c", "import socket; s = socket.socket(socket.AF_INET, socket.SOCK_STREAM); s.settimeout(2); exit(0 if s.connect_ex(('localhost', 6789)) == 0 else 1)"] + interval: 10s + timeout: 5s + retries: 3 + start_period: 15s + depends_on: + afm-exec-api: + condition: service_started + metadata-api: + condition: service_started + ports: + - "${TABULAR_EXPORTER_GRPC_EXPOSED_PORT-6789}:6789" + - "${TABULAR_EXPORTER_MANAGEMENT_EXPOSED_PORT-9131}:9131" + environment: + AFM_EXEC_API_URL: "http://afm-exec-api:9000" + MD_API_URL: "http://metadata-api:9007" + TABULAR_EXPORTER_LOGLEVEL: INFO + + export-controller: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/export-controller:latest + tty: true + depends_on: + auth-service: + condition: service_started + metadata-api: + condition: service_started + afm-exec-api: + condition: service_started + pulsar-create-namespace: + condition: service_completed_successfully + redis: + condition: service_healthy + result-cache: + condition: service_started + tabular-exporter: + condition: service_healthy + ports: + - "${EXPORT_CONTROLLER_API_EXPOSED_PORT-6580}:6580" + - "${EXPORT_CONTROLLER_MANAGEMENT_EXPOSED_PORT-6581}:6581" + - "${EXPORT_CONTROLLER_GRPC_EXPOSED_PORT-6578}:6578" + environment: + <<: *java-opts + GDC_TELEMETRY_ENABLED: "false" + GRPC_METADATA_HOST: metadata-api + GRPC_METADATA_PORT: 6572 + GRPC_RESULTCACHE_HOST: result-cache + GRPC_RESULTCACHE_PORT: 6567 + GRPC_TABULAREXPORTER_HOST: tabular-exporter + GRPC_TABULAREXPORTER_PORT: 6789 + GRPC_EXPORTBUILDER_HOST: router + GRPC_EXPORTBUILDER_PORT: 6528 + GRPC_CALCIQUE_HOST: calcique + GRPC_CALCIQUE_PORT: 6577 + GRPC_AFMEXEC_HOST: afm-exec-api + GRPC_AFMEXEC_PORT: 6571 + PULSAR_ADMINURL: http://pulsar:8080 + PULSAR_SERVICEURL: pulsar://pulsar:6650 + PULSAR_PRODUCERS_EXPORT_TABULAR_TOPIC: gooddata/nas/export-tabular.request + PULSAR_PRODUCERS_EXPORT_TABULAR_MESSAGE_TTL: 300 + PULSAR_CONSUMERS_EXPORT_TABULAR_TOPIC: gooddata/nas/export-tabular.request + PULSAR_CONSUMERS_EXPORT_TABULAR_DEAD_LETTER_TOPIC: gooddata/nas/export-tabular.request.DLQ + PULSAR_CONSUMERS_EXPORT_TABULAR_SCHEDULED_TOPIC: gooddata/nas/export-tabular-scheduled.request + PULSAR_CONSUMERS_EXPORT_TABULAR_SCHEDULED_DEAD_LETTER_TOPIC: gooddata/nas/export-tabular-scheduled.request.DLQ + PULSAR_PRODUCERS_EXPORT_VISUAL_TOPIC: gooddata/nas/export-visual.request + PULSAR_PRODUCERS_EXPORT_VISUAL_MESSAGE_TTL: 300 + PULSAR_CONSUMERS_EXPORT_VISUAL_TOPIC: gooddata/nas/export-visual.request + PULSAR_CONSUMERS_EXPORT_VISUAL_DEAD_LETTER_TOPIC: gooddata/nas/export-visual.request.DLQ + PULSAR_CONSUMERS_EXPORT_VISUAL_SCHEDULED_TOPIC: gooddata/nas/export-visual-scheduled.request + PULSAR_CONSUMERS_EXPORT_VISUAL_SCHEDULED_DEAD_LETTER_TOPIC: gooddata/nas/export-visual-scheduled.request.DLQ + PULSAR_PRODUCERS_EXPORT_SLIDESHOW_TOPIC: gooddata/nas/export-slideshow.request + PULSAR_PRODUCERS_EXPORT_SLIDESHOW_MESSAGE_TTL: 300 + PULSAR_CONSUMERS_EXPORT_SLIDESHOW_TOPIC: gooddata/nas/export-slideshow.request + PULSAR_CONSUMERS_EXPORT_SLIDESHOW_DEAD_LETTER_TOPIC: gooddata/nas/export-slideshow.request.DLQ + PULSAR_CONSUMERS_EXPORT_SLIDESHOW_SCHEDULED_TOPIC: gooddata/nas/export-slideshow-scheduled.request + PULSAR_CONSUMERS_EXPORT_SLIDESHOW_SCHEDULED_DEAD_LETTER_TOPIC: gooddata/nas/export-slideshow-scheduled.request.DLQ + PULSAR_PRODUCERS_EXPORT_RAW_TOPIC: gooddata/nas/export-raw.request + PULSAR_PRODUCERS_EXPORT_RAW_MESSAGE_TTL: 300 + PULSAR_CONSUMERS_EXPORT_RAW_TOPIC: gooddata/nas/export-raw.request + PULSAR_CONSUMERS_EXPORT_RAW_DEAD_LETTER_TOPIC: gooddata/nas/export-raw.request.DLQ + PULSAR_CONSUMERS_EXPORT_RAW_SCHEDULED_TOPIC: gooddata/nas/export-raw-scheduled.request + PULSAR_CONSUMERS_EXPORT_RAW_SCHEDULED_DEAD_LETTER_TOPIC: gooddata/nas/export-raw-scheduled.request.DLQ + PULSAR_PULLCONSUMERS_EXPORTRAW_TOPICS: gooddata/nas/export-raw.request + PULSAR_PULLCONSUMERS_EXPORTRAW_MAXCONCURRENTMESSAGES: 4 + PULSAR_PULL_CONSUMERS_INVALIDATION_TOPICS: gooddata/nas/metadata.model + SPRING_DATA_REDIS_HOST: redis + SPRING_DATA_REDIS_PORT: 6379 + GDC_FILE_STORAGE_BASE_URL: /tmp/exports + GDC_FEATURES_VALUES_ENABLE_KPI_DASHBOARD_EXPORT_PDF: "true" + GDC_FEATURES_VALUES_ENABLE_DASHBOARD_TABULAR_EXPORT: "true" + GDC_FEATURES_VALUES_ENABLE_RAW_EXPORTS: "true" + GDC_FEATURES_VALUES_ENABLE_CHUNKED_RAW_EXPORTS: "true" + GDC_EXPORT_CONTROLLER_USENEWFLOW: "true" + GDC_FEATURES_VALUES_ENABLE_NEW_SCHEDULED_EXPORT: "true" + GDC_FEATURES_VALUES_ENABLE_NEW_PDF_TABULAR_EXPORT: "true" + volumes: + - ./packages/gooddata-sdk/tests/export:/tmp/exports:Z + + ## ============================================ + ## Supplemental Services + ## ============================================ + + apidocs: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/apidocs:latest + ports: + - "${APIDOCS_EXPOSED_PORT-9999}:8080" + + ## ============================================ + ## External UI Applications (from ECR) + ## ============================================ + + analytical-designer: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/analytical-designer:latest + tty: true + ports: + - "${AD_EXPOSED_PORT-9300}:9300" + + dashboards: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/dashboards:latest + tty: true + ports: + - "${KD_EXPOSED_PORT-9500}:9500" + + ldm-modeler: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/ldm-modeler:latest + tty: true + ports: + - "${LDM_MODELER_EXPOSED_PORT-9400}:8080" + + home-ui: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/home-ui:latest + tty: true + ports: + - "${HOME_UI_EXPOSED_PORT-9600}:9600" + + measure-editor: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/measure-editor:latest + tty: true + ports: + - "${MEASURE_EDITOR_EXPOSED_PORT-9700}:9700" + + web-components: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/web-components:latest + tty: true + ports: + - "${WC_EXPOSED_PORT-9800}:8080" + + ## ============================================ + ## Quiver (Data Processing Engine) + ## ============================================ + + quiver: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/quiver:v0.361.1 + healthcheck: + test: ["CMD-SHELL", "curl -fsS http://quiver:8877/ready"] + interval: 10s + timeout: 5s + retries: 3 + start_period: 10s + tty: true + ports: + - "${QUIVER_EXPOSED_PORT-16001}:16001" + volumes: + - quiver-data:/data/ + - ./quiver-ds:/data/file-storage:Z:ro + - ./scripts/quiver/quiver.conf.toml:/etc/quiver/quiver.conf.toml:ro + environment: + QUIVER_LISTEN_FLIGHT_HOST: 0.0.0.0 + QUIVER_METRICS_HOST: 0.0.0.0 + QUIVER_ADVERTISE_FLIGHT_HOST: quiver + QUIVER_MEMORY_CACHE_SIZE: "100M" + QUIVER_MMAP_CACHE_SIZE: "100M" + QUIVER_DISK_CACHE_SIZE: "1G" + QUIVER_DISK_DIR: "/data/disk-cache" + QUIVER_MODULES: '["quiver_shard", "quiver_policy", "quiver_dataframe", "quiver_connector", "quiver_sql_query"]' + QUIVER_DATAFRAME_OPS: '["gooddata_df_ops.crosstab_pl.op", "gooddata_df_ops.empty.op", "gooddata_df_ops.forecast.op", "gooddata_df_ops.clustering.op", "gooddata_df_ops.kda.op", "gooddata_df_ops.change_analysis.op", "gooddata_df_ops.anomaly_detection.op", "gooddata_df_ops.alerting_pl.op", "gooddata_df_ops.outlier_detection_pl.op"]' + QUIVER_PEER_FLIGHT_HOST: quiver + QUIVER_SECRETS_PROVIDERS: '["tiger_secrets_provider.provider"]' + QUIVER_TIGER_SECRETS__HOST: 'sql-executor' + QUIVER_TIGER_SECRETS__PORT: 6570 + GLIBC_TUNABLES: "glibc.malloc.trim_threshold=128:glibc.malloc.arena_max=2" + PYTHONMALLOC: malloc + ACERO_ALIGNMENT_HANDLING: 'ignore' + command: + - --config + - /etc/quiver/quiver.conf.toml + - --init-cluster-config + - start + - --single-node + - --single-node-savefile + - ":memory:" + - --name + - quiver-node + + ## ============================================ + ## Dex (OIDC Provider) + ## ============================================ + + dex: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/dex:latest + entrypoint: ["sh", "-c"] + command: + - | + cat << 'EOF' > /tmp/dex-config.yaml + issuer: http://localhost:3000/dex + storage: + type: sqlite3 + config: + file: /data/dex.db + web: + http: 0.0.0.0:5556 + grpc: + addr: 0.0.0.0:5557 + reflection: true + expiry: + deviceRequests: "24h" + signingKeys: "24h" + idTokens: "24h" + oauth2: + responseTypes: ["code", "token", "id_token"] + skipApprovalScreen: true + enablePasswordDB: true + staticPasswords: [] + frontend: + theme: gdc + issuer: GoodData.CN + logoUrl: theme/logo.svg + dir: web/ + EOF + exec /opt/dex/dex serve /tmp/dex-config.yaml + ports: + - 5556:5556 + - 5557:5557 + volumes: + - dex-data:/data + labels: + - "traefik.enable=true" + - "traefik.http.routers.dex.rule=PathPrefix(`/dex`)" + - "traefik.http.routers.dex.entrypoints=web" + - "traefik.http.services.dex.loadbalancer.server.port=5556" + + ## ============================================ + ## Data Loading Services (Bootstrap) + ## ============================================ + + # Organization bootstrap - Self-contained (no gdc-nas dependency) + # Creates organization via gRPC, then creates demo user via REST + metadata-organization-bootstrap: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/pullthrough/docker.io/library/alpine:3.21 + tty: true + depends_on: + auth-service: + condition: service_started + metadata-api: + condition: service_started + api-gw: + condition: service_started + entrypoint: ["/bin/sh", "-c"] + command: + - | + set -e + # Install required tools + apk add --no-cache curl jq + + # Download grpcurl binary + echo "Installing grpcurl..." + GRPCURL_VERSION="1.9.1" + wget -q -O /tmp/grpcurl.tar.gz "https://github.com/fullstorydev/grpcurl/releases/download/v$${GRPCURL_VERSION}/grpcurl_$${GRPCURL_VERSION}_linux_x86_64.tar.gz" + tar -xzf /tmp/grpcurl.tar.gz -C /usr/local/bin grpcurl + chmod +x /usr/local/bin/grpcurl + rm /tmp/grpcurl.tar.gz + + echo "Waiting for metadata-api..." + until curl -sf http://metadata-api:9008/actuator/health/readiness >/dev/null 2>&1; do + echo "Waiting for metadata-api to be ready..." + sleep 5 + done + echo "metadata-api is ready!" + + echo "Waiting for api-gw..." + until curl -sf http://api-gw:9203/health/readiness >/dev/null 2>&1; do + echo "Waiting for api-gw to be ready..." + sleep 5 + done + echo "api-gw is ready!" + + # Generate token hash (using same format as gdc-nas bootstrap) + TOKEN_HASH='$$5$$1234567890123456$$ZhevF45HOczSVHwo7R6s60tQRjbx4/bhjTrYi317TKD' + + echo "Creating organization via gRPC..." + echo '{ + "organizations": [{ + "id": "default", + "name": "Default Organization", + "hostname": "localhost", + "userGroup": { + "id": "adminGroup", + "user": { + "id": "admin" + } + }, + "initialToken": { + "id": "bootstrap", + "cryptedToken": "'"$$TOKEN_HASH"'" + } + }] + }' | grpcurl -plaintext -H "Authorization: Basic Ym9vdHN0cmFwOlN1cGVyU2VjcmV0UGFzc3dvcmQ=" \ + -d @ metadata-api:6572 tiger.MetadataStorePrivilegedService/ensureOrganizationsExists + + echo "Organization created!" + + echo "Waiting for auth-service..." + until curl -sf http://auth-service:9051/actuator/health/readiness >/dev/null 2>&1; do + echo "Waiting for auth-service to be ready..." + sleep 5 + done + echo "auth-service is ready!" + + # Create demo user via REST API + BOOTSTRAP_TOKEN="YWRtaW46Ym9vdHN0cmFwOmFkbWluMTIz" + + echo "Creating dex user..." + AUTH_ID=$$(curl -sf -X POST \ + -H "Content-type: application/json" \ + -H "Host: localhost:3000" \ + -H "Authorization: Bearer $$BOOTSTRAP_TOKEN" \ + -d '{"email": "demo@example.com","password": "demo123","displayName": "Demo User"}' \ + "http://api-gw:9201/api/v1/auth/users" 2>/dev/null | jq -r '.authenticationId // empty') || true + + if [ -z "$$AUTH_ID" ]; then + echo "User might already exist, trying to get existing user..." + AUTH_ID=$$(curl -sf -X GET \ + -H "Content-type: application/json" \ + -H "Host: localhost:3000" \ + -H "Authorization: Bearer $$BOOTSTRAP_TOKEN" \ + "http://api-gw:9201/api/v1/auth/users/demo@example.com" 2>/dev/null | jq -r '.authenticationId // empty') || true + fi + + echo "Auth ID: $$AUTH_ID" + + if [ -n "$$AUTH_ID" ]; then + echo "Creating metadata user..." + curl -sf -X POST \ + -H "Content-type: application/vnd.gooddata.api+json" \ + -H "Host: localhost:3000" \ + -H "Authorization: Bearer $$BOOTSTRAP_TOKEN" \ + -d '{ + "data": { + "id": "demo", + "type": "user", + "attributes": { + "authenticationId": "'"$$AUTH_ID"'", + "firstname": "Demo", + "lastname": "User", + "email": "demo@example.com" + }, + "relationships": { + "userGroups": { + "data": [{"id": "adminGroup", "type": "userGroup"}] + } + } + } + }' "http://api-gw:9201/api/v1/entities/users" 2>/dev/null || echo "User might already exist" + fi + + echo "Bootstrap completed!" + + # Data loader - populates PostgreSQL with demo data + # Uses --no-schema-versioning to create schemas without hash suffixes (e.g., "demo" not "demo_abc123") + # This is CRITICAL for Python SDK tests which expect consistent fixture names + data-loader: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/data-loader:master + platform: linux/amd64 + pull_policy: always + tty: true + depends_on: + postgres: + condition: service_healthy + environment: + DOWNLOAD_DATA_SETS: "yes" + # Call data_load.py directly with --no-schema-versioning flag + # This ensures schema names don't have hash suffixes (demo instead of demo_) + entrypoint: ["/bin/bash", "-c"] + command: + - | + set -ex + # Download fresh data if needed + if [ -n "$$DOWNLOAD_DATA_SETS" ]; then + ./data_load.py -w demo -d pg_local_docker --skip-load --skip-store-versions || true + fi + # Load data with --no-schema-versioning (critical for SDK tests) + ./data_load.py -w demo -d pg_local_docker --skip-download --skip-store-versions --no-schema-versioning --debug + echo "Data loading completed with --no-schema-versioning!" + + # Create data sources in metadata-api + # This registers the data source so GoodData services can query the demo data + create-ds: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/nas/data-loader:master + pull_policy: missing + tty: true + depends_on: + metadata-organization-bootstrap: + condition: service_completed_successfully + data-loader: + condition: service_completed_successfully + environment: + API_HOST: "localhost" + API_ENDPOINT: "http://api-gw:9201" + TIGER_API_TOKEN: YWRtaW46Ym9vdHN0cmFwOmFkbWluMTIz + DATABASES: "pg_local_docker" + WORKSPACES: "demo" + # Use --no-schema-versioning for consistent data source schema names + entrypoint: ["/bin/bash", "-c"] + command: + - | + set -ex + ./create_data_sources.py -w demo -d pg_local_docker -e http://api-gw:9201 --version-file /data_load/versions.json --no-schema-versioning --debug + echo "Data sources created with --no-schema-versioning!" + + # Layout uploader - uploads workspace hierarchy, analytics model, users, and permissions + # This replaces the old upload_demo_layout.py script and runs automatically + # CRITICAL: This step is required for SDK tests to work properly + layout-uploader: + image: 020413372491.dkr.ecr.us-east-1.amazonaws.com/pullthrough/docker.io/library/python:3.14-alpine + tty: true + depends_on: + create-ds: + condition: service_completed_successfully + environment: + HOST: "http://api-gw:9201" + TOKEN: "YWRtaW46Ym9vdHN0cmFwOmFkbWluMTIz" + HEADER_HOST: "localhost" + FIXTURES_DIR: "/app/fixtures" + volumes: + - ./packages/tests-support/fixtures:/app/fixtures:ro + - ./packages/tests-support/upload_demo_layout.py:/app/upload_demo_layout.py:ro + entrypoint: ["/bin/sh", "-c"] + command: + - | + set -ex + # Install required packages + pip install --quiet requests pyyaml + + # Wait for api-gw to be ready + echo "Waiting for api-gw to be ready..." + until wget -q --spider http://api-gw:9203/health/readiness 2>/dev/null; do + sleep 2 + done + echo "api-gw is ready!" + + # Run the layout uploader + cd /app + python upload_demo_layout.py + + echo "Layout upload completed successfully!" + + ## ============================================ + ## FDW Service (optional - for gooddata-fdw tests) + ## ============================================ + gooddata-fdw: build: context: . @@ -37,24 +1099,13 @@ services: POSTGRES_DB: gooddata POSTGRES_USER: gooddata POSTGRES_PASSWORD: "${POSTGRES_PASSWORD-gooddata123}" - command: ["postgres", "-c", "shared_preload_libraries=foreign_table_exposer", "-c", "log_statement=all", "-c", "client_min_messages=DEBUG1", "-c", "log_min_messages=DEBUG1"] - - upload-layout: - build: - context: . - dockerfile: packages/tests-support/Dockerfile - environment: - FIXTURES_DIR: /app/fixtures - HOST: "http://gooddata-cn-ce:3000" - HEADER_HOST: localhost - command: ["python3", "upload_demo_layout.py"] - - docs: - build: - context: . - dockerfile: docs/Dockerfile - ports: - - "1313:1313" + command: ["postgres", "-c", "shared_preload_libraries=foreign_table_exposer", "-c", "log_statement=all"] + profiles: + - fdw volumes: - gooddata-cn-ce-data: + postgres-data: + pulsar-data: + redis-data: + quiver-data: + dex-data: diff --git a/docs/Dockerfile b/docs/Dockerfile index 0d5bcdfd0..8a34e4d66 100644 --- a/docs/Dockerfile +++ b/docs/Dockerfile @@ -1,36 +1,44 @@ -FROM python:3.12-slim AS builder +# syntax=docker/dockerfile:1 +FROM python:3.14-slim AS builder RUN apt-get update && apt-get install -y git make curl +# Install Python deps first (changes rarely) for better layer caching. +# Copy only dependency manifests and package source before installing. COPY scripts/script-requirements.txt /scripts/script-requirements.txt -COPY docs docs -COPY scripts/docs/ /docs COPY gooddata-api-client /gooddata-api-client -COPY packages/gooddata-sdk /gooddata-sdk -COPY packages/gooddata-pandas /gooddata-pandas +COPY packages/gooddata-sdk /packages/gooddata-sdk +COPY packages/gooddata-pandas /packages/gooddata-pandas + +RUN --mount=type=cache,target=/root/.cache/pip \ + pip install -r /scripts/script-requirements.txt -RUN pip install --no-cache-dir -r /scripts/script-requirements.txt +# Copy source (docs content changes most frequently, scripts less so) +COPY docs docs +COPY scripts/docs/ /docs WORKDIR /docs RUN python json_builder.py && \ python python_ref_builder.py api_spec.toml data.json latest content/en && \ - mkdir versioned_docs/latest && \ - mv -f data.json ./versioned_docs/latest/data.json && \ - mv -f content/en/latest/links.json ./versioned_docs/latest/links.json + rm -f data.json FROM node:20.18.0-bookworm-slim -COPY --from=builder /docs /docs - RUN apt-get update && \ apt-get install -y git make golang-go curl && \ npm install -g hugo-extended@0.117.0 && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* +COPY --from=builder /docs /docs + WORKDIR /docs -RUN npm install && \ + +# npm cache mount speeds up rebuilds; Go modules are stored in the image +# layer so Hugo can resolve them at runtime. +RUN --mount=type=cache,target=/root/.npm \ + npm install && \ hugo mod get # accessible on http://localhost:1313/latest/ diff --git a/docs/assets/scss/homepage.scss b/docs/assets/scss/homepage.scss index e13d47c93..47bcaecab 100644 --- a/docs/assets/scss/homepage.scss +++ b/docs/assets/scss/homepage.scss @@ -224,8 +224,11 @@ display: block; } - & h4#{&}__title { + & p#{&}__title { margin-top: 0; + font-size: 1.125em; + font-family: bca6d3310b5c9dae1dae416e8abc8405,helvetica,arial,sans-serif; + line-height: 1.2; } p:last-child { diff --git a/docs/class_template.md b/docs/class_template.md index 1ac9d54d8..cab546bd2 100644 --- a/docs/class_template.md +++ b/docs/class_template.md @@ -3,4 +3,4 @@ linkTitle: "LINK" no_list: true --- -{{< api-ref-class "PATH" >}} +CONTENT diff --git a/docs/config/production/params.toml b/docs/config/production/params.toml index 445216e15..896ba5f97 100644 --- a/docs/config/production/params.toml +++ b/docs/config/production/params.toml @@ -12,18 +12,18 @@ dirpath = "dev" url = "/dev/" sitemapExclude = true [[versions]] -version = "1.53" +version = "1.61" dirpath = "latest" url = "/latest/" [[versions]] -version = "1.52" -dirpath = "1.52" -url = "/1.52/" +version = "1.60" +dirpath = "1.60" +url = "/1.60/" [[versions]] -version = "1.51" -dirpath = "1.51" -url = "/1.51/" +version = "1.59" +dirpath = "1.59" +url = "/1.59/" [[versions]] -version = "1.50" -dirpath = "1.50" -url = "/1.50/" +version = "1.58" +dirpath = "1.58" +url = "/1.58/" diff --git a/docs/content/en/latest/administration/appearance/_index.md b/docs/content/en/latest/administration/appearance/_index.md new file mode 100644 index 000000000..75d289e2d --- /dev/null +++ b/docs/content/en/latest/administration/appearance/_index.md @@ -0,0 +1,77 @@ +--- +title: "Appearance" +linkTitle: "Appearance" +weight: 15 +no_list: true +--- + +Manage themes and color palettes for your organization. + +## Methods + +### Themes + +* [list_themes](./list_themes/) +* [get_theme](./get_theme/) +* [create_theme](./create_theme/) +* [update_theme](./update_theme/) +* [delete_theme](./delete_theme/) + +### Color Palettes + +* [list_color_palettes](./list_color_palettes/) +* [get_color_palette](./get_color_palette/) +* [create_color_palette](./create_color_palette/) +* [update_color_palette](./update_color_palette/) +* [delete_color_palette](./delete_color_palette/) + +## Example + +Create a custom theme and color palette: + +```python +from gooddata_sdk import GoodDataSdk, CatalogTheme, CatalogColorPalette + +host = "https://www.example.com" +token = "some_user_token" +sdk = GoodDataSdk.create(host, token) + +# Create a custom theme +theme = CatalogTheme.init( + theme_id="my_dark_theme", + name="My Dark Theme", + content={ + "palette": { + "primary": {"base": "#14B2E2"}, + }, + "dashboards": { + "content": { + "widget": { + "backgroundColor": "#122330", + } + } + }, + }, +) +sdk.catalog_appearance.create_theme(theme) + +# List all themes +themes = sdk.catalog_appearance.list_themes() + +# Create a custom color palette for charts +palette = CatalogColorPalette.init( + color_palette_id="my_palette", + name="My Palette", + content={ + "colorPalette": [ + {"guid": "01", "fill": {"r": 140, "g": 125, "b": 232}}, + {"guid": "02", "fill": {"r": 125, "g": 219, "b": 232}}, + ] + }, +) +sdk.catalog_appearance.create_color_palette(palette) + +# Clean up +sdk.catalog_appearance.delete_theme("my_dark_theme") +sdk.catalog_appearance.delete_color_palette("my_palette") +``` diff --git a/docs/content/en/latest/administration/appearance/create_color_palette.md b/docs/content/en/latest/administration/appearance/create_color_palette.md new file mode 100644 index 000000000..781a5eb27 --- /dev/null +++ b/docs/content/en/latest/administration/appearance/create_color_palette.md @@ -0,0 +1,20 @@ +--- +title: "create_color_palette" +linkTitle: "create_color_palette" +superheading: "catalog_appearance." +weight: 220 +api_ref: "CatalogAppearanceService.create_color_palette" +--- + +``create_color_palette( color_palette: CatalogColorPalette ) -> None`` + +Create a new color palette. + +{{% parameters-block title="Parameters" %}} +{{< parameter p_name="color_palette" p_type="CatalogColorPalette" >}} +A catalog color palette object to be created. +{{< /parameter >}} +{{% /parameters-block %}} + +{{% parameters-block title="Returns" None="yes"%}} +{{% /parameters-block %}} diff --git a/docs/content/en/latest/administration/appearance/create_theme.md b/docs/content/en/latest/administration/appearance/create_theme.md new file mode 100644 index 000000000..f28319867 --- /dev/null +++ b/docs/content/en/latest/administration/appearance/create_theme.md @@ -0,0 +1,20 @@ +--- +title: "create_theme" +linkTitle: "create_theme" +superheading: "catalog_appearance." +weight: 120 +api_ref: "CatalogAppearanceService.create_theme" +--- + +``create_theme( theme: CatalogTheme ) -> None`` + +Create a new theme. + +{{% parameters-block title="Parameters" %}} +{{< parameter p_name="theme" p_type="CatalogTheme" >}} +A catalog theme object to be created. +{{< /parameter >}} +{{% /parameters-block %}} + +{{% parameters-block title="Returns" None="yes"%}} +{{% /parameters-block %}} diff --git a/docs/content/en/latest/administration/appearance/delete_color_palette.md b/docs/content/en/latest/administration/appearance/delete_color_palette.md new file mode 100644 index 000000000..c9054fa25 --- /dev/null +++ b/docs/content/en/latest/administration/appearance/delete_color_palette.md @@ -0,0 +1,26 @@ +--- +title: "delete_color_palette" +linkTitle: "delete_color_palette" +superheading: "catalog_appearance." +weight: 240 +api_ref: "CatalogAppearanceService.delete_color_palette" +--- + +``delete_color_palette( color_palette_id: str ) -> None`` + +Delete a color palette. + +{{% parameters-block title="Parameters" %}} +{{< parameter p_name="color_palette_id" p_type="string" >}} +Color palette identification string e.g. "my_palette" +{{< /parameter >}} +{{% /parameters-block %}} + +{{% parameters-block title="Returns" None="yes"%}} +{{% /parameters-block %}} + +{{% parameters-block title="Raises" %}} +{{< parameter p_type="Value Error" >}} +Color palette does not exist. +{{< /parameter >}} +{{% /parameters-block %}} diff --git a/docs/content/en/latest/administration/appearance/delete_theme.md b/docs/content/en/latest/administration/appearance/delete_theme.md new file mode 100644 index 000000000..d4ae21284 --- /dev/null +++ b/docs/content/en/latest/administration/appearance/delete_theme.md @@ -0,0 +1,26 @@ +--- +title: "delete_theme" +linkTitle: "delete_theme" +superheading: "catalog_appearance." +weight: 140 +api_ref: "CatalogAppearanceService.delete_theme" +--- + +``delete_theme( theme_id: str ) -> None`` + +Delete a theme. + +{{% parameters-block title="Parameters" %}} +{{< parameter p_name="theme_id" p_type="string" >}} +Theme identification string e.g. "my_dark_theme" +{{< /parameter >}} +{{% /parameters-block %}} + +{{% parameters-block title="Returns" None="yes"%}} +{{% /parameters-block %}} + +{{% parameters-block title="Raises" %}} +{{< parameter p_type="Value Error" >}} +Theme does not exist. +{{< /parameter >}} +{{% /parameters-block %}} diff --git a/docs/content/en/latest/administration/appearance/get_color_palette.md b/docs/content/en/latest/administration/appearance/get_color_palette.md new file mode 100644 index 000000000..46acade14 --- /dev/null +++ b/docs/content/en/latest/administration/appearance/get_color_palette.md @@ -0,0 +1,23 @@ +--- +title: "get_color_palette" +linkTitle: "get_color_palette" +superheading: "catalog_appearance." +weight: 210 +api_ref: "CatalogAppearanceService.get_color_palette" +--- + +``get_color_palette( color_palette_id: str ) -> CatalogColorPalette`` + +Get an individual color palette. + +{{% parameters-block title="Parameters" %}} +{{< parameter p_name="color_palette_id" p_type="string" >}} +Color palette identification string e.g. "my_palette" +{{< /parameter >}} +{{% /parameters-block %}} + +{{% parameters-block title="Returns" %}} +{{< parameter p_type="CatalogColorPalette" >}} +Catalog color palette object containing structure of the color palette. +{{< /parameter >}} +{{% /parameters-block %}} diff --git a/docs/content/en/latest/administration/appearance/get_theme.md b/docs/content/en/latest/administration/appearance/get_theme.md new file mode 100644 index 000000000..7876165b8 --- /dev/null +++ b/docs/content/en/latest/administration/appearance/get_theme.md @@ -0,0 +1,23 @@ +--- +title: "get_theme" +linkTitle: "get_theme" +superheading: "catalog_appearance." +weight: 110 +api_ref: "CatalogAppearanceService.get_theme" +--- + +``get_theme( theme_id: str ) -> CatalogTheme`` + +Get an individual theme. + +{{% parameters-block title="Parameters" %}} +{{< parameter p_name="theme_id" p_type="string" >}} +Theme identification string e.g. "my_dark_theme" +{{< /parameter >}} +{{% /parameters-block %}} + +{{% parameters-block title="Returns" %}} +{{< parameter p_type="CatalogTheme" >}} +Catalog theme object containing structure of the theme. +{{< /parameter >}} +{{% /parameters-block %}} diff --git a/docs/content/en/latest/administration/appearance/list_color_palettes.md b/docs/content/en/latest/administration/appearance/list_color_palettes.md new file mode 100644 index 000000000..fb5c12d0f --- /dev/null +++ b/docs/content/en/latest/administration/appearance/list_color_palettes.md @@ -0,0 +1,20 @@ +--- +title: "list_color_palettes" +linkTitle: "list_color_palettes" +superheading: "catalog_appearance." +weight: 200 +api_ref: "CatalogAppearanceService.list_color_palettes" +--- + +``list_color_palettes( ) -> list[CatalogColorPalette]`` + +Returns a list of all color palettes in the current organization. + +{{% parameters-block title="Parameters" None="yes"%}} +{{% /parameters-block %}} + +{{% parameters-block title="Returns" %}} +{{< parameter p_type="list[CatalogColorPalette]" >}} +List of color palettes in the current organization. +{{< /parameter >}} +{{% /parameters-block %}} diff --git a/docs/content/en/latest/administration/appearance/list_themes.md b/docs/content/en/latest/administration/appearance/list_themes.md new file mode 100644 index 000000000..140271862 --- /dev/null +++ b/docs/content/en/latest/administration/appearance/list_themes.md @@ -0,0 +1,20 @@ +--- +title: "list_themes" +linkTitle: "list_themes" +superheading: "catalog_appearance." +weight: 100 +api_ref: "CatalogAppearanceService.list_themes" +--- + +``list_themes( ) -> list[CatalogTheme]`` + +Returns a list of all themes in the current organization. + +{{% parameters-block title="Parameters" None="yes"%}} +{{% /parameters-block %}} + +{{% parameters-block title="Returns" %}} +{{< parameter p_type="list[CatalogTheme]" >}} +List of themes in the current organization. +{{< /parameter >}} +{{% /parameters-block %}} diff --git a/docs/content/en/latest/administration/appearance/update_color_palette.md b/docs/content/en/latest/administration/appearance/update_color_palette.md new file mode 100644 index 000000000..ab50fdab6 --- /dev/null +++ b/docs/content/en/latest/administration/appearance/update_color_palette.md @@ -0,0 +1,26 @@ +--- +title: "update_color_palette" +linkTitle: "update_color_palette" +superheading: "catalog_appearance." +weight: 230 +api_ref: "CatalogAppearanceService.update_color_palette" +--- + +``update_color_palette( color_palette: CatalogColorPalette ) -> None`` + +Update a color palette. + +{{% parameters-block title="Parameters" %}} +{{< parameter p_name="color_palette" p_type="CatalogColorPalette" >}} +A catalog color palette object to be updated. +{{< /parameter >}} +{{% /parameters-block %}} + +{{% parameters-block title="Returns" None="yes"%}} +{{% /parameters-block %}} + +{{% parameters-block title="Raises" %}} +{{< parameter p_type="Value Error" >}} +Color palette does not exist. +{{< /parameter >}} +{{% /parameters-block %}} diff --git a/docs/content/en/latest/administration/appearance/update_theme.md b/docs/content/en/latest/administration/appearance/update_theme.md new file mode 100644 index 000000000..ac288f23e --- /dev/null +++ b/docs/content/en/latest/administration/appearance/update_theme.md @@ -0,0 +1,26 @@ +--- +title: "update_theme" +linkTitle: "update_theme" +superheading: "catalog_appearance." +weight: 130 +api_ref: "CatalogAppearanceService.update_theme" +--- + +``update_theme( theme: CatalogTheme ) -> None`` + +Update a theme. + +{{% parameters-block title="Parameters" %}} +{{< parameter p_name="theme" p_type="CatalogTheme" >}} +A catalog theme object to be updated. +{{< /parameter >}} +{{% /parameters-block %}} + +{{% parameters-block title="Returns" None="yes"%}} +{{% /parameters-block %}} + +{{% parameters-block title="Raises" %}} +{{< parameter p_type="Value Error" >}} +Theme does not exist. +{{< /parameter >}} +{{% /parameters-block %}} diff --git a/docs/content/en/latest/administration/notification-channels/get_declarative_notification_channels.md b/docs/content/en/latest/administration/notification-channels/get_declarative_notification_channels.md index c31b6343a..125dd4bf7 100644 --- a/docs/content/en/latest/administration/notification-channels/get_declarative_notification_channels.md +++ b/docs/content/en/latest/administration/notification-channels/get_declarative_notification_channels.md @@ -4,6 +4,7 @@ linkTitle: "get_declarative_notification_channels..." weight: 190 no_list: true superheading: "catalog_organization." +api_ref: "CatalogOrganizationService.get_declarative_notification_channels" --- diff --git a/docs/content/en/latest/administration/notification-channels/put_declarative_notification_channels.md b/docs/content/en/latest/administration/notification-channels/put_declarative_notification_channels.md index da1c086ee..fbecabc2c 100644 --- a/docs/content/en/latest/administration/notification-channels/put_declarative_notification_channels.md +++ b/docs/content/en/latest/administration/notification-channels/put_declarative_notification_channels.md @@ -4,6 +4,7 @@ linkTitle: "put_declarative_notification_channels..." weight: 190 no_list: true superheading: "catalog_organization." +api_ref: "CatalogOrganizationService.put_declarative_notification_channels" --- diff --git a/docs/content/en/latest/administration/organization/_index.md b/docs/content/en/latest/administration/organization/_index.md index c7e6ddbe9..bd113486f 100644 --- a/docs/content/en/latest/administration/organization/_index.md +++ b/docs/content/en/latest/administration/organization/_index.md @@ -12,7 +12,6 @@ See [Manage Organizations](https://www.gooddata.com/docs/cloud/manage-deployment ## Methods * [update_name](./update_name/) -* [update_oidc_parameters](./update_oidc_parameters/) * [create_or_update_jwk](./create_or_update_jwk/) * [delete_jwk](./delete_jwk/) * [get_jwk](./get_jwk/) @@ -33,9 +32,4 @@ sdk = GoodDataSdk.create(host, token) # Update organization name sdk.catalog_organization.update_name(name="new_organization_name") - -# Update OIDC provider -sdk.catalog_organization.update_oidc_parameters(oauth_client_id="oauth_client_id", - oauth_issuer_location="oauth_issuer_location", - oauth_client_secret="oauth_client_secret") ``` diff --git a/docs/content/en/latest/administration/organization/create_or_update_jwk.md b/docs/content/en/latest/administration/organization/create_or_update_jwk.md index 8e0581c64..d51f8b418 100644 --- a/docs/content/en/latest/administration/organization/create_or_update_jwk.md +++ b/docs/content/en/latest/administration/organization/create_or_update_jwk.md @@ -3,6 +3,7 @@ title: "create_or_update_jwk" linkTitle: "create_or_update_jwk" superheading: "catalog_organization." weight: 100 +api_ref: "CatalogOrganizationService.create_or_update_jwk" --- ``create_or_update_jwk( jwk: CatalogJwk ) -> None`` diff --git a/docs/content/en/latest/administration/organization/delete_jwk.md b/docs/content/en/latest/administration/organization/delete_jwk.md index 0146ca036..955035bbc 100644 --- a/docs/content/en/latest/administration/organization/delete_jwk.md +++ b/docs/content/en/latest/administration/organization/delete_jwk.md @@ -3,6 +3,7 @@ title: "delete_jwk" linkTitle: "delete_jwk" superheading: "catalog_organization." weight: 100 +api_ref: "CatalogOrganizationService.delete_jwk" --- ``delete_jwk( jwk_id: str ) -> None`` diff --git a/docs/content/en/latest/administration/organization/get_jwk.md b/docs/content/en/latest/administration/organization/get_jwk.md index a6a3186a8..e28ca8682 100644 --- a/docs/content/en/latest/administration/organization/get_jwk.md +++ b/docs/content/en/latest/administration/organization/get_jwk.md @@ -3,6 +3,7 @@ title: "get_jwk" linkTitle: "get_jwk" superheading: "catalog_organization." weight: 100 +api_ref: "CatalogOrganizationService.get_jwk" --- ``get_jwk( jwk_id: str ) -> CatalogJwk`` diff --git a/docs/content/en/latest/administration/organization/list_jwks.md b/docs/content/en/latest/administration/organization/list_jwks.md index 3569210ff..6b5d35b6a 100644 --- a/docs/content/en/latest/administration/organization/list_jwks.md +++ b/docs/content/en/latest/administration/organization/list_jwks.md @@ -3,6 +3,7 @@ title: "list_jwks" linkTitle: "list_jwks" superheading: "catalog_organization." weight: 100 +api_ref: "CatalogOrganizationService.list_jwks" --- ``list_jwks( ) -> List[CatalogJwk]`` diff --git a/docs/content/en/latest/administration/organization/update_name.md b/docs/content/en/latest/administration/organization/update_name.md index 90b2fbb61..dd4d41091 100644 --- a/docs/content/en/latest/administration/organization/update_name.md +++ b/docs/content/en/latest/administration/organization/update_name.md @@ -4,6 +4,7 @@ linkTitle: "update_name" weight: 10 no_list: true superheading: "catalog_organization." +api_ref: "CatalogOrganizationService.update_name" --- diff --git a/docs/content/en/latest/administration/organization/update_oidc_parameters.md b/docs/content/en/latest/administration/organization/update_oidc_parameters.md deleted file mode 100644 index c3f1e82c8..000000000 --- a/docs/content/en/latest/administration/organization/update_oidc_parameters.md +++ /dev/null @@ -1,44 +0,0 @@ ---- -title: "update_oidc_parameters" -linkTitle: "update_oidc_parameters" -weight: 20 -no_list: true -superheading: "catalog_organization." ---- - - - -``update_oidc_parameters(oauth_issuer_location: Optional[str] = None, oauth_client_id: Optional[str] = None, oauth_client_secret: Optional[str] = None)`` - -Updates the OIDC parameters for a given users. - -{{% parameters-block title="Parameters"%}} - -{{< parameter p_name="oauth_issuer_location" p_type="Optional[string]" >}} -Issuer location. Defaults to None. -{{< /parameter >}} -{{< parameter p_name="oauth_client_id" p_type="Optional[string]" >}} -Public client identifier. Defaults to None. -{{< /parameter >}} -{{< parameter p_name="oauth_client_secret" p_type="Optional[string]" >}} -Client secret. Defaults to None. -{{< /parameter >}} -{{% /parameters-block %}} - -{{% parameters-block title="Returns" None="yes"%}} -{{% /parameters-block %}} - -{{% parameters-block title="Raises"%}} -{{< parameter p_name="ValueError" >}} -Parameters were not strictly all none or all string. -{{< /parameter >}} -{{% /parameters-block %}} - -## Example - -```python -# Update OIDC provider -sdk.catalog_organization.update_oidc_parameters(oauth_client_id="oauth_client_id", - oauth_issuer_location="oauth_issuer_location", - oauth_client_secret="oauth_client_secret") -``` diff --git a/docs/content/en/latest/administration/permissions/get_declarative_organization_permissions.md b/docs/content/en/latest/administration/permissions/get_declarative_organization_permissions.md index 9ebc43666..e304f0539 100644 --- a/docs/content/en/latest/administration/permissions/get_declarative_organization_permissions.md +++ b/docs/content/en/latest/administration/permissions/get_declarative_organization_permissions.md @@ -4,6 +4,7 @@ linkTitle: "get_declarative_organization_permissions" weight: 10 no_list: true superheading: "catalog_permission." +api_ref: "CatalogPermissionService.get_declarative_organization_permissions" --- diff --git a/docs/content/en/latest/administration/permissions/get_declarative_permissions.md b/docs/content/en/latest/administration/permissions/get_declarative_permissions.md index f0c36515d..2304560f2 100644 --- a/docs/content/en/latest/administration/permissions/get_declarative_permissions.md +++ b/docs/content/en/latest/administration/permissions/get_declarative_permissions.md @@ -4,6 +4,7 @@ linkTitle: "get_declarative_permissions" weight: 10 no_list: true superheading: "catalog_permission." +api_ref: "CatalogPermissionService.get_declarative_permissions" --- diff --git a/docs/content/en/latest/administration/permissions/list_available_assignees.md b/docs/content/en/latest/administration/permissions/list_available_assignees.md index 70200410a..0574df833 100644 --- a/docs/content/en/latest/administration/permissions/list_available_assignees.md +++ b/docs/content/en/latest/administration/permissions/list_available_assignees.md @@ -4,6 +4,7 @@ linkTitle: "list_available_assignees" weight: 17 no_list: true superheading: "catalog_permission." +api_ref: "CatalogPermissionService.list_available_assignees" --- diff --git a/docs/content/en/latest/administration/permissions/list_dashboard_permissions.md b/docs/content/en/latest/administration/permissions/list_dashboard_permissions.md index b1ecf3415..e9d5db80d 100644 --- a/docs/content/en/latest/administration/permissions/list_dashboard_permissions.md +++ b/docs/content/en/latest/administration/permissions/list_dashboard_permissions.md @@ -4,6 +4,7 @@ linkTitle: "list_dashboard_permissions" weight: 16 no_list: true superheading: "catalog_permission." +api_ref: "CatalogPermissionService.list_dashboard_permissions" --- diff --git a/docs/content/en/latest/administration/permissions/manage_dashboard_permissions.md b/docs/content/en/latest/administration/permissions/manage_dashboard_permissions.md index 271d2db5e..ca03ceddb 100644 --- a/docs/content/en/latest/administration/permissions/manage_dashboard_permissions.md +++ b/docs/content/en/latest/administration/permissions/manage_dashboard_permissions.md @@ -4,6 +4,7 @@ linkTitle: "manage_dashboard_permissions" weight: 15 no_list: true superheading: "catalog_permission." +api_ref: "CatalogPermissionService.manage_dashboard_permissions" --- diff --git a/docs/content/en/latest/administration/permissions/manage_organization_permissions.md b/docs/content/en/latest/administration/permissions/manage_organization_permissions.md index 96676fbf9..3a972d3e8 100644 --- a/docs/content/en/latest/administration/permissions/manage_organization_permissions.md +++ b/docs/content/en/latest/administration/permissions/manage_organization_permissions.md @@ -4,6 +4,7 @@ linkTitle: "manage_organization_permissions" weight: 20 no_list: true superheading: "catalog_permission." +api_ref: "CatalogPermissionService.manage_organization_permissions" --- diff --git a/docs/content/en/latest/administration/permissions/put_declarative_organization_permissions.md b/docs/content/en/latest/administration/permissions/put_declarative_organization_permissions.md index adb4e4b51..50898d538 100644 --- a/docs/content/en/latest/administration/permissions/put_declarative_organization_permissions.md +++ b/docs/content/en/latest/administration/permissions/put_declarative_organization_permissions.md @@ -4,6 +4,7 @@ linkTitle: "put_declarative_organization_permissions" weight: 20 no_list: true superheading: "catalog_permission." +api_ref: "CatalogPermissionService.put_declarative_organization_permissions" --- diff --git a/docs/content/en/latest/administration/permissions/put_declarative_permissions.md b/docs/content/en/latest/administration/permissions/put_declarative_permissions.md index 3a0d47c30..9a0460f3b 100644 --- a/docs/content/en/latest/administration/permissions/put_declarative_permissions.md +++ b/docs/content/en/latest/administration/permissions/put_declarative_permissions.md @@ -4,6 +4,7 @@ linkTitle: "put_declarative_permissions" weight: 20 no_list: true superheading: "catalog_permission." +api_ref: "CatalogPermissionService.put_declarative_permissions" --- diff --git a/docs/content/en/latest/administration/user-groups/create_or_update_user_group.md b/docs/content/en/latest/administration/user-groups/create_or_update_user_group.md index 5ca57dc14..23c5b4379 100644 --- a/docs/content/en/latest/administration/user-groups/create_or_update_user_group.md +++ b/docs/content/en/latest/administration/user-groups/create_or_update_user_group.md @@ -4,6 +4,7 @@ linkTitle: "create_or_update_user_group" weight: 50 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.create_or_update_user_group" --- diff --git a/docs/content/en/latest/administration/user-groups/delete_user_group.md b/docs/content/en/latest/administration/user-groups/delete_user_group.md index 22f8b8fa0..820428758 100644 --- a/docs/content/en/latest/administration/user-groups/delete_user_group.md +++ b/docs/content/en/latest/administration/user-groups/delete_user_group.md @@ -4,6 +4,7 @@ linkTitle: "delete_user_group" weight: 70 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.delete_user_group" --- diff --git a/docs/content/en/latest/administration/user-groups/get_declarative_user_groups.md b/docs/content/en/latest/administration/user-groups/get_declarative_user_groups.md index 6ce63227e..d69f4f5e3 100644 --- a/docs/content/en/latest/administration/user-groups/get_declarative_user_groups.md +++ b/docs/content/en/latest/administration/user-groups/get_declarative_user_groups.md @@ -4,6 +4,7 @@ linkTitle: "get_declarative_user_groups" weight: 140 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.get_declarative_user_groups" --- ``get_declarative_user_groups()`` diff --git a/docs/content/en/latest/administration/user-groups/get_user_group.md b/docs/content/en/latest/administration/user-groups/get_user_group.md index 01105feb0..04d5fe049 100644 --- a/docs/content/en/latest/administration/user-groups/get_user_group.md +++ b/docs/content/en/latest/administration/user-groups/get_user_group.md @@ -4,6 +4,7 @@ linkTitle: "get_user_group" weight: 60 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.get_user_group" --- diff --git a/docs/content/en/latest/administration/user-groups/get_user_group_permissions.md b/docs/content/en/latest/administration/user-groups/get_user_group_permissions.md index 4c1d4bf6f..ff9ddeb21 100644 --- a/docs/content/en/latest/administration/user-groups/get_user_group_permissions.md +++ b/docs/content/en/latest/administration/user-groups/get_user_group_permissions.md @@ -4,6 +4,7 @@ linkTitle: "get_user_group_permissions" weight: 10 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.get_user_group_permissions" --- diff --git a/docs/content/en/latest/administration/user-groups/list_user_groups.md b/docs/content/en/latest/administration/user-groups/list_user_groups.md index 02469bab8..bd85f2722 100644 --- a/docs/content/en/latest/administration/user-groups/list_user_groups.md +++ b/docs/content/en/latest/administration/user-groups/list_user_groups.md @@ -4,6 +4,7 @@ linkTitle: "list_user_groups" weight: 80 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.list_user_groups" --- diff --git a/docs/content/en/latest/administration/user-groups/load_and_put_declarative_user_groups.md b/docs/content/en/latest/administration/user-groups/load_and_put_declarative_user_groups.md index ef7281638..007604a1b 100644 --- a/docs/content/en/latest/administration/user-groups/load_and_put_declarative_user_groups.md +++ b/docs/content/en/latest/administration/user-groups/load_and_put_declarative_user_groups.md @@ -4,6 +4,7 @@ linkTitle: "load_and_put_declarative_user_g..." weight: 180 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.load_and_put_declarative_user_groups" --- diff --git a/docs/content/en/latest/administration/user-groups/load_declarative_user_groups.md b/docs/content/en/latest/administration/user-groups/load_declarative_user_groups.md index 7b1ef3d47..eee7035db 100644 --- a/docs/content/en/latest/administration/user-groups/load_declarative_user_groups.md +++ b/docs/content/en/latest/administration/user-groups/load_declarative_user_groups.md @@ -4,6 +4,7 @@ linkTitle: "load_declarative_user_groups" weight: 170 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.load_declarative_user_groups" --- diff --git a/docs/content/en/latest/administration/user-groups/manage_user_group_permissions.md b/docs/content/en/latest/administration/user-groups/manage_user_group_permissions.md index 5191bf2db..ea0395881 100644 --- a/docs/content/en/latest/administration/user-groups/manage_user_group_permissions.md +++ b/docs/content/en/latest/administration/user-groups/manage_user_group_permissions.md @@ -4,6 +4,7 @@ linkTitle: "manage_user_group_permissions" weight: 10 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.manage_user_group_permissions" --- diff --git a/docs/content/en/latest/administration/user-groups/put_declarative_user_groups.md b/docs/content/en/latest/administration/user-groups/put_declarative_user_groups.md index 04e6802b7..050cc9ec5 100644 --- a/docs/content/en/latest/administration/user-groups/put_declarative_user_groups.md +++ b/docs/content/en/latest/administration/user-groups/put_declarative_user_groups.md @@ -4,6 +4,7 @@ linkTitle: "put_declarative_user_groups" weight: 150 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.put_declarative_user_groups" --- diff --git a/docs/content/en/latest/administration/user-groups/store_declarative_user_groups.md b/docs/content/en/latest/administration/user-groups/store_declarative_user_groups.md index 278bd65c1..6805a4370 100644 --- a/docs/content/en/latest/administration/user-groups/store_declarative_user_groups.md +++ b/docs/content/en/latest/administration/user-groups/store_declarative_user_groups.md @@ -4,6 +4,7 @@ linkTitle: "store_declarative_user_groups" weight: 160 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.store_declarative_user_groups" --- diff --git a/docs/content/en/latest/administration/users-and-user-groups/assign_permissions_bulk.md b/docs/content/en/latest/administration/users-and-user-groups/assign_permissions_bulk.md index ed318df07..a88073eed 100644 --- a/docs/content/en/latest/administration/users-and-user-groups/assign_permissions_bulk.md +++ b/docs/content/en/latest/administration/users-and-user-groups/assign_permissions_bulk.md @@ -4,6 +4,7 @@ linkTitle: "assign_permissions_bulk" weight: 10 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.assign_permissions_bulk" --- diff --git a/docs/content/en/latest/administration/users-and-user-groups/get_declarative_users_user_groups.md b/docs/content/en/latest/administration/users-and-user-groups/get_declarative_users_user_groups.md index 3208ee81a..5ee3a1525 100644 --- a/docs/content/en/latest/administration/users-and-user-groups/get_declarative_users_user_groups.md +++ b/docs/content/en/latest/administration/users-and-user-groups/get_declarative_users_user_groups.md @@ -4,6 +4,7 @@ linkTitle: "get_declarative_users_user_grou..." weight: 190 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.get_declarative_users_user_groups" --- diff --git a/docs/content/en/latest/administration/users-and-user-groups/load_and_put_declarative_users_user_groups.md b/docs/content/en/latest/administration/users-and-user-groups/load_and_put_declarative_users_user_groups.md index 62ad8c76c..acd299b18 100644 --- a/docs/content/en/latest/administration/users-and-user-groups/load_and_put_declarative_users_user_groups.md +++ b/docs/content/en/latest/administration/users-and-user-groups/load_and_put_declarative_users_user_groups.md @@ -4,6 +4,7 @@ linkTitle: "load_and_put_declarative_users..." weight: 230 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.load_and_put_declarative_users_user_groups" --- diff --git a/docs/content/en/latest/administration/users-and-user-groups/load_declarative_users_user_groups.md b/docs/content/en/latest/administration/users-and-user-groups/load_declarative_users_user_groups.md index d217662a1..04a458d5c 100644 --- a/docs/content/en/latest/administration/users-and-user-groups/load_declarative_users_user_groups.md +++ b/docs/content/en/latest/administration/users-and-user-groups/load_declarative_users_user_groups.md @@ -4,6 +4,7 @@ linkTitle: "load_declarative_users_user_gro..." weight: 220 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.load_declarative_users_user_groups" --- diff --git a/docs/content/en/latest/administration/users-and-user-groups/put_declarative_users_user_groups.md b/docs/content/en/latest/administration/users-and-user-groups/put_declarative_users_user_groups.md index 017705d85..572b73343 100644 --- a/docs/content/en/latest/administration/users-and-user-groups/put_declarative_users_user_groups.md +++ b/docs/content/en/latest/administration/users-and-user-groups/put_declarative_users_user_groups.md @@ -4,6 +4,7 @@ linkTitle: "put_declarative_users_user_grou..." weight: 200 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.put_declarative_users_user_groups" --- diff --git a/docs/content/en/latest/administration/users-and-user-groups/revoke_permissions_bulk.md b/docs/content/en/latest/administration/users-and-user-groups/revoke_permissions_bulk.md index 8cb407093..6bd80f527 100644 --- a/docs/content/en/latest/administration/users-and-user-groups/revoke_permissions_bulk.md +++ b/docs/content/en/latest/administration/users-and-user-groups/revoke_permissions_bulk.md @@ -4,6 +4,7 @@ linkTitle: "revoke_permissions_bulk" weight: 10 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.revoke_permissions_bulk" --- diff --git a/docs/content/en/latest/administration/users-and-user-groups/store_declarative_users_user_groups.md b/docs/content/en/latest/administration/users-and-user-groups/store_declarative_users_user_groups.md index cfd07c66c..1fe69eafb 100644 --- a/docs/content/en/latest/administration/users-and-user-groups/store_declarative_users_user_groups.md +++ b/docs/content/en/latest/administration/users-and-user-groups/store_declarative_users_user_groups.md @@ -4,6 +4,7 @@ linkTitle: "store_declarative_users_user_gro..." weight: 210 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.store_declarative_users_user_groups" --- diff --git a/docs/content/en/latest/administration/users/create_or_update_user.md b/docs/content/en/latest/administration/users/create_or_update_user.md index 33bc25538..313f3d1ad 100644 --- a/docs/content/en/latest/administration/users/create_or_update_user.md +++ b/docs/content/en/latest/administration/users/create_or_update_user.md @@ -4,6 +4,7 @@ linkTitle: "create_or_update_user" weight: 10 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.create_or_update_user" --- diff --git a/docs/content/en/latest/administration/users/create_user_api_token.md b/docs/content/en/latest/administration/users/create_user_api_token.md index 3e7c4c334..25e434578 100644 --- a/docs/content/en/latest/administration/users/create_user_api_token.md +++ b/docs/content/en/latest/administration/users/create_user_api_token.md @@ -4,6 +4,7 @@ linkTitle: "create_user_api_token" weight: 10 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.create_user_api_token" --- diff --git a/docs/content/en/latest/administration/users/delete_user.md b/docs/content/en/latest/administration/users/delete_user.md index 3a400c89d..ccb2cd181 100644 --- a/docs/content/en/latest/administration/users/delete_user.md +++ b/docs/content/en/latest/administration/users/delete_user.md @@ -4,6 +4,7 @@ linkTitle: "delete_user" weight: 30 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.delete_user" --- diff --git a/docs/content/en/latest/administration/users/delete_user_api_token.md b/docs/content/en/latest/administration/users/delete_user_api_token.md index 76bb9122c..efa5f3ec9 100644 --- a/docs/content/en/latest/administration/users/delete_user_api_token.md +++ b/docs/content/en/latest/administration/users/delete_user_api_token.md @@ -4,6 +4,7 @@ linkTitle: "delete_user_api_token" weight: 10 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.delete_user_api_token" --- diff --git a/docs/content/en/latest/administration/users/get_declarative_users.md b/docs/content/en/latest/administration/users/get_declarative_users.md index b220593b4..4e0f59ad9 100644 --- a/docs/content/en/latest/administration/users/get_declarative_users.md +++ b/docs/content/en/latest/administration/users/get_declarative_users.md @@ -4,6 +4,7 @@ linkTitle: "get_declarative_users" weight: 90 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.get_declarative_users" --- diff --git a/docs/content/en/latest/administration/users/get_user.md b/docs/content/en/latest/administration/users/get_user.md index c4f1984c0..74a4c10b6 100644 --- a/docs/content/en/latest/administration/users/get_user.md +++ b/docs/content/en/latest/administration/users/get_user.md @@ -4,6 +4,7 @@ linkTitle: "get_user" weight: 20 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.get_user" --- diff --git a/docs/content/en/latest/administration/users/get_user_api_token.md b/docs/content/en/latest/administration/users/get_user_api_token.md index fc5193a05..1a737fe69 100644 --- a/docs/content/en/latest/administration/users/get_user_api_token.md +++ b/docs/content/en/latest/administration/users/get_user_api_token.md @@ -4,6 +4,7 @@ linkTitle: "get_user_api_token" weight: 10 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.get_user_api_token" --- diff --git a/docs/content/en/latest/administration/users/get_user_permissions.md b/docs/content/en/latest/administration/users/get_user_permissions.md index 17a2bbd57..34c5cea6f 100644 --- a/docs/content/en/latest/administration/users/get_user_permissions.md +++ b/docs/content/en/latest/administration/users/get_user_permissions.md @@ -4,6 +4,7 @@ linkTitle: "get_user_permissions" weight: 10 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.get_user_permissions" --- diff --git a/docs/content/en/latest/administration/users/list_user_api_tokens.md b/docs/content/en/latest/administration/users/list_user_api_tokens.md index 6c4dad769..56fcf2742 100644 --- a/docs/content/en/latest/administration/users/list_user_api_tokens.md +++ b/docs/content/en/latest/administration/users/list_user_api_tokens.md @@ -4,6 +4,7 @@ linkTitle: "list_user_api_tokens" weight: 10 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.list_user_api_tokens" --- diff --git a/docs/content/en/latest/administration/users/list_users.md b/docs/content/en/latest/administration/users/list_users.md index 6d362e4f6..7c2af6a19 100644 --- a/docs/content/en/latest/administration/users/list_users.md +++ b/docs/content/en/latest/administration/users/list_users.md @@ -4,6 +4,7 @@ linkTitle: "list_users" weight: 40 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.list_users" --- diff --git a/docs/content/en/latest/administration/users/load_and_put_declarative_users.md b/docs/content/en/latest/administration/users/load_and_put_declarative_users.md index 730b11f0f..794038e72 100644 --- a/docs/content/en/latest/administration/users/load_and_put_declarative_users.md +++ b/docs/content/en/latest/administration/users/load_and_put_declarative_users.md @@ -4,6 +4,7 @@ linkTitle: "load_and_put_declarative_users" weight: 130 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.load_and_put_declarative_users" --- diff --git a/docs/content/en/latest/administration/users/load_declarative_users.md b/docs/content/en/latest/administration/users/load_declarative_users.md index c1ae510de..1adc0785d 100644 --- a/docs/content/en/latest/administration/users/load_declarative_users.md +++ b/docs/content/en/latest/administration/users/load_declarative_users.md @@ -4,6 +4,7 @@ linkTitle: "load_declarative_users" weight: 120 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.load_declarative_users" --- diff --git a/docs/content/en/latest/administration/users/manage_user_permissions.md b/docs/content/en/latest/administration/users/manage_user_permissions.md index 4cb2eb29f..787adcd22 100644 --- a/docs/content/en/latest/administration/users/manage_user_permissions.md +++ b/docs/content/en/latest/administration/users/manage_user_permissions.md @@ -4,6 +4,7 @@ linkTitle: "manage_user_permissions" weight: 10 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.manage_user_permissions" --- diff --git a/docs/content/en/latest/administration/users/put_declarative_users.md b/docs/content/en/latest/administration/users/put_declarative_users.md index d9816ae3d..c9a094ab3 100644 --- a/docs/content/en/latest/administration/users/put_declarative_users.md +++ b/docs/content/en/latest/administration/users/put_declarative_users.md @@ -4,6 +4,7 @@ linkTitle: "put_declarative_users" weight: 100 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.put_declarative_users" --- diff --git a/docs/content/en/latest/administration/users/store_declarative_users.md b/docs/content/en/latest/administration/users/store_declarative_users.md index 88c727dca..beb2bf322 100644 --- a/docs/content/en/latest/administration/users/store_declarative_users.md +++ b/docs/content/en/latest/administration/users/store_declarative_users.md @@ -4,6 +4,7 @@ linkTitle: "store_declarative_users" weight: 110 no_list: true superheading: "catalog_user." +api_ref: "CatalogUserService.store_declarative_users" --- diff --git a/docs/content/en/latest/api-reference/_index.md b/docs/content/en/latest/api-reference/_index.md index df77a072a..810154d2b 100644 --- a/docs/content/en/latest/api-reference/_index.md +++ b/docs/content/en/latest/api-reference/_index.md @@ -4,6 +4,3 @@ linkTitle: "API Reference" weight: 99 navigationLabel: true --- - - -Placeholder for gerenerated API reference. diff --git a/docs/content/en/latest/data/data-source/_index.md b/docs/content/en/latest/data/data-source/_index.md index 9a8829d31..4a493c7ec 100644 --- a/docs/content/en/latest/data/data-source/_index.md +++ b/docs/content/en/latest/data/data-source/_index.md @@ -37,6 +37,14 @@ See [Connect Data](https://www.gooddata.com/docs/cloud/connect-data/) to learn h * [scan_schemata](./scan_schemata/) * [scan_sql](./scan_sql/) +### CSV Upload Methods + +* [staging_upload](./staging_upload/) +* [analyze_csv](./analyze_csv/) +* [import_csv](./import_csv/) +* [delete_csv_files](./delete_csv_files/) +* [upload_csv](./upload_csv/) + ## Example diff --git a/docs/content/en/latest/data/data-source/analyze_csv.md b/docs/content/en/latest/data/data-source/analyze_csv.md new file mode 100644 index 000000000..e25ffa2ba --- /dev/null +++ b/docs/content/en/latest/data/data-source/analyze_csv.md @@ -0,0 +1,38 @@ +--- +title: "analyze_csv" +linkTitle: "analyze_csv" +weight: 191 +superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.analyze_csv" +--- + + + +``analyze_csv(location: str)`` + +Analyzes an uploaded CSV file in the staging area. Returns column metadata, detected types, preview data, and a config object that can be passed to import_csv. + +{{% parameters-block title="Parameters"%}} + +{{< parameter p_name="location" p_type="string" >}} +Location string returned by staging_upload. +{{< /parameter >}} + +{{% /parameters-block %}} + +{{% parameters-block title="Returns"%}} + +{{< parameter p_type="AnalyzeCsvResponse" >}} +Analysis result with columns, preview data, and config. +{{< /parameter >}} + +{{% /parameters-block %}} + +## Example + +```python +# Analyze a previously uploaded CSV file +analysis = sdk.catalog_data_source.analyze_csv(location="staging/some-location") +for col in analysis["columns"]: + print(f"{col['name']}: {col['type']}") +``` diff --git a/docs/content/en/latest/data/data-source/create_or_update_data_source.md b/docs/content/en/latest/data/data-source/create_or_update_data_source.md index 7a47347f3..e6d29e512 100644 --- a/docs/content/en/latest/data/data-source/create_or_update_data_source.md +++ b/docs/content/en/latest/data/data-source/create_or_update_data_source.md @@ -3,6 +3,7 @@ title: "create_or_update_data_source" linkTitle: "create_or_update_data_source" weight: 10 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.create_or_update_data_source" --- diff --git a/docs/content/en/latest/data/data-source/delete_csv_files.md b/docs/content/en/latest/data/data-source/delete_csv_files.md new file mode 100644 index 000000000..4b58d9487 --- /dev/null +++ b/docs/content/en/latest/data/data-source/delete_csv_files.md @@ -0,0 +1,38 @@ +--- +title: "delete_csv_files" +linkTitle: "delete_csv_files" +weight: 193 +superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.delete_csv_files" +--- + + + +``delete_csv_files(data_source_id: str, file_names: list[str])`` + +Deletes files from a GDSTORAGE data source. + +{{% parameters-block title="Parameters"%}} + +{{< parameter p_name="data_source_id" p_type="string" >}} +Data source identification string. +{{< /parameter >}} + +{{< parameter p_name="file_names" p_type="list[string]" >}} +List of file names to delete. +{{< /parameter >}} + +{{% /parameters-block %}} + +{{% parameters-block title="Returns" None="yes"%}} +{{% /parameters-block %}} + +## Example + +```python +# Delete specific files from a GDSTORAGE data source +sdk.catalog_data_source.delete_csv_files( + data_source_id="my-gdstorage-ds", + file_names=["my_table.csv"], +) +``` diff --git a/docs/content/en/latest/data/data-source/delete_data_source.md b/docs/content/en/latest/data/data-source/delete_data_source.md index 79ec7818a..664c81e00 100644 --- a/docs/content/en/latest/data/data-source/delete_data_source.md +++ b/docs/content/en/latest/data/data-source/delete_data_source.md @@ -3,6 +3,7 @@ title: "delete_data_source" linkTitle: "delete_data_source" weight: 30 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.delete_data_source" --- diff --git a/docs/content/en/latest/data/data-source/generate_logical_model.md b/docs/content/en/latest/data/data-source/generate_logical_model.md index db1d8b038..873033529 100644 --- a/docs/content/en/latest/data/data-source/generate_logical_model.md +++ b/docs/content/en/latest/data/data-source/generate_logical_model.md @@ -3,6 +3,7 @@ title: "generate_logical_model" linkTitle: "generate_logical_model" weight: 170 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.generate_logical_model" --- diff --git a/docs/content/en/latest/data/data-source/get_data_source.md b/docs/content/en/latest/data/data-source/get_data_source.md index 113f1ed3a..29d8c1927 100644 --- a/docs/content/en/latest/data/data-source/get_data_source.md +++ b/docs/content/en/latest/data/data-source/get_data_source.md @@ -3,6 +3,7 @@ title: "get_data_source" linkTitle: "get_data_source" weight: 20 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.get_data_source" --- diff --git a/docs/content/en/latest/data/data-source/get_declarative_data_sources.md b/docs/content/en/latest/data/data-source/get_declarative_data_sources.md index 12c16eb90..a39b1b781 100644 --- a/docs/content/en/latest/data/data-source/get_declarative_data_sources.md +++ b/docs/content/en/latest/data/data-source/get_declarative_data_sources.md @@ -3,6 +3,7 @@ title: "get_declarative_data_sources" linkTitle: "get_declarative_data_sources" weight: 70 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.get_declarative_data_sources" --- diff --git a/docs/content/en/latest/data/data-source/import_csv.md b/docs/content/en/latest/data/data-source/import_csv.md new file mode 100644 index 000000000..41ee79e3e --- /dev/null +++ b/docs/content/en/latest/data/data-source/import_csv.md @@ -0,0 +1,50 @@ +--- +title: "import_csv" +linkTitle: "import_csv" +weight: 192 +superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.import_csv" +--- + + + +``import_csv(data_source_id: str, table_name: str, location: str, config: Optional[dict] = None)`` + +Imports a CSV file from the staging area into a GDSTORAGE data source. + +{{% parameters-block title="Parameters"%}} + +{{< parameter p_name="data_source_id" p_type="string" >}} +Data source identification string. +{{< /parameter >}} + +{{< parameter p_name="table_name" p_type="string" >}} +Name for the table to create or replace. +{{< /parameter >}} + +{{< parameter p_name="location" p_type="string" >}} +Location string returned by staging_upload. +{{< /parameter >}} + +{{< parameter p_name="config" p_type="Optional[dict]" >}} +Source config dict, typically from analyze_csv response. Optional. +{{< /parameter >}} + +{{% /parameters-block %}} + +{{% parameters-block title="Returns" None="yes"%}} +{{% /parameters-block %}} + +## Example + +```python +# Import a CSV into a GDSTORAGE data source using config from analysis +analysis = sdk.catalog_data_source.analyze_csv(location=location) +config = analysis.to_dict().get("config") +sdk.catalog_data_source.import_csv( + data_source_id="my-gdstorage-ds", + table_name="my_table", + location=location, + config=config, +) +``` diff --git a/docs/content/en/latest/data/data-source/list_data_sources.md b/docs/content/en/latest/data/data-source/list_data_sources.md index 6167cb4ae..1765e26ad 100644 --- a/docs/content/en/latest/data/data-source/list_data_sources.md +++ b/docs/content/en/latest/data/data-source/list_data_sources.md @@ -3,6 +3,7 @@ title: "list_data_sources" linkTitle: "list_data_sources" weight: 50 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.list_data_sources" --- diff --git a/docs/content/en/latest/data/data-source/load_and_put_declarative_data_sources.md b/docs/content/en/latest/data/data-source/load_and_put_declarative_data_sources.md index 38d2bee0c..e5344d298 100644 --- a/docs/content/en/latest/data/data-source/load_and_put_declarative_data_sources.md +++ b/docs/content/en/latest/data/data-source/load_and_put_declarative_data_sources.md @@ -3,6 +3,7 @@ title: "load_and_put_declarative_data_sources" linkTitle: "load_and_put_declarative_data..." weight: 110 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.load_and_put_declarative_data_sources" --- diff --git a/docs/content/en/latest/data/data-source/load_declarative_data_sources.md b/docs/content/en/latest/data/data-source/load_declarative_data_sources.md index d3eba2c98..bb294df5f 100644 --- a/docs/content/en/latest/data/data-source/load_declarative_data_sources.md +++ b/docs/content/en/latest/data/data-source/load_declarative_data_sources.md @@ -3,6 +3,7 @@ title: "load_declarative_data_sources" linkTitle: "load_declarative_data_sources" weight: 100 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.load_declarative_data_sources" --- diff --git a/docs/content/en/latest/data/data-source/patch_data_source_attributes.md b/docs/content/en/latest/data/data-source/patch_data_source_attributes.md index bdd946ca8..120a480d1 100644 --- a/docs/content/en/latest/data/data-source/patch_data_source_attributes.md +++ b/docs/content/en/latest/data/data-source/patch_data_source_attributes.md @@ -3,6 +3,7 @@ title: "patch_data_source_attributes" linkTitle: "patch_data_source_attributes" weight: 40 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.patch_data_source_attributes" --- diff --git a/docs/content/en/latest/data/data-source/put_declarative_data_sources.md b/docs/content/en/latest/data/data-source/put_declarative_data_sources.md index 4658aa369..613da61b8 100644 --- a/docs/content/en/latest/data/data-source/put_declarative_data_sources.md +++ b/docs/content/en/latest/data/data-source/put_declarative_data_sources.md @@ -3,6 +3,7 @@ title: "put_declarative_data_sources" linkTitle: "put_declarative_data_sources" weight: 80 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.put_declarative_data_sources" --- diff --git a/docs/content/en/latest/data/data-source/register_upload_notification.md b/docs/content/en/latest/data/data-source/register_upload_notification.md index d40aff752..efd22507a 100644 --- a/docs/content/en/latest/data/data-source/register_upload_notification.md +++ b/docs/content/en/latest/data/data-source/register_upload_notification.md @@ -3,6 +3,7 @@ title: "register_upload_notification" linkTitle: "register_upload_notification" weight: 180 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.register_upload_notification" --- diff --git a/docs/content/en/latest/data/data-source/scan_data_source.md b/docs/content/en/latest/data/data-source/scan_data_source.md index 6dc011b90..daaa5dc69 100644 --- a/docs/content/en/latest/data/data-source/scan_data_source.md +++ b/docs/content/en/latest/data/data-source/scan_data_source.md @@ -3,6 +3,7 @@ title: "scan_data_source" linkTitle: "scan_data_source" weight: 190 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.scan_data_source" --- ``scan_data_source(data_source_id: str, scan_request: CatalogScanModelRequest = CatalogScanModelRequest(), report_warnings: bool = False)`` diff --git a/docs/content/en/latest/data/data-source/scan_pdm_and_generate_logical_model.md b/docs/content/en/latest/data/data-source/scan_pdm_and_generate_logical_model.md index 731be55ab..64c55f671 100644 --- a/docs/content/en/latest/data/data-source/scan_pdm_and_generate_logical_model.md +++ b/docs/content/en/latest/data/data-source/scan_pdm_and_generate_logical_model.md @@ -3,6 +3,7 @@ title: "scan_pdm_and_generate_logical_model" linkTitle: "scan_pdm_and_generate_logical_model" weight: 190 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.scan_pdm_and_generate_logical_model" --- ``scan_pdm_and_generate_logical_model(data_source_id: str, generate_ldm_request: Optional[CatalogGenerateLdmRequest] = None, scan_request: CatalogScanModelRequest = CatalogScanModelRequest(), report_warnings: bool = False) -> tuple[CatalogDeclarativeModel, CatalogScanResultPdm]`` diff --git a/docs/content/en/latest/data/data-source/scan_schemata.md b/docs/content/en/latest/data/data-source/scan_schemata.md index bdaae2948..f9ef345b2 100644 --- a/docs/content/en/latest/data/data-source/scan_schemata.md +++ b/docs/content/en/latest/data/data-source/scan_schemata.md @@ -3,6 +3,7 @@ title: "scan_schemata" linkTitle: "scan_schemata" weight: 210 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.scan_schemata" --- diff --git a/docs/content/en/latest/data/data-source/scan_sql.md b/docs/content/en/latest/data/data-source/scan_sql.md index ac789c09d..eed463876 100644 --- a/docs/content/en/latest/data/data-source/scan_sql.md +++ b/docs/content/en/latest/data/data-source/scan_sql.md @@ -3,6 +3,7 @@ title: "scan_sql" linkTitle: "scan_sql" weight: 210 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.scan_sql" --- diff --git a/docs/content/en/latest/data/data-source/staging_upload.md b/docs/content/en/latest/data/data-source/staging_upload.md new file mode 100644 index 000000000..9ccc2216a --- /dev/null +++ b/docs/content/en/latest/data/data-source/staging_upload.md @@ -0,0 +1,38 @@ +--- +title: "staging_upload" +linkTitle: "staging_upload" +weight: 190 +superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.staging_upload" +--- + + + +``staging_upload(csv_file: Path)`` + +Uploads a CSV file to the staging area and returns a location string that can be used in subsequent calls to analyze_csv and import_csv. + +{{% parameters-block title="Parameters"%}} + +{{< parameter p_name="csv_file" p_type="Path" >}} +Path to the CSV file to upload. +{{< /parameter >}} + +{{% /parameters-block %}} + +{{% parameters-block title="Returns"%}} + +{{< parameter p_type="string" >}} +Location string referencing the uploaded file in staging. +{{< /parameter >}} + +{{% /parameters-block %}} + +## Example + +```python +from pathlib import Path + +# Upload a CSV file to staging +location = sdk.catalog_data_source.staging_upload(csv_file=Path("data.csv")) +``` diff --git a/docs/content/en/latest/data/data-source/store_declarative_data_sources.md b/docs/content/en/latest/data/data-source/store_declarative_data_sources.md index 18cfee10f..0248e9ec8 100644 --- a/docs/content/en/latest/data/data-source/store_declarative_data_sources.md +++ b/docs/content/en/latest/data/data-source/store_declarative_data_sources.md @@ -3,6 +3,7 @@ title: "store_declarative_data_sources" linkTitle: "store_declarative_data_sources" weight: 90 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.store_declarative_data_sources" --- ``store_declarative_data_sources(layout_root_path: Path = Path.cwd())`` diff --git a/docs/content/en/latest/data/data-source/test_data_sources_connection.md b/docs/content/en/latest/data/data-source/test_data_sources_connection.md index b5bfc2f9f..cb982552b 100644 --- a/docs/content/en/latest/data/data-source/test_data_sources_connection.md +++ b/docs/content/en/latest/data/data-source/test_data_sources_connection.md @@ -3,6 +3,7 @@ title: "test_data_sources_connection" linkTitle: "test_data_sources_connection" weight: 220 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.test_data_sources_connection" --- diff --git a/docs/content/en/latest/data/data-source/upload_csv.md b/docs/content/en/latest/data/data-source/upload_csv.md new file mode 100644 index 000000000..27d6242ac --- /dev/null +++ b/docs/content/en/latest/data/data-source/upload_csv.md @@ -0,0 +1,45 @@ +--- +title: "upload_csv" +linkTitle: "upload_csv" +weight: 194 +superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.upload_csv" +--- + + + +``upload_csv(data_source_id: str, csv_file: Path, table_name: str)`` + +Convenience method that uploads a CSV file and imports it into a GDSTORAGE data source in a single call. Orchestrates the full flow: staging_upload → analyze_csv → import_csv → register_upload_notification. + +{{% parameters-block title="Parameters"%}} + +{{< parameter p_name="data_source_id" p_type="string" >}} +Data source identification string for a GDSTORAGE data source. +{{< /parameter >}} + +{{< parameter p_name="csv_file" p_type="Path" >}} +Path to the CSV file to upload. +{{< /parameter >}} + +{{< parameter p_name="table_name" p_type="string" >}} +Name for the table to create or replace in the data source. +{{< /parameter >}} + +{{% /parameters-block %}} + +{{% parameters-block title="Returns" None="yes"%}} +{{% /parameters-block %}} + +## Example + +```python +from pathlib import Path + +# Upload a CSV file end-to-end in a single call +sdk.catalog_data_source.upload_csv( + data_source_id="my-gdstorage-ds", + csv_file=Path("data.csv"), + table_name="my_table", +) +``` diff --git a/docs/content/en/latest/data/physical-data-model/load_pdm_from_disk.md b/docs/content/en/latest/data/physical-data-model/load_pdm_from_disk.md index 4790ea80e..7f6b78b7f 100644 --- a/docs/content/en/latest/data/physical-data-model/load_pdm_from_disk.md +++ b/docs/content/en/latest/data/physical-data-model/load_pdm_from_disk.md @@ -3,6 +3,7 @@ title: "load_pdm_from_disk" linkTitle: "load_pdm_from_disk" weight: 140 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.load_pdm_from_disk" --- ``load_pdm_from_disk(path: Path = Path.cwd())`` diff --git a/docs/content/en/latest/data/physical-data-model/store_pdm_to_disk.md b/docs/content/en/latest/data/physical-data-model/store_pdm_to_disk.md index 263be9b18..d17136b5c 100644 --- a/docs/content/en/latest/data/physical-data-model/store_pdm_to_disk.md +++ b/docs/content/en/latest/data/physical-data-model/store_pdm_to_disk.md @@ -3,6 +3,7 @@ title: "store_pdm_to_disk" linkTitle: "store_pdm_to_disk" weight: 140 superheading: "catalog_data_source." +api_ref: "CatalogDataSourceService.store_pdm_to_disk" --- diff --git a/docs/content/en/latest/execution/ai/ai_chat.md b/docs/content/en/latest/execution/ai/ai_chat.md index 143cd11dc..707cb8f7e 100644 --- a/docs/content/en/latest/execution/ai/ai_chat.md +++ b/docs/content/en/latest/execution/ai/ai_chat.md @@ -3,6 +3,7 @@ title: "ai_chat" linkTitle: "ai_chat" weight: 92 superheading: "compute." +api_ref: "ComputeService.ai_chat" --- ``ai_chat(workspace_id: str, question: str) -> ChatResult`` diff --git a/docs/content/en/latest/execution/ai/ai_chat_stream.md b/docs/content/en/latest/execution/ai/ai_chat_stream.md index b3084ab8c..b88968d52 100644 --- a/docs/content/en/latest/execution/ai/ai_chat_stream.md +++ b/docs/content/en/latest/execution/ai/ai_chat_stream.md @@ -3,6 +3,7 @@ title: "ai_chat_stream" linkTitle: "ai_chat_stream" weight: 93 superheading: "compute." +api_ref: "ComputeService.ai_chat_stream" --- ``ai_chat_stream(workspace_id: str, question: str) -> Iterator[Any]`` diff --git a/docs/content/en/latest/execution/ai/build_exec_def_from_chat_result.md b/docs/content/en/latest/execution/ai/build_exec_def_from_chat_result.md index 4040b3754..3e98da2e6 100644 --- a/docs/content/en/latest/execution/ai/build_exec_def_from_chat_result.md +++ b/docs/content/en/latest/execution/ai/build_exec_def_from_chat_result.md @@ -3,6 +3,7 @@ title: "build_exec_def_from_chat_result" linkTitle: "build_exec_def_from_chat_result" weight: 95 superheading: "compute." +api_ref: "ComputeService.build_exec_def_from_chat_result" --- ``build_exec_def_from_chat_result(chat_result: ChatResult) -> ExecutionDefinition`` diff --git a/docs/content/en/latest/execution/ai/get_ai_chat_history.md b/docs/content/en/latest/execution/ai/get_ai_chat_history.md index 0f5ab7e46..0578366a5 100644 --- a/docs/content/en/latest/execution/ai/get_ai_chat_history.md +++ b/docs/content/en/latest/execution/ai/get_ai_chat_history.md @@ -3,6 +3,7 @@ title: "ai_chat" linkTitle: "ai_chat" weight: 96 superheading: "compute." +api_ref: "ComputeService.ai_chat" --- ``ai_chat(workspace_id: str, question: str) -> ChatResult`` diff --git a/docs/content/en/latest/execution/ai/reset_ai_chat_history.md b/docs/content/en/latest/execution/ai/reset_ai_chat_history.md index 1570f1b18..2e4db3e85 100644 --- a/docs/content/en/latest/execution/ai/reset_ai_chat_history.md +++ b/docs/content/en/latest/execution/ai/reset_ai_chat_history.md @@ -3,6 +3,7 @@ title: "reset_ai_chat_history" linkTitle: "reset_ai_chat_history" weight: 97 superheading: "compute." +api_ref: "ComputeService.reset_ai_chat_history" --- ``reset_ai_chat_history(workspace_id: str) -> None`` diff --git a/docs/content/en/latest/execution/ai/search_ai.md b/docs/content/en/latest/execution/ai/search_ai.md index dc9edb221..513f04aa2 100644 --- a/docs/content/en/latest/execution/ai/search_ai.md +++ b/docs/content/en/latest/execution/ai/search_ai.md @@ -3,6 +3,7 @@ title: "search_ai" linkTitle: "search_ai" weight: 98 superheading: "compute." +api_ref: "ComputeService.search_ai" --- ``search_ai( diff --git a/docs/content/en/latest/execution/ai/set_ai_chat_history_feedback.md b/docs/content/en/latest/execution/ai/set_ai_chat_history_feedback.md index 3e7df39d1..e4bdde574 100644 --- a/docs/content/en/latest/execution/ai/set_ai_chat_history_feedback.md +++ b/docs/content/en/latest/execution/ai/set_ai_chat_history_feedback.md @@ -3,6 +3,7 @@ title: "set_ai_chat_history_feedback" linkTitle: "set_ai_chat_history_feedback" weight: 101 superheading: "compute." +api_ref: "ComputeService.set_ai_chat_history_feedback" --- ``set_ai_chat_history_feedback(workspace_id: str, interaction_id: str, user_feedback: str, chat_history_interaction_id: str, thread_id_suffix: str = "") -> None`` diff --git a/docs/content/en/latest/execution/ai/sync_metadata.md b/docs/content/en/latest/execution/ai/sync_metadata.md index 517d99aa4..361f2187d 100644 --- a/docs/content/en/latest/execution/ai/sync_metadata.md +++ b/docs/content/en/latest/execution/ai/sync_metadata.md @@ -3,6 +3,7 @@ title: "sync_metadata" linkTitle: "sync_metadata" weight: 100 superheading: "compute." +api_ref: "ComputeService.sync_metadata" --- ``sync_metadata(workspace_id: str, async_req: bool = False) -> None`` diff --git a/docs/content/en/latest/execution/exports/export_pdf.md b/docs/content/en/latest/execution/exports/export_pdf.md index 685efea59..26eb1c174 100644 --- a/docs/content/en/latest/execution/exports/export_pdf.md +++ b/docs/content/en/latest/execution/exports/export_pdf.md @@ -3,6 +3,7 @@ title: "export_pdf" linkTitle: "export_pdf" weight: 110 superheading: "export." +api_ref: "ExportService.export_pdf" --- ``export_pdf(workspace_id: str, diff --git a/docs/content/en/latest/execution/exports/export_tabular.md b/docs/content/en/latest/execution/exports/export_tabular.md index e9ce3898c..5a1bcedd6 100644 --- a/docs/content/en/latest/execution/exports/export_tabular.md +++ b/docs/content/en/latest/execution/exports/export_tabular.md @@ -3,6 +3,7 @@ title: "export_tabular" linkTitle: "export_tabular" weight: 110 superheading: "export." +api_ref: "ExportService.export_tabular" --- ``export_tabular(workspace_id: str, diff --git a/docs/content/en/latest/execution/exports/export_tabular_by_visualization_id.md b/docs/content/en/latest/execution/exports/export_tabular_by_visualization_id.md index 3d849983a..7aeb0a949 100644 --- a/docs/content/en/latest/execution/exports/export_tabular_by_visualization_id.md +++ b/docs/content/en/latest/execution/exports/export_tabular_by_visualization_id.md @@ -3,6 +3,7 @@ title: "export_tabular_by_visualization_id" linkTitle: "export_tabular_by_visualization_id" weight: 110 superheading: "export." +api_ref: "ExportService.export_tabular_by_visualization_id" --- ``export_tabular_by_visualization_id( diff --git a/docs/content/en/latest/pandas/_index.md b/docs/content/en/latest/pandas/_index.md index b3b3c40ab..91573280a 100644 --- a/docs/content/en/latest/pandas/_index.md +++ b/docs/content/en/latest/pandas/_index.md @@ -4,6 +4,3 @@ linkTitle: "GOODDATA PANDAS" weight: 59 navigationLabel: true --- - - -Placeholder for gerenerated API reference - pandas. diff --git a/docs/content/en/latest/pipelines/backup_and_restore/backup.md b/docs/content/en/latest/pipelines/backup_and_restore/backup.md index 9c7df6e49..4edb103eb 100644 --- a/docs/content/en/latest/pipelines/backup_and_restore/backup.md +++ b/docs/content/en/latest/pipelines/backup_and_restore/backup.md @@ -4,7 +4,7 @@ linkTitle: "Workspace Backup" weight: 2 --- -Workspace Backup allows you to create backups of one or more workspaces. Backups can be stored either locally or uploaded to an S3 bucket. +Workspace Backup allows you to create backups of one or more workspaces. Backups can be stored locally, uploaded to an S3 bucket, or uploaded to Azure Blob Storage. The backup stores following definitions: @@ -141,6 +141,43 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) backup_manager.logger.subscribe(logger) +# Run the backup +backup_manager.backup_workspaces(workspace_ids=["workspace_id_1", "workspace_id_2"]) +``` + +### Example with Azure Blob Storage + +Here is an example using Azure Blob Storage with Workload Identity: + +```python +import logging +import os + +from gooddata_pipelines import ( + BackupManager, + BackupRestoreConfig, + AzureStorageConfig, + StorageType, +) + +# Create storage configuration +azure_storage_config = AzureStorageConfig.from_workload_identity( + backup_path="backup_folder", account_name="mystorageaccount", container="my-container" +) + +# Create backup configuration +config = BackupRestoreConfig(storage_type=StorageType.AZURE, storage=azure_storage_config) + +# Initialize the BackupManager with your configuration and GoodData credentials +backup_manager = BackupManager.create( + config, os.environ["GD_HOST"], os.environ["GD_TOKEN"] +) + +# Optionally set up a logger and subscribe it to the logs from the BackupManager +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) +backup_manager.logger.subscribe(logger) + # Run the backup backup_manager.backup_workspaces(workspace_ids=["workspace_id_1", "workspace_id_2"]) diff --git a/docs/content/en/latest/pipelines/backup_and_restore/configuration.md b/docs/content/en/latest/pipelines/backup_and_restore/configuration.md index 84e7c3622..e0e55eab4 100644 --- a/docs/content/en/latest/pipelines/backup_and_restore/configuration.md +++ b/docs/content/en/latest/pipelines/backup_and_restore/configuration.md @@ -15,10 +15,10 @@ from gooddata_pipelines import BackupRestoreConfig ``` -If you plan on storing your backups on S3, you will also need to import the `StorageType` enum and `S3StorageConfig` class. You can find more details about configuration for the S3 storage below in the [S3 Storage](#s3-storage) section. +If you plan on storing your backups on S3 or Azure Blob Storage, you will also need to import the `StorageType` enum and the appropriate storage config class (`S3StorageConfig` or `AzureStorageConfig`). You can find more details about configuration for each storage type below in the [S3 Storage](#s3-storage) and [Azure Blob Storage](#azure-blob-storage) sections. ```python -from gooddata_pipelines import BackupRestoreConfig, S3StorageConfig, StorageType +from gooddata_pipelines import BackupRestoreConfig, S3StorageConfig, AzureStorageConfig, StorageType ``` @@ -26,7 +26,7 @@ The `BackupRestoreConfig` accepts following parameters: | name | description | | -------------------- | ------------------------------------------------------------------------------------------------------------ | -| storage_type | The type of storage to use - either `local` or `s3`. Defaults to `local`. | +| storage_type | The type of storage to use - either `local`, `s3`, or `azure`. Defaults to `local`. | | storage | Configuration for the storage type. Defaults to local storage configuration. | | api_page_size | Page size for fetching workspace relationships. Defaults to 100 when unspecified. | | batch_size | Configures how many workspaces are backed up in a single batch. Defaults to 100 when unspecified. | @@ -34,7 +34,7 @@ The `BackupRestoreConfig` accepts following parameters: ## Storage -The configuration supports two types of storage - local and S3. +The configuration supports three types of storage - local, S3, and Azure Blob Storage. The backups are organized in a tree with following nodes: @@ -100,6 +100,63 @@ s3_storage_config = S3StorageConfig.from_aws_credentials( ) ``` +### Azure Blob Storage + +To configure upload of the backups to Azure Blob Storage, use the AzureStorageConfig object: + +```python +from gooddata_pipelines.backup_and_restore.models.storage import AzureStorageConfig + +``` + +The configuration is responsible for establishing a valid connection to Azure Blob Storage, connecting to a storage account and container, and specifying the folder where the backups will be stored or read. You can create the object in three ways, depending on the type of Azure authentication you want to use. The common arguments for all three ways are: + +| name | description | +| ------------ | ------------------------------------------------------------- | +| account_name | The name of the Azure storage account | +| container | The name of the blob container | +| backup_path | Path to the folder serving as the root for the backup storage | + +#### Config from Workload Identity + +Will use Azure Workload Identity (for Kubernetes environments). You only need to specify the `account_name`, `container`, and `backup_path` arguments. + +```python +azure_storage_config = AzureStorageConfig.from_workload_identity( + backup_path="backups_folder", account_name="mystorageaccount", container="my-container" + ) + +``` + +#### Config from Connection String + +Will use an Azure Storage connection string to authenticate. + +```python +azure_storage_config = AzureStorageConfig.from_connection_string( + backup_path="backups_folder", + account_name="mystorageaccount", + container="my-container", + connection_string="DefaultEndpointsProtocol=https;AccountName=...", + ) + +``` + +#### Config from Service Principal + +Will use Azure Service Principal credentials to authenticate. + +```python +azure_storage_config = AzureStorageConfig.from_service_principal( + backup_path="backups_folder", + account_name="mystorageaccount", + container="my-container", + client_id="your-client-id", + client_secret="your-client-secret", + tenant_id="your-tenant-id", + ) +``` + ## Examples Here is a couple of examples of different configuration cases. @@ -133,3 +190,22 @@ s3_storage_config = S3StorageConfig.from_aws_profile( config = BackupRestoreConfig(storage_type=StorageType.S3, storage=s3_storage_config) ``` + +### Config with Azure Blob Storage and Workload Identity + +If you plan to use Azure Blob Storage, your config might look like this: + +```python +from gooddata_pipelines import ( + BackupRestoreConfig, + AzureStorageConfig, + StorageType, +) + +azure_storage_config = AzureStorageConfig.from_workload_identity( + backup_path="backups_folder", account_name="mystorageaccount", container="my-container" + ) + +config = BackupRestoreConfig(storage_type=StorageType.AZURE, storage=azure_storage_config) + +``` diff --git a/docs/content/en/latest/workspace-content/analytics-model/get_declarative_analytics_model.md b/docs/content/en/latest/workspace-content/analytics-model/get_declarative_analytics_model.md index c065ff8c4..b019f443b 100644 --- a/docs/content/en/latest/workspace-content/analytics-model/get_declarative_analytics_model.md +++ b/docs/content/en/latest/workspace-content/analytics-model/get_declarative_analytics_model.md @@ -3,6 +3,7 @@ title: "get_declarative_analytics_model" linkTitle: "get_declarative_analytics_model" weight: 110 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.get_declarative_analytics_model" --- ``get_declarative_analytics_model(workspace_id: str, exclude: Optional[list[str]])`` diff --git a/docs/content/en/latest/workspace-content/analytics-model/load_analytics_model_from_disk.md b/docs/content/en/latest/workspace-content/analytics-model/load_analytics_model_from_disk.md index 089105ea6..63d0152a3 100644 --- a/docs/content/en/latest/workspace-content/analytics-model/load_analytics_model_from_disk.md +++ b/docs/content/en/latest/workspace-content/analytics-model/load_analytics_model_from_disk.md @@ -3,6 +3,7 @@ title: "load_analytics_model_from_disk" linkTitle: "load_analytics_model_from_disk" weight: 132 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.load_analytics_model_from_disk" --- ``load_analytics_model_from_disk(path: Path = Path.cwd())`` diff --git a/docs/content/en/latest/workspace-content/analytics-model/load_and_put_declarative_analytics_model.md b/docs/content/en/latest/workspace-content/analytics-model/load_and_put_declarative_analytics_model.md index d833708e5..34a6f8634 100644 --- a/docs/content/en/latest/workspace-content/analytics-model/load_and_put_declarative_analytics_model.md +++ b/docs/content/en/latest/workspace-content/analytics-model/load_and_put_declarative_analytics_model.md @@ -3,6 +3,7 @@ title: "load_and_put_declarative_analytics_model" linkTitle: "load_and_put_declarative_analyt..." weight: 150 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.load_and_put_declarative_analytics_model" --- diff --git a/docs/content/en/latest/workspace-content/analytics-model/load_declarative_analytics_model.md b/docs/content/en/latest/workspace-content/analytics-model/load_declarative_analytics_model.md index 142373558..8074b8d75 100644 --- a/docs/content/en/latest/workspace-content/analytics-model/load_declarative_analytics_model.md +++ b/docs/content/en/latest/workspace-content/analytics-model/load_declarative_analytics_model.md @@ -3,6 +3,7 @@ title: "load_declarative_analytics_model" linkTitle: "load_declarative_analytics_model" weight: 140 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.load_declarative_analytics_model" --- diff --git a/docs/content/en/latest/workspace-content/analytics-model/put_declarative_analytics_model.md b/docs/content/en/latest/workspace-content/analytics-model/put_declarative_analytics_model.md index 3f114fd50..4ff051df6 100644 --- a/docs/content/en/latest/workspace-content/analytics-model/put_declarative_analytics_model.md +++ b/docs/content/en/latest/workspace-content/analytics-model/put_declarative_analytics_model.md @@ -3,6 +3,7 @@ title: "put_declarative_analytics_model" linkTitle: "put_declarative_analytics_model" weight: 120 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.put_declarative_analytics_model" --- ``put_declarative_analytics_model(workspace_id: str, analytics_model: CatalogDeclarativeAnalytics)`` diff --git a/docs/content/en/latest/workspace-content/analytics-model/store_analytics_model_to_disk.md b/docs/content/en/latest/workspace-content/analytics-model/store_analytics_model_to_disk.md index d1f5ea5df..85e67a2aa 100644 --- a/docs/content/en/latest/workspace-content/analytics-model/store_analytics_model_to_disk.md +++ b/docs/content/en/latest/workspace-content/analytics-model/store_analytics_model_to_disk.md @@ -3,6 +3,7 @@ title: "store_analytics_model_to_disk" linkTitle: "store_analytics_model_to_disk" weight: 131 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.store_analytics_model_to_disk" --- ``store_analytics_model_to_disk(workspace_id: str, path: Path = Path.cwd(), exclude: Optional[list[str]] = None, sort: bool = False)`` diff --git a/docs/content/en/latest/workspace-content/analytics-model/store_declarative_analytics_model.md b/docs/content/en/latest/workspace-content/analytics-model/store_declarative_analytics_model.md index 69408105a..9480bd0aa 100644 --- a/docs/content/en/latest/workspace-content/analytics-model/store_declarative_analytics_model.md +++ b/docs/content/en/latest/workspace-content/analytics-model/store_declarative_analytics_model.md @@ -3,6 +3,7 @@ title: "store_declarative_analytics_model" linkTitle: "store_declarative_analytics_model" weight: 130 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.store_declarative_analytics_model" --- diff --git a/docs/content/en/latest/workspace-content/localization/add_metadata_locale.md b/docs/content/en/latest/workspace-content/localization/add_metadata_locale.md index 2709b8a0f..e837e8459 100644 --- a/docs/content/en/latest/workspace-content/localization/add_metadata_locale.md +++ b/docs/content/en/latest/workspace-content/localization/add_metadata_locale.md @@ -3,6 +3,7 @@ title: "add_metadata_locale" linkTitle: "add_metadata_locale" weight: 25 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.add_metadata_locale" --- ``add_metadata_locale(workspace_id: str, target_language: str, translator_func: Callable, set_locale: bool = True) -> None`` diff --git a/docs/content/en/latest/workspace-content/localization/clean_metadata_localization.md b/docs/content/en/latest/workspace-content/localization/clean_metadata_localization.md index caad71e87..38f036ecf 100644 --- a/docs/content/en/latest/workspace-content/localization/clean_metadata_localization.md +++ b/docs/content/en/latest/workspace-content/localization/clean_metadata_localization.md @@ -3,6 +3,7 @@ title: "add_metadata_locale" linkTitle: "add_metadata_locale" weight: 55 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.add_metadata_locale" --- ``add_metadata_locale(workspace_id: str, target_language: str, translator_func: Callable, set_locale: bool = True) -> None`` diff --git a/docs/content/en/latest/workspace-content/localization/get_metadata_localization.md b/docs/content/en/latest/workspace-content/localization/get_metadata_localization.md index c192d7ace..0bcd29690 100644 --- a/docs/content/en/latest/workspace-content/localization/get_metadata_localization.md +++ b/docs/content/en/latest/workspace-content/localization/get_metadata_localization.md @@ -3,6 +3,7 @@ title: "get_metadata_localization" linkTitle: "get_metadata_localization" weight: 52 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.get_metadata_localization" --- ``get_metadata_localization(workspace_id: str, target_language: str) -> bytes`` diff --git a/docs/content/en/latest/workspace-content/localization/save_metadata_locale_to_disk.md b/docs/content/en/latest/workspace-content/localization/save_metadata_locale_to_disk.md index 4f878e0bc..ec50364ab 100644 --- a/docs/content/en/latest/workspace-content/localization/save_metadata_locale_to_disk.md +++ b/docs/content/en/latest/workspace-content/localization/save_metadata_locale_to_disk.md @@ -3,6 +3,7 @@ title: "save_metadata_locale_to_disk" linkTitle: "save_metadata_locale_to_disk" weight: 56 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.save_metadata_locale_to_disk" --- ``save_metadata_locale_to_disk(workspace_id: str, target_language: str, file_path: Path) -> None`` diff --git a/docs/content/en/latest/workspace-content/localization/set_metadata_locale_from_disk.md b/docs/content/en/latest/workspace-content/localization/set_metadata_locale_from_disk.md index 514d29009..4b54ed380 100644 --- a/docs/content/en/latest/workspace-content/localization/set_metadata_locale_from_disk.md +++ b/docs/content/en/latest/workspace-content/localization/set_metadata_locale_from_disk.md @@ -3,6 +3,7 @@ title: "set_metadata_locale_from_disk" linkTitle: "set_metadata_locale_from_disk" weight: 57 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.set_metadata_locale_from_disk" --- ``set_metadata_locale_from_disk(workspace_id: str, file_path: Path) -> None`` diff --git a/docs/content/en/latest/workspace-content/localization/set_metadata_localization.md b/docs/content/en/latest/workspace-content/localization/set_metadata_localization.md index 7260b85cc..28c411a3e 100644 --- a/docs/content/en/latest/workspace-content/localization/set_metadata_localization.md +++ b/docs/content/en/latest/workspace-content/localization/set_metadata_localization.md @@ -3,6 +3,7 @@ title: "set_metadata_localization" linkTitle: "set_metadata_localization" weight: 53 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.set_metadata_localization" --- ``set_metadata_localization(workspace_id: str, encoded_xml: bytes) -> None`` diff --git a/docs/content/en/latest/workspace-content/logical-data-model/get_declarative_ldm.md b/docs/content/en/latest/workspace-content/logical-data-model/get_declarative_ldm.md index dfec36ff9..37e9fd0aa 100644 --- a/docs/content/en/latest/workspace-content/logical-data-model/get_declarative_ldm.md +++ b/docs/content/en/latest/workspace-content/logical-data-model/get_declarative_ldm.md @@ -3,6 +3,7 @@ title: "get_declarative_ldm" linkTitle: "get_declarative_ldm" weight: 80 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.get_declarative_ldm" --- diff --git a/docs/content/en/latest/workspace-content/logical-data-model/load_and_put_declarative_ldm.md b/docs/content/en/latest/workspace-content/logical-data-model/load_and_put_declarative_ldm.md index e605ba05b..b51153ce6 100644 --- a/docs/content/en/latest/workspace-content/logical-data-model/load_and_put_declarative_ldm.md +++ b/docs/content/en/latest/workspace-content/logical-data-model/load_and_put_declarative_ldm.md @@ -3,6 +3,7 @@ title: "load_and_put_declarative_ldm" linkTitle: "load_and_put_declarative_ldm" weight: 120 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.load_and_put_declarative_ldm" --- diff --git a/docs/content/en/latest/workspace-content/logical-data-model/load_declarative_ldm.md b/docs/content/en/latest/workspace-content/logical-data-model/load_declarative_ldm.md index f74393912..e9a95a714 100644 --- a/docs/content/en/latest/workspace-content/logical-data-model/load_declarative_ldm.md +++ b/docs/content/en/latest/workspace-content/logical-data-model/load_declarative_ldm.md @@ -3,6 +3,7 @@ title: "load_declarative_ldm" linkTitle: "load_declarative_ldm" weight: 110 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.load_declarative_ldm" --- diff --git a/docs/content/en/latest/workspace-content/logical-data-model/load_ldm_from_disk.md b/docs/content/en/latest/workspace-content/logical-data-model/load_ldm_from_disk.md index c4fb5249c..3abd6b5ad 100644 --- a/docs/content/en/latest/workspace-content/logical-data-model/load_ldm_from_disk.md +++ b/docs/content/en/latest/workspace-content/logical-data-model/load_ldm_from_disk.md @@ -3,6 +3,7 @@ title: "load_ldm_from_disk" linkTitle: "load_ldm_from_disk" weight: 101 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.load_ldm_from_disk" --- ``load_ldm_from_disk( path: Path = Path.cwd())`` diff --git a/docs/content/en/latest/workspace-content/logical-data-model/put_declarative_ldm.md b/docs/content/en/latest/workspace-content/logical-data-model/put_declarative_ldm.md index 7c7e290ab..959393e95 100644 --- a/docs/content/en/latest/workspace-content/logical-data-model/put_declarative_ldm.md +++ b/docs/content/en/latest/workspace-content/logical-data-model/put_declarative_ldm.md @@ -3,6 +3,7 @@ title: "put_declarative_ldm" linkTitle: "put_declarative_ldm" weight: 90 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.put_declarative_ldm" --- diff --git a/docs/content/en/latest/workspace-content/logical-data-model/store_declarative_ldm.md b/docs/content/en/latest/workspace-content/logical-data-model/store_declarative_ldm.md index 3968f82b0..33292bdcc 100644 --- a/docs/content/en/latest/workspace-content/logical-data-model/store_declarative_ldm.md +++ b/docs/content/en/latest/workspace-content/logical-data-model/store_declarative_ldm.md @@ -3,6 +3,7 @@ title: "store_declarative_ldm" linkTitle: "store_declarative_ldm" weight: 100 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.store_declarative_ldm" --- diff --git a/docs/content/en/latest/workspace-content/logical-data-model/store_ldm_to_disk.md b/docs/content/en/latest/workspace-content/logical-data-model/store_ldm_to_disk.md index 1f7f695e9..ed9b80540 100644 --- a/docs/content/en/latest/workspace-content/logical-data-model/store_ldm_to_disk.md +++ b/docs/content/en/latest/workspace-content/logical-data-model/store_ldm_to_disk.md @@ -3,6 +3,7 @@ title: "store_ldm_to_disk" linkTitle: "store_ldm_to_disk" weight: 100 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.store_ldm_to_disk" --- diff --git a/docs/content/en/latest/workspace-content/workspace-content/for_items.md b/docs/content/en/latest/workspace-content/workspace-content/for_items.md index a8bb60309..5d938df3a 100644 --- a/docs/content/en/latest/workspace-content/workspace-content/for_items.md +++ b/docs/content/en/latest/workspace-content/workspace-content/for_items.md @@ -3,6 +3,7 @@ title: "for_items" linkTitle: "for_items" weight: 11 superheading: "tables." +api_ref: "TableService.for_items" --- diff --git a/docs/content/en/latest/workspace-content/workspace-content/for_visualization.md b/docs/content/en/latest/workspace-content/workspace-content/for_visualization.md index 9fc68ab92..c7893bb24 100644 --- a/docs/content/en/latest/workspace-content/workspace-content/for_visualization.md +++ b/docs/content/en/latest/workspace-content/workspace-content/for_visualization.md @@ -3,6 +3,7 @@ title: "for_visualization" linkTitle: "for_visualization" weight: 10 superheading: "tables." +api_ref: "TableService.for_visualization" --- diff --git a/docs/content/en/latest/workspace-content/workspace-content/get_attributes_catalog.md b/docs/content/en/latest/workspace-content/workspace-content/get_attributes_catalog.md index 2cbf79aa2..30fe4d541 100644 --- a/docs/content/en/latest/workspace-content/workspace-content/get_attributes_catalog.md +++ b/docs/content/en/latest/workspace-content/workspace-content/get_attributes_catalog.md @@ -3,6 +3,7 @@ title: "get_attributes_catalog" linkTitle: "get_attributes_catalog" weight: 50 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.get_attributes_catalog" --- diff --git a/docs/content/en/latest/workspace-content/workspace-content/get_dependent_entities_graph.md b/docs/content/en/latest/workspace-content/workspace-content/get_dependent_entities_graph.md index 9b3f005ce..2fc4da167 100644 --- a/docs/content/en/latest/workspace-content/workspace-content/get_dependent_entities_graph.md +++ b/docs/content/en/latest/workspace-content/workspace-content/get_dependent_entities_graph.md @@ -3,6 +3,7 @@ title: "get_dependent_entities_graph" linkTitle: "get_dependent_entities_graph" weight: 90 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.get_dependent_entities_graph" --- diff --git a/docs/content/en/latest/workspace-content/workspace-content/get_dependent_entities_graph_from_entry_points.md b/docs/content/en/latest/workspace-content/workspace-content/get_dependent_entities_graph_from_entry_points.md index e7e4823cc..29ced7343 100644 --- a/docs/content/en/latest/workspace-content/workspace-content/get_dependent_entities_graph_from_entry_points.md +++ b/docs/content/en/latest/workspace-content/workspace-content/get_dependent_entities_graph_from_entry_points.md @@ -3,6 +3,7 @@ title: "get_dependent_entities_graph_from_entry_points" linkTitle: "get_dependent_entities_graph_f..." weight: 100 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.get_dependent_entities_graph_from_entry_points" --- diff --git a/docs/content/en/latest/workspace-content/workspace-content/get_facts_catalog.md b/docs/content/en/latest/workspace-content/workspace-content/get_facts_catalog.md index 5bdb07e6c..b1db0803a 100644 --- a/docs/content/en/latest/workspace-content/workspace-content/get_facts_catalog.md +++ b/docs/content/en/latest/workspace-content/workspace-content/get_facts_catalog.md @@ -3,6 +3,7 @@ title: "get_facts_catalog" linkTitle: "get_facts_catalog" weight: 40 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.get_facts_catalog" --- diff --git a/docs/content/en/latest/workspace-content/workspace-content/get_full_catalog.md b/docs/content/en/latest/workspace-content/workspace-content/get_full_catalog.md index ecf1a66e0..d9773264c 100644 --- a/docs/content/en/latest/workspace-content/workspace-content/get_full_catalog.md +++ b/docs/content/en/latest/workspace-content/workspace-content/get_full_catalog.md @@ -3,6 +3,7 @@ title: "get_full_catalog" linkTitle: "get_full_catalog" weight: 20 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.get_full_catalog" --- diff --git a/docs/content/en/latest/workspace-content/workspace-content/get_labels_catalog.md b/docs/content/en/latest/workspace-content/workspace-content/get_labels_catalog.md index 456c615c8..aaad032ad 100644 --- a/docs/content/en/latest/workspace-content/workspace-content/get_labels_catalog.md +++ b/docs/content/en/latest/workspace-content/workspace-content/get_labels_catalog.md @@ -3,6 +3,7 @@ title: "get_labels_catalog" linkTitle: "get_labels_catalog" weight: 60 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.get_labels_catalog" --- diff --git a/docs/content/en/latest/workspace-content/workspace-content/get_metrics_catalog.md b/docs/content/en/latest/workspace-content/workspace-content/get_metrics_catalog.md index 48572af2a..54540b8c2 100644 --- a/docs/content/en/latest/workspace-content/workspace-content/get_metrics_catalog.md +++ b/docs/content/en/latest/workspace-content/workspace-content/get_metrics_catalog.md @@ -3,6 +3,7 @@ title: "get_metrics_catalog" linkTitle: "get_metrics_catalog" weight: 30 superheading: "catalog_workspace_content." +api_ref: "CatalogWorkspaceContentService.get_metrics_catalog" --- diff --git a/docs/content/en/latest/workspace-content/workspace-content/get_visualization.md b/docs/content/en/latest/workspace-content/workspace-content/get_visualization.md index 667ef2454..4a174eb01 100644 --- a/docs/content/en/latest/workspace-content/workspace-content/get_visualization.md +++ b/docs/content/en/latest/workspace-content/workspace-content/get_visualization.md @@ -3,6 +3,7 @@ title: "get_visualization" linkTitle: "get_visualization" weight: 15 superheading: "visualizations." +api_ref: "VisualizationService.get_visualization" --- diff --git a/docs/content/en/latest/workspace-content/workspace-content/get_visualizations.md b/docs/content/en/latest/workspace-content/workspace-content/get_visualizations.md index 6ee8f4921..82ddb5c67 100644 --- a/docs/content/en/latest/workspace-content/workspace-content/get_visualizations.md +++ b/docs/content/en/latest/workspace-content/workspace-content/get_visualizations.md @@ -3,6 +3,7 @@ title: "get_visualizations" linkTitle: "get_visualizations" weight: 15 superheading: "visualizations." +api_ref: "VisualizationService.get_visualizations" --- ``get_visualizations(workspace_id: str)`` diff --git a/docs/content/en/latest/workspace/workspace-data-filters/get_declarative_workspace_data_filters.md b/docs/content/en/latest/workspace/workspace-data-filters/get_declarative_workspace_data_filters.md index 8b50eb4fe..cdaa03238 100644 --- a/docs/content/en/latest/workspace/workspace-data-filters/get_declarative_workspace_data_filters.md +++ b/docs/content/en/latest/workspace/workspace-data-filters/get_declarative_workspace_data_filters.md @@ -3,6 +3,7 @@ title: "get_declarative_workspace_data_filters" linkTitle: "get_declarative_workspace_data_f..." weight: 140 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.get_declarative_workspace_data_filters" --- ``get_declarative_workspace_data_filters()`` diff --git a/docs/content/en/latest/workspace/workspace-data-filters/load_and_put_declarative_workspace_data_filters.md b/docs/content/en/latest/workspace/workspace-data-filters/load_and_put_declarative_workspace_data_filters.md index 2980cf5d9..1e951da7d 100644 --- a/docs/content/en/latest/workspace/workspace-data-filters/load_and_put_declarative_workspace_data_filters.md +++ b/docs/content/en/latest/workspace/workspace-data-filters/load_and_put_declarative_workspace_data_filters.md @@ -3,6 +3,7 @@ title: "load_and_put_declarative_workspace_data_filters" linkTitle: "load_and_put_declarative_worksp..." weight: 180 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.load_and_put_declarative_workspace_data_filters" --- diff --git a/docs/content/en/latest/workspace/workspace-data-filters/load_declarative_workspace_data_filters.md b/docs/content/en/latest/workspace/workspace-data-filters/load_declarative_workspace_data_filters.md index 7188ead32..f6dcc83b0 100644 --- a/docs/content/en/latest/workspace/workspace-data-filters/load_declarative_workspace_data_filters.md +++ b/docs/content/en/latest/workspace/workspace-data-filters/load_declarative_workspace_data_filters.md @@ -3,6 +3,7 @@ title: "load_declarative_workspace_data_filters" linkTitle: "load_declarative_workspace_data_..." weight: 170 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.load_declarative_workspace_data_filters" --- diff --git a/docs/content/en/latest/workspace/workspace-data-filters/put_declarative_workspace_data_filters.md b/docs/content/en/latest/workspace/workspace-data-filters/put_declarative_workspace_data_filters.md index 868008851..254f960a7 100644 --- a/docs/content/en/latest/workspace/workspace-data-filters/put_declarative_workspace_data_filters.md +++ b/docs/content/en/latest/workspace/workspace-data-filters/put_declarative_workspace_data_filters.md @@ -3,6 +3,7 @@ title: "put_declarative_workspace_data_filters" linkTitle: "put_declarative_workspace_data_f..." weight: 150 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.put_declarative_workspace_data_filters" --- diff --git a/docs/content/en/latest/workspace/workspace-data-filters/store_declarative_workspace_data_filters.md b/docs/content/en/latest/workspace/workspace-data-filters/store_declarative_workspace_data_filters.md index d8bfb8174..a285ced43 100644 --- a/docs/content/en/latest/workspace/workspace-data-filters/store_declarative_workspace_data_filters.md +++ b/docs/content/en/latest/workspace/workspace-data-filters/store_declarative_workspace_data_filters.md @@ -3,6 +3,7 @@ title: "store_declarative_workspace_data_filters" linkTitle: "store_declarative_workspace_dat..." weight: 160 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.store_declarative_workspace_data_filters" --- diff --git a/docs/content/en/latest/workspace/workspaces/clone_workspace.md b/docs/content/en/latest/workspace/workspaces/clone_workspace.md index 94baf9f46..d3ebd35d5 100644 --- a/docs/content/en/latest/workspace/workspaces/clone_workspace.md +++ b/docs/content/en/latest/workspace/workspaces/clone_workspace.md @@ -3,6 +3,7 @@ title: "clone_workspace" linkTitle: "clone_workspace" weight: 21 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.clone_workspace" --- diff --git a/docs/content/en/latest/workspace/workspaces/create_or_update.md b/docs/content/en/latest/workspace/workspaces/create_or_update.md index 4e94a1897..84686ef28 100644 --- a/docs/content/en/latest/workspace/workspaces/create_or_update.md +++ b/docs/content/en/latest/workspace/workspaces/create_or_update.md @@ -3,6 +3,7 @@ title: "create_or_update" linkTitle: "create_or_update" weight: 10 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.create_or_update" --- ``create_or_update(workspace: CatalogWorkspace)`` diff --git a/docs/content/en/latest/workspace/workspaces/delete_workspace.md b/docs/content/en/latest/workspace/workspaces/delete_workspace.md index 03ccceb7c..d3fefdb04 100644 --- a/docs/content/en/latest/workspace/workspaces/delete_workspace.md +++ b/docs/content/en/latest/workspace/workspaces/delete_workspace.md @@ -3,6 +3,7 @@ title: "delete_workspace" linkTitle: "delete_workspace" weight: 30 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.delete_workspace" --- diff --git a/docs/content/en/latest/workspace/workspaces/get_declarative_automations.md b/docs/content/en/latest/workspace/workspaces/get_declarative_automations.md index 072d1062a..0c3a1a35e 100644 --- a/docs/content/en/latest/workspace/workspaces/get_declarative_automations.md +++ b/docs/content/en/latest/workspace/workspaces/get_declarative_automations.md @@ -3,6 +3,7 @@ title: "get_declarative_automations" linkTitle: "get_declarative_automations" weight: 50 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.get_declarative_automations" --- ``get_declarative_automations(workspace_id: str)`` diff --git a/docs/content/en/latest/workspace/workspaces/get_declarative_workspace.md b/docs/content/en/latest/workspace/workspaces/get_declarative_workspace.md index 87b5c059e..375820de4 100644 --- a/docs/content/en/latest/workspace/workspaces/get_declarative_workspace.md +++ b/docs/content/en/latest/workspace/workspaces/get_declarative_workspace.md @@ -3,6 +3,7 @@ title: "get_declarative_workspace" linkTitle: "get_declarative_workspace" weight: 100 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.get_declarative_workspace" --- diff --git a/docs/content/en/latest/workspace/workspaces/get_declarative_workspaces.md b/docs/content/en/latest/workspace/workspaces/get_declarative_workspaces.md index 690c084da..0361eb371 100644 --- a/docs/content/en/latest/workspace/workspaces/get_declarative_workspaces.md +++ b/docs/content/en/latest/workspace/workspaces/get_declarative_workspaces.md @@ -3,6 +3,7 @@ title: "get_declarative_workspaces" linkTitle: "get_declarative_workspaces" weight: 50 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.get_declarative_workspaces" --- ``get_declarative_workspaces()`` diff --git a/docs/content/en/latest/workspace/workspaces/get_workspace.md b/docs/content/en/latest/workspace/workspaces/get_workspace.md index 1dc543478..edb1b81dc 100644 --- a/docs/content/en/latest/workspace/workspaces/get_workspace.md +++ b/docs/content/en/latest/workspace/workspaces/get_workspace.md @@ -3,6 +3,7 @@ title: "get_workspace" linkTitle: "get_workspace" weight: 20 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.get_workspace" --- diff --git a/docs/content/en/latest/workspace/workspaces/list_workspaces.md b/docs/content/en/latest/workspace/workspaces/list_workspaces.md index 24704d30b..2b58e5f32 100644 --- a/docs/content/en/latest/workspace/workspaces/list_workspaces.md +++ b/docs/content/en/latest/workspace/workspaces/list_workspaces.md @@ -3,6 +3,7 @@ title: "list_workspaces" linkTitle: "list_workspaces" weight: 40 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.list_workspaces" --- diff --git a/docs/content/en/latest/workspace/workspaces/load_and_put_declarative_workspace.md b/docs/content/en/latest/workspace/workspaces/load_and_put_declarative_workspace.md index f9aa5adcf..a7f14e330 100644 --- a/docs/content/en/latest/workspace/workspaces/load_and_put_declarative_workspace.md +++ b/docs/content/en/latest/workspace/workspaces/load_and_put_declarative_workspace.md @@ -3,6 +3,7 @@ title: "load_and_put_declarative_workspace" linkTitle: "load_and_put_declarative_work..." weight: 130 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.load_and_put_declarative_workspace" --- diff --git a/docs/content/en/latest/workspace/workspaces/load_and_put_declarative_workspaces.md b/docs/content/en/latest/workspace/workspaces/load_and_put_declarative_workspaces.md index edfcadcf2..462fabd61 100644 --- a/docs/content/en/latest/workspace/workspaces/load_and_put_declarative_workspaces.md +++ b/docs/content/en/latest/workspace/workspaces/load_and_put_declarative_workspaces.md @@ -3,6 +3,7 @@ title: "load_and_put_declarative_workspaces" linkTitle: "load_and_put_declarative_work..." weight: 90 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.load_and_put_declarative_workspaces" --- diff --git a/docs/content/en/latest/workspace/workspaces/load_declarative_workspace.md b/docs/content/en/latest/workspace/workspaces/load_declarative_workspace.md index 85d996e01..d4f8dd834 100644 --- a/docs/content/en/latest/workspace/workspaces/load_declarative_workspace.md +++ b/docs/content/en/latest/workspace/workspaces/load_declarative_workspace.md @@ -3,6 +3,7 @@ title: "load_declarative_workspace" linkTitle: "load_declarative_workspace" weight: 120 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.load_declarative_workspace" --- diff --git a/docs/content/en/latest/workspace/workspaces/load_declarative_workspaces.md b/docs/content/en/latest/workspace/workspaces/load_declarative_workspaces.md index d3f495964..3d0fa1713 100644 --- a/docs/content/en/latest/workspace/workspaces/load_declarative_workspaces.md +++ b/docs/content/en/latest/workspace/workspaces/load_declarative_workspaces.md @@ -3,6 +3,7 @@ title: "load_declarative_workspaces" linkTitle: "load_declarative_workspaces" weight: 80 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.load_declarative_workspaces" --- diff --git a/docs/content/en/latest/workspace/workspaces/put_declarative_automations.md b/docs/content/en/latest/workspace/workspaces/put_declarative_automations.md index 97434f595..921a0bb62 100644 --- a/docs/content/en/latest/workspace/workspaces/put_declarative_automations.md +++ b/docs/content/en/latest/workspace/workspaces/put_declarative_automations.md @@ -3,6 +3,7 @@ title: "put_declarative_automations" linkTitle: "put_declarative_automations" weight: 50 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.put_declarative_automations" --- ``put_declarative_automations(workspace_id: str, automations: list[CatalogDeclarativeAutomation])`` diff --git a/docs/content/en/latest/workspace/workspaces/put_declarative_workspace.md b/docs/content/en/latest/workspace/workspaces/put_declarative_workspace.md index d6509efbe..44583aed5 100644 --- a/docs/content/en/latest/workspace/workspaces/put_declarative_workspace.md +++ b/docs/content/en/latest/workspace/workspaces/put_declarative_workspace.md @@ -3,6 +3,7 @@ title: "put_declarative_workspace" linkTitle: "put_declarative_workspace" weight: 110 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.put_declarative_workspace" --- diff --git a/docs/content/en/latest/workspace/workspaces/put_declarative_workspaces.md b/docs/content/en/latest/workspace/workspaces/put_declarative_workspaces.md index f74c1a6aa..4c6bbae05 100644 --- a/docs/content/en/latest/workspace/workspaces/put_declarative_workspaces.md +++ b/docs/content/en/latest/workspace/workspaces/put_declarative_workspaces.md @@ -3,6 +3,7 @@ title: "put_declarative_workspaces" linkTitle: "put_declarative_workspaces" weight: 60 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.put_declarative_workspaces" --- ``put_declarative_workspaces(workspace: CatalogDeclarativeWorkspaces)`` diff --git a/docs/content/en/latest/workspace/workspaces/store_declarative_workspace.md b/docs/content/en/latest/workspace/workspaces/store_declarative_workspace.md index 715f0e903..8252cd432 100644 --- a/docs/content/en/latest/workspace/workspaces/store_declarative_workspace.md +++ b/docs/content/en/latest/workspace/workspaces/store_declarative_workspace.md @@ -3,6 +3,7 @@ title: "store_declarative_workspace" linkTitle: "store_declarative_workspace" weight: 115 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.store_declarative_workspace" --- diff --git a/docs/content/en/latest/workspace/workspaces/store_declarative_workspaces.md b/docs/content/en/latest/workspace/workspaces/store_declarative_workspaces.md index a4aac72f4..724eeee88 100644 --- a/docs/content/en/latest/workspace/workspaces/store_declarative_workspaces.md +++ b/docs/content/en/latest/workspace/workspaces/store_declarative_workspaces.md @@ -3,6 +3,7 @@ title: "store_declarative_workspaces" linkTitle: "store_declarative_workspaces" weight: 70 superheading: "catalog_workspace." +api_ref: "CatalogWorkspaceService.store_declarative_workspaces" --- diff --git a/docs/function_template.md b/docs/function_template.md index 9b35c58c8..65119e742 100644 --- a/docs/function_template.md +++ b/docs/function_template.md @@ -5,4 +5,4 @@ superheading: "PARENT." weight: 100 --- -{{< api-ref "PATH" >}} +CONTENT diff --git a/docs/go.mod b/docs/go.mod index c03b44af3..a5647d01f 100644 --- a/docs/go.mod +++ b/docs/go.mod @@ -4,7 +4,7 @@ go 1.20 require ( github.com/FortAwesome/Font-Awesome v0.0.0-20230327165841-0698449d50f2 // indirect - github.com/gooddata/gooddata-docs-theme v0.0.0-20251008131043-19f8c818ea1e // indirect + github.com/gooddata/gooddata-docs-theme v0.0.0-20260224092332-353448869720 // indirect github.com/google/docsy v0.7.1 // indirect github.com/google/docsy/dependencies v0.7.1 // indirect github.com/twbs/bootstrap v5.3.1+incompatible // indirect diff --git a/docs/go.sum b/docs/go.sum index ba6164673..12be4d0e8 100644 --- a/docs/go.sum +++ b/docs/go.sum @@ -44,6 +44,12 @@ github.com/gooddata/gooddata-docs-theme v0.0.0-20251007133000-356776424f96 h1:gN github.com/gooddata/gooddata-docs-theme v0.0.0-20251007133000-356776424f96/go.mod h1:VVNP6Cmo+vC37RD3T/YHjyU/QdqftGY5z4G513LYyrA= github.com/gooddata/gooddata-docs-theme v0.0.0-20251008131043-19f8c818ea1e h1:+QitAaL+87DjFpsjQaMLBNq6zcbLVrUASCc4hBtBMX8= github.com/gooddata/gooddata-docs-theme v0.0.0-20251008131043-19f8c818ea1e/go.mod h1:VVNP6Cmo+vC37RD3T/YHjyU/QdqftGY5z4G513LYyrA= +github.com/gooddata/gooddata-docs-theme v0.0.0-20251103110911-19e503a55290 h1:po5Ui9+Ne0RlbE8clrKYGXdtmSaA+tPMv7gU0j5bztw= +github.com/gooddata/gooddata-docs-theme v0.0.0-20251103110911-19e503a55290/go.mod h1:VVNP6Cmo+vC37RD3T/YHjyU/QdqftGY5z4G513LYyrA= +github.com/gooddata/gooddata-docs-theme v0.0.0-20251111131802-3f566c293eaa h1:+k15ub8UegDMUh+KPps2A3W8YGulzAQcIHdc+hoB7/c= +github.com/gooddata/gooddata-docs-theme v0.0.0-20251111131802-3f566c293eaa/go.mod h1:VVNP6Cmo+vC37RD3T/YHjyU/QdqftGY5z4G513LYyrA= +github.com/gooddata/gooddata-docs-theme v0.0.0-20260224092332-353448869720 h1:aJoM2UUS7tzHJtwdox8h0JYPDdIWi3h539gbSpyua24= +github.com/gooddata/gooddata-docs-theme v0.0.0-20260224092332-353448869720/go.mod h1:VVNP6Cmo+vC37RD3T/YHjyU/QdqftGY5z4G513LYyrA= github.com/google/docsy v0.7.1 h1:DUriA7Nr3lJjNi9Ulev1SfiG1sUYmvyDeU4nTp7uDxY= github.com/google/docsy v0.7.1/go.mod h1:JCmE+c+izhE0Rvzv3y+AzHhz1KdwlA9Oj5YBMklJcfc= github.com/google/docsy/dependencies v0.7.1 h1:NbzYKJYMin2q50xdWSUzR2c9gCp7zR/XHDBcxklEcTQ= diff --git a/docs/layouts/docs/baseof.html b/docs/layouts/docs/baseof.html index 3ad552702..3191b5286 100644 --- a/docs/layouts/docs/baseof.html +++ b/docs/layouts/docs/baseof.html @@ -7,9 +7,13 @@ {{ if .Params.externalLink }} {{ end }} + {{ partial "gtm-body.html" }} + {{ partial "skip-links.html" . }}
{{ partial "navbar.html" . }}
diff --git a/docs/layouts/index.redir b/docs/layouts/index.redir index 8ddbf2ac1..c752fbb98 100644 --- a/docs/layouts/index.redir +++ b/docs/layouts/index.redir @@ -1,8 +1,16 @@ # Redirect homepage to the latest version / {{ .Site.BaseURL }}/latest/ 301! /docs/ {{ .Site.BaseURL }}/latest/ 301! -/1.53/ {{ .Site.BaseURL }}/latest 301! -/1.53.0/ {{ .Site.BaseURL }}/latest 301! +/1.61/ {{ .Site.BaseURL }}/latest 301! +/1.61.0/ {{ .Site.BaseURL }}/latest 301! +/1.60.0/ {{ .Site.BaseURL }}/1.60 301! +/1.59.0/ {{ .Site.BaseURL }}/1.59 301! +/1.58.0/ {{ .Site.BaseURL }}/1.58 301! +/1.57.0/ {{ .Site.BaseURL }}/1.57 301! +/1.56.0/ {{ .Site.BaseURL }}/1.56 301! +/1.55.0/ {{ .Site.BaseURL }}/1.55 301! +/1.54.0/ {{ .Site.BaseURL }}/1.54 301! +/1.53.0/ {{ .Site.BaseURL }}/1.53 301! /1.52.0/ {{ .Site.BaseURL }}/1.52 301! /1.51.0/ {{ .Site.BaseURL }}/1.51 301! /1.50.0/ {{ .Site.BaseURL }}/1.50 301! diff --git a/docs/layouts/partials/api-ref-function-parenthesis.html b/docs/layouts/partials/api-ref-function-parenthesis.html deleted file mode 100644 index 1f648ed6f..000000000 --- a/docs/layouts/partials/api-ref-function-parenthesis.html +++ /dev/null @@ -1,10 +0,0 @@ -{{- $funcObj := . -}} -{{- /* Create the parenthesis string for signature */ -}} -{{- $parenthesisArgsString := "" -}} -{{- if $funcObj.docstring_parsed -}} -{{- range $funcObj.docstring_parsed.params -}} -{{- $parenthesisArgsString = print $parenthesisArgsString .arg_name ": " .type_name ", " -}} -{{- end -}} -{{- end -}} -{{- $parenthesisArgsString := strings.TrimRight ", " $parenthesisArgsString -}} -{{- $parenthesisArgsString -}} diff --git a/docs/layouts/partials/api-ref-link-all-partial.html b/docs/layouts/partials/api-ref-link-all-partial.html deleted file mode 100644 index 0e81805f6..000000000 --- a/docs/layouts/partials/api-ref-link-all-partial.html +++ /dev/null @@ -1,47 +0,0 @@ -{{ $context := .context}} -{{ $paragraph := .paragraph}} -{{ $jsonPath := printf "versioned_docs/%s/links.json" (partial "version.html" (dict "context" $context)).dirpath }} - -{{ if fileExists $jsonPath }} - -{{ with getJSON $jsonPath }} - {{ $links := . }} - - {{ range $name, $data := $links }} - {{ if and $data.path (ne $data.path "") }} - {{ $path := $data.path }} - {{ $kind := $data.kind }} - {{/* I don't think there is any other way to include literal backtick inside a raw-string */}} - {{/* The regex captures $name that is surrounded by any of */}} - {{/* [$name] `name`*/}} - {{/* Hugo doesn't support lookahead/lookbehind, so - the groups have to be used to match correctly*/}} - {{ $pattern := "" }} - {{ if strings.Contains $name "_" }} - {{ $pattern = printf `([\[%s ])(%s)([\]%s. ])` "`" $name "`"}} - {{/* If name contains "_" use more lenient regex, which allows the name - to be surrounded by whitespace. This is required, otherwise names like "load" - are matched in random places */}} - {{ else }} - {{ $pattern = printf `([\[%s])(%s)([\]%s])` "`" $name "`" }} - {{ end }} - {{ $matches := findRE $name $paragraph}} - {{ if gt (len $matches) 0 }} - {{ $a_html := (printf "$1%s$3" $path $name) }} - {{ $paragraph = replaceRE $pattern $a_html $paragraph | safeHTML}} - {{ end }} - {{ end }} - {{ end }} - - {{/* Remove backticks around links */}} - {{ $leadingBacktickRemovalPattern := "`" $paragraph}} - - {{ $paragraph | safeHTML }} -{{ end }} - -{{ else }} - Missing links.json for this version! -{{ end }} diff --git a/docs/layouts/partials/api-ref-link-partial.html b/docs/layouts/partials/api-ref-link-partial.html deleted file mode 100644 index 4902895c1..000000000 --- a/docs/layouts/partials/api-ref-link-partial.html +++ /dev/null @@ -1,28 +0,0 @@ -{{ $orig_name := .paragraph }} -{{ $context := .context }} -{{ $jsonPath := printf "versioned_docs/%s/links.json" (partial "version.html" (dict "context" $context)).dirpath }} - -{{ if fileExists $jsonPath }} - -{{ with getJSON $jsonPath }} - {{ $links := . }} - {{/* We have to handle the case of Optional[name] or list[name] */}} - {{ $name := $orig_name }} - {{ $name := replace $name "]" "" }} - {{ $name := replace $name "Optional[" "" }} - {{ $name := replace $name "list[" "" }} - {{ $name := replace $name "List[" "" }} - - {{ $data := index $links (trim $name " ") }} - {{ if and $data $data.path (ne $data.path "") }} - {{ $path := $data.path }} - {{ $a_html := (printf "%s" $path $name) }} - {{ replace $orig_name $name $a_html | safeHTML}} - {{ else }} - {{ $orig_name }} - {{ end }} -{{ end }} - -{{ else }} - Missing links.json for this version! -{{ end }} diff --git a/docs/layouts/partials/api-ref-object-partial.html b/docs/layouts/partials/api-ref-object-partial.html deleted file mode 100644 index 669efeb1b..000000000 --- a/docs/layouts/partials/api-ref-object-partial.html +++ /dev/null @@ -1,137 +0,0 @@ -{{ $context := .context }} -{{ $objData := .object }} -{{ $path := .path }} - - -
-{{ if and (eq $objData.kind "function")}} - -{{ $parenthesisArgsString := partial "api-ref-function-parenthesis.html" $objData}} -{{ if not $objData.is_property }} -

{{ (index (last 1 $path) 0) }}({{- $parenthesisArgsString -}}) -> {{ partial "api-ref-link-partial.html" (dict - "paragraph" $objData.signature.return_annotation "context" $context) }}

-{{ else }} -

{{ (index (last 1 $path) 0) }} -> {{ partial "api-ref-link-partial.html" (dict "paragraph" - $objData.signature.return_annotation "context" $context) }}

-{{ end }} - -{{ if $objData.docstring_parsed}} -
-

{{ partial "api-ref-link-all-partial.html" (dict "paragraph" ($objData.docstring_parsed.short_description | - safeHTML) "context" $context) }}

-

{{ partial "api-ref-link-all-partial.html" (dict "paragraph" ($objData.docstring_parsed.long_description | - safeHTML) "context" $context)}}

-
-{{ end }} - -{{ if not $objData.is_property }} -

Parameters

-{{ if $objData.docstring_parsed }} -{{ if (index $objData.docstring_parsed "params") }} -{{ if (gt (len $objData.docstring_parsed.params) 0) }} - - - - - - - - - - {{range $objData.docstring_parsed.params}} - - - - - - {{end}} - -
nametypedescription
{{ .arg_name }} {{ partial "api-ref-link-partial.html" (dict "paragraph" .type_name "context" $context) }} {{ partial "api-ref-link-all-partial.html" (dict "paragraph" (.description | safeHTML) "context" $context) - }} -
-{{ else }} - None -{{ end }} -{{ else if (gt (len $objData.signature.params) 0) }} - - - - - - - - - - {{range $objData.signature.params}} - - - - - - {{end}} - -
nametypedescription
{{ index . 0 }} {{ partial "api-ref-link-partial.html" (dict "paragraph" (index . 1) "context" $context) }} None
-{{ else }} - None -{{ end }} -{{ else }} - None -{{ end }} -{{ end }} - -

Returns

-{{ if $objData.docstring_parsed }} -{{ if not $objData.docstring_parsed.returns }} - No docs -{{ else if or $objData.docstring_parsed.returns.type_name (ne $objData.signature.return_annotation "None") }} -{{ $typeName := "" }} -{{ if $objData.docstring_parsed.returns.type_name }} -{{ $typeName = $objData.docstring_parsed.returns.type_name }} -{{ else }} -{{ $typeName = $objData.signature.return_type }} -{{ end }} - -{{ $description := $objData.docstring_parsed.returns.description }} - - - - - - - - - - - - - - -
typedescription
- {{ partial "api-ref-link-partial.html" (dict "paragraph" $typeName "context" $context) }} - - {{ if $description }} - {{ partial "api-ref-link-all-partial.html" (dict "paragraph" ($objData.docstring_parsed.returns.description - | safeHTML) "context" $context) }} - {{ else }} - None - {{ end }} -
-{{ else }} - None -{{ end }} -{{ else }} - No docs -{{ end }} - -{{ else if and (eq $objData.kind "class") }} -

{{index (last 2 $path) 0}}.{{ index (last 1 $path) 0 }}

-{{ if $objData.docstring_parsed }} -
-

{{ partial "api-ref-link-all-partial.html" (dict "paragraph" ($objData.docstring_parsed.short_description | - safeHTML) "context" $context) }}

-

{{ partial "api-ref-link-all-partial.html" (dict "paragraph" ($objData.docstring_parsed.long_description | - safeHTML) "context" $context) }}

-
-{{ end }} -{{end}} -
diff --git a/docs/layouts/partials/api-ref-partial.html b/docs/layouts/partials/api-ref-partial.html deleted file mode 100644 index 4d5ab7520..000000000 --- a/docs/layouts/partials/api-ref-partial.html +++ /dev/null @@ -1,29 +0,0 @@ -{{ $path := split .path "." }} -{{ $jsonPath := printf "versioned_docs/%s/data.json" (partial "version.html" (dict "context" .context )).dirpath }} - -{{ if fileExists $jsonPath }} - -{{ with getJSON $jsonPath }} - {{ $currentData := . }} - {{ range $path }} - {{ if (eq $currentData.kind "class") }} - {{ $currentData = $currentData.functions }} - {{ end }} - {{ if $currentData }} - {{$currentData = index $currentData .}} - {{ else }} - Failed to render: {{ $path }} - {{ end}} - {{ end }} - - {{ $objData := $currentData }} - - -
- {{ partial "api-ref-object-partial.html" (dict "object" $objData "path" $path "context" .) }} -
-{{ end }} - -{{ else }} - Missing data.json for this version! -{{ end }} diff --git a/docs/layouts/partials/ask-ai-button.html b/docs/layouts/partials/ask-ai-button.html index c9b00d8f4..b2b0b2488 100644 --- a/docs/layouts/partials/ask-ai-button.html +++ b/docs/layouts/partials/ask-ai-button.html @@ -1,6 +1,9 @@
+ diff --git a/docs/layouts/partials/hooks/body-end.html b/docs/layouts/partials/hooks/body-end.html index 45ce4c241..5130ca4f7 100644 --- a/docs/layouts/partials/hooks/body-end.html +++ b/docs/layouts/partials/hooks/body-end.html @@ -6,9 +6,10 @@ + {{/* additional scripts */}} - +