From b9718eb0f38418ea9d8c9235b9ae4396e8c1442f Mon Sep 17 00:00:00 2001 From: Roman Lutz Date: Fri, 6 Feb 2026 04:55:44 -0800 Subject: [PATCH 01/12] improve docker setup to be based on devcontainer for minimal duplication, helper scripts, update release instructions, github workflows for automated validation --- .github/workflows/docker_build.yml | 326 +++++++++++++++++++++++++ build_scripts/prepare_package.py | 158 ++++++++++++ doc/contributing/11_release_process.md | 14 ++ docker/Dockerfile | 115 ++++++--- docker/QUICKSTART.md | 82 +++++++ docker/build_pyrit_docker.py | 206 ++++++++++++++++ docker/docker-compose.yaml | 52 +++- docker/requirements.txt | 8 - docker/run_pyrit_docker.py | 139 +++++++++++ docker/start.sh | 45 ++-- frontend/tsconfig.json | 1 + pyproject.toml | 3 +- pyrit/backend/main.py | 15 +- 13 files changed, 1088 insertions(+), 76 deletions(-) create mode 100644 .github/workflows/docker_build.yml create mode 100644 build_scripts/prepare_package.py create mode 100644 docker/QUICKSTART.md create mode 100644 docker/build_pyrit_docker.py delete mode 100644 docker/requirements.txt create mode 100644 docker/run_pyrit_docker.py diff --git a/.github/workflows/docker_build.yml b/.github/workflows/docker_build.yml new file mode 100644 index 0000000000..fe7695438f --- /dev/null +++ b/.github/workflows/docker_build.yml @@ -0,0 +1,326 @@ +# Tests Docker image builds for devcontainer and production + +name: docker_build + +on: + push: + branches: + - "main" + paths: + - '.devcontainer/**' + - 'docker/**' + - 'pyproject.toml' + - 'frontend/**' + - '.github/workflows/docker_build.yml' + pull_request: + branches: + - "main" + - "release/**" + paths: + - '.devcontainer/**' + - 'docker/**' + - 'pyproject.toml' + - 'frontend/**' + - '.github/workflows/docker_build.yml' + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + # Stage 1: Build devcontainer base image + build-devcontainer: + name: Build Devcontainer + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build devcontainer image + uses: docker/build-push-action@v5 + with: + context: .devcontainer + file: .devcontainer/Dockerfile + push: false + tags: pyrit-devcontainer:latest + load: true + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Save devcontainer image + run: docker save pyrit-devcontainer:latest | gzip > devcontainer.tar.gz + + - name: Upload devcontainer artifact + uses: actions/upload-artifact@v4 + with: + name: devcontainer-image + path: devcontainer.tar.gz + retention-days: 1 + + # Stage 2: Build production images (parallel) + build-production-local: + name: Build Production (local) + runs-on: ubuntu-latest + needs: build-devcontainer + permissions: + contents: read + steps: + - uses: actions/checkout@v4 + + - name: Download devcontainer image + uses: actions/download-artifact@v4 + with: + name: devcontainer-image + + - name: Load devcontainer image + run: gunzip -c devcontainer.tar.gz | docker load + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build production image (local) + uses: docker/build-push-action@v5 + with: + context: . + file: docker/Dockerfile + push: false + tags: pyrit:local-test + load: true + build-args: | + BASE_IMAGE=pyrit-devcontainer:latest + PYRIT_SOURCE=local + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Save production image + run: docker save pyrit:local-test | gzip > local.tar.gz + + - name: Upload production artifact + uses: actions/upload-artifact@v4 + with: + name: production-local-image + path: local.tar.gz + retention-days: 1 + + build-production-pypi: + name: Build Production (PyPI) + runs-on: ubuntu-latest + needs: build-devcontainer + if: github.ref == 'refs/heads/main' || github.event_name == 'workflow_dispatch' + permissions: + contents: read + steps: + - uses: actions/checkout@v4 + + - name: Get latest PyRIT version from PyPI + id: pypi-version + run: | + VERSION=$(pip index versions pyrit 2>/dev/null | head -1 | grep -oP '\(\K[^)]+' || echo "0.10.0") + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "Latest PyRIT version on PyPI: $VERSION" + + - name: Download devcontainer image + uses: actions/download-artifact@v4 + with: + name: devcontainer-image + + - name: Load devcontainer image + run: gunzip -c devcontainer.tar.gz | docker load + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build production image (PyPI) + uses: docker/build-push-action@v5 + with: + context: . + file: docker/Dockerfile + push: false + tags: pyrit:pypi-test + load: true + build-args: | + BASE_IMAGE=pyrit-devcontainer:latest + PYRIT_SOURCE=pypi + PYRIT_VERSION=${{ steps.pypi-version.outputs.version }} + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Save production image + run: docker save pyrit:pypi-test | gzip > pypi.tar.gz + + - name: Upload production artifact + uses: actions/upload-artifact@v4 + with: + name: production-pypi-image + path: pypi.tar.gz + retention-days: 1 + + # Stage 3: Test production images (parallel) + test-local-import: + name: Test Import (local) + runs-on: ubuntu-latest + needs: build-production-local + steps: + - name: Download production image + uses: actions/download-artifact@v4 + with: + name: production-local-image + + - name: Load production image + run: gunzip -c local.tar.gz | docker load + + - name: Test PyRIT import + run: | + docker run --rm --entrypoint /opt/venv/bin/python pyrit:local-test -c "import pyrit; print(f'PyRIT version: {pyrit.__version__}')" + + test-local-gui: + name: Test GUI (local) + runs-on: ubuntu-latest + needs: build-production-local + steps: + - name: Download production image + uses: actions/download-artifact@v4 + with: + name: production-local-image + + - name: Load production image + run: gunzip -c local.tar.gz | docker load + + - name: Test GUI mode + run: | + docker run -d --name pyrit-gui-test -e PYRIT_MODE=gui -p 8000:8000 pyrit:local-test + + echo "Waiting for GUI to start..." + sleep 15 + + if ! docker ps | grep -q pyrit-gui-test; then + echo "Container not running! Logs:" + docker logs pyrit-gui-test + exit 1 + fi + + echo "Testing API health endpoint..." + curl -sf http://localhost:8000/api/health || (echo "Health endpoint failed" && docker logs pyrit-gui-test && exit 1) + + echo "Testing frontend is served..." + curl -sf http://localhost:8000/ | grep -q "" || (echo "Frontend not served" && exit 1) + + echo "✅ GUI mode tests passed" + docker stop pyrit-gui-test && docker rm pyrit-gui-test + + test-local-jupyter: + name: Test Jupyter (local) + runs-on: ubuntu-latest + needs: build-production-local + steps: + - name: Download production image + uses: actions/download-artifact@v4 + with: + name: production-local-image + + - name: Load production image + run: gunzip -c local.tar.gz | docker load + + - name: Test Jupyter mode + run: | + docker run -d --name pyrit-jupyter-test -e PYRIT_MODE=jupyter -p 8888:8888 pyrit:local-test + + echo "Waiting for Jupyter to start..." + sleep 20 + + if ! docker ps | grep -q pyrit-jupyter-test; then + echo "Container not running! Logs:" + docker logs pyrit-jupyter-test + exit 1 + fi + + echo "Testing Jupyter responds..." + curl -sf http://localhost:8888/api || (echo "Jupyter API failed" && docker logs pyrit-jupyter-test && exit 1) + + echo "✅ Jupyter mode tests passed" + docker stop pyrit-jupyter-test && docker rm pyrit-jupyter-test + + test-pypi-import: + name: Test Import (PyPI) + runs-on: ubuntu-latest + needs: build-production-pypi + steps: + - name: Download production image + uses: actions/download-artifact@v4 + with: + name: production-pypi-image + + - name: Load production image + run: gunzip -c pypi.tar.gz | docker load + + - name: Test PyRIT import + run: | + docker run --rm --entrypoint /opt/venv/bin/python pyrit:pypi-test -c "import pyrit; print(f'PyRIT version: {pyrit.__version__}')" + + test-pypi-gui: + name: Test GUI (PyPI) + runs-on: ubuntu-latest + needs: build-production-pypi + steps: + - name: Download production image + uses: actions/download-artifact@v4 + with: + name: production-pypi-image + + - name: Load production image + run: gunzip -c pypi.tar.gz | docker load + + - name: Test GUI mode + run: | + docker run -d --name pyrit-gui-pypi -e PYRIT_MODE=gui -p 8000:8000 pyrit:pypi-test + + echo "Waiting for GUI to start..." + sleep 15 + + if ! docker ps | grep -q pyrit-gui-pypi; then + echo "Container not running! Logs:" + docker logs pyrit-gui-pypi + exit 1 + fi + + curl -sf http://localhost:8000/api/health || (echo "Health endpoint failed" && docker logs pyrit-gui-pypi && exit 1) + curl -sf http://localhost:8000/ | grep -q "" || (echo "Frontend not served" && exit 1) + + echo "✅ GUI mode tests passed (PyPI)" + docker stop pyrit-gui-pypi && docker rm pyrit-gui-pypi + + test-pypi-jupyter: + name: Test Jupyter (PyPI) + runs-on: ubuntu-latest + needs: build-production-pypi + steps: + - name: Download production image + uses: actions/download-artifact@v4 + with: + name: production-pypi-image + + - name: Load production image + run: gunzip -c pypi.tar.gz | docker load + + - name: Test Jupyter mode + run: | + docker run -d --name pyrit-jupyter-pypi -e PYRIT_MODE=jupyter -p 8888:8888 pyrit:pypi-test + + echo "Waiting for Jupyter to start..." + sleep 20 + + if ! docker ps | grep -q pyrit-jupyter-pypi; then + echo "Container not running! Logs:" + docker logs pyrit-jupyter-pypi + exit 1 + fi + + curl -sf http://localhost:8888/api || (echo "Jupyter API failed" && docker logs pyrit-jupyter-pypi && exit 1) + + echo "✅ Jupyter mode tests passed (PyPI)" + docker stop pyrit-jupyter-pypi && docker rm pyrit-jupyter-pypi diff --git a/build_scripts/prepare_package.py b/build_scripts/prepare_package.py new file mode 100644 index 0000000000..a21ff55f18 --- /dev/null +++ b/build_scripts/prepare_package.py @@ -0,0 +1,158 @@ +#!/usr/bin/env python3 +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT license. + +""" +Script to prepare the PyRIT package for distribution. +This builds the TypeScript/React frontend and copies artifacts into the Python package structure. +""" + +import shutil +import subprocess +import sys +from pathlib import Path + + +def build_frontend(frontend_dir: Path) -> bool: + """ + Build the TypeScript/React frontend using npm. + + Args: + frontend_dir: Path to the frontend directory + + Returns: + True if successful, False otherwise + """ + print("=" * 60) + print("Building TypeScript/React frontend...") + print("=" * 60) + + # Check if npm is available + try: + result = subprocess.run(["npm", "--version"], capture_output=True, text=True, check=True) + print(f"Found npm version: {result.stdout.strip()}") + except (subprocess.CalledProcessError, FileNotFoundError): + print("ERROR: npm is not installed or not in PATH") + print("Please install Node.js and npm from https://nodejs.org/") + return False + + # Check if package.json exists + package_json = frontend_dir / "package.json" + if not package_json.exists(): + print(f"ERROR: package.json not found at {package_json}") + return False + + # Install dependencies + print("\nInstalling frontend dependencies...") + try: + subprocess.run( + ["npm", "install"], + cwd=frontend_dir, + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + ) + print("✓ Dependencies installed") + except subprocess.CalledProcessError as e: + print(f"ERROR: Failed to install dependencies:\n{e.stdout}") + return False + + # Build the frontend + print("\nBuilding frontend for production...") + try: + subprocess.run( + ["npm", "run", "build"], + cwd=frontend_dir, + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + ) + print("✓ Frontend built successfully") + return True + except subprocess.CalledProcessError as e: + print(f"ERROR: Failed to build frontend:\n{e.stdout}") + return False + + +def copy_frontend_to_package(frontend_dist: Path, backend_frontend: Path) -> bool: + """ + Copy frontend dist to pyrit/backend/frontend for packaging. + + Args: + frontend_dist: Path to frontend/dist + backend_frontend: Path to pyrit/backend/frontend + + Returns: + True if successful, False otherwise + """ + print("\n" + "=" * 60) + print("Copying frontend to Python package...") + print("=" * 60) + + # Check if frontend dist exists + if not frontend_dist.exists(): + print(f"ERROR: Frontend dist directory not found at {frontend_dist}") + return False + + # Remove existing backend/frontend if it exists + if backend_frontend.exists(): + print(f"Removing existing {backend_frontend}") + shutil.rmtree(backend_frontend) + + # Copy frontend dist to backend/frontend + print(f"Copying {frontend_dist} to {backend_frontend}") + shutil.copytree(frontend_dist, backend_frontend) + + # Verify files were copied + index_html = backend_frontend / "index.html" + if index_html.exists(): + print("✓ Frontend successfully copied to package") + return True + else: + print("ERROR: index.html not found after copy") + return False + + +def main(): + """Build frontend and prepare package for distribution.""" + # Define paths + root = Path(__file__).parent.parent + frontend_dir = root / "frontend" + frontend_dist = frontend_dir / "dist" + backend_frontend = root / "pyrit" / "backend" / "frontend" + + print("PyRIT Package Preparation") + print("=" * 60) + print(f"Root directory: {root}") + print(f"Frontend directory: {frontend_dir}") + print(f"Target directory: {backend_frontend}") + print() + + # Check if frontend directory exists + if not frontend_dir.exists(): + print(f"ERROR: Frontend directory not found at {frontend_dir}") + return 1 + + # Build the frontend + if not build_frontend(frontend_dir): + print("\n❌ Failed to build frontend") + return 1 + + # Copy to package + if not copy_frontend_to_package(frontend_dist, backend_frontend): + print("\n❌ Failed to copy frontend to package") + return 1 + + print("\n" + "=" * 60) + print("✅ Package preparation complete!") + print("=" * 60) + print("\nNext steps:") + print(" 1. Build the Python package: python -m build") + print(" 2. Upload to PyPI: python -m twine upload dist/*") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/doc/contributing/11_release_process.md b/doc/contributing/11_release_process.md index 95d54baec7..71a0984835 100644 --- a/doc/contributing/11_release_process.md +++ b/doc/contributing/11_release_process.md @@ -97,7 +97,21 @@ After pushing the branch to remote, check the release branch to make sure it loo ## 5. Build Package You'll need the build package to build the project. If it’s not already installed, install it `pip install build`. +### Build the Frontend +The PyRIT package includes a web-based frontend that must be built before packaging. This requires Node.js and npm to be installed. + +Run the prepare script to build the frontend and copy it into the package structure: + +```bash +python build_scripts/prepare_package.py +``` + +This will: +1. Run `npm install` and `npm run build` in the `frontend/` directory +2. Copy the built assets from `frontend/dist/` to `pyrit/backend/frontend/`. Double check to make sure the files exist after running the `prepare_package.py` script. This should at least include index.html, an `assets` folder with `js` and `css` files. + +### Build the Python Package To build the package wheel and archive for PyPI run ```bash diff --git a/docker/Dockerfile b/docker/Dockerfile index 1467a8dca0..6b713a797b 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,61 +1,98 @@ -# Base image from Microsoft Azure with Python 3.12 -FROM mcr.microsoft.com/azureml/minimal-py312-inference:20250310.v1 -LABEL description="Docker container for PyRIT with Jupyter Notebook integration" +# syntax=docker/dockerfile:1.4 +# ============================================================================ +# PyRIT Production Dockerfile +# +# This Dockerfile builds on top of the devcontainer base image to avoid +# duplication. The devcontainer is built first and used as the base. +# +# Build with: +# python docker/build_pyrit_docker.py --source local +# python docker/build_pyrit_docker.py --source pypi --version 0.10.0 +# ============================================================================ -# Set environment variables +# Use the devcontainer as base (built by build_pyrit_docker.py) +ARG BASE_IMAGE=pyrit-devcontainer +FROM ${BASE_IMAGE} AS production + +LABEL description="Docker container for PyRIT with Jupyter Notebook and GUI support" + +# Build arguments for version tracking +ARG PYRIT_SOURCE=pypi +ARG PYRIT_VERSION="" +ARG GIT_COMMIT="" +ARG GIT_MODIFIED=false + +# Production environment variables ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONUNBUFFERED=1 -ENV DEBIAN_FRONTEND=noninteractive +ENV JUPYTER_ENABLE_LAB=yes +ENV CUDA_VISIBLE_DEVICES=-1 +ENV ENABLE_GPU=false +ENV HOME=/home/vscode -# Switch to root user to install packages USER root -# Install system dependencies -RUN apt-get update && apt-get install -y --no-install-recommends \ - git \ - curl \ - wget \ - build-essential \ - ca-certificates \ - unixodbc \ - libgl1-mesa-glx \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* +# Ensure we use the venv from the devcontainer base +ENV PATH="/opt/venv/bin:$PATH" +ENV VIRTUAL_ENV="/opt/venv" # Set up working directory WORKDIR /app -# Copy requirements file -COPY requirements.txt /app/ - -# Install torch, torchvision, torchaudio from PyTorch with CUDA 11.8 support -RUN pip install --no-cache-dir --index-url https://download.pytorch.org/whl/cu118 torch==2.6.0+cu118 torchvision==0.21.0+cu118 torchaudio==2.6.0+cu118 -# Install all Python dependencies at once with pinned versions -RUN pip install --no-cache-dir -r requirements.txt +# For local: copy source, install editable, and build frontend +COPY --chown=vscode:vscode pyproject.toml MANIFEST.in README.md LICENSE /app/ +COPY --chown=vscode:vscode pyrit/ /app/pyrit/ +COPY --chown=vscode:vscode frontend/ /app/frontend/ +COPY --chown=vscode:vscode build_scripts/ /app/build_scripts/ +COPY --chown=vscode:vscode doc/ /app/doc/ -# Install PyRIT from PyPI (the official way) -RUN pip install --no-cache-dir pyrit[dev,all] +# Install PyRIT and create build info (combined to ensure dependencies are available) +# Note: We use 'uv pip' because the devcontainer creates venv with uv (no pip by default) +RUN if [ "$PYRIT_SOURCE" = "pypi" ]; then \ + echo "Installing PyRIT from PyPI version: $PYRIT_VERSION"; \ + uv pip install --python /opt/venv/bin/python pyrit[dev,speech,opencv,fairness_bias,fastapi,playwright]==$PYRIT_VERSION; \ + elif [ "$PYRIT_SOURCE" = "local" ]; then \ + echo "Installing PyRIT from local source"; \ + uv pip install --python /opt/venv/bin/python -e .[dev,speech,opencv,fairness_bias,fastapi,playwright]; \ + echo "Building frontend..."; \ + /opt/venv/bin/python build_scripts/prepare_package.py; \ + fi && \ + echo "Creating build info..." && \ + /opt/venv/bin/python -c "import json; import pyrit; \ +info = { \ + 'source': '$PYRIT_SOURCE', \ + 'version': pyrit.__version__, \ + 'commit': '$GIT_COMMIT' if '$GIT_COMMIT' else None, \ + 'modified': '$GIT_MODIFIED' == 'true', \ + 'display': '$PYRIT_VERSION' if '$PYRIT_SOURCE' == 'pypi' else ('$GIT_COMMIT' + (' + local changes' if '$GIT_MODIFIED' == 'true' else '') if '$GIT_COMMIT' else pyrit.__version__) \ +}; \ +f = open('/app/build_info.json', 'w'); json.dump(info, f); f.close(); \ +print(f'PyRIT version: {pyrit.__version__}')" - -# Create a directory for notebooks and data +# Create directories for notebooks and data RUN mkdir -p /app/notebooks /app/data /app/assets && \ chmod -R 777 /app/notebooks /app/data /app/assets -# Check PyRIT version -RUN python -c "import pyrit; print(f'PyRIT version: {pyrit.__version__}')" +# Create PyRIT config directory for env files (will be mounted at runtime) +RUN mkdir -p /home/vscode/.pyrit && \ + chown -R vscode:vscode /home/vscode/.pyrit + +# Register the Jupyter kernel for the venv +RUN /opt/venv/bin/python -m ipykernel install --user --name pyrit --display-name "PyRIT" -RUN chown -R dockeruser:dockeruser /app +# Copy doc to notebooks for Jupyter mode +RUN if [ -d "/app/doc" ]; then \ + cp -r /app/doc/* /app/notebooks/ || true; \ + fi + +RUN chown -R vscode:vscode /app # Create and set permissions for the startup script -COPY start.sh /app/start.sh +COPY docker/start.sh /app/start.sh RUN chmod +x /app/start.sh -# Switch to non-root user -USER dockeruser - -# Expose port for JupyterLab -EXPOSE 8888 +# Expose ports for JupyterLab (8888) and GUI (8000) +EXPOSE 8888 8000 -# Set the entrypoint to the startup script and default command to launch JupyterLab +# Set the entrypoint to the startup script (mode determined by PYRIT_MODE env var) ENTRYPOINT ["/app/start.sh"] -CMD ["jupyter", "lab", "--ip=0.0.0.0", "--port=8888", "--no-browser", "--allow-root", "--NotebookApp.token=''", "--NotebookApp.password=''", "--notebook-dir=/app/notebooks"] diff --git a/docker/QUICKSTART.md b/docker/QUICKSTART.md new file mode 100644 index 0000000000..2ba28b778c --- /dev/null +++ b/docker/QUICKSTART.md @@ -0,0 +1,82 @@ +# PyRIT Docker - Quick Start Guide + +Docker container for PyRIT with support for both **Jupyter Notebook** and **GUI** modes. + +## Prerequisites +- Docker installed and running +- `.env` file at `~/.pyrit/.env` with API keys +- Optionally, `~/.pyrit/.env.local` for additional environment variables + +## Quick Start + +### 1. Build the Image + +Build from local source (includes frontend): +```bash +python docker/build_pyrit_docker.py --source local +``` + +Build from PyPI version: +```bash +python docker/build_pyrit_docker.py --source pypi --version 0.10.0 +``` + +Rebuild base image (when devcontainer changes): +```bash +python docker/build_pyrit_docker.py --source local --rebuild-base +``` + +> **Note:** The build script automatically builds the devcontainer base image if needed. +> The base image is cached and reused for faster subsequent builds. + +### 2. Run PyRIT + +Jupyter mode (port 8888): +```bash +python docker/run_pyrit_docker.py jupyter +``` + +GUI mode (port 8000): +```bash +python docker/run_pyrit_docker.py gui +``` + +## Image Tags + +Images are tagged with version information: +- PyPI: `pyrit:0.10.0`, `pyrit:latest` +- Local (clean): `pyrit:`, `pyrit:latest` +- Local (modified): `pyrit:-modified`, `pyrit:latest` + +Run specific tag: +```bash +python docker/run_pyrit_docker.py gui --tag abc1234def5678 +``` + +## Version Display + +The GUI shows PyRIT version in a tooltip on the logo: +- PyPI builds: `0.10.0` +- Local builds: `abc1234def5678` or `abc1234def5678 + local changes` + +## Docker Compose + +Use profiles to run specific modes: + +```bash +# Jupyter mode +docker-compose --profile jupyter up + +# GUI mode +docker-compose --profile gui up +``` + +## Troubleshooting + +**Image not found**: Run `python docker/build_pyrit_docker.py --source local` first + +**.env missing**: Create `.env` file at `~/.pyrit/.env` with your API keys + +**GUI frontend missing**: Build with `--source local` (PyPI builds before GUI release won't work) + +For complete documentation, see [docker/README.md](./README.md) diff --git a/docker/build_pyrit_docker.py b/docker/build_pyrit_docker.py new file mode 100644 index 0000000000..406647d84d --- /dev/null +++ b/docker/build_pyrit_docker.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python3 +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT license. + +""" +Build PyRIT Docker image with support for both PyPI and local source. + +This script first builds the devcontainer base image, then builds the +production image on top of it to avoid duplication. + +Usage: + python build_pyrit_docker.py --source pypi --version 0.10.0 + python build_pyrit_docker.py --source local +""" + +import argparse +import subprocess +import sys +from pathlib import Path + +DEVCONTAINER_IMAGE = "pyrit-devcontainer" + + +def get_git_info(): + """Get current git commit hash and check for uncommitted changes.""" + try: + # Get commit hash + result = subprocess.run(["git", "rev-parse", "HEAD"], capture_output=True, text=True, check=True) + commit = result.stdout.strip() + + # Check for uncommitted changes + result = subprocess.run(["git", "status", "--porcelain"], capture_output=True, text=True, check=True) + modified = len(result.stdout.strip()) > 0 + + return commit, modified + except subprocess.CalledProcessError as e: + print(f"ERROR: Failed to get git info: {e}") + sys.exit(1) + + +def build_devcontainer(root_dir: Path, force_rebuild: bool = False) -> bool: + """Build the devcontainer base image if needed.""" + print("🔧 Building devcontainer base image...") + print(f" Tag: {DEVCONTAINER_IMAGE}") + print() + + # Check if image already exists (skip if not forcing rebuild) + if not force_rebuild: + result = subprocess.run(["docker", "images", "-q", DEVCONTAINER_IMAGE], capture_output=True, text=True) + if result.stdout.strip(): + print(f" ✓ Using existing {DEVCONTAINER_IMAGE} image") + print(" (use --rebuild-base to force rebuild)") + print() + return True + + cmd = [ + "docker", + "build", + "-f", + str(root_dir / ".devcontainer" / "Dockerfile"), + "-t", + DEVCONTAINER_IMAGE, + str(root_dir / ".devcontainer"), + ] + + print(f"Running: {' '.join(cmd)}") + print() + + result = subprocess.run(cmd) + + if result.returncode != 0: + print() + print("❌ Failed to build devcontainer base image") + return False + + print() + print(f" ✓ Devcontainer base image built: {DEVCONTAINER_IMAGE}") + print() + return True + + +def build_image(source, version=None, rebuild_base=False): + """Build the Docker image with appropriate tags.""" + root_dir = Path(__file__).parent.parent + + print("🐳 PyRIT Docker Image Builder") + print("=" * 60) + + # First, build the devcontainer base image + if not build_devcontainer(root_dir, force_rebuild=rebuild_base): + sys.exit(1) + + # Prepare build arguments + build_args = {"PYRIT_SOURCE": source, "BASE_IMAGE": DEVCONTAINER_IMAGE} + + # Determine version and tag + if source == "pypi": + if not version: + print("ERROR: --version is required when --source is pypi") + sys.exit(1) + build_args["PYRIT_VERSION"] = version + image_tag = version + print(f"📦 Building from PyPI version: {version}") + print() + print("⚠️ IMPORTANT WARNINGS:") + print(" 1. GUI mode may not work if this PyPI version doesn't") + print(" include the frontend. Jupyter mode will work.") + print(" 2. Ensure your local branch matches the release version:") + print(f" git checkout releases/v{version}") + print(" This ensures notebooks/docs match the PyRIT version.") + print() + + elif source == "local": + commit, modified = get_git_info() + build_args["GIT_COMMIT"] = commit + build_args["GIT_MODIFIED"] = "true" if modified else "false" + + # Create tag from commit hash + image_tag = f"{commit}" + if modified: + image_tag += "-modified" + + print(f"📦 Building from local source") + print(f" Commit: {commit}") + print(f" Modified: {modified}") + print() + else: + print(f"ERROR: Invalid source '{source}'. Must be 'pypi' or 'local'") + sys.exit(1) + + # Build the Docker image + print("🔨 Building Docker image...") + print(f" Tag: pyrit:{image_tag}") + print(f" Also tagging as: pyrit:latest") + print() + + cmd = [ + "docker", + "build", + "-f", + str(root_dir / "docker" / "Dockerfile"), + "-t", + f"pyrit:{image_tag}", + "-t", + "pyrit:latest", + ] + + # Add build args + for key, value in build_args.items(): + cmd.extend(["--build-arg", f"{key}={value}"]) + + cmd.append(str(root_dir)) + + print(f"Running: {' '.join(cmd)}") + print() + + result = subprocess.run(cmd) + + if result.returncode != 0: + print() + print("❌ Failed to build Docker image") + sys.exit(1) + + print() + print("=" * 60) + print("✅ Docker image built successfully!") + print("=" * 60) + print() + print(f" pyrit:{image_tag}") + print(f" pyrit:latest") + print() + print("Next steps:") + print(f" python docker/run_pyrit_docker.py jupyter") + print(f" python docker/run_pyrit_docker.py gui") + print() + + +def main(): + parser = argparse.ArgumentParser( + description="Build PyRIT Docker image", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + # Build from PyPI version 0.10.0 + python docker/build_pyrit_docker.py --source pypi --version 0.10.0 + + # Build from local source + python docker/build_pyrit_docker.py --source local + """, + ) + + parser.add_argument( + "--source", required=True, choices=["pypi", "local"], help="Source to build from: 'pypi' or 'local'" + ) + + parser.add_argument("--version", help="PyRIT version to install (required when source=pypi)") + + parser.add_argument("--rebuild-base", action="store_true", help="Force rebuild of the devcontainer base image") + + args = parser.parse_args() + + build_image(args.source, args.version, args.rebuild_base) + + +if __name__ == "__main__": + main() diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 94c954993a..0342a85e52 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -1,8 +1,17 @@ +# NOTE: You must build the devcontainer base image first: +# docker build -f .devcontainer/Dockerfile -t pyrit-devcontainer .devcontainer +# Or use: python docker/build_pyrit_docker.py --source local +# +# Environment files: ~/.pyrit/.env and ~/.pyrit/.env.local are mounted to the container. + services: - pyrit: + pyrit-jupyter: build: - context: . - dockerfile: Dockerfile + context: .. + dockerfile: docker/Dockerfile + args: + BASE_IMAGE: pyrit-devcontainer + PYRIT_SOURCE: local image: pyrit:latest container_name: pyrit-jupyter ports: @@ -11,10 +20,10 @@ services: - notebooks:/app/notebooks - data:/app/data - ../assets:/app/assets - env_file: - - ~/.pyrit/.env - - ~/.pyrit/.env.local - - .env.container.settings + - ~/.pyrit/.env:/home/vscode/.pyrit/.env:ro + - ~/.pyrit/.env.local:/home/vscode/.pyrit/.env.local:ro + environment: + - PYRIT_MODE=jupyter restart: unless-stopped healthcheck: test: ["CMD-SHELL", "curl -sf http://localhost:8888 || exit 1"] @@ -22,6 +31,35 @@ services: timeout: 10s retries: 3 start_period: 40s + profiles: + - jupyter + + pyrit-gui: + build: + context: .. + dockerfile: docker/Dockerfile + args: + BASE_IMAGE: pyrit-devcontainer + PYRIT_SOURCE: local + image: pyrit:latest + container_name: pyrit-gui + ports: + - "8000:8000" + volumes: + - ../assets:/app/assets + - ~/.pyrit/.env:/home/vscode/.pyrit/.env:ro + - ~/.pyrit/.env.local:/home/vscode/.pyrit/.env.local:ro + environment: + - PYRIT_MODE=gui + restart: unless-stopped + healthcheck: + test: ["CMD-SHELL", "curl -sf http://localhost:8000/api/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + profiles: + - gui volumes: notebooks: diff --git a/docker/requirements.txt b/docker/requirements.txt deleted file mode 100644 index edf4e49e51..0000000000 --- a/docker/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -# JupyterLab and related packages -jupyterlab>=4.3.4,<4.5 -notebook==7.3.2 -ipywidgets==8.1.5 -matplotlib==3.10.1 -pandas==2.2.3 -seaborn==0.13.2 -ipython==9.0.2 diff --git a/docker/run_pyrit_docker.py b/docker/run_pyrit_docker.py new file mode 100644 index 0000000000..63bdcbd150 --- /dev/null +++ b/docker/run_pyrit_docker.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python3 +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT license. + +""" +Run PyRIT Docker container in Jupyter or GUI mode. + +Usage: + python run_pyrit_docker.py jupyter + python run_pyrit_docker.py gui + python run_pyrit_docker.py gui --tag abc1234def5678 +""" + +import argparse +import subprocess +import sys +from pathlib import Path + + +def check_image_exists(tag): + """Check if the Docker image exists.""" + result = subprocess.run(["docker", "images", "-q", f"pyrit:{tag}"], capture_output=True, text=True) + return len(result.stdout.strip()) > 0 + + +def run_container(mode, tag="latest"): + """Run the PyRIT container in the specified mode.""" + root_dir = Path(__file__).parent.parent + pyrit_config_dir = Path.home() / ".pyrit" + env_file = pyrit_config_dir / ".env" + env_local_file = pyrit_config_dir / ".env.local" + + print("🐳 PyRIT Docker Runner") + print("=" * 60) + + # Check for .env file + if not env_file.exists(): + print("❌ ERROR: .env file not found!") + print(f" Expected location: {env_file}") + print(" Please create a .env file with your API keys.") + print(" See: https://github.com/Azure/PyRIT/blob/main/doc/setup/setup.md") + sys.exit(1) + + # Check if image exists + if not check_image_exists(tag): + print(f"❌ ERROR: Docker image 'pyrit:{tag}' not found!") + print() + print("Please build the image first:") + print(" python docker/build_pyrit_docker.py --source local") + print(" python docker/build_pyrit_docker.py --source pypi --version X.Y.Z") + sys.exit(1) + + # Determine port based on mode + if mode == "jupyter": + port = "8888" + url = "http://localhost:8888" + container_name = "pyrit-jupyter" + elif mode == "gui": + port = "8000" + url = "http://localhost:8000" + container_name = "pyrit-gui" + else: + print(f"ERROR: Invalid mode '{mode}'. Must be 'jupyter' or 'gui'") + sys.exit(1) + + print(f"🚀 Starting PyRIT in {mode.upper()} mode") + print(f" Image: pyrit:{tag}") + print(f" Port: {port}") + print() + + # Build docker run command + # Mount env files to ~/.pyrit/ where PyRIT expects them + cmd = [ + "docker", + "run", + "--rm", + "--name", + container_name, + "-p", + f"{port}:{port}", + "-e", + f"PYRIT_MODE={mode}", + "-v", + f"{env_file}:/home/vscode/.pyrit/.env:ro", + ] + + # Add .env.local if it exists + if env_local_file.exists(): + print(f" Found .env.local - including it") + cmd.extend(["-v", f"{env_local_file}:/home/vscode/.pyrit/.env.local:ro"]) + + cmd.append(f"pyrit:{tag}") + + print() + print("=" * 60) + print("🌐 Open in your browser:") + print() + print(f" {url}") + print() + print("=" * 60) + print() + print("Press Ctrl+C to stop") + print() + + try: + subprocess.run(cmd) + except KeyboardInterrupt: + print("\n\n🛑 Stopping PyRIT...") + print("✅ Stopped") + + +def main(): + parser = argparse.ArgumentParser( + description="Run PyRIT Docker container", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + # Run in Jupyter mode + python docker/run_pyrit_docker.py jupyter + + # Run in GUI mode + python docker/run_pyrit_docker.py gui + + # Run with specific image tag + python docker/run_pyrit_docker.py gui --tag abc1234def5678 + """, + ) + + parser.add_argument("mode", choices=["jupyter", "gui"], help="Mode to run: 'jupyter' or 'gui'") + + parser.add_argument("--tag", default="latest", help="Docker image tag to use (default: latest)") + + args = parser.parse_args() + + run_container(args.mode, args.tag) + + +if __name__ == "__main__": + main() diff --git a/docker/start.sh b/docker/start.sh index 6bebf6b833..f101ee5c96 100644 --- a/docker/start.sh +++ b/docker/start.sh @@ -1,21 +1,25 @@ #!/bin/bash set -e -# Clone PyRIT repository if not already present -if [ ! -d "/app/PyRIT" ]; then - echo "Cloning PyRIT repository..." - git clone https://github.com/Azure/PyRIT -else - echo "PyRIT repository already exists. Updating..." - cd /app/PyRIT - git pull - cd /app +# Activate the Python virtual environment +source /opt/venv/bin/activate + +# Set HOME to vscode user's home so PyRIT finds env files at ~/.pyrit/ +export HOME=/home/vscode + +echo "=== PyRIT Container Startup ===" +echo "PYRIT_MODE: ${PYRIT_MODE:-not set}" +echo "Python version: $(python --version)" +echo "================================" + +# Check if PYRIT_MODE is set +if [ -z "$PYRIT_MODE" ]; then + echo "ERROR: PYRIT_MODE environment variable is not set!" + echo "Please set PYRIT_MODE to either 'jupyter' or 'gui'" + exit 1 fi -# Copy doc folder to notebooks directory -echo "Copying documentation to notebooks directory..." -cp -r /app/PyRIT/doc/* /app/notebooks/ -rm -rf /app/PyRIT +echo "PYRIT_MODE is set to: $PYRIT_MODE" # Default to CPU mode export CUDA_VISIBLE_DEVICES="-1" @@ -30,7 +34,18 @@ else fi # Print PyRIT version +echo "Checking PyRIT installation..." python -c "import pyrit; print(f'Running PyRIT version: {pyrit.__version__}')" -# Execute the command passed to docker run (or the CMD if none provided) -exec "$@" +# Start the appropriate service based on PYRIT_MODE +if [ "$PYRIT_MODE" = "jupyter" ]; then + echo "Starting JupyterLab on port 8888..." + echo "Note: Notebooks are from the local source at build time" + exec jupyter lab --ip=0.0.0.0 --port=8888 --no-browser --allow-root --NotebookApp.token='' --NotebookApp.password='' --notebook-dir=/app/notebooks +elif [ "$PYRIT_MODE" = "gui" ]; then + echo "Starting PyRIT GUI on port 8000..." + exec python -m uvicorn pyrit.backend.main:app --host 0.0.0.0 --port 8000 +else + echo "ERROR: Invalid PYRIT_MODE '$PYRIT_MODE'. Must be 'jupyter' or 'gui'" + exit 1 +fi diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index 1f1eed3157..59b39e17bf 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -28,5 +28,6 @@ } }, "include": ["src"], + "exclude": ["src/**/*.test.ts", "src/**/*.test.tsx", "src/setupTests.ts"], "references": [{ "path": "./tsconfig.node.json" }] } diff --git a/pyproject.toml b/pyproject.toml index a4a34b55f2..d3e676c4f9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,8 +72,10 @@ dev = [ "jupyter>=1.1.1", "jupyter-book==1.0.4", "jupytext>=1.17.1", + "matplotlib>=3.10.0", "mypy>=1.16.0", "mock-alchemy>=0.2.6", + "pandas>=2.2.0", "pre-commit>=4.2.0", "pytest>=8.3.5", "pytest-asyncio>=1.0.0", @@ -129,7 +131,6 @@ speech = [ "azure-cognitiveservices-speech>=1.46.0", ] - # all includes all functional dependencies excluding the ones from the "dev" extra all = [ "accelerate>=1.7.0", diff --git a/pyrit/backend/main.py b/pyrit/backend/main.py index c703f592cc..08502b849e 100644 --- a/pyrit/backend/main.py +++ b/pyrit/backend/main.py @@ -6,7 +6,6 @@ """ import os -import sys from pathlib import Path from fastapi import FastAPI @@ -52,7 +51,7 @@ async def startup_event_async() -> None: def setup_frontend() -> None: - """Set up frontend static file serving (only called when running as main script).""" + """Set up frontend static file serving.""" frontend_path = Path(__file__).parent / "frontend" if DEV_MODE: @@ -63,12 +62,17 @@ def setup_frontend() -> None: print(f"✅ Serving frontend from {frontend_path}") app.mount("/", StaticFiles(directory=str(frontend_path), html=True), name="frontend") else: - # Production mode but no frontend found - this is an error - print("❌ ERROR: Frontend not found!") + # Production mode but no frontend found - warn but don't exit + # This allows API-only usage + print("⚠️ WARNING: Frontend not found!") print(f" Expected location: {frontend_path}") print(" The frontend must be built and included in the package.") print(" Run: python build_scripts/prepare_package.py") - sys.exit(1) + print(" API endpoints will still work but the UI won't be available.") + + +# Set up frontend at module load time (needed when running via uvicorn) +setup_frontend() @app.exception_handler(Exception) @@ -88,5 +92,4 @@ async def global_exception_handler_async(request: object, exc: Exception) -> JSO if __name__ == "__main__": import uvicorn - setup_frontend() uvicorn.run(app, host="0.0.0.0", port=8000, log_level="info") From ecf08653b99511dcf881a24e160d62dc096830ce Mon Sep 17 00:00:00 2001 From: Roman Lutz Date: Fri, 6 Feb 2026 05:43:43 -0800 Subject: [PATCH 02/12] SHA1 fix --- .devcontainer/Dockerfile | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index d4f454d1bc..4ba50992c5 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -7,6 +7,9 @@ SHELL ["/bin/bash", "-c"] USER root +# Remove the Yarn repository (has expired GPG key and we don't use Yarn) +RUN rm -f /etc/apt/sources.list.d/yarn.list 2>/dev/null || true + # Install required system packages + ODBC prerequisites RUN apt-get update && apt-get install -y \ sudo \ @@ -20,15 +23,17 @@ RUN apt-get update && apt-get install -y \ && apt-get clean && rm -rf /var/lib/apt/lists/* # Install the Azure CLI, Microsoft ODBC Driver 18 & SQL tools +# Note: Debian Trixie's sqv rejects SHA1 signatures, so we use gpg directly to import the Microsoft key RUN apt-get update && apt-get install -y \ apt-transport-https \ ca-certificates \ gnupg \ lsb-release \ - && curl -sL https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb \ - -o packages-microsoft-prod.deb \ - && dpkg -i packages-microsoft-prod.deb \ - && rm packages-microsoft-prod.deb \ + && curl -sL https://packages.microsoft.com/keys/microsoft.asc \ + | gpg --dearmor \ + > /usr/share/keyrings/microsoft-archive-keyring.gpg \ + && echo "deb [arch=amd64 signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/debian/12/prod bookworm main" \ + > /etc/apt/sources.list.d/microsoft.list \ && apt-get update \ && ACCEPT_EULA=Y apt-get install -y \ msodbcsql18 \ From 5170807d33edb8db3f252a1c05058124a2958c6e Mon Sep 17 00:00:00 2001 From: Roman Lutz Date: Fri, 6 Feb 2026 06:08:56 -0800 Subject: [PATCH 03/12] use docker driver to create awareness of locally built images --- .github/workflows/docker_build.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/docker_build.yml b/.github/workflows/docker_build.yml index fe7695438f..de9db8f742 100644 --- a/.github/workflows/docker_build.yml +++ b/.github/workflows/docker_build.yml @@ -82,6 +82,8 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 + with: + driver: docker - name: Build production image (local) uses: docker/build-push-action@v5 @@ -94,8 +96,6 @@ jobs: build-args: | BASE_IMAGE=pyrit-devcontainer:latest PYRIT_SOURCE=local - cache-from: type=gha - cache-to: type=gha,mode=max - name: Save production image run: docker save pyrit:local-test | gzip > local.tar.gz @@ -134,6 +134,8 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 + with: + driver: docker - name: Build production image (PyPI) uses: docker/build-push-action@v5 @@ -147,8 +149,6 @@ jobs: BASE_IMAGE=pyrit-devcontainer:latest PYRIT_SOURCE=pypi PYRIT_VERSION=${{ steps.pypi-version.outputs.version }} - cache-from: type=gha - cache-to: type=gha,mode=max - name: Save production image run: docker save pyrit:pypi-test | gzip > pypi.tar.gz From c2769634be3612652cbe3e99971c54b47b35432a Mon Sep 17 00:00:00 2001 From: Natalie Olivo <23561238+nmolivo@users.noreply.github.com> Date: Fri, 6 Feb 2026 18:12:20 +0100 Subject: [PATCH 04/12] FIX MS Package Signing Key issue #1353 to enable DevContainers after Feb-01-2026 (#1356) --- .devcontainer/devcontainer_setup.sh | 7 +++++++ frontend/package-lock.json | 4 ++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/.devcontainer/devcontainer_setup.sh b/.devcontainer/devcontainer_setup.sh index 822b850f4a..3657b804ff 100644 --- a/.devcontainer/devcontainer_setup.sh +++ b/.devcontainer/devcontainer_setup.sh @@ -71,6 +71,13 @@ if [ -f "package.json" ]; then # Install Playwright browsers and system dependencies for E2E testing echo "📦 Installing Playwright browsers..." + + # Remove third-party repos with SHA1 signature issues (rejected since 2026-02-01) + # Playwright deps come from Debian main repos, these aren't needed + sudo rm -f /etc/apt/sources.list.d/yarn.list \ + /etc/apt/sources.list.d/nodesource.list \ + /etc/apt/sources.list.d/microsoft.list 2>/dev/null || true + npx playwright install --with-deps chromium echo "✅ Frontend dependencies installed." diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 1d7e358624..0846e9f179 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,12 +1,12 @@ { "name": "pyrit-frontend", - "version": "0.10.1-dev.0", + "version": "0.11.1-dev.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "pyrit-frontend", - "version": "0.10.1-dev.0", + "version": "0.11.1-dev.0", "dependencies": { "@fluentui/react-components": "^9.54.0", "@fluentui/react-icons": "^2.0.258", From 08a29043fc7bb2bd555ad4af50677a1845792670 Mon Sep 17 00:00:00 2001 From: Roman Lutz Date: Fri, 6 Feb 2026 09:45:51 -0800 Subject: [PATCH 05/12] MAINT address code scanning alert on url sanitization (#1361) --- pyrit/prompt_target/openai/openai_target.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyrit/prompt_target/openai/openai_target.py b/pyrit/prompt_target/openai/openai_target.py index 677ede76d9..1db5661991 100644 --- a/pyrit/prompt_target/openai/openai_target.py +++ b/pyrit/prompt_target/openai/openai_target.py @@ -309,7 +309,7 @@ def _warn_azure_url_path_issues(self, endpoint_url: str) -> None: """ parsed = urlparse(endpoint_url) - if ".openai.azure.com" in endpoint_url: + if parsed.netloc.endswith(".openai.azure.com"): # Check for various issues with Azure OpenAI URLs path = parsed.path.rstrip("/") From db3b5ffebe59add03e09e9e501e87d0f727ea8e7 Mon Sep 17 00:00:00 2001 From: Roman Lutz Date: Fri, 6 Feb 2026 09:46:12 -0800 Subject: [PATCH 06/12] MAINT bump package versions to address dependabot alerts (#1360) --- pyproject.toml | 6 ++-- uv.lock | 75 ++++++++++++++++++++++++++------------------------ 2 files changed, 42 insertions(+), 39 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d3e676c4f9..aac2f29297 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dependencies = [ "aiofiles>=24,<25", "appdirs>=1.4.0", "art>=6.5.0", - "azure-core>=1.34.0", + "azure-core>=1.38.0", "azure-identity>=1.19.0", "azure-ai-contentsafety>=1.0.0", "azure-storage-blob>=12.19.0", @@ -49,7 +49,7 @@ dependencies = [ "pydantic>=2.11.5", "pyodbc>=5.1.0", "python-dotenv>=1.0.1", - "pypdf>=5.1.0", + "pypdf>=6.6.2", "reportlab>=4.4.4", "segno>=1.6.6", "scipy>=1.15.3", @@ -176,7 +176,7 @@ disable_error_code = ["empty-body"] exclude = ["doc/code/"] [build-system] -requires = ["setuptools>=64.0.0", "wheel"] +requires = ["setuptools>=64.0.0", "wheel>=0.46.2"] build-backend = "setuptools.build_meta" [tool.setuptools.packages.find] diff --git a/uv.lock b/uv.lock index 6097708942..4c64b1d8ba 100644 --- a/uv.lock +++ b/uv.lock @@ -478,15 +478,15 @@ wheels = [ [[package]] name = "azure-core" -version = "1.37.0" +version = "1.38.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ef/83/41c9371c8298999c67b007e308a0a3c4d6a59c6908fa9c62101f031f886f/azure_core-1.37.0.tar.gz", hash = "sha256:7064f2c11e4b97f340e8e8c6d923b822978be3016e46b7bc4aa4b337cfb48aee", size = 357620, upload-time = "2025-12-11T20:05:13.518Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/1b/e503e08e755ea94e7d3419c9242315f888fc664211c90d032e40479022bf/azure_core-1.38.0.tar.gz", hash = "sha256:8194d2682245a3e4e3151a667c686464c3786fed7918b394d035bdcd61bb5993", size = 363033, upload-time = "2026-01-12T17:03:05.535Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/34/a9914e676971a13d6cc671b1ed172f9804b50a3a80a143ff196e52f4c7ee/azure_core-1.37.0-py3-none-any.whl", hash = "sha256:b3abe2c59e7d6bb18b38c275a5029ff80f98990e7c90a5e646249a56630fcc19", size = 214006, upload-time = "2025-12-11T20:05:14.96Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d8/b8fcba9464f02b121f39de2db2bf57f0b216fe11d014513d666e8634380d/azure_core-1.38.0-py3-none-any.whl", hash = "sha256:ab0c9b2cd71fecb1842d52c965c95285d3cfb38902f6766e4a471f1cd8905335", size = 217825, upload-time = "2026-01-12T17:03:07.291Z" }, ] [[package]] @@ -1839,11 +1839,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.20.2" +version = "3.20.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c1/e0/a75dbe4bca1e7d41307323dad5ea2efdd95408f74ab2de8bd7dba9b51a1a/filelock-3.20.2.tar.gz", hash = "sha256:a2241ff4ddde2a7cebddf78e39832509cb045d18ec1a09d7248d6bfc6bfbbe64", size = 19510, upload-time = "2026-01-02T15:33:32.582Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/30/ab407e2ec752aa541704ed8f93c11e2a5d92c168b8a755d818b74a3c5c2d/filelock-3.20.2-py3-none-any.whl", hash = "sha256:fbba7237d6ea277175a32c54bb71ef814a8546d8601269e1bfc388de333974e8", size = 16697, upload-time = "2026-01-02T15:33:31.133Z" }, + { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, ] [[package]] @@ -5410,17 +5410,17 @@ wheels = [ [[package]] name = "protobuf" -version = "6.33.2" +version = "6.33.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/34/44/e49ecff446afeec9d1a66d6bbf9adc21e3c7cea7803a920ca3773379d4f6/protobuf-6.33.2.tar.gz", hash = "sha256:56dc370c91fbb8ac85bc13582c9e373569668a290aa2e66a590c2a0d35ddb9e4", size = 444296, upload-time = "2025-12-06T00:17:53.311Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/91/1e3a34881a88697a7354ffd177e8746e97a722e5e8db101544b47e84afb1/protobuf-6.33.2-cp310-abi3-win32.whl", hash = "sha256:87eb388bd2d0f78febd8f4c8779c79247b26a5befad525008e49a6955787ff3d", size = 425603, upload-time = "2025-12-06T00:17:41.114Z" }, - { url = "https://files.pythonhosted.org/packages/64/20/4d50191997e917ae13ad0a235c8b42d8c1ab9c3e6fd455ca16d416944355/protobuf-6.33.2-cp310-abi3-win_amd64.whl", hash = "sha256:fc2a0e8b05b180e5fc0dd1559fe8ebdae21a27e81ac77728fb6c42b12c7419b4", size = 436930, upload-time = "2025-12-06T00:17:43.278Z" }, - { url = "https://files.pythonhosted.org/packages/b2/ca/7e485da88ba45c920fb3f50ae78de29ab925d9e54ef0de678306abfbb497/protobuf-6.33.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d9b19771ca75935b3a4422957bc518b0cecb978b31d1dd12037b088f6bcc0e43", size = 427621, upload-time = "2025-12-06T00:17:44.445Z" }, - { url = "https://files.pythonhosted.org/packages/7d/4f/f743761e41d3b2b2566748eb76bbff2b43e14d5fcab694f494a16458b05f/protobuf-6.33.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5d3b5625192214066d99b2b605f5783483575656784de223f00a8d00754fc0e", size = 324460, upload-time = "2025-12-06T00:17:45.678Z" }, - { url = "https://files.pythonhosted.org/packages/b1/fa/26468d00a92824020f6f2090d827078c09c9c587e34cbfd2d0c7911221f8/protobuf-6.33.2-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8cd7640aee0b7828b6d03ae518b5b4806fdfc1afe8de82f79c3454f8aef29872", size = 339168, upload-time = "2025-12-06T00:17:46.813Z" }, - { url = "https://files.pythonhosted.org/packages/56/13/333b8f421738f149d4fe5e49553bc2a2ab75235486259f689b4b91f96cec/protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:1f8017c48c07ec5859106533b682260ba3d7c5567b1ca1f24297ce03384d1b4f", size = 323270, upload-time = "2025-12-06T00:17:48.253Z" }, - { url = "https://files.pythonhosted.org/packages/0e/15/4f02896cc3df04fc465010a4c6a0cd89810f54617a32a70ef531ed75d61c/protobuf-6.33.2-py3-none-any.whl", hash = "sha256:7636aad9bb01768870266de5dc009de2d1b936771b38a793f73cbbf279c91c5c", size = 170501, upload-time = "2025-12-06T00:17:52.211Z" }, + { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, + { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" }, + { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" }, + { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, ] [[package]] @@ -5614,11 +5614,11 @@ wheels = [ [[package]] name = "pyasn1" -version = "0.6.1" +version = "0.6.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/b6/6e630dff89739fcd427e3f72b3d905ce0acb85a45d4ec3e2678718a3487f/pyasn1-0.6.2.tar.gz", hash = "sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b", size = 146586, upload-time = "2026-01-16T18:04:18.534Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, + { url = "https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl", hash = "sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf", size = 83371, upload-time = "2026-01-16T18:04:17.174Z" }, ] [[package]] @@ -6040,14 +6040,14 @@ wheels = [ [[package]] name = "pypdf" -version = "6.5.0" +version = "6.6.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4d/9b/db1056a54eda8cd44f9e5128e87e1142cb328295dad92bbec0d39f251641/pypdf-6.5.0.tar.gz", hash = "sha256:9e78950906380ae4f2ce1d9039e9008098ba6366a4d9c7423c4bdbd6e6683404", size = 5277655, upload-time = "2025-12-21T11:07:19.876Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b8/bb/a44bab1ac3c54dbcf653d7b8bcdee93dddb2d3bf025a3912cacb8149a2f2/pypdf-6.6.2.tar.gz", hash = "sha256:0a3ea3b3303982333404e22d8f75d7b3144f9cf4b2970b96856391a516f9f016", size = 5281850, upload-time = "2026-01-26T11:57:55.964Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/db/f2e7703791a1f32532618b82789ddddb7173b9e22d97e34cc11950d8e330/pypdf-6.5.0-py3-none-any.whl", hash = "sha256:9cef8002aaedeecf648dfd9ff1ce38f20ae8d88e2534fced6630038906440b25", size = 329560, upload-time = "2025-12-21T11:07:18.173Z" }, + { url = "https://files.pythonhosted.org/packages/7d/be/549aaf1dfa4ab4aed29b09703d2fb02c4366fc1f05e880948c296c5764b9/pypdf-6.6.2-py3-none-any.whl", hash = "sha256:44c0c9811cfb3b83b28f1c3d054531d5b8b81abaedee0d8cb403650d023832ba", size = 329132, upload-time = "2026-01-26T11:57:54.099Z" }, ] [[package]] @@ -6061,7 +6061,7 @@ wheels = [ [[package]] name = "pyrit" -version = "0.10.1.dev0" +version = "0.11.1.dev0" source = { editable = "." } dependencies = [ { name = "aiofiles" }, @@ -6204,7 +6204,7 @@ requires-dist = [ { name = "azure-ai-ml", marker = "extra == 'gcg'", specifier = ">=1.27.1" }, { name = "azure-cognitiveservices-speech", marker = "extra == 'all'", specifier = ">=1.44.0" }, { name = "azure-cognitiveservices-speech", marker = "extra == 'speech'", specifier = ">=1.46.0" }, - { name = "azure-core", specifier = ">=1.34.0" }, + { name = "azure-core", specifier = ">=1.38.0" }, { name = "azure-identity", specifier = ">=1.19.0" }, { name = "azure-storage-blob", specifier = ">=12.19.0" }, { name = "azureml-mlflow", marker = "extra == 'all'", specifier = ">=1.60.0" }, @@ -6248,7 +6248,7 @@ requires-dist = [ { name = "pre-commit", marker = "extra == 'dev'", specifier = ">=4.2.0" }, { name = "pydantic", specifier = ">=2.11.5" }, { name = "pyodbc", specifier = ">=5.1.0" }, - { name = "pypdf", specifier = ">=5.1.0" }, + { name = "pypdf", specifier = ">=6.6.2" }, { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.3.5" }, { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=1.0.0" }, { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=6.1.1" }, @@ -6400,11 +6400,11 @@ wheels = [ [[package]] name = "python-multipart" -version = "0.0.21" +version = "0.0.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/78/96/804520d0850c7db98e5ccb70282e29208723f0964e88ffd9d0da2f52ea09/python_multipart-0.0.21.tar.gz", hash = "sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92", size = 37196, upload-time = "2025-12-17T09:24:22.446Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/76/03af049af4dcee5d27442f71b6924f01f3efb5d2bd34f23fcd563f2cc5f5/python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090", size = 24541, upload-time = "2025-12-17T09:24:21.153Z" }, + { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" }, ] [[package]] @@ -8412,11 +8412,11 @@ wheels = [ [[package]] name = "urllib3" -version = "2.6.2" +version = "2.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1e/24/a2a2ed9addd907787d7aa0355ba36a6cadf1768b934c652ea78acbd59dcd/urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797", size = 432930, upload-time = "2025-12-11T15:56:40.252Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" }, + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] [[package]] @@ -8709,23 +8709,26 @@ wheels = [ [[package]] name = "werkzeug" -version = "3.1.4" +version = "3.1.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/ea/b0f8eeb287f8df9066e56e831c7824ac6bab645dd6c7a8f4b2d767944f9b/werkzeug-3.1.4.tar.gz", hash = "sha256:cd3cd98b1b92dc3b7b3995038826c68097dcb16f9baa63abe35f20eafeb9fe5e", size = 864687, upload-time = "2025-11-29T02:15:22.841Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/70/1469ef1d3542ae7c2c7b72bd5e3a4e6ee69d7978fa8a3af05a38eca5becf/werkzeug-3.1.5.tar.gz", hash = "sha256:6a548b0e88955dd07ccb25539d7d0cc97417ee9e179677d22c7041c8f078ce67", size = 864754, upload-time = "2026-01-08T17:49:23.247Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/f9/9e082990c2585c744734f85bec79b5dae5df9c974ffee58fe421652c8e91/werkzeug-3.1.4-py3-none-any.whl", hash = "sha256:2ad50fb9ed09cc3af22c54698351027ace879a0b60a3b5edf5730b2f7d876905", size = 224960, upload-time = "2025-11-29T02:15:21.13Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e4/8d97cca767bcc1be76d16fb76951608305561c6e056811587f36cb1316a8/werkzeug-3.1.5-py3-none-any.whl", hash = "sha256:5111e36e91086ece91f93268bb39b4a35c1e6f1feac762c9c822ded0a4e322dc", size = 225025, upload-time = "2026-01-08T17:49:21.859Z" }, ] [[package]] name = "wheel" -version = "0.45.1" +version = "0.46.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8a/98/2d9906746cdc6a6ef809ae6338005b3f21bb568bea3165cfc6a243fdc25c/wheel-0.45.1.tar.gz", hash = "sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729", size = 107545, upload-time = "2024-11-23T00:18:23.513Z" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/89/24/a2eb353a6edac9a0303977c4cb048134959dd2a51b48a269dfc9dde00c8a/wheel-0.46.3.tar.gz", hash = "sha256:e3e79874b07d776c40bd6033f8ddf76a7dad46a7b8aa1b2787a83083519a1803", size = 60605, upload-time = "2026-01-22T12:39:49.136Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/2c/87f3254fd8ffd29e4c02732eee68a83a1d3c346ae39bc6822dcbcb697f2b/wheel-0.45.1-py3-none-any.whl", hash = "sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248", size = 72494, upload-time = "2024-11-23T00:18:21.207Z" }, + { url = "https://files.pythonhosted.org/packages/87/22/b76d483683216dde3d67cba61fb2444be8d5be289bf628c13fc0fd90e5f9/wheel-0.46.3-py3-none-any.whl", hash = "sha256:4b399d56c9d9338230118d705d9737a2a468ccca63d5e813e2a4fc7815d8bc4d", size = 30557, upload-time = "2026-01-22T12:39:48.099Z" }, ] [[package]] From 25106e4302bfb5324d2bab3e2bf5e18ccb186939 Mon Sep 17 00:00:00 2001 From: Roman Lutz Date: Fri, 6 Feb 2026 09:46:32 -0800 Subject: [PATCH 07/12] MAINT mypy fixes (#1359) --- pyrit/auth/manual_copilot_authenticator.py | 6 +++-- .../attack/component/conversation_manager.py | 5 ++-- .../tokenizer_template_normalizer.py | 2 +- .../hugging_face/hugging_face_chat_target.py | 27 +++++++++++-------- 4 files changed, 24 insertions(+), 16 deletions(-) diff --git a/pyrit/auth/manual_copilot_authenticator.py b/pyrit/auth/manual_copilot_authenticator.py index 175db871e1..8dca7293df 100644 --- a/pyrit/auth/manual_copilot_authenticator.py +++ b/pyrit/auth/manual_copilot_authenticator.py @@ -59,7 +59,9 @@ def __init__(self, *, access_token: Optional[str] = None) -> None: self._access_token = resolved_token try: - self._claims = jwt.decode(resolved_token, algorithms=["RS256"], options={"verify_signature": False}) + self._claims: dict[str, Any] = jwt.decode( + resolved_token, algorithms=["RS256"], options={"verify_signature": False} + ) except jwt.exceptions.DecodeError as e: raise ValueError(f"Failed to decode access_token as JWT: {e}") @@ -97,7 +99,7 @@ async def get_claims(self) -> dict[str, Any]: Returns: dict[str, Any]: The JWT claims decoded from the access token. """ - return self._claims # type: ignore + return self._claims async def refresh_token_async(self) -> str: """ diff --git a/pyrit/executor/attack/component/conversation_manager.py b/pyrit/executor/attack/component/conversation_manager.py index 84fcb93a2e..75228c0f2d 100644 --- a/pyrit/executor/attack/component/conversation_manager.py +++ b/pyrit/executor/attack/component/conversation_manager.py @@ -4,12 +4,13 @@ import logging import uuid from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union from pyrit.common.utils import combine_dict from pyrit.executor.attack.component.prepended_conversation_config import ( PrependedConversationConfig, ) +from pyrit.identifiers import TargetIdentifier from pyrit.memory import CentralMemory from pyrit.message_normalizer import ConversationContextNormalizer from pyrit.models import ChatMessageRole, Message, MessagePiece, Score @@ -54,7 +55,7 @@ def get_adversarial_chat_messages( *, adversarial_chat_conversation_id: str, attack_identifier: Dict[str, str], - adversarial_chat_target_identifier: Dict[str, str], + adversarial_chat_target_identifier: Union[TargetIdentifier, Dict[str, Any]], labels: Optional[Dict[str, str]] = None, ) -> List[Message]: """ diff --git a/pyrit/message_normalizer/tokenizer_template_normalizer.py b/pyrit/message_normalizer/tokenizer_template_normalizer.py index ce8813a232..b62e3b5234 100644 --- a/pyrit/message_normalizer/tokenizer_template_normalizer.py +++ b/pyrit/message_normalizer/tokenizer_template_normalizer.py @@ -126,7 +126,7 @@ def _load_tokenizer(model_name: str, token: Optional[str]) -> "PreTrainedTokeniz return cast( PreTrainedTokenizerBase, - AutoTokenizer.from_pretrained(model_name, token=token or None), # type: ignore[no-untyped-call] + AutoTokenizer.from_pretrained(model_name, token=token or None), # type: ignore[no-untyped-call, unused-ignore] ) @classmethod diff --git a/pyrit/prompt_target/hugging_face/hugging_face_chat_target.py b/pyrit/prompt_target/hugging_face/hugging_face_chat_target.py index 5fe1104a8b..f320248e2c 100644 --- a/pyrit/prompt_target/hugging_face/hugging_face_chat_target.py +++ b/pyrit/prompt_target/hugging_face/hugging_face_chat_target.py @@ -5,11 +5,12 @@ import logging import os from pathlib import Path -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any, Optional, cast from transformers import ( AutoModelForCausalLM, AutoTokenizer, + BatchEncoding, PretrainedConfig, ) @@ -167,7 +168,7 @@ def _load_from_path(self, path: str, **kwargs: Any) -> None: **kwargs: Additional keyword arguments to pass to the model loader. """ logger.info(f"Loading model and tokenizer from path: {path}...") - self.tokenizer = AutoTokenizer.from_pretrained( # type: ignore[no-untyped-call] + self.tokenizer = AutoTokenizer.from_pretrained( # type: ignore[no-untyped-call, unused-ignore] path, trust_remote_code=self.trust_remote_code ) self.model = AutoModelForCausalLM.from_pretrained(path, trust_remote_code=self.trust_remote_code, **kwargs) @@ -245,7 +246,7 @@ async def load_model_and_tokenizer(self) -> None: # Load the tokenizer and model from the specified directory logger.info(f"Loading model {self.model_id} from cache path: {cache_dir}...") - self.tokenizer = AutoTokenizer.from_pretrained( # type: ignore[no-untyped-call] + self.tokenizer = AutoTokenizer.from_pretrained( # type: ignore[no-untyped-call, unused-ignore] self.model_id, cache_dir=cache_dir, trust_remote_code=self.trust_remote_code ) self.model = AutoModelForCausalLM.from_pretrained( @@ -329,8 +330,9 @@ async def send_prompt_async(self, *, message: Message) -> list[Message]: generated_tokens = generated_ids[0][input_length:] # Decode the assistant's response from the generated token IDs - assistant_response = self.tokenizer.decode( - generated_tokens, skip_special_tokens=self.skip_special_tokens + assistant_response = cast( + str, + self.tokenizer.decode(generated_tokens, skip_special_tokens=self.skip_special_tokens), ).strip() if not assistant_response: @@ -369,12 +371,15 @@ def _apply_chat_template(self, messages: list[dict[str, str]]) -> Any: logger.info("Tokenizer has a chat template. Applying it to the input messages.") # Apply the chat template to format and tokenize the messages - tokenized_chat = self.tokenizer.apply_chat_template( - messages, - tokenize=True, - add_generation_prompt=True, - return_tensors=self.tensor_format, - return_dict=True, + tokenized_chat = cast( + BatchEncoding, + self.tokenizer.apply_chat_template( + messages, + tokenize=True, + add_generation_prompt=True, + return_tensors=self.tensor_format, + return_dict=True, + ), ).to(self.device) return tokenized_chat else: From 1463ef801c852b5f57bc385c1e29a21b19604dd5 Mon Sep 17 00:00:00 2001 From: Roman Lutz Date: Fri, 6 Feb 2026 09:46:58 -0800 Subject: [PATCH 08/12] MAINT upgrade vite in response to npm audit (#1358) --- frontend/package-lock.json | 416 +++++++++++++++++++++++++------------ frontend/package.json | 2 +- 2 files changed, 283 insertions(+), 135 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 0846e9f179..9816474de9 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -37,7 +37,7 @@ "ts-jest": "^29.2.5", "ts-node": "^10.9.2", "typescript": "^5.6.3", - "vite": "^5.4.11" + "vite": "^7.3.1" } }, "node_modules/@adobe/css-tools": { @@ -644,9 +644,9 @@ "license": "MIT" }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", - "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz", + "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==", "cpu": [ "ppc64" ], @@ -657,13 +657,13 @@ "aix" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/android-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", - "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz", + "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==", "cpu": [ "arm" ], @@ -674,13 +674,13 @@ "android" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/android-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", - "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz", + "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==", "cpu": [ "arm64" ], @@ -691,13 +691,13 @@ "android" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/android-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", - "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz", + "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==", "cpu": [ "x64" ], @@ -708,13 +708,13 @@ "android" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", - "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", + "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", "cpu": [ "arm64" ], @@ -725,13 +725,13 @@ "darwin" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", - "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", + "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", "cpu": [ "x64" ], @@ -742,13 +742,13 @@ "darwin" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", - "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz", + "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==", "cpu": [ "arm64" ], @@ -759,13 +759,13 @@ "freebsd" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", - "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz", + "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==", "cpu": [ "x64" ], @@ -776,13 +776,13 @@ "freebsd" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", - "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz", + "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==", "cpu": [ "arm" ], @@ -793,13 +793,13 @@ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", - "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz", + "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==", "cpu": [ "arm64" ], @@ -810,13 +810,13 @@ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", - "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz", + "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==", "cpu": [ "ia32" ], @@ -827,13 +827,13 @@ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", - "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz", + "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==", "cpu": [ "loong64" ], @@ -844,13 +844,13 @@ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", - "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz", + "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==", "cpu": [ "mips64el" ], @@ -861,13 +861,13 @@ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", - "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz", + "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==", "cpu": [ "ppc64" ], @@ -878,13 +878,13 @@ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", - "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz", + "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==", "cpu": [ "riscv64" ], @@ -895,13 +895,13 @@ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", - "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz", + "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==", "cpu": [ "s390x" ], @@ -912,13 +912,13 @@ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", - "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz", + "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==", "cpu": [ "x64" ], @@ -929,13 +929,30 @@ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz", + "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", - "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz", + "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==", "cpu": [ "x64" ], @@ -946,13 +963,30 @@ "netbsd" ], "engines": { - "node": ">=12" + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz", + "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", - "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz", + "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==", "cpu": [ "x64" ], @@ -963,13 +997,30 @@ "openbsd" ], "engines": { - "node": ">=12" + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz", + "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", - "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz", + "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==", "cpu": [ "x64" ], @@ -980,13 +1031,13 @@ "sunos" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", - "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz", + "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==", "cpu": [ "arm64" ], @@ -997,13 +1048,13 @@ "win32" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", - "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz", + "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==", "cpu": [ "ia32" ], @@ -1014,13 +1065,13 @@ "win32" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/win32-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", - "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz", + "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==", "cpu": [ "x64" ], @@ -1031,7 +1082,7 @@ "win32" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@eslint-community/eslint-utils": { @@ -5366,9 +5417,9 @@ } }, "node_modules/esbuild": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", - "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz", + "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -5376,32 +5427,35 @@ "esbuild": "bin/esbuild" }, "engines": { - "node": ">=12" + "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.21.5", - "@esbuild/android-arm": "0.21.5", - "@esbuild/android-arm64": "0.21.5", - "@esbuild/android-x64": "0.21.5", - "@esbuild/darwin-arm64": "0.21.5", - "@esbuild/darwin-x64": "0.21.5", - "@esbuild/freebsd-arm64": "0.21.5", - "@esbuild/freebsd-x64": "0.21.5", - "@esbuild/linux-arm": "0.21.5", - "@esbuild/linux-arm64": "0.21.5", - "@esbuild/linux-ia32": "0.21.5", - "@esbuild/linux-loong64": "0.21.5", - "@esbuild/linux-mips64el": "0.21.5", - "@esbuild/linux-ppc64": "0.21.5", - "@esbuild/linux-riscv64": "0.21.5", - "@esbuild/linux-s390x": "0.21.5", - "@esbuild/linux-x64": "0.21.5", - "@esbuild/netbsd-x64": "0.21.5", - "@esbuild/openbsd-x64": "0.21.5", - "@esbuild/sunos-x64": "0.21.5", - "@esbuild/win32-arm64": "0.21.5", - "@esbuild/win32-ia32": "0.21.5", - "@esbuild/win32-x64": "0.21.5" + "@esbuild/aix-ppc64": "0.27.3", + "@esbuild/android-arm": "0.27.3", + "@esbuild/android-arm64": "0.27.3", + "@esbuild/android-x64": "0.27.3", + "@esbuild/darwin-arm64": "0.27.3", + "@esbuild/darwin-x64": "0.27.3", + "@esbuild/freebsd-arm64": "0.27.3", + "@esbuild/freebsd-x64": "0.27.3", + "@esbuild/linux-arm": "0.27.3", + "@esbuild/linux-arm64": "0.27.3", + "@esbuild/linux-ia32": "0.27.3", + "@esbuild/linux-loong64": "0.27.3", + "@esbuild/linux-mips64el": "0.27.3", + "@esbuild/linux-ppc64": "0.27.3", + "@esbuild/linux-riscv64": "0.27.3", + "@esbuild/linux-s390x": "0.27.3", + "@esbuild/linux-x64": "0.27.3", + "@esbuild/netbsd-arm64": "0.27.3", + "@esbuild/netbsd-x64": "0.27.3", + "@esbuild/openbsd-arm64": "0.27.3", + "@esbuild/openbsd-x64": "0.27.3", + "@esbuild/openharmony-arm64": "0.27.3", + "@esbuild/sunos-x64": "0.27.3", + "@esbuild/win32-arm64": "0.27.3", + "@esbuild/win32-ia32": "0.27.3", + "@esbuild/win32-x64": "0.27.3" } }, "node_modules/escalade": { @@ -8985,6 +9039,54 @@ "node": "*" } }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/tmpl": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", @@ -9328,21 +9430,24 @@ } }, "node_modules/vite": { - "version": "5.4.21", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", - "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", "dev": true, "license": "MIT", "dependencies": { - "esbuild": "^0.21.3", - "postcss": "^8.4.43", - "rollup": "^4.20.0" + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" }, "bin": { "vite": "bin/vite.js" }, "engines": { - "node": "^18.0.0 || >=20.0.0" + "node": "^20.19.0 || >=22.12.0" }, "funding": { "url": "https://github.com/vitejs/vite?sponsor=1" @@ -9351,19 +9456,25 @@ "fsevents": "~2.3.3" }, "peerDependencies": { - "@types/node": "^18.0.0 || >=20.0.0", - "less": "*", + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", - "terser": "^5.4.0" + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" }, "peerDependenciesMeta": { "@types/node": { "optional": true }, + "jiti": { + "optional": true + }, "less": { "optional": true }, @@ -9384,9 +9495,46 @@ }, "terser": { "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vite/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true } } }, + "node_modules/vite/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/w3c-xmlserializer": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-4.0.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index 324453afc3..db87fb36c9 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -49,6 +49,6 @@ "ts-jest": "^29.2.5", "ts-node": "^10.9.2", "typescript": "^5.6.3", - "vite": "^5.4.11" + "vite": "^7.3.1" } } From 8006366cd28beaad1ad1d69199db66d97bed26c6 Mon Sep 17 00:00:00 2001 From: jsong468 Date: Fri, 6 Feb 2026 11:21:40 -0800 Subject: [PATCH 09/12] FEAT: Target Registry and AIRT Targets Initializer (#1320) --- .env_example | 30 +- doc/api.rst | 1 + doc/code/registry/2_instance_registry.ipynb | 76 +++- doc/code/registry/2_instance_registry.py | 43 +- pyrit/identifiers/target_identifier.py | 3 + pyrit/prompt_target/common/prompt_target.py | 4 + pyrit/registry/__init__.py | 2 + .../registry/instance_registries/__init__.py | 4 + .../instance_registries/target_registry.py | 97 ++++ pyrit/setup/initializers/__init__.py | 2 + pyrit/setup/initializers/airt_targets.py | 422 ++++++++++++++++++ .../identifiers/test_target_identifier.py | 57 +++ tests/unit/registry/test_target_registry.py | 277 ++++++++++++ .../setup/test_airt_targets_initializer.py | 221 +++++++++ 14 files changed, 1211 insertions(+), 28 deletions(-) create mode 100644 pyrit/registry/instance_registries/target_registry.py create mode 100644 pyrit/setup/initializers/airt_targets.py create mode 100644 tests/unit/registry/test_target_registry.py create mode 100644 tests/unit/setup/test_airt_targets_initializer.py diff --git a/.env_example b/.env_example index 2d63d66913..281b3db223 100644 --- a/.env_example +++ b/.env_example @@ -19,27 +19,45 @@ PLATFORM_OPENAI_CHAT_GPT4O_MODEL="gpt-4o" AZURE_OPENAI_GPT4O_ENDPOINT="https://xxxx.openai.azure.com/openai/v1" AZURE_OPENAI_GPT4O_KEY="xxxxx" AZURE_OPENAI_GPT4O_MODEL="deployment-name" -# Since deployment name may be custom and differ from the actual underlying model, -# you can specify the underlying model for identifier purposes +# Since Azure deployment name may be custom and differ from the actual underlying model, +# you can specify the underlying model for identifier purposes. If not specified, +# identifiers will default to the value of the standard MODEL environment variable. AZURE_OPENAI_GPT4O_UNDERLYING_MODEL="gpt-4o" AZURE_OPENAI_INTEGRATION_TEST_ENDPOINT="https://xxxxx.openai.azure.com/openai/v1" AZURE_OPENAI_INTEGRATION_TEST_KEY="xxxxx" AZURE_OPENAI_INTEGRATION_TEST_MODEL="deployment-name" +AZURE_OPENAI_INTEGRATION_TEST_UNDERLYING_MODEL="" AZURE_OPENAI_GPT3_5_CHAT_ENDPOINT="https://xxxxx.openai.azure.com/openai/v1" AZURE_OPENAI_GPT3_5_CHAT_KEY="xxxxx" AZURE_OPENAI_GPT3_5_CHAT_MODEL="deployment-name" +AZURE_OPENAI_GPT3_5_CHAT_UNDERLYING_MODEL="" AZURE_OPENAI_GPT4_CHAT_ENDPOINT="https://xxxxx.openai.azure.com/openai/v1" AZURE_OPENAI_GPT4_CHAT_KEY="xxxxx" AZURE_OPENAI_GPT4_CHAT_MODEL="deployment-name" +AZURE_OPENAI_GPT4_CHAT_UNDERLYING_MODEL="" + +# Endpoints that host models with fewer safety mechanisms (e.g. via adversarial fine tuning +# or content filters turned off) can be defined below and used in adversarial attack testing scenarios. +AZURE_OPENAI_GPT4O_UNSAFE_CHAT_ENDPOINT="https://xxxxx.openai.azure.com/openai/v1" +AZURE_OPENAI_GPT4O_UNSAFE_CHAT_KEY="xxxxx" +AZURE_OPENAI_GPT4O_UNSAFE_CHAT_MODEL="deployment-name" +AZURE_OPENAI_GPT4O_UNSAFE_CHAT_UNDERLYING_MODEL="" + +AZURE_OPENAI_GPT4O_UNSAFE_CHAT_ENDPOINT2="https://xxxxx.openai.azure.com/openai/v1" +AZURE_OPENAI_GPT4O_UNSAFE_CHAT_KEY2="xxxxx" +AZURE_OPENAI_GPT4O_UNSAFE_CHAT_MODEL2="deployment-name" +AZURE_OPENAI_GPT4O_UNSAFE_CHAT_UNDERLYING_MODEL2="" AZURE_FOUNDRY_DEEPSEEK_ENDPOINT="https://xxxxx.eastus2.models.ai.azure.com" AZURE_FOUNDRY_DEEPSEEK_KEY="xxxxx" +AZURE_FOUNDRY_DEEPSEEK_MODEL="" AZURE_FOUNDRY_PHI4_ENDPOINT="https://xxxxx.models.ai.azure.com" AZURE_CHAT_PHI4_KEY="xxxxx" +AZURE_FOUNDRY_PHI4_MODEL="" AZURE_FOUNDRY_MISTRAL_LARGE_ENDPOINT="https://xxxxx.services.ai.azure.com/openai/v1/" AZURE_FOUNDRY_MISTRAL_LARGE_KEY="xxxxx" @@ -75,6 +93,7 @@ AZURE_OPENAI_GPT5_RESPONSES_ENDPOINT="https://xxxxxxxxx.azure.com/openai/v1" AZURE_OPENAI_GPT5_COMPLETION_ENDPOINT="https://xxxxxxxxx.azure.com/openai/v1" AZURE_OPENAI_GPT5_KEY="xxxxxxx" AZURE_OPENAI_GPT5_MODEL="gpt-5" +AZURE_OPENAI_GPT5_UNDERLYING_MODEL="gpt-5" PLATFORM_OPENAI_RESPONSES_ENDPOINT="https://api.openai.com/v1" PLATFORM_OPENAI_RESPONSES_KEY="sk-xxxxx" @@ -83,6 +102,7 @@ PLATFORM_OPENAI_RESPONSES_MODEL="o4-mini" AZURE_OPENAI_RESPONSES_ENDPOINT="https://xxxxx.openai.azure.com/openai/v1" AZURE_OPENAI_RESPONSES_KEY="xxxxx" AZURE_OPENAI_RESPONSES_MODEL="o4-mini" +AZURE_OPENAI_RESPONSES_UNDERLYING_MODEL="o4-mini" OPENAI_RESPONSES_ENDPOINT=${PLATFORM_OPENAI_RESPONSES_ENDPOINT} OPENAI_RESPONSES_KEY=${PLATFORM_OPENAI_RESPONSES_KEY} @@ -103,6 +123,7 @@ PLATFORM_OPENAI_REALTIME_MODEL="gpt-4o-realtime-preview" AZURE_OPENAI_REALTIME_ENDPOINT = "wss://xxxx.openai.azure.com/openai/v1" AZURE_OPENAI_REALTIME_API_KEY = "xxxxx" AZURE_OPENAI_REALTIME_MODEL = "gpt-4o-realtime-preview" +AZURE_OPENAI_REALTIME_UNDERLYING_MODEL = "gpt-4o-realtime-preview" OPENAI_REALTIME_ENDPOINT = ${PLATFORM_OPENAI_REALTIME_ENDPOINT} OPENAI_REALTIME_API_KEY = ${PLATFORM_OPENAI_REALTIME_API_KEY} @@ -119,10 +140,12 @@ OPENAI_REALTIME_UNDERLYING_MODEL = "" OPENAI_IMAGE_ENDPOINT1 = "https://xxxxx.openai.azure.com/openai/v1" OPENAI_IMAGE_API_KEY1 = "xxxxxx" OPENAI_IMAGE_MODEL1 = "deployment-name" +OPENAI_IMAGE_UNDERLYING_MODEL1 = "dall-e-3" OPENAI_IMAGE_ENDPOINT2 = "https://api.openai.com/v1" OPENAI_IMAGE_API_KEY2 = "sk-xxxxx" OPENAI_IMAGE_MODEL2 = "dall-e-3" +OPENAI_IMAGE_UNDERLYING_MODEL2 = "dall-e-3" OPENAI_IMAGE_ENDPOINT = ${OPENAI_IMAGE_ENDPOINT2} OPENAI_IMAGE_API_KEY = ${OPENAI_IMAGE_API_KEY2} @@ -140,10 +163,12 @@ OPENAI_IMAGE_UNDERLYING_MODEL = "" OPENAI_TTS_ENDPOINT1 = "https://xxxxx.openai.azure.com/openai/v1" OPENAI_TTS_KEY1 = "xxxxxxx" OPENAI_TTS_MODEL1 = "tts" +OPENAI_TTS_UNDERLYING_MODEL1 = "tts" OPENAI_TTS_ENDPOINT2 = "https://api.openai.com/v1" OPENAI_TTS_KEY2 = "xxxxxx" OPENAI_TTS_MODEL2 = "tts-1" +OPENAI_TTS_UNDERLYING_MODEL2 = "tts-1" OPENAI_TTS_ENDPOINT = ${OPENAI_TTS_ENDPOINT2} OPENAI_TTS_KEY = ${OPENAI_TTS_KEY2} @@ -161,6 +186,7 @@ OPENAI_TTS_UNDERLYING_MODEL = "" AZURE_OPENAI_VIDEO_ENDPOINT="https://xxxxx.cognitiveservices.azure.com/openai/v1" AZURE_OPENAI_VIDEO_KEY="xxxxxxx" AZURE_OPENAI_VIDEO_MODEL="sora-2" +AZURE_OPENAI_VIDEO_UNDERLYING_MODEL="sora-2" OPENAI_VIDEO_ENDPOINT = ${AZURE_OPENAI_VIDEO_ENDPOINT} OPENAI_VIDEO_KEY = ${AZURE_OPENAI_VIDEO_KEY} diff --git a/doc/api.rst b/doc/api.rst index 15411b0d4b..780ae04206 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -704,6 +704,7 @@ API Reference PyRITInitializer AIRTInitializer + AIRTTargetInitializer SimpleInitializer LoadDefaultDatasets ScenarioObjectiveListInitializer diff --git a/doc/code/registry/2_instance_registry.ipynb b/doc/code/registry/2_instance_registry.ipynb index 24a8b1bb68..52ce374054 100644 --- a/doc/code/registry/2_instance_registry.ipynb +++ b/doc/code/registry/2_instance_registry.ipynb @@ -35,10 +35,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Found default environment files: ['C:\\\\Users\\\\rlundeen\\\\.pyrit\\\\.env', 'C:\\\\Users\\\\rlundeen\\\\.pyrit\\\\.env.local']\n", - "Loaded environment file: C:\\Users\\rlundeen\\.pyrit\\.env\n", - "Loaded environment file: C:\\Users\\rlundeen\\.pyrit\\.env.local\n", - "Registered scorers: ['self_ask_refusal_d9007ba2']\n" + "Found default environment files: ['C:\\\\Users\\\\songjustin\\\\.pyrit\\\\.env']\n", + "Loaded environment file: C:\\Users\\songjustin\\.pyrit\\.env\n", + "Registered scorers: ['self_ask_refusal_scorer::94a582f5']\n" ] } ], @@ -83,7 +82,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Retrieved scorer: \n", + "Retrieved scorer: \n", "Scorer type: SelfAskRefusalScorer\n" ] } @@ -118,7 +117,7 @@ "output_type": "stream", "text": [ "\n", - "self_ask_refusal_d9007ba2:\n", + "self_ask_refusal_scorer::94a582f5:\n", " Class: SelfAskRefusalScorer\n", " Type: true_false\n", " Description: A self-ask scorer that detects refusal in AI responses. This...\n", @@ -126,7 +125,7 @@ "\u001b[1m 📊 Scorer Information\u001b[0m\n", "\u001b[37m ▸ Scorer Identifier\u001b[0m\n", "\u001b[36m • Scorer Type: SelfAskRefusalScorer\u001b[0m\n", - "\u001b[36m • Target Model: gpt-40\u001b[0m\n", + "\u001b[36m • Target Model: gpt-4o\u001b[0m\n", "\u001b[36m • Temperature: None\u001b[0m\n", "\u001b[36m • Score Aggregator: OR_\u001b[0m\n", "\n", @@ -141,12 +140,12 @@ "# Get metadata for all registered scorers\n", "metadata = registry.list_metadata()\n", "for item in metadata:\n", - " print(f\"\\n{item.name}:\")\n", + " print(f\"\\n{item.unique_name}:\")\n", " print(f\" Class: {item.class_name}\")\n", " print(f\" Type: {item.scorer_type}\")\n", - " print(f\" Description: {item.description[:60]}...\")\n", + " print(f\" Description: {item.class_description[:60]}...\")\n", "\n", - " ConsoleScorerPrinter().print_objective_scorer(scorer_identifier=item.scorer_identifier)" + " ConsoleScorerPrinter().print_objective_scorer(scorer_identifier=item)" ] }, { @@ -169,26 +168,69 @@ "name": "stdout", "output_type": "stream", "text": [ - "True/False scorers: ['self_ask_refusal_d9007ba2']\n", - "Refusal scorers: ['self_ask_refusal_d9007ba2']\n", - "True/False refusal scorers: ['self_ask_refusal_d9007ba2']\n" + "True/False scorers: ['self_ask_refusal_scorer::94a582f5']\n", + "Refusal scorers: ['self_ask_refusal_scorer::94a582f5']\n", + "True/False refusal scorers: ['self_ask_refusal_scorer::94a582f5']\n" ] } ], "source": [ "# Filter by scorer_type (based on isinstance check against TrueFalseScorer/FloatScaleScorer)\n", "true_false_scorers = registry.list_metadata(include_filters={\"scorer_type\": \"true_false\"})\n", - "print(f\"True/False scorers: {[m.name for m in true_false_scorers]}\")\n", + "print(f\"True/False scorers: {[m.unique_name for m in true_false_scorers]}\")\n", "\n", "# Filter by class_name\n", "refusal_scorers = registry.list_metadata(include_filters={\"class_name\": \"SelfAskRefusalScorer\"})\n", - "print(f\"Refusal scorers: {[m.name for m in refusal_scorers]}\")\n", + "print(f\"Refusal scorers: {[m.unique_name for m in refusal_scorers]}\")\n", "\n", "# Combine multiple filters (AND logic)\n", "specific_scorers = registry.list_metadata(\n", " include_filters={\"scorer_type\": \"true_false\", \"class_name\": \"SelfAskRefusalScorer\"}\n", ")\n", - "print(f\"True/False refusal scorers: {[m.name for m in specific_scorers]}\")" + "print(f\"True/False refusal scorers: {[m.unique_name for m in specific_scorers]}\")" + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": {}, + "source": [ + "## Using Target Initializer\n", + "\n", + "You can optionally use the `AIRTTargetInitializer` to automatically configure and register targets that use commonly used environment variables (from `.env_example`). This initializer does not strictly require any environment variables - it simply registers whatever endpoints are available." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "10", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found default environment files: ['C:\\\\Users\\\\songjustin\\\\.pyrit\\\\.env']\n", + "Loaded environment file: C:\\Users\\songjustin\\.pyrit\\.env\n", + "Registered targets after AIRT initialization: ['azure_content_safety', 'azure_gpt4o_unsafe_chat', 'azure_gpt4o_unsafe_chat2', 'default_openai_frontend', 'openai_chat', 'openai_image', 'openai_realtime', 'openai_responses', 'openai_tts', 'openai_video']\n" + ] + } + ], + "source": [ + "from pyrit.registry import TargetRegistry\n", + "from pyrit.setup import initialize_pyrit_async\n", + "from pyrit.setup.initializers import AIRTTargetInitializer\n", + "\n", + "# Using built-in initializer\n", + "await initialize_pyrit_async( # type: ignore\n", + " memory_db_type=\"InMemory\", initializers=[AIRTTargetInitializer()]\n", + ")\n", + "\n", + "# Get the registry singleton\n", + "registry = TargetRegistry.get_registry_singleton()\n", + "# List registered targets\n", + "target_names = registry.get_names()\n", + "print(f\"Registered targets after AIRT initialization: {target_names}\")" ] } ], @@ -203,7 +245,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.13.5" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/doc/code/registry/2_instance_registry.py b/doc/code/registry/2_instance_registry.py index c20755730c..d645529f25 100644 --- a/doc/code/registry/2_instance_registry.py +++ b/doc/code/registry/2_instance_registry.py @@ -5,11 +5,15 @@ # extension: .py # format_name: percent # format_version: '1.3' -# jupytext_version: 1.18.1 +# jupytext_version: 1.17.2 +# kernelspec: +# display_name: pyrit-dev +# language: python +# name: python3 # --- # %% [markdown] -# ## Why Instance Registries? +# # Why Instance Registries? # # Some components need configuration that can't easily be passed at instantiation time. For example, scorers often need: # - A configured `chat_target` for LLM-based scoring @@ -19,7 +23,7 @@ # Instance registries let initializers register fully-configured instances that are ready to use. # %% [markdown] -# # Listing Available Instances +# ## Listing Available Instances # # Use `get_names()` to see registered instances, or `list_metadata()` for details. @@ -67,12 +71,12 @@ # Get metadata for all registered scorers metadata = registry.list_metadata() for item in metadata: - print(f"\n{item.name}:") + print(f"\n{item.unique_name}:") print(f" Class: {item.class_name}") print(f" Type: {item.scorer_type}") - print(f" Description: {item.description[:60]}...") + print(f" Description: {item.class_description[:60]}...") - ConsoleScorerPrinter().print_objective_scorer(scorer_identifier=item.scorer_identifier) + ConsoleScorerPrinter().print_objective_scorer(scorer_identifier=item) # %% [markdown] # ## Filtering @@ -82,14 +86,35 @@ # %% # Filter by scorer_type (based on isinstance check against TrueFalseScorer/FloatScaleScorer) true_false_scorers = registry.list_metadata(include_filters={"scorer_type": "true_false"}) -print(f"True/False scorers: {[m.name for m in true_false_scorers]}") +print(f"True/False scorers: {[m.unique_name for m in true_false_scorers]}") # Filter by class_name refusal_scorers = registry.list_metadata(include_filters={"class_name": "SelfAskRefusalScorer"}) -print(f"Refusal scorers: {[m.name for m in refusal_scorers]}") +print(f"Refusal scorers: {[m.unique_name for m in refusal_scorers]}") # Combine multiple filters (AND logic) specific_scorers = registry.list_metadata( include_filters={"scorer_type": "true_false", "class_name": "SelfAskRefusalScorer"} ) -print(f"True/False refusal scorers: {[m.name for m in specific_scorers]}") +print(f"True/False refusal scorers: {[m.unique_name for m in specific_scorers]}") + +# %% [markdown] +# ## Using Target Initializer +# +# You can optionally use the `AIRTTargetInitializer` to automatically configure and register targets that use commonly used environment variables (from `.env_example`). This initializer does not strictly require any environment variables - it simply registers whatever endpoints are available. + +# %% +from pyrit.registry import TargetRegistry +from pyrit.setup import initialize_pyrit_async +from pyrit.setup.initializers import AIRTTargetInitializer + +# Using built-in initializer +await initialize_pyrit_async( # type: ignore + memory_db_type="InMemory", initializers=[AIRTTargetInitializer()] +) + +# Get the registry singleton +registry = TargetRegistry.get_registry_singleton() +# List registered targets +target_names = registry.get_names() +print(f"Registered targets after AIRT initialization: {target_names}") diff --git a/pyrit/identifiers/target_identifier.py b/pyrit/identifiers/target_identifier.py index f08ad8709d..b8924fb0c0 100644 --- a/pyrit/identifiers/target_identifier.py +++ b/pyrit/identifiers/target_identifier.py @@ -34,6 +34,9 @@ class TargetIdentifier(Identifier): max_requests_per_minute: Optional[int] = None """Maximum number of requests per minute.""" + supports_conversation_history: bool = False + """Whether the target supports explicit setting of conversation history (is a PromptChatTarget).""" + target_specific_params: Optional[Dict[str, Any]] = None """Additional target-specific parameters.""" diff --git a/pyrit/prompt_target/common/prompt_target.py b/pyrit/prompt_target/common/prompt_target.py index 8cd80f47d4..653d008e65 100644 --- a/pyrit/prompt_target/common/prompt_target.py +++ b/pyrit/prompt_target/common/prompt_target.py @@ -122,6 +122,9 @@ def _create_identifier( elif self._model_name: model_name = self._model_name + # Late import to avoid circular dependency + from pyrit.prompt_target.common.prompt_chat_target import PromptChatTarget + return TargetIdentifier( class_name=self.__class__.__name__, class_module=self.__class__.__module__, @@ -132,6 +135,7 @@ def _create_identifier( temperature=temperature, top_p=top_p, max_requests_per_minute=self._max_requests_per_minute, + supports_conversation_history=isinstance(self, PromptChatTarget), target_specific_params=target_specific_params, ) diff --git a/pyrit/registry/__init__.py b/pyrit/registry/__init__.py index 209ec6c146..5f2fe7536f 100644 --- a/pyrit/registry/__init__.py +++ b/pyrit/registry/__init__.py @@ -21,6 +21,7 @@ from pyrit.registry.instance_registries import ( BaseInstanceRegistry, ScorerRegistry, + TargetRegistry, ) __all__ = [ @@ -39,4 +40,5 @@ "ScenarioMetadata", "ScenarioRegistry", "ScorerRegistry", + "TargetRegistry", ] diff --git a/pyrit/registry/instance_registries/__init__.py b/pyrit/registry/instance_registries/__init__.py index eab870f0e1..2cf50693cf 100644 --- a/pyrit/registry/instance_registries/__init__.py +++ b/pyrit/registry/instance_registries/__init__.py @@ -17,10 +17,14 @@ from pyrit.registry.instance_registries.scorer_registry import ( ScorerRegistry, ) +from pyrit.registry.instance_registries.target_registry import ( + TargetRegistry, +) __all__ = [ # Base class "BaseInstanceRegistry", # Concrete registries "ScorerRegistry", + "TargetRegistry", ] diff --git a/pyrit/registry/instance_registries/target_registry.py b/pyrit/registry/instance_registries/target_registry.py new file mode 100644 index 0000000000..3fcdbb3160 --- /dev/null +++ b/pyrit/registry/instance_registries/target_registry.py @@ -0,0 +1,97 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT license. + +""" +Target registry for discovering and managing PyRIT prompt targets. + +Targets are registered explicitly via initializers as pre-configured instances. +""" + +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING, Optional + +from pyrit.identifiers import TargetIdentifier +from pyrit.registry.instance_registries.base_instance_registry import ( + BaseInstanceRegistry, +) + +if TYPE_CHECKING: + from pyrit.prompt_target import PromptTarget + +logger = logging.getLogger(__name__) + + +class TargetRegistry(BaseInstanceRegistry["PromptTarget", TargetIdentifier]): + """ + Registry for managing available prompt target instances. + + This registry stores pre-configured PromptTarget instances (not classes). + Targets are registered explicitly via initializers after being instantiated + with their required parameters (e.g., endpoint, API keys). + + Targets are identified by their snake_case name derived from the class name, + or a custom name provided during registration. + """ + + @classmethod + def get_registry_singleton(cls) -> "TargetRegistry": + """ + Get the singleton instance of the TargetRegistry. + + Returns: + The singleton TargetRegistry instance. + """ + return super().get_registry_singleton() # type: ignore[return-value] + + def register_instance( + self, + target: "PromptTarget", + *, + name: Optional[str] = None, + ) -> None: + """ + Register a target instance. + + Note: Unlike ScenarioRegistry and InitializerRegistry which register classes, + TargetRegistry registers pre-configured instances. + + Args: + target: The pre-configured target instance (not a class). + name: Optional custom registry name. If not provided, + derived from class name with identifier hash appended + (e.g., OpenAIChatTarget -> openai_chat_abc123). + """ + if name is None: + name = target.get_identifier().unique_name + + self.register(target, name=name) + logger.debug(f"Registered target instance: {name} ({target.__class__.__name__})") + + def get_instance_by_name(self, name: str) -> Optional["PromptTarget"]: + """ + Get a registered target instance by name. + + Note: This returns an already-instantiated target, not a class. + + Args: + name: The registry name of the target. + + Returns: + The target instance, or None if not found. + """ + return self.get(name) + + def _build_metadata(self, name: str, instance: "PromptTarget") -> TargetIdentifier: + """ + Build metadata for a target instance. + + Args: + name: The registry name of the target. + instance: The target instance. + + Returns: + TargetIdentifier describing the target. + """ + return instance.get_identifier() diff --git a/pyrit/setup/initializers/__init__.py b/pyrit/setup/initializers/__init__.py index 1c0cbd4683..6b1c63c484 100644 --- a/pyrit/setup/initializers/__init__.py +++ b/pyrit/setup/initializers/__init__.py @@ -4,6 +4,7 @@ """PyRIT initializers package.""" from pyrit.setup.initializers.airt import AIRTInitializer +from pyrit.setup.initializers.airt_targets import AIRTTargetInitializer from pyrit.setup.initializers.pyrit_initializer import PyRITInitializer from pyrit.setup.initializers.scenarios.load_default_datasets import LoadDefaultDatasets from pyrit.setup.initializers.scenarios.objective_list import ScenarioObjectiveListInitializer @@ -13,6 +14,7 @@ __all__ = [ "PyRITInitializer", "AIRTInitializer", + "AIRTTargetInitializer", "SimpleInitializer", "LoadDefaultDatasets", "ScenarioObjectiveListInitializer", diff --git a/pyrit/setup/initializers/airt_targets.py b/pyrit/setup/initializers/airt_targets.py new file mode 100644 index 0000000000..f421c53c6e --- /dev/null +++ b/pyrit/setup/initializers/airt_targets.py @@ -0,0 +1,422 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT license. + +""" +AIRT Target Initializer for registering pre-configured targets from environment variables. + +This module provides the AIRTTargetInitializer class that registers available +targets into the TargetRegistry based on environment variable configuration. + +Note: This module only includes PRIMARY endpoint configurations from .env_example. + Alias configurations (those using ${...} syntax) are excluded since they + reference other primary configurations. +""" + +import logging +import os +from dataclasses import dataclass +from typing import Any, List, Optional, Type + +from pyrit.prompt_target import ( + AzureMLChatTarget, + OpenAIChatTarget, + OpenAICompletionTarget, + OpenAIImageTarget, + OpenAIResponseTarget, + OpenAITTSTarget, + OpenAIVideoTarget, + PromptShieldTarget, + PromptTarget, + RealtimeTarget, +) +from pyrit.registry import TargetRegistry +from pyrit.setup.initializers.pyrit_initializer import PyRITInitializer + +logger = logging.getLogger(__name__) + + +@dataclass +class TargetConfig: + """Configuration for a target to be registered.""" + + registry_name: str + target_class: Type[PromptTarget] + endpoint_var: str + key_var: str = "" # Empty string means no auth required + model_var: Optional[str] = None + underlying_model_var: Optional[str] = None + + +# Define all supported target configurations. +# Only PRIMARY configurations are included here - alias configurations that use ${...} +# syntax in .env_example are excluded since they reference other primary configurations. +TARGET_CONFIGS: List[TargetConfig] = [ + # ============================================ + # OpenAI Chat Targets (OpenAIChatTarget) + # ============================================ + TargetConfig( + registry_name="platform_openai_chat", + target_class=OpenAIChatTarget, + endpoint_var="PLATFORM_OPENAI_CHAT_ENDPOINT", + key_var="PLATFORM_OPENAI_CHAT_API_KEY", + model_var="PLATFORM_OPENAI_CHAT_GPT4O_MODEL", + ), + TargetConfig( + registry_name="azure_openai_gpt4o", + target_class=OpenAIChatTarget, + endpoint_var="AZURE_OPENAI_GPT4O_ENDPOINT", + key_var="AZURE_OPENAI_GPT4O_KEY", + model_var="AZURE_OPENAI_GPT4O_MODEL", + underlying_model_var="AZURE_OPENAI_GPT4O_UNDERLYING_MODEL", + ), + TargetConfig( + registry_name="azure_openai_integration_test", + target_class=OpenAIChatTarget, + endpoint_var="AZURE_OPENAI_INTEGRATION_TEST_ENDPOINT", + key_var="AZURE_OPENAI_INTEGRATION_TEST_KEY", + model_var="AZURE_OPENAI_INTEGRATION_TEST_MODEL", + underlying_model_var="AZURE_OPENAI_INTEGRATION_TEST_UNDERLYING_MODEL", + ), + TargetConfig( + registry_name="azure_openai_gpt35_chat", + target_class=OpenAIChatTarget, + endpoint_var="AZURE_OPENAI_GPT3_5_CHAT_ENDPOINT", + key_var="AZURE_OPENAI_GPT3_5_CHAT_KEY", + model_var="AZURE_OPENAI_GPT3_5_CHAT_MODEL", + underlying_model_var="AZURE_OPENAI_GPT3_5_CHAT_UNDERLYING_MODEL", + ), + TargetConfig( + registry_name="azure_openai_gpt4_chat", + target_class=OpenAIChatTarget, + endpoint_var="AZURE_OPENAI_GPT4_CHAT_ENDPOINT", + key_var="AZURE_OPENAI_GPT4_CHAT_KEY", + model_var="AZURE_OPENAI_GPT4_CHAT_MODEL", + underlying_model_var="AZURE_OPENAI_GPT4_CHAT_UNDERLYING_MODEL", + ), + TargetConfig( + registry_name="azure_gpt4o_unsafe_chat", + target_class=OpenAIChatTarget, + endpoint_var="AZURE_OPENAI_GPT4O_UNSAFE_CHAT_ENDPOINT", + key_var="AZURE_OPENAI_GPT4O_UNSAFE_CHAT_KEY", + model_var="AZURE_OPENAI_GPT4O_UNSAFE_CHAT_MODEL", + underlying_model_var="AZURE_OPENAI_GPT4O_UNSAFE_CHAT_UNDERLYING_MODEL", + ), + TargetConfig( + registry_name="azure_gpt4o_unsafe_chat2", + target_class=OpenAIChatTarget, + endpoint_var="AZURE_OPENAI_GPT4O_UNSAFE_CHAT_ENDPOINT2", + key_var="AZURE_OPENAI_GPT4O_UNSAFE_CHAT_KEY2", + model_var="AZURE_OPENAI_GPT4O_UNSAFE_CHAT_MODEL2", + underlying_model_var="AZURE_OPENAI_GPT4O_UNSAFE_CHAT_UNDERLYING_MODEL2", + ), + TargetConfig( + registry_name="azure_foundry_deepseek", + target_class=OpenAIChatTarget, + endpoint_var="AZURE_FOUNDRY_DEEPSEEK_ENDPOINT", + key_var="AZURE_FOUNDRY_DEEPSEEK_KEY", + model_var="AZURE_FOUNDRY_DEEPSEEK_MODEL", + ), + TargetConfig( + registry_name="azure_foundry_phi4", + target_class=OpenAIChatTarget, + endpoint_var="AZURE_FOUNDRY_PHI4_ENDPOINT", + key_var="AZURE_CHAT_PHI4_KEY", + model_var="AZURE_FOUNDRY_PHI4_MODEL", + ), + TargetConfig( + registry_name="azure_foundry_mistral_large", + target_class=OpenAIChatTarget, + endpoint_var="AZURE_FOUNDRY_MISTRAL_LARGE_ENDPOINT", + key_var="AZURE_FOUNDRY_MISTRAL_LARGE_KEY", + model_var="AZURE_FOUNDRY_MISTRAL_LARGE_MODEL", + ), + TargetConfig( + registry_name="groq", + target_class=OpenAIChatTarget, + endpoint_var="GROQ_ENDPOINT", + key_var="GROQ_KEY", + model_var="GROQ_LLAMA_MODEL", + ), + TargetConfig( + registry_name="open_router", + target_class=OpenAIChatTarget, + endpoint_var="OPEN_ROUTER_ENDPOINT", + key_var="OPEN_ROUTER_KEY", + model_var="OPEN_ROUTER_CLAUDE_MODEL", + ), + TargetConfig( + registry_name="ollama", + target_class=OpenAIChatTarget, + endpoint_var="OLLAMA_CHAT_ENDPOINT", + model_var="OLLAMA_MODEL", + ), + TargetConfig( + registry_name="google_gemini", + target_class=OpenAIChatTarget, + endpoint_var="GOOGLE_GEMINI_ENDPOINT", + key_var="GOOGLE_GEMINI_API_KEY", + model_var="GOOGLE_GEMINI_MODEL", + ), + # ============================================ + # OpenAI Responses Targets (OpenAIResponseTarget) + # ============================================ + TargetConfig( + registry_name="azure_openai_gpt5_responses", + target_class=OpenAIResponseTarget, + endpoint_var="AZURE_OPENAI_GPT5_RESPONSES_ENDPOINT", + key_var="AZURE_OPENAI_GPT5_KEY", + model_var="AZURE_OPENAI_GPT5_MODEL", + underlying_model_var="AZURE_OPENAI_GPT5_UNDERLYING_MODEL", + ), + TargetConfig( + registry_name="platform_openai_responses", + target_class=OpenAIResponseTarget, + endpoint_var="PLATFORM_OPENAI_RESPONSES_ENDPOINT", + key_var="PLATFORM_OPENAI_RESPONSES_KEY", + model_var="PLATFORM_OPENAI_RESPONSES_MODEL", + ), + TargetConfig( + registry_name="azure_openai_responses", + target_class=OpenAIResponseTarget, + endpoint_var="AZURE_OPENAI_RESPONSES_ENDPOINT", + key_var="AZURE_OPENAI_RESPONSES_KEY", + model_var="AZURE_OPENAI_RESPONSES_MODEL", + underlying_model_var="AZURE_OPENAI_RESPONSES_UNDERLYING_MODEL", + ), + # ============================================ + # Realtime Targets (RealtimeTarget) + # ============================================ + TargetConfig( + registry_name="platform_openai_realtime", + target_class=RealtimeTarget, + endpoint_var="PLATFORM_OPENAI_REALTIME_ENDPOINT", + key_var="PLATFORM_OPENAI_REALTIME_API_KEY", + model_var="PLATFORM_OPENAI_REALTIME_MODEL", + ), + TargetConfig( + registry_name="azure_openai_realtime", + target_class=RealtimeTarget, + endpoint_var="AZURE_OPENAI_REALTIME_ENDPOINT", + key_var="AZURE_OPENAI_REALTIME_API_KEY", + model_var="AZURE_OPENAI_REALTIME_MODEL", + underlying_model_var="AZURE_OPENAI_REALTIME_UNDERLYING_MODEL", + ), + # ============================================ + # Image Targets (OpenAIImageTarget) + # ============================================ + TargetConfig( + registry_name="openai_image_azure", + target_class=OpenAIImageTarget, + endpoint_var="OPENAI_IMAGE_ENDPOINT1", + key_var="OPENAI_IMAGE_API_KEY1", + model_var="OPENAI_IMAGE_MODEL1", + underlying_model_var="OPENAI_IMAGE_UNDERLYING_MODEL1", + ), + TargetConfig( + registry_name="openai_image_platform", + target_class=OpenAIImageTarget, + endpoint_var="OPENAI_IMAGE_ENDPOINT2", + key_var="OPENAI_IMAGE_API_KEY2", + model_var="OPENAI_IMAGE_MODEL2", + underlying_model_var="OPENAI_IMAGE_UNDERLYING_MODEL2", + ), + # ============================================ + # TTS Targets (OpenAITTSTarget) + # ============================================ + TargetConfig( + registry_name="openai_tts_azure", + target_class=OpenAITTSTarget, + endpoint_var="OPENAI_TTS_ENDPOINT1", + key_var="OPENAI_TTS_KEY1", + model_var="OPENAI_TTS_MODEL1", + underlying_model_var="OPENAI_TTS_UNDERLYING_MODEL1", + ), + TargetConfig( + registry_name="openai_tts_platform", + target_class=OpenAITTSTarget, + endpoint_var="OPENAI_TTS_ENDPOINT2", + key_var="OPENAI_TTS_KEY2", + model_var="OPENAI_TTS_MODEL2", + underlying_model_var="OPENAI_TTS_UNDERLYING_MODEL2", + ), + # ============================================ + # Video Targets (OpenAIVideoTarget) + # ============================================ + TargetConfig( + registry_name="azure_openai_video", + target_class=OpenAIVideoTarget, + endpoint_var="AZURE_OPENAI_VIDEO_ENDPOINT", + key_var="AZURE_OPENAI_VIDEO_KEY", + model_var="AZURE_OPENAI_VIDEO_MODEL", + underlying_model_var="AZURE_OPENAI_VIDEO_UNDERLYING_MODEL", + ), + # ============================================ + # Completion Targets (OpenAICompletionTarget) + # ============================================ + TargetConfig( + registry_name="openai_completion", + target_class=OpenAICompletionTarget, + endpoint_var="OPENAI_COMPLETION_ENDPOINT", + key_var="OPENAI_COMPLETION_API_KEY", + model_var="OPENAI_COMPLETION_MODEL", + ), + # ============================================ + # Azure ML Targets (AzureMLChatTarget) + # ============================================ + TargetConfig( + registry_name="azure_ml_phi", + target_class=AzureMLChatTarget, + endpoint_var="AZURE_ML_PHI_ENDPOINT", + key_var="AZURE_ML_PHI_KEY", + ), + # ============================================ + # Safety Targets (PromptShieldTarget) + # ============================================ + TargetConfig( + registry_name="azure_content_safety", + target_class=PromptShieldTarget, + endpoint_var="AZURE_CONTENT_SAFETY_API_ENDPOINT", + key_var="AZURE_CONTENT_SAFETY_API_KEY", + ), +] + + +class AIRTTargetInitializer(PyRITInitializer): + """ + AIRT Target Initializer for registering pre-configured targets. + + This initializer scans for known endpoint environment variables and registers + the corresponding targets into the TargetRegistry. It only includes PRIMARY + endpoint configurations - alias configurations (those using ${...} syntax in + .env_example) are excluded since they reference other primary configurations. + + Supported Endpoints by Category: + + **OpenAI Chat Targets (OpenAIChatTarget):** + - PLATFORM_OPENAI_CHAT_* - Platform OpenAI Chat API + - AZURE_OPENAI_GPT4O_* - Azure OpenAI GPT-4o + - AZURE_OPENAI_INTEGRATION_TEST_* - Integration test endpoint + - AZURE_OPENAI_GPT3_5_CHAT_* - Azure OpenAI GPT-3.5 + - AZURE_OPENAI_GPT4_CHAT_* - Azure OpenAI GPT-4 + - AZURE_OPENAI_GPT4O_UNSAFE_CHAT_* - Azure OpenAI GPT-4o unsafe + - AZURE_OPENAI_GPT4O_UNSAFE_CHAT_*2 - Azure OpenAI GPT-4o unsafe secondary + - AZURE_FOUNDRY_DEEPSEEK_* - Azure AI Foundry DeepSeek + - AZURE_FOUNDRY_PHI4_* - Azure AI Foundry Phi-4 + - AZURE_FOUNDRY_MISTRAL_LARGE_* - Azure AI Foundry Mistral Large + - GROQ_* - Groq API + - OPEN_ROUTER_* - OpenRouter API + - OLLAMA_* - Ollama local + - GOOGLE_GEMINI_* - Google Gemini (OpenAI-compatible) + + **OpenAI Responses Targets (OpenAIResponseTarget):** + - AZURE_OPENAI_GPT5_RESPONSES_* - Azure OpenAI GPT-5 Responses + - PLATFORM_OPENAI_RESPONSES_* - Platform OpenAI Responses + - AZURE_OPENAI_RESPONSES_* - Azure OpenAI Responses + + **Realtime Targets (RealtimeTarget):** + - PLATFORM_OPENAI_REALTIME_* - Platform OpenAI Realtime + - AZURE_OPENAI_REALTIME_* - Azure OpenAI Realtime + + **Image Targets (OpenAIImageTarget):** + - OPENAI_IMAGE_*1 - Azure OpenAI Image + - OPENAI_IMAGE_*2 - Platform OpenAI Image + + **TTS Targets (OpenAITTSTarget):** + - OPENAI_TTS_*1 - Azure OpenAI TTS + - OPENAI_TTS_*2 - Platform OpenAI TTS + + **Video Targets (OpenAIVideoTarget):** + - AZURE_OPENAI_VIDEO_* - Azure OpenAI Video + + **Completion Targets (OpenAICompletionTarget):** + - OPENAI_COMPLETION_* - OpenAI Completion + + **Azure ML Targets (AzureMLChatTarget):** + - AZURE_ML_PHI_* - Azure ML Phi + + **Safety Targets (PromptShieldTarget):** + - AZURE_CONTENT_SAFETY_* - Azure Content Safety + + Example: + initializer = AIRTTargetInitializer() + await initializer.initialize_async() + """ + + def __init__(self) -> None: + """Initialize the AIRT Target Initializer.""" + super().__init__() + + @property + def name(self) -> str: + """Get the name of this initializer.""" + return "AIRT Target Initializer" + + @property + def description(self) -> str: + """Get the description of this initializer.""" + return ( + "Instantiates a collection of (AI Red Team suggested) targets from " + "available environment variables and adds them to the TargetRegistry" + ) + + @property + def required_env_vars(self) -> List[str]: + """ + Get list of required environment variables. + + Returns empty list since this initializer is optional - it registers + whatever endpoints are available without requiring any. + """ + return [] + + async def initialize_async(self) -> None: + """ + Register available targets based on environment variables. + + Scans for known endpoint environment variables and registers the + corresponding targets into the TargetRegistry. + """ + for config in TARGET_CONFIGS: + self._register_target(config) + + def _register_target(self, config: TargetConfig) -> None: + """ + Register a target if its required environment variables are set. + + Args: + config: The target configuration specifying env vars and target class. + """ + endpoint = os.getenv(config.endpoint_var) + if not endpoint: + return + + # If key_var is empty, use placeholder (for targets like Ollama that don't require auth) + # If key_var is set, look up the env var and skip registration if not found + if config.key_var: + api_key = os.getenv(config.key_var) + if not api_key: + return + else: + api_key = "not-needed" + + model_name = os.getenv(config.model_var) if config.model_var else None + underlying_model = os.getenv(config.underlying_model_var) if config.underlying_model_var else None + + # Build kwargs for the target constructor + kwargs: dict[str, Any] = { + "endpoint": endpoint, + "api_key": api_key, + } + + # Only add model_name if the target supports it (PromptShieldTarget doesn't) + if model_name is not None: + kwargs["model_name"] = model_name + + # Add underlying_model if specified (for Azure deployments where name differs from model) + if underlying_model is not None: + kwargs["underlying_model"] = underlying_model + + target = config.target_class(**kwargs) + registry = TargetRegistry.get_registry_singleton() + registry.register_instance(target, name=config.registry_name) + logger.info(f"Registered target: {config.registry_name}") diff --git a/tests/unit/identifiers/test_target_identifier.py b/tests/unit/identifiers/test_target_identifier.py index 0541b36be5..148c60983d 100644 --- a/tests/unit/identifiers/test_target_identifier.py +++ b/tests/unit/identifiers/test_target_identifier.py @@ -500,6 +500,63 @@ def test_can_use_as_dict_key(self): assert d[identifier] == "value" +class TestTargetIdentifierSupportsConversationHistory: + """Test the supports_conversation_history field in TargetIdentifier.""" + + def test_supports_conversation_history_defaults_to_false(self): + """Test that supports_conversation_history defaults to False.""" + identifier = TargetIdentifier( + class_name="TestTarget", + class_module="pyrit.prompt_target.test_target", + class_description="A test target", + identifier_type="instance", + ) + + assert identifier.supports_conversation_history is False + + def test_supports_conversation_history_included_in_hash(self): + """Test that supports_conversation_history affects the hash.""" + base_args = { + "class_name": "TestTarget", + "class_module": "pyrit.prompt_target.test_target", + "class_description": "A test target", + "identifier_type": "instance", + } + + identifier1 = TargetIdentifier(supports_conversation_history=False, **base_args) + identifier2 = TargetIdentifier(supports_conversation_history=True, **base_args) + + assert identifier1.hash != identifier2.hash + + def test_supports_conversation_history_in_to_dict(self): + """Test that supports_conversation_history is included in to_dict.""" + identifier = TargetIdentifier( + class_name="TestChatTarget", + class_module="pyrit.prompt_target.test_chat_target", + class_description="A test chat target", + identifier_type="instance", + supports_conversation_history=True, + ) + + result = identifier.to_dict() + + assert result["supports_conversation_history"] is True + + def test_supports_conversation_history_from_dict(self): + """Test that supports_conversation_history is restored from dict.""" + data = { + "class_name": "TestChatTarget", + "class_module": "pyrit.prompt_target.test_chat_target", + "class_description": "A test chat target", + "identifier_type": "instance", + "supports_conversation_history": True, + } + + identifier = TargetIdentifier.from_dict(data) + + assert identifier.supports_conversation_history is True + + class TestTargetIdentifierNormalize: """Test the normalize class method for TargetIdentifier.""" diff --git a/tests/unit/registry/test_target_registry.py b/tests/unit/registry/test_target_registry.py new file mode 100644 index 0000000000..8e32411b89 --- /dev/null +++ b/tests/unit/registry/test_target_registry.py @@ -0,0 +1,277 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT license. + + +import pytest + +from pyrit.identifiers import TargetIdentifier +from pyrit.models import Message, MessagePiece +from pyrit.prompt_target import PromptTarget +from pyrit.prompt_target.common.prompt_chat_target import PromptChatTarget +from pyrit.registry.instance_registries.target_registry import TargetRegistry + + +class MockPromptTarget(PromptTarget): + """Mock PromptTarget for testing.""" + + def __init__(self, *, model_name: str = "mock_model") -> None: + super().__init__(model_name=model_name) + + async def send_prompt_async( + self, + *, + message: Message, + ) -> list[Message]: + return [ + MessagePiece( + role="assistant", + original_value="mock response", + ).to_message() + ] + + def _validate_request(self, *, message: Message) -> None: + pass + + +class MockPromptChatTarget(PromptChatTarget): + """Mock PromptChatTarget for testing conversation history support.""" + + def __init__(self, *, model_name: str = "mock_chat_model", endpoint: str = "http://chat-test") -> None: + super().__init__(model_name=model_name, endpoint=endpoint) + + async def send_prompt_async( + self, + *, + message: Message, + ) -> list[Message]: + return [ + MessagePiece( + role="assistant", + original_value="chat response", + ).to_message() + ] + + def _validate_request(self, *, message: Message) -> None: + pass + + def is_json_response_supported(self) -> bool: + return False + + +class TestTargetRegistrySingleton: + """Tests for the singleton pattern in TargetRegistry.""" + + def setup_method(self): + """Reset the singleton before each test.""" + TargetRegistry.reset_instance() + + def teardown_method(self): + """Reset the singleton after each test.""" + TargetRegistry.reset_instance() + + def test_get_registry_singleton_returns_same_instance(self): + """Test that get_registry_singleton returns the same singleton each time.""" + instance1 = TargetRegistry.get_registry_singleton() + instance2 = TargetRegistry.get_registry_singleton() + + assert instance1 is instance2 + + def test_get_registry_singleton_returns_target_registry_type(self): + """Test that get_registry_singleton returns a TargetRegistry instance.""" + instance = TargetRegistry.get_registry_singleton() + assert isinstance(instance, TargetRegistry) + + def test_reset_instance_clears_singleton(self): + """Test that reset_instance clears the singleton.""" + instance1 = TargetRegistry.get_registry_singleton() + TargetRegistry.reset_instance() + instance2 = TargetRegistry.get_registry_singleton() + + assert instance1 is not instance2 + + +@pytest.mark.usefixtures("patch_central_database") +class TestTargetRegistryRegisterInstance: + """Tests for register_instance functionality in TargetRegistry.""" + + def setup_method(self): + """Reset and get a fresh registry for each test.""" + TargetRegistry.reset_instance() + self.registry = TargetRegistry.get_registry_singleton() + + def teardown_method(self): + """Reset the singleton after each test.""" + TargetRegistry.reset_instance() + + def test_register_instance_with_custom_name(self): + """Test registering a target with a custom name.""" + target = MockPromptTarget() + self.registry.register_instance(target, name="custom_target") + + assert "custom_target" in self.registry + assert self.registry.get("custom_target") is target + + def test_register_instance_generates_name_from_class(self): + """Test that register_instance generates a name from class name when not provided.""" + target = MockPromptTarget() + self.registry.register_instance(target) + + # Name should be derived from class name with hash suffix + names = self.registry.get_names() + assert len(names) == 1 + assert names[0].startswith("mock_prompt_") + + def test_register_instance_multiple_targets_unique_names(self): + """Test registering multiple targets generates unique names.""" + target1 = MockPromptTarget() + target2 = MockPromptChatTarget() + + self.registry.register_instance(target1) + self.registry.register_instance(target2) + + assert len(self.registry) == 2 + + def test_register_instance_same_target_type_different_config(self): + """Test that same target class with different configs can be registered.""" + target1 = MockPromptTarget(model_name="model_a") + target2 = MockPromptTarget(model_name="model_b") + + # Register with explicit names + self.registry.register_instance(target1, name="target_1") + self.registry.register_instance(target2, name="target_2") + + assert len(self.registry) == 2 + + +@pytest.mark.usefixtures("patch_central_database") +class TestTargetRegistryGetInstanceByName: + """Tests for get_instance_by_name functionality in TargetRegistry.""" + + def setup_method(self): + """Reset and get a fresh registry for each test.""" + TargetRegistry.reset_instance() + self.registry = TargetRegistry.get_registry_singleton() + self.target = MockPromptTarget() + self.registry.register_instance(self.target, name="test_target") + + def teardown_method(self): + """Reset the singleton after each test.""" + TargetRegistry.reset_instance() + + def test_get_instance_by_name_returns_target(self): + """Test getting a registered target by name.""" + result = self.registry.get_instance_by_name("test_target") + assert result is self.target + + def test_get_instance_by_name_nonexistent_returns_none(self): + """Test that getting a non-existent target returns None.""" + result = self.registry.get_instance_by_name("nonexistent") + assert result is None + + +@pytest.mark.usefixtures("patch_central_database") +class TestTargetRegistryBuildMetadata: + """Tests for _build_metadata functionality in TargetRegistry.""" + + def setup_method(self): + """Reset and get a fresh registry for each test.""" + TargetRegistry.reset_instance() + self.registry = TargetRegistry.get_registry_singleton() + + def teardown_method(self): + """Reset the singleton after each test.""" + TargetRegistry.reset_instance() + + def test_build_metadata_includes_class_name(self): + """Test that metadata (TargetIdentifier) includes the class name.""" + target = MockPromptTarget() + self.registry.register_instance(target, name="mock_target") + + metadata = self.registry.list_metadata() + assert len(metadata) == 1 + assert isinstance(metadata[0], TargetIdentifier) + assert metadata[0].class_name == "MockPromptTarget" + + def test_build_metadata_includes_model_name(self): + """Test that metadata includes the model_name.""" + target = MockPromptTarget(model_name="test_model") + self.registry.register_instance(target, name="mock_target") + + metadata = self.registry.list_metadata() + assert metadata[0].model_name == "test_model" + + def test_build_metadata_description_from_docstring(self): + """Test that class_description is derived from the target's docstring.""" + target = MockPromptTarget() + self.registry.register_instance(target, name="mock_target") + + metadata = self.registry.list_metadata() + # MockPromptTarget has a docstring + assert "Mock PromptTarget for testing" in metadata[0].class_description + + +@pytest.mark.usefixtures("patch_central_database") +class TestTargetRegistryListMetadata: + """Tests for list_metadata in TargetRegistry.""" + + def setup_method(self): + """Reset and get a fresh registry with multiple targets.""" + TargetRegistry.reset_instance() + self.registry = TargetRegistry.get_registry_singleton() + + self.target1 = MockPromptTarget(model_name="model_a") + self.target2 = MockPromptTarget(model_name="model_b") + self.chat_target = MockPromptChatTarget() + + self.registry.register_instance(self.target1, name="target_1") + self.registry.register_instance(self.target2, name="target_2") + self.registry.register_instance(self.chat_target, name="chat_target") + + def teardown_method(self): + """Reset the singleton after each test.""" + TargetRegistry.reset_instance() + + def test_list_metadata_returns_all_registered(self): + """Test that list_metadata returns metadata for all registered targets.""" + metadata = self.registry.list_metadata() + assert len(metadata) == 3 + + def test_list_metadata_filter_by_class_name(self): + """Test filtering metadata by class_name.""" + mock_metadata = self.registry.list_metadata(include_filters={"class_name": "MockPromptTarget"}) + + assert len(mock_metadata) == 2 + for m in mock_metadata: + assert m.class_name == "MockPromptTarget" + + +@pytest.mark.usefixtures("patch_central_database") +class TestTargetRegistrySupportsConversationHistory: + """Tests for supports_conversation_history field in TargetIdentifier.""" + + def setup_method(self): + """Reset and get a fresh registry for each test.""" + TargetRegistry.reset_instance() + self.registry = TargetRegistry.get_registry_singleton() + + def teardown_method(self): + """Reset the singleton after each test.""" + TargetRegistry.reset_instance() + + def test_registered_chat_target_has_supports_conversation_history_true(self): + """Test that registered chat targets have supports_conversation_history=True in metadata.""" + chat_target = MockPromptChatTarget() + self.registry.register_instance(chat_target, name="chat_target") + + metadata = self.registry.list_metadata() + assert len(metadata) == 1 + assert metadata[0].supports_conversation_history is True + + def test_registered_non_chat_target_has_supports_conversation_history_false(self): + """Test that registered non-chat targets have supports_conversation_history=False in metadata.""" + target = MockPromptTarget() + self.registry.register_instance(target, name="prompt_target") + + metadata = self.registry.list_metadata() + assert len(metadata) == 1 + assert metadata[0].supports_conversation_history is False diff --git a/tests/unit/setup/test_airt_targets_initializer.py b/tests/unit/setup/test_airt_targets_initializer.py new file mode 100644 index 0000000000..356a6388d5 --- /dev/null +++ b/tests/unit/setup/test_airt_targets_initializer.py @@ -0,0 +1,221 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT license. + +import os + +import pytest + +from pyrit.registry import TargetRegistry +from pyrit.setup.initializers import AIRTTargetInitializer +from pyrit.setup.initializers.airt_targets import TARGET_CONFIGS + + +class TestAIRTTargetInitializerBasic: + """Tests for AIRTTargetInitializer class - basic functionality.""" + + def test_can_be_created(self): + """Test that AIRTTargetInitializer can be instantiated.""" + init = AIRTTargetInitializer() + assert init is not None + assert init.name == "AIRT Target Initializer" + assert init.execution_order == 1 + + def test_required_env_vars_is_empty(self): + """Test that no env vars are required (initializer is optional).""" + init = AIRTTargetInitializer() + assert init.required_env_vars == [] + + +@pytest.mark.usefixtures("patch_central_database") +class TestAIRTTargetInitializerInitialize: + """Tests for AIRTTargetInitializer.initialize_async method.""" + + def setup_method(self) -> None: + """Reset registry before each test.""" + TargetRegistry.reset_instance() + # Clear all target-related env vars + self._clear_env_vars() + + def teardown_method(self) -> None: + """Clean up after each test.""" + TargetRegistry.reset_instance() + self._clear_env_vars() + + def _clear_env_vars(self) -> None: + """Clear all environment variables used by TARGET_CONFIGS.""" + for config in TARGET_CONFIGS: + for var in [config.endpoint_var, config.key_var, config.model_var, config.underlying_model_var]: + if var and var in os.environ: + del os.environ[var] + + @pytest.mark.asyncio + async def test_initialize_runs_without_error_no_env_vars(self): + """Test that initialize runs without errors when no env vars are set.""" + init = AIRTTargetInitializer() + await init.initialize_async() + + # No targets should be registered + registry = TargetRegistry.get_registry_singleton() + assert len(registry) == 0 + + @pytest.mark.asyncio + async def test_registers_target_when_env_vars_set(self): + """Test that a target is registered when its env vars are set.""" + os.environ["PLATFORM_OPENAI_CHAT_ENDPOINT"] = "https://api.openai.com/v1" + os.environ["PLATFORM_OPENAI_CHAT_API_KEY"] = "test_key" + os.environ["PLATFORM_OPENAI_CHAT_GPT4O_MODEL"] = "gpt-4o" + + init = AIRTTargetInitializer() + await init.initialize_async() + + registry = TargetRegistry.get_registry_singleton() + assert "platform_openai_chat" in registry + target = registry.get_instance_by_name("platform_openai_chat") + assert target is not None + assert target._model_name == "gpt-4o" + + @pytest.mark.asyncio + async def test_does_not_register_target_without_endpoint(self): + """Test that target is not registered if endpoint is missing.""" + # Only set key, not endpoint + os.environ["PLATFORM_OPENAI_CHAT_API_KEY"] = "test_key" + os.environ["PLATFORM_OPENAI_CHAT_GPT4O_MODEL"] = "gpt-4o" + + init = AIRTTargetInitializer() + await init.initialize_async() + + registry = TargetRegistry.get_registry_singleton() + assert "platform_openai_chat" not in registry + + @pytest.mark.asyncio + async def test_does_not_register_target_without_api_key(self): + """Test that target is not registered if api_key env var is missing.""" + # Only set endpoint, not key + os.environ["PLATFORM_OPENAI_CHAT_ENDPOINT"] = "https://api.openai.com/v1" + os.environ["PLATFORM_OPENAI_CHAT_GPT4O_MODEL"] = "gpt-4o" + + init = AIRTTargetInitializer() + await init.initialize_async() + + registry = TargetRegistry.get_registry_singleton() + assert "platform_openai_chat" not in registry + + @pytest.mark.asyncio + async def test_registers_multiple_targets(self): + """Test that multiple targets are registered when their env vars are set.""" + # Set up platform_openai_chat + os.environ["PLATFORM_OPENAI_CHAT_ENDPOINT"] = "https://api.openai.com/v1" + os.environ["PLATFORM_OPENAI_CHAT_API_KEY"] = "test_key" + os.environ["PLATFORM_OPENAI_CHAT_GPT4O_MODEL"] = "gpt-4o" + + # Set up openai_image_platform (uses ENDPOINT2/KEY2/MODEL2) + os.environ["OPENAI_IMAGE_ENDPOINT2"] = "https://api.openai.com/v1" + os.environ["OPENAI_IMAGE_API_KEY2"] = "test_image_key" + os.environ["OPENAI_IMAGE_MODEL2"] = "dall-e-3" + + init = AIRTTargetInitializer() + await init.initialize_async() + + registry = TargetRegistry.get_registry_singleton() + assert len(registry) == 2 + assert "platform_openai_chat" in registry + assert "openai_image_platform" in registry + + @pytest.mark.asyncio + async def test_registers_azure_content_safety_without_model(self): + """Test that PromptShieldTarget is registered without model_name (it doesn't use one).""" + os.environ["AZURE_CONTENT_SAFETY_API_ENDPOINT"] = "https://test.cognitiveservices.azure.com" + os.environ["AZURE_CONTENT_SAFETY_API_KEY"] = "test_safety_key" + + init = AIRTTargetInitializer() + await init.initialize_async() + + registry = TargetRegistry.get_registry_singleton() + assert "azure_content_safety" in registry + + @pytest.mark.asyncio + async def test_underlying_model_passed_when_set(self): + """Test that underlying_model is passed to target when env var is set.""" + os.environ["AZURE_OPENAI_GPT4O_ENDPOINT"] = "https://my-deployment.openai.azure.com" + os.environ["AZURE_OPENAI_GPT4O_KEY"] = "test_key" + os.environ["AZURE_OPENAI_GPT4O_MODEL"] = "my-deployment-name" + os.environ["AZURE_OPENAI_GPT4O_UNDERLYING_MODEL"] = "gpt-4o" + + init = AIRTTargetInitializer() + await init.initialize_async() + + registry = TargetRegistry.get_registry_singleton() + target = registry.get_instance_by_name("azure_openai_gpt4o") + assert target is not None + assert target._model_name == "my-deployment-name" + assert target._underlying_model == "gpt-4o" + + @pytest.mark.asyncio + async def test_registers_ollama_without_api_key(self): + """Test that Ollama target is registered without requiring an API key.""" + os.environ["OLLAMA_CHAT_ENDPOINT"] = "http://127.0.0.1:11434/v1" + os.environ["OLLAMA_MODEL"] = "llama2" + + init = AIRTTargetInitializer() + await init.initialize_async() + + registry = TargetRegistry.get_registry_singleton() + assert "ollama" in registry + target = registry.get_instance_by_name("ollama") + assert target is not None + assert target._model_name == "llama2" + + +@pytest.mark.usefixtures("patch_central_database") +class TestAIRTTargetInitializerTargetConfigs: + """Tests verifying TARGET_CONFIGS covers expected targets.""" + + def test_target_configs_not_empty(self): + """Test that TARGET_CONFIGS has configurations defined.""" + assert len(TARGET_CONFIGS) > 0 + + def test_all_configs_have_required_fields(self): + """Test that all TARGET_CONFIGS have required fields (key_var is optional for some).""" + for config in TARGET_CONFIGS: + assert config.registry_name, f"Config missing registry_name" + assert config.target_class, f"Config {config.registry_name} missing target_class" + assert config.endpoint_var, f"Config {config.registry_name} missing endpoint_var" + # key_var is optional for targets like Ollama that don't require auth + + def test_expected_targets_in_configs(self): + """Test that expected target names are in TARGET_CONFIGS.""" + registry_names = [config.registry_name for config in TARGET_CONFIGS] + + # Verify key targets are configured (using new primary config names) + assert "platform_openai_chat" in registry_names + assert "azure_openai_gpt4o" in registry_names + assert "openai_image_platform" in registry_names + assert "openai_tts_platform" in registry_names + assert "azure_content_safety" in registry_names + assert "ollama" in registry_names + assert "groq" in registry_names + assert "google_gemini" in registry_names + + +class TestAIRTTargetInitializerGetInfo: + """Tests for AIRTTargetInitializer.get_info_async method.""" + + @pytest.mark.asyncio + async def test_get_info_returns_expected_structure(self): + """Test that get_info_async returns expected structure.""" + info = await AIRTTargetInitializer.get_info_async() + + assert isinstance(info, dict) + assert info["name"] == "AIRT Target Initializer" + assert info["class"] == "AIRTTargetInitializer" + assert "description" in info + assert isinstance(info["description"], str) + + @pytest.mark.asyncio + async def test_get_info_required_env_vars_empty_or_not_present(self): + """Test that get_info has empty or no required_env_vars (since none are required).""" + info = await AIRTTargetInitializer.get_info_async() + + # required_env_vars may be omitted or empty since this initializer has no requirements + if "required_env_vars" in info: + assert info["required_env_vars"] == [] From 5c31b8579166cab0fedcf85748fb375a5ac5b7c6 Mon Sep 17 00:00:00 2001 From: Roman Lutz Date: Mon, 9 Feb 2026 11:05:00 -0800 Subject: [PATCH 10/12] potential CI fix with diagnostics --- .github/workflows/docker_build.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker_build.yml b/.github/workflows/docker_build.yml index de9db8f742..84d0104586 100644 --- a/.github/workflows/docker_build.yml +++ b/.github/workflows/docker_build.yml @@ -208,7 +208,9 @@ jobs: curl -sf http://localhost:8000/api/health || (echo "Health endpoint failed" && docker logs pyrit-gui-test && exit 1) echo "Testing frontend is served..." - curl -sf http://localhost:8000/ | grep -q "" || (echo "Frontend not served" && exit 1) + RESPONSE=$(curl -s http://localhost:8000/) + echo "$RESPONSE" | head -5 + echo "$RESPONSE" | grep -iq '' || (echo "Frontend not served" && docker logs pyrit-gui-test && exit 1) echo "✅ GUI mode tests passed" docker stop pyrit-gui-test && docker rm pyrit-gui-test @@ -289,7 +291,10 @@ jobs: fi curl -sf http://localhost:8000/api/health || (echo "Health endpoint failed" && docker logs pyrit-gui-pypi && exit 1) - curl -sf http://localhost:8000/ | grep -q "" || (echo "Frontend not served" && exit 1) + + RESPONSE=$(curl -s http://localhost:8000/) + echo "$RESPONSE" | head -5 + echo "$RESPONSE" | grep -iq '' || (echo "Frontend not served" && docker logs pyrit-gui-pypi && exit 1) echo "✅ GUI mode tests passed (PyPI)" docker stop pyrit-gui-pypi && docker rm pyrit-gui-pypi From 7e62b11ca20d30ccf51276dc1f4e5209a5ee05a2 Mon Sep 17 00:00:00 2001 From: Roman Lutz Date: Mon, 9 Feb 2026 12:21:23 -0800 Subject: [PATCH 11/12] remove path filter for triggering pipeline --- .github/workflows/docker_build.yml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/.github/workflows/docker_build.yml b/.github/workflows/docker_build.yml index 84d0104586..5010859c86 100644 --- a/.github/workflows/docker_build.yml +++ b/.github/workflows/docker_build.yml @@ -6,22 +6,10 @@ on: push: branches: - "main" - paths: - - '.devcontainer/**' - - 'docker/**' - - 'pyproject.toml' - - 'frontend/**' - - '.github/workflows/docker_build.yml' pull_request: branches: - "main" - "release/**" - paths: - - '.devcontainer/**' - - 'docker/**' - - 'pyproject.toml' - - 'frontend/**' - - '.github/workflows/docker_build.yml' workflow_dispatch: concurrency: From 82571d32f401b4ab02eb3b12497dbe05050e0c4f Mon Sep 17 00:00:00 2001 From: Roman Lutz Date: Tue, 10 Feb 2026 01:57:02 -0800 Subject: [PATCH 12/12] Update prepare_package.py Co-authored-by: Victor Valbuena <50061128+ValbuenaVC@users.noreply.github.com> --- build_scripts/prepare_package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build_scripts/prepare_package.py b/build_scripts/prepare_package.py index a21ff55f18..1ed307d5c0 100644 --- a/build_scripts/prepare_package.py +++ b/build_scripts/prepare_package.py @@ -33,7 +33,7 @@ def build_frontend(frontend_dir: Path) -> bool: print(f"Found npm version: {result.stdout.strip()}") except (subprocess.CalledProcessError, FileNotFoundError): print("ERROR: npm is not installed or not in PATH") - print("Please install Node.js and npm from https://nodejs.org/") + print("Please install Node.js 20.x and npm from https://nodejs.org/") return False # Check if package.json exists