diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a1907b2..c5a7b79 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,7 +19,7 @@ jobs: runs-on: ${{ github.repository == 'stainless-sdks/isaacus-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install Rye run: | @@ -44,7 +44,7 @@ jobs: id-token: write runs-on: ${{ github.repository == 'stainless-sdks/isaacus-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install Rye run: | @@ -63,7 +63,7 @@ jobs: - name: Get GitHub OIDC Token if: github.repository == 'stainless-sdks/isaacus-python' id: github-oidc - uses: actions/github-script@v6 + uses: actions/github-script@v8 with: script: core.setOutput('github_token', await core.getIDToken()); @@ -81,7 +81,7 @@ jobs: runs-on: ${{ github.repository == 'stainless-sdks/isaacus-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install Rye run: | diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 04538cd..5c2a7c8 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install Rye run: | diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml index e1eed33..1d43113 100644 --- a/.github/workflows/release-doctor.yml +++ b/.github/workflows/release-doctor.yml @@ -12,7 +12,7 @@ jobs: if: github.repository == 'isaacus-dev/isaacus-python' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Check release environment run: | diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 0598874..091cfb1 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.9.1" + ".": "0.10.0" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index d088835..60a8fa2 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 4 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/isaacus%2Fisaacus-6705b8e0baa0e4aad69a1c04e9876b352e40e0e5caf21e87e7b2c355e70c4e66.yml -openapi_spec_hash: 87d3cc80f5ddc5275e8a47d35f1a484e -config_hash: a85580968a69d8d6fadf96e5e2d6870e +configured_endpoints: 5 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/isaacus%2Fisaacus-8de0fb6fe23bf24b4e1d3550eabd580589dc1dc42690dde5b4107485560320e1.yml +openapi_spec_hash: 22dc50a6506bfc2659aa5ad10cc8f5ba +config_hash: 1d77b499f5b4f2dc6986fdd5936d18ef diff --git a/CHANGELOG.md b/CHANGELOG.md index 8379560..967272b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,44 @@ # Changelog +## 0.10.0 (2026-02-03) + +Full Changelog: [v0.9.1...v0.10.0](https://github.com/isaacus-dev/isaacus-python/compare/v0.9.1...v0.10.0) + +### Features + +* **api:** add min max ([50ece81](https://github.com/isaacus-dev/isaacus-python/commit/50ece81e21c605fd2801e2037f52046afe32a7f4)) +* **api:** switch to span objects for enrichment, note closed beta ([326b7dc](https://github.com/isaacus-dev/isaacus-python/commit/326b7dca713b9240bb6ea825f5361de804ca300b)) +* **sdk:** add enrichments ([eb4057b](https://github.com/isaacus-dev/isaacus-python/commit/eb4057b54a26158aded93e0b943e9828c5457b39)) + + +### Bug Fixes + +* **client:** close streams without requiring full consumption ([13eb93f](https://github.com/isaacus-dev/isaacus-python/commit/13eb93fea611507de440a4418b82de3aa8a06500)) +* compat with Python 3.14 ([89ef152](https://github.com/isaacus-dev/isaacus-python/commit/89ef152681673c408e217ad44f51dba229f9d371)) +* **compat:** update signatures of `model_dump` and `model_dump_json` for Pydantic v1 ([4d7b71d](https://github.com/isaacus-dev/isaacus-python/commit/4d7b71de78bdd83f00d16ec409c57cbe4dd01ebb)) +* ensure streams are always closed ([c2ae554](https://github.com/isaacus-dev/isaacus-python/commit/c2ae554fd73a5f0d4c0f7eb9a2fa652669d38f2e)) +* **types:** allow pyright to infer TypedDict types within SequenceNotStr ([b89d4ce](https://github.com/isaacus-dev/isaacus-python/commit/b89d4ce184708ad0737ba7fc77f1afe270a8eb50)) +* use async_to_httpx_files in patch method ([30ffab2](https://github.com/isaacus-dev/isaacus-python/commit/30ffab28e37d4f940e69f2afe199fad830f57ba0)) + + +### Chores + +* add missing docstrings ([0c4e7d4](https://github.com/isaacus-dev/isaacus-python/commit/0c4e7d4039a6b60adccc39be7a98b9efdbbc3df4)) +* add Python 3.14 classifier and testing ([083d4b6](https://github.com/isaacus-dev/isaacus-python/commit/083d4b600ddba22fc81c33da7dee079c97088c81)) +* **deps:** mypy 1.18.1 has a regression, pin to 1.17 ([9612e35](https://github.com/isaacus-dev/isaacus-python/commit/9612e35c83634a774c507349b4cd211d4d8c2c84)) +* **docs:** use environment variables for authentication in code snippets ([ac22598](https://github.com/isaacus-dev/isaacus-python/commit/ac2259872f666a3c0cdb57695623aa980e5bdf36)) +* **internal/tests:** avoid race condition with implicit client cleanup ([871fa54](https://github.com/isaacus-dev/isaacus-python/commit/871fa54adcd54f3f5ffde794e4ae703e7913e6b7)) +* **internal:** add missing files argument to base client ([669f50a](https://github.com/isaacus-dev/isaacus-python/commit/669f50a93bc5868ff80a48e0a29c4a53e362fa99)) +* **internal:** grammar fix (it's -> its) ([84145f2](https://github.com/isaacus-dev/isaacus-python/commit/84145f233adddf8b55a24b0443a2f9c9dc5a4cbd)) +* **package:** drop Python 3.8 support ([556d5d0](https://github.com/isaacus-dev/isaacus-python/commit/556d5d07c941f4e2c4b6e523756723ce5661c4b3)) +* speedup initial import ([2d500ed](https://github.com/isaacus-dev/isaacus-python/commit/2d500ed45248bf120cd21236644d9573d563120b)) +* update lockfile ([b53392a](https://github.com/isaacus-dev/isaacus-python/commit/b53392a51c7b7e8605880af51e53db044ad14d94)) + + +### Documentation + +* **sdk:** specify example params ([f617346](https://github.com/isaacus-dev/isaacus-python/commit/f617346d3262a45594d34357e97fc616f5ddf9e9)) + ## 0.9.1 (2025-10-19) Full Changelog: [v0.9.0...v0.9.1](https://github.com/isaacus-dev/isaacus-python/compare/v0.9.0...v0.9.1) diff --git a/LICENSE b/LICENSE index 45ad5b5..7a2dc45 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2025 Isaacus + Copyright 2026 Isaacus Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index 818160e..813cfd9 100644 --- a/README.md +++ b/README.md @@ -3,12 +3,21 @@ [![PyPI version](https://img.shields.io/pypi/v/isaacus.svg?label=pypi%20(stable))](https://pypi.org/project/isaacus/) -The Isaacus Python library provides convenient access to the Isaacus REST API from any Python 3.8+ +The Isaacus Python library provides convenient access to the Isaacus REST API from any Python 3.9+ application. The library includes type definitions for all request params and response fields, and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx). It is generated with [Stainless](https://www.stainless.com/). +## MCP Server + +Use the Isaacus MCP Server to enable AI assistants to interact with this API, allowing them to explore endpoints, make test requests, and use documentation to help integrate this SDK into your application. + +[![Add to Cursor](https://cursor.com/deeplink/mcp-install-dark.svg)](https://cursor.com/en-US/install-mcp?name=isaacus-mcp&config=eyJjb21tYW5kIjoibnB4IiwiYXJncyI6WyIteSIsImlzYWFjdXMtbWNwIl0sImVudiI6eyJJU0FBQ1VTX0FQSV9LRVkiOiJNeSBBUEkgS2V5In19) +[![Install in VS Code](https://img.shields.io/badge/_-Add_to_VS_Code-blue?style=for-the-badge&logo=data:image/svg%2bxml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIGZpbGw9Im5vbmUiIHZpZXdCb3g9IjAgMCA0MCA0MCI+PHBhdGggZmlsbD0iI0VFRSIgZmlsbC1ydWxlPSJldmVub2RkIiBkPSJNMzAuMjM1IDM5Ljg4NGEyLjQ5MSAyLjQ5MSAwIDAgMS0xLjc4MS0uNzNMMTIuNyAyNC43OGwtMy40NiAyLjYyNC0zLjQwNiAyLjU4MmExLjY2NSAxLjY2NSAwIDAgMS0xLjA4Mi4zMzggMS42NjQgMS42NjQgMCAwIDEtMS4wNDYtLjQzMWwtMi4yLTJhMS42NjYgMS42NjYgMCAwIDEgMC0yLjQ2M0w3LjQ1OCAyMCA0LjY3IDE3LjQ1MyAxLjUwNyAxNC41N2ExLjY2NSAxLjY2NSAwIDAgMSAwLTIuNDYzbDIuMi0yYTEuNjY1IDEuNjY1IDAgMCAxIDIuMTMtLjA5N2w2Ljg2MyA1LjIwOUwyOC40NTIuODQ0YTIuNDg4IDIuNDg4IDAgMCAxIDEuODQxLS43MjljLjM1MS4wMDkuNjk5LjA5MSAxLjAxOS4yNDVsOC4yMzYgMy45NjFhMi41IDIuNSAwIDAgMSAxLjQxNSAyLjI1M3YuMDk5LS4wNDVWMzMuMzd2LS4wNDUuMDk1YTIuNTAxIDIuNTAxIDAgMCAxLTEuNDE2IDIuMjU3bC04LjIzNSAzLjk2MWEyLjQ5MiAyLjQ5MiAwIDAgMS0xLjA3Ny4yNDZabS43MTYtMjguOTQ3LTExLjk0OCA5LjA2MiAxMS45NTIgOS4wNjUtLjAwNC0xOC4xMjdaIi8+PC9zdmc+)](https://vscode.stainless.com/mcp/%7B%22name%22%3A%22isaacus-mcp%22%2C%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22isaacus-mcp%22%5D%2C%22env%22%3A%7B%22ISAACUS_API_KEY%22%3A%22My%20API%20Key%22%7D%7D) + +> Note: You may need to set environment variables in your MCP client. + ## Documentation The REST API documentation can be found on [docs.isaacus.com](https://docs.isaacus.com). The full API of this library can be found in [api.md](api.md). @@ -38,6 +47,7 @@ embedding_response = client.embeddings.create( "Are restraints of trade enforceable under English law?", "What is a non-compete clause?", ], + task="retrieval/query", ) print(embedding_response.embeddings) ``` @@ -68,6 +78,7 @@ async def main() -> None: "Are restraints of trade enforceable under English law?", "What is a non-compete clause?", ], + task="retrieval/query", ) print(embedding_response.embeddings) @@ -91,6 +102,7 @@ pip install isaacus[aiohttp] Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`: ```python +import os import asyncio from isaacus import DefaultAioHttpClient from isaacus import AsyncIsaacus @@ -98,7 +110,7 @@ from isaacus import AsyncIsaacus async def main() -> None: async with AsyncIsaacus( - api_key="My API Key", + api_key=os.environ.get("ISAACUS_API_KEY"), # This is the default and can be omitted http_client=DefaultAioHttpClient(), ) as client: embedding_response = await client.embeddings.create( @@ -107,6 +119,7 @@ async def main() -> None: "Are restraints of trade enforceable under English law?", "What is a non-compete clause?", ], + task="retrieval/query", ) print(embedding_response.embeddings) @@ -167,6 +180,7 @@ try: "Are restraints of trade enforceable under English law?", "What is a non-compete clause?", ], + task="retrieval/query", ) except isaacus.APIConnectionError as e: print("The server could not be reached") @@ -216,6 +230,7 @@ client.with_options(max_retries=5).embeddings.create( "Are restraints of trade enforceable under English law?", "What is a non-compete clause?", ], + task="retrieval/query", ) ``` @@ -245,6 +260,7 @@ client.with_options(timeout=5.0).embeddings.create( "Are restraints of trade enforceable under English law?", "What is a non-compete clause?", ], + task="retrieval/query", ) ``` @@ -289,6 +305,7 @@ client = Isaacus() response = client.embeddings.with_raw_response.create( model="kanon-2-embedder", texts=["Are restraints of trade enforceable under English law?", "What is a non-compete clause?"], + task="retrieval/query", ) print(response.headers.get('X-My-Header')) @@ -313,6 +330,7 @@ with client.embeddings.with_streaming_response.create( "Are restraints of trade enforceable under English law?", "What is a non-compete clause?", ], + task="retrieval/query", ) as response: print(response.headers.get("X-My-Header")) @@ -423,7 +441,7 @@ print(isaacus.__version__) ## Requirements -Python 3.8 or higher. +Python 3.9 or higher. ## Contributing diff --git a/api.md b/api.md index 5ace2eb..438bdae 100644 --- a/api.md +++ b/api.md @@ -49,3 +49,15 @@ from isaacus.types.extractions import AnswerExtractionResponse Methods: - client.extractions.qa.create(\*\*params) -> AnswerExtractionResponse + +# Enrichments + +Types: + +```python +from isaacus.types import EnrichmentResponse +``` + +Methods: + +- client.enrichments.create(\*\*params) -> EnrichmentResponse diff --git a/pyproject.toml b/pyproject.toml index a3cd1c8..fec1f10 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,30 +1,32 @@ [project] name = "isaacus" -version = "0.9.1" +version = "0.10.0" description = "The official Python library for the isaacus API" dynamic = ["readme"] license = "Apache-2.0" authors = [ { name = "Isaacus", email = "support@isaacus.com" }, ] + dependencies = [ - "httpx>=0.23.0, <1", - "pydantic>=1.9.0, <3", - "typing-extensions>=4.10, <5", - "anyio>=3.5.0, <5", - "distro>=1.7.0, <2", - "sniffio", + "httpx>=0.23.0, <1", + "pydantic>=1.9.0, <3", + "typing-extensions>=4.10, <5", + "anyio>=3.5.0, <5", + "distro>=1.7.0, <2", + "sniffio", ] -requires-python = ">= 3.8" + +requires-python = ">= 3.9" classifiers = [ "Typing :: Typed", "Intended Audience :: Developers", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Operating System :: POSIX", "Operating System :: MacOS", @@ -46,7 +48,7 @@ managed = true # version pins are in requirements-dev.lock dev-dependencies = [ "pyright==1.1.399", - "mypy", + "mypy==1.17", "respx", "pytest", "pytest-asyncio", @@ -141,7 +143,7 @@ filterwarnings = [ # there are a couple of flags that are still disabled by # default in strict mode as they are experimental and niche. typeCheckingMode = "strict" -pythonVersion = "3.8" +pythonVersion = "3.9" exclude = [ "_dev", diff --git a/requirements-dev.lock b/requirements-dev.lock index b40f56f..5f2848c 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -12,40 +12,45 @@ -e file:. aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.12.8 +aiohttp==3.13.2 # via httpx-aiohttp # via isaacus -aiosignal==1.3.2 +aiosignal==1.4.0 # via aiohttp -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic -anyio==4.4.0 +anyio==4.12.0 # via httpx # via isaacus -argcomplete==3.1.2 +argcomplete==3.6.3 # via nox async-timeout==5.0.1 # via aiohttp -attrs==25.3.0 +attrs==25.4.0 # via aiohttp -certifi==2023.7.22 + # via nox +backports-asyncio-runner==1.2.0 + # via pytest-asyncio +certifi==2025.11.12 # via httpcore # via httpx -colorlog==6.7.0 +colorlog==6.10.1 + # via nox +dependency-groups==1.3.1 # via nox -dirty-equals==0.6.0 -distlib==0.3.7 +dirty-equals==0.11 +distlib==0.4.0 # via virtualenv -distro==1.8.0 +distro==1.9.0 # via isaacus -exceptiongroup==1.2.2 +exceptiongroup==1.3.1 # via anyio # via pytest -execnet==2.1.1 +execnet==2.1.2 # via pytest-xdist -filelock==3.12.4 +filelock==3.19.1 # via virtualenv -frozenlist==1.6.2 +frozenlist==1.8.0 # via aiohttp # via aiosignal h11==0.16.0 @@ -58,80 +63,87 @@ httpx==0.28.1 # via respx httpx-aiohttp==0.1.9 # via isaacus -idna==3.4 +humanize==4.13.0 + # via nox +idna==3.11 # via anyio # via httpx # via yarl -importlib-metadata==7.0.0 -iniconfig==2.0.0 +importlib-metadata==8.7.0 +iniconfig==2.1.0 # via pytest markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.4.4 +multidict==6.7.0 # via aiohttp # via yarl -mypy==1.14.1 -mypy-extensions==1.0.0 +mypy==1.17.0 +mypy-extensions==1.1.0 # via mypy -nodeenv==1.8.0 +nodeenv==1.9.1 # via pyright -nox==2023.4.22 -packaging==23.2 +nox==2025.11.12 +packaging==25.0 + # via dependency-groups # via nox # via pytest -platformdirs==3.11.0 +pathspec==0.12.1 + # via mypy +platformdirs==4.4.0 # via virtualenv -pluggy==1.5.0 +pluggy==1.6.0 # via pytest -propcache==0.3.1 +propcache==0.4.1 # via aiohttp # via yarl -pydantic==2.11.9 +pydantic==2.12.5 # via isaacus -pydantic-core==2.33.2 +pydantic-core==2.41.5 # via pydantic -pygments==2.18.0 +pygments==2.19.2 + # via pytest # via rich pyright==1.1.399 -pytest==8.3.3 +pytest==8.4.2 # via pytest-asyncio # via pytest-xdist -pytest-asyncio==0.24.0 -pytest-xdist==3.7.0 -python-dateutil==2.8.2 +pytest-asyncio==1.2.0 +pytest-xdist==3.8.0 +python-dateutil==2.9.0.post0 # via time-machine -pytz==2023.3.post1 - # via dirty-equals respx==0.22.0 -rich==13.7.1 -ruff==0.9.4 -setuptools==68.2.2 - # via nodeenv -six==1.16.0 +rich==14.2.0 +ruff==0.14.7 +six==1.17.0 # via python-dateutil -sniffio==1.3.0 - # via anyio +sniffio==1.3.1 # via isaacus -time-machine==2.9.0 -tomli==2.0.2 +time-machine==2.19.0 +tomli==2.3.0 + # via dependency-groups # via mypy + # via nox # via pytest -typing-extensions==4.12.2 +typing-extensions==4.15.0 + # via aiosignal # via anyio + # via exceptiongroup # via isaacus # via multidict # via mypy # via pydantic # via pydantic-core # via pyright + # via pytest-asyncio # via typing-inspection -typing-inspection==0.4.1 + # via virtualenv +typing-inspection==0.4.2 # via pydantic -virtualenv==20.24.5 +virtualenv==20.35.4 # via nox -yarl==1.20.0 +yarl==1.22.0 # via aiohttp -zipp==3.17.0 +zipp==3.23.0 # via importlib-metadata diff --git a/requirements.lock b/requirements.lock index c76cec7..60eae12 100644 --- a/requirements.lock +++ b/requirements.lock @@ -12,28 +12,28 @@ -e file:. aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.12.8 +aiohttp==3.13.2 # via httpx-aiohttp # via isaacus -aiosignal==1.3.2 +aiosignal==1.4.0 # via aiohttp -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic -anyio==4.4.0 +anyio==4.12.0 # via httpx # via isaacus async-timeout==5.0.1 # via aiohttp -attrs==25.3.0 +attrs==25.4.0 # via aiohttp -certifi==2023.7.22 +certifi==2025.11.12 # via httpcore # via httpx -distro==1.8.0 +distro==1.9.0 # via isaacus -exceptiongroup==1.2.2 +exceptiongroup==1.3.1 # via anyio -frozenlist==1.6.2 +frozenlist==1.8.0 # via aiohttp # via aiosignal h11==0.16.0 @@ -45,31 +45,32 @@ httpx==0.28.1 # via isaacus httpx-aiohttp==0.1.9 # via isaacus -idna==3.4 +idna==3.11 # via anyio # via httpx # via yarl -multidict==6.4.4 +multidict==6.7.0 # via aiohttp # via yarl -propcache==0.3.1 +propcache==0.4.1 # via aiohttp # via yarl -pydantic==2.11.9 +pydantic==2.12.5 # via isaacus -pydantic-core==2.33.2 +pydantic-core==2.41.5 # via pydantic -sniffio==1.3.0 - # via anyio +sniffio==1.3.1 # via isaacus -typing-extensions==4.12.2 +typing-extensions==4.15.0 + # via aiosignal # via anyio + # via exceptiongroup # via isaacus # via multidict # via pydantic # via pydantic-core # via typing-inspection -typing-inspection==0.4.1 +typing-inspection==0.4.2 # via pydantic -yarl==1.20.0 +yarl==1.22.0 # via aiohttp diff --git a/scripts/lint b/scripts/lint index 483db95..140b62a 100755 --- a/scripts/lint +++ b/scripts/lint @@ -4,8 +4,13 @@ set -e cd "$(dirname "$0")/.." -echo "==> Running lints" -rye run lint +if [ "$1" = "--fix" ]; then + echo "==> Running lints with --fix" + rye run fix:ruff +else + echo "==> Running lints" + rye run lint +fi echo "==> Making sure it imports" rye run python -c 'import isaacus' diff --git a/src/isaacus/_base_client.py b/src/isaacus/_base_client.py index 56e03dc..967ce99 100644 --- a/src/isaacus/_base_client.py +++ b/src/isaacus/_base_client.py @@ -9,6 +9,7 @@ import inspect import logging import platform +import warnings import email.utils from types import TracebackType from random import random @@ -51,9 +52,11 @@ ResponseT, AnyMapping, PostParser, + BinaryTypes, RequestFiles, HttpxSendArgs, RequestOptions, + AsyncBinaryTypes, HttpxRequestFiles, ModelBuilderProtocol, not_given, @@ -83,6 +86,7 @@ APIConnectionError, APIResponseValidationError, ) +from ._utils._json import openapi_dumps log: logging.Logger = logging.getLogger(__name__) @@ -477,8 +481,19 @@ def _build_request( retries_taken: int = 0, ) -> httpx.Request: if log.isEnabledFor(logging.DEBUG): - log.debug("Request options: %s", model_dump(options, exclude_unset=True)) - + log.debug( + "Request options: %s", + model_dump( + options, + exclude_unset=True, + # Pydantic v1 can't dump every type we support in content, so we exclude it for now. + exclude={ + "content", + } + if PYDANTIC_V1 + else {}, + ), + ) kwargs: dict[str, Any] = {} json_data = options.json_data @@ -532,10 +547,18 @@ def _build_request( is_body_allowed = options.method.lower() != "get" if is_body_allowed: - if isinstance(json_data, bytes): + if options.content is not None and json_data is not None: + raise TypeError("Passing both `content` and `json_data` is not supported") + if options.content is not None and files is not None: + raise TypeError("Passing both `content` and `files` is not supported") + if options.content is not None: + kwargs["content"] = options.content + elif isinstance(json_data, bytes): kwargs["content"] = json_data - else: - kwargs["json"] = json_data if is_given(json_data) else None + elif not files: + # Don't set content when JSON is sent as multipart/form-data, + # since httpx's content param overrides other body arguments + kwargs["content"] = openapi_dumps(json_data) if is_given(json_data) and json_data is not None else None kwargs["files"] = files else: headers.pop("Content-Type", None) @@ -1194,6 +1217,7 @@ def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, options: RequestOptions = {}, files: RequestFiles | None = None, stream: Literal[False] = False, @@ -1206,6 +1230,7 @@ def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, options: RequestOptions = {}, files: RequestFiles | None = None, stream: Literal[True], @@ -1219,6 +1244,7 @@ def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, options: RequestOptions = {}, files: RequestFiles | None = None, stream: bool, @@ -1231,13 +1257,25 @@ def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, options: RequestOptions = {}, files: RequestFiles | None = None, stream: bool = False, stream_cls: type[_StreamT] | None = None, ) -> ResponseT | _StreamT: + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if files is not None and content is not None: + raise TypeError("Passing both `files` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) opts = FinalRequestOptions.construct( - method="post", url=path, json_data=body, files=to_httpx_files(files), **options + method="post", url=path, json_data=body, content=content, files=to_httpx_files(files), **options ) return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) @@ -1247,9 +1285,24 @@ def patch( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, + files: RequestFiles | None = None, options: RequestOptions = {}, ) -> ResponseT: - opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options) + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if files is not None and content is not None: + raise TypeError("Passing both `files` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) + opts = FinalRequestOptions.construct( + method="patch", url=path, json_data=body, content=content, files=to_httpx_files(files), **options + ) return self.request(cast_to, opts) def put( @@ -1258,11 +1311,23 @@ def put( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, ) -> ResponseT: + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if files is not None and content is not None: + raise TypeError("Passing both `files` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) opts = FinalRequestOptions.construct( - method="put", url=path, json_data=body, files=to_httpx_files(files), **options + method="put", url=path, json_data=body, content=content, files=to_httpx_files(files), **options ) return self.request(cast_to, opts) @@ -1272,9 +1337,19 @@ def delete( *, cast_to: Type[ResponseT], body: Body | None = None, + content: BinaryTypes | None = None, options: RequestOptions = {}, ) -> ResponseT: - opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) + opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, content=content, **options) return self.request(cast_to, opts) def get_api_list( @@ -1714,6 +1789,7 @@ async def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, stream: Literal[False] = False, @@ -1726,6 +1802,7 @@ async def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, stream: Literal[True], @@ -1739,6 +1816,7 @@ async def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, stream: bool, @@ -1751,13 +1829,25 @@ async def post( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, stream: bool = False, stream_cls: type[_AsyncStreamT] | None = None, ) -> ResponseT | _AsyncStreamT: + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if files is not None and content is not None: + raise TypeError("Passing both `files` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) opts = FinalRequestOptions.construct( - method="post", url=path, json_data=body, files=await async_to_httpx_files(files), **options + method="post", url=path, json_data=body, content=content, files=await async_to_httpx_files(files), **options ) return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) @@ -1767,9 +1857,29 @@ async def patch( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, + files: RequestFiles | None = None, options: RequestOptions = {}, ) -> ResponseT: - opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options) + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if files is not None and content is not None: + raise TypeError("Passing both `files` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) + opts = FinalRequestOptions.construct( + method="patch", + url=path, + json_data=body, + content=content, + files=await async_to_httpx_files(files), + **options, + ) return await self.request(cast_to, opts) async def put( @@ -1778,11 +1888,23 @@ async def put( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, files: RequestFiles | None = None, options: RequestOptions = {}, ) -> ResponseT: + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if files is not None and content is not None: + raise TypeError("Passing both `files` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) opts = FinalRequestOptions.construct( - method="put", url=path, json_data=body, files=await async_to_httpx_files(files), **options + method="put", url=path, json_data=body, content=content, files=await async_to_httpx_files(files), **options ) return await self.request(cast_to, opts) @@ -1792,9 +1914,19 @@ async def delete( *, cast_to: Type[ResponseT], body: Body | None = None, + content: AsyncBinaryTypes | None = None, options: RequestOptions = {}, ) -> ResponseT: - opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) + if body is not None and content is not None: + raise TypeError("Passing both `body` and `content` is not supported") + if isinstance(body, bytes): + warnings.warn( + "Passing raw bytes as `body` is deprecated and will be removed in a future version. " + "Please pass raw bytes via the `content` parameter instead.", + DeprecationWarning, + stacklevel=2, + ) + opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, content=content, **options) return await self.request(cast_to, opts) def get_api_list( diff --git a/src/isaacus/_client.py b/src/isaacus/_client.py index 990b9e6..ae83c36 100644 --- a/src/isaacus/_client.py +++ b/src/isaacus/_client.py @@ -3,7 +3,7 @@ from __future__ import annotations import os -from typing import Any, Mapping +from typing import TYPE_CHECKING, Any, Mapping from typing_extensions import Self, override import httpx @@ -20,8 +20,8 @@ not_given, ) from ._utils import is_given, get_async_library +from ._compat import cached_property from ._version import __version__ -from .resources import embeddings, rerankings from ._streaming import Stream as Stream, AsyncStream as AsyncStream from ._exceptions import IsaacusError, APIStatusError from ._base_client import ( @@ -29,20 +29,19 @@ SyncAPIClient, AsyncAPIClient, ) -from .resources.extractions import extractions -from .resources.classifications import classifications + +if TYPE_CHECKING: + from .resources import embeddings, rerankings, enrichments, extractions, classifications + from .resources.embeddings import EmbeddingsResource, AsyncEmbeddingsResource + from .resources.rerankings import RerankingsResource, AsyncRerankingsResource + from .resources.enrichments import EnrichmentsResource, AsyncEnrichmentsResource + from .resources.extractions.extractions import ExtractionsResource, AsyncExtractionsResource + from .resources.classifications.classifications import ClassificationsResource, AsyncClassificationsResource __all__ = ["Timeout", "Transport", "ProxiesTypes", "RequestOptions", "Isaacus", "AsyncIsaacus", "Client", "AsyncClient"] class Isaacus(SyncAPIClient): - embeddings: embeddings.EmbeddingsResource - classifications: classifications.ClassificationsResource - rerankings: rerankings.RerankingsResource - extractions: extractions.ExtractionsResource - with_raw_response: IsaacusWithRawResponse - with_streaming_response: IsaacusWithStreamedResponse - # client options api_key: str @@ -97,12 +96,43 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.embeddings = embeddings.EmbeddingsResource(self) - self.classifications = classifications.ClassificationsResource(self) - self.rerankings = rerankings.RerankingsResource(self) - self.extractions = extractions.ExtractionsResource(self) - self.with_raw_response = IsaacusWithRawResponse(self) - self.with_streaming_response = IsaacusWithStreamedResponse(self) + @cached_property + def embeddings(self) -> EmbeddingsResource: + from .resources.embeddings import EmbeddingsResource + + return EmbeddingsResource(self) + + @cached_property + def classifications(self) -> ClassificationsResource: + from .resources.classifications import ClassificationsResource + + return ClassificationsResource(self) + + @cached_property + def rerankings(self) -> RerankingsResource: + from .resources.rerankings import RerankingsResource + + return RerankingsResource(self) + + @cached_property + def extractions(self) -> ExtractionsResource: + from .resources.extractions import ExtractionsResource + + return ExtractionsResource(self) + + @cached_property + def enrichments(self) -> EnrichmentsResource: + from .resources.enrichments import EnrichmentsResource + + return EnrichmentsResource(self) + + @cached_property + def with_raw_response(self) -> IsaacusWithRawResponse: + return IsaacusWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> IsaacusWithStreamedResponse: + return IsaacusWithStreamedResponse(self) @property @override @@ -210,13 +240,6 @@ def _make_status_error( class AsyncIsaacus(AsyncAPIClient): - embeddings: embeddings.AsyncEmbeddingsResource - classifications: classifications.AsyncClassificationsResource - rerankings: rerankings.AsyncRerankingsResource - extractions: extractions.AsyncExtractionsResource - with_raw_response: AsyncIsaacusWithRawResponse - with_streaming_response: AsyncIsaacusWithStreamedResponse - # client options api_key: str @@ -271,12 +294,43 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.embeddings = embeddings.AsyncEmbeddingsResource(self) - self.classifications = classifications.AsyncClassificationsResource(self) - self.rerankings = rerankings.AsyncRerankingsResource(self) - self.extractions = extractions.AsyncExtractionsResource(self) - self.with_raw_response = AsyncIsaacusWithRawResponse(self) - self.with_streaming_response = AsyncIsaacusWithStreamedResponse(self) + @cached_property + def embeddings(self) -> AsyncEmbeddingsResource: + from .resources.embeddings import AsyncEmbeddingsResource + + return AsyncEmbeddingsResource(self) + + @cached_property + def classifications(self) -> AsyncClassificationsResource: + from .resources.classifications import AsyncClassificationsResource + + return AsyncClassificationsResource(self) + + @cached_property + def rerankings(self) -> AsyncRerankingsResource: + from .resources.rerankings import AsyncRerankingsResource + + return AsyncRerankingsResource(self) + + @cached_property + def extractions(self) -> AsyncExtractionsResource: + from .resources.extractions import AsyncExtractionsResource + + return AsyncExtractionsResource(self) + + @cached_property + def enrichments(self) -> AsyncEnrichmentsResource: + from .resources.enrichments import AsyncEnrichmentsResource + + return AsyncEnrichmentsResource(self) + + @cached_property + def with_raw_response(self) -> AsyncIsaacusWithRawResponse: + return AsyncIsaacusWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncIsaacusWithStreamedResponse: + return AsyncIsaacusWithStreamedResponse(self) @property @override @@ -384,35 +438,151 @@ def _make_status_error( class IsaacusWithRawResponse: + _client: Isaacus + def __init__(self, client: Isaacus) -> None: - self.embeddings = embeddings.EmbeddingsResourceWithRawResponse(client.embeddings) - self.classifications = classifications.ClassificationsResourceWithRawResponse(client.classifications) - self.rerankings = rerankings.RerankingsResourceWithRawResponse(client.rerankings) - self.extractions = extractions.ExtractionsResourceWithRawResponse(client.extractions) + self._client = client + + @cached_property + def embeddings(self) -> embeddings.EmbeddingsResourceWithRawResponse: + from .resources.embeddings import EmbeddingsResourceWithRawResponse + + return EmbeddingsResourceWithRawResponse(self._client.embeddings) + + @cached_property + def classifications(self) -> classifications.ClassificationsResourceWithRawResponse: + from .resources.classifications import ClassificationsResourceWithRawResponse + + return ClassificationsResourceWithRawResponse(self._client.classifications) + + @cached_property + def rerankings(self) -> rerankings.RerankingsResourceWithRawResponse: + from .resources.rerankings import RerankingsResourceWithRawResponse + + return RerankingsResourceWithRawResponse(self._client.rerankings) + + @cached_property + def extractions(self) -> extractions.ExtractionsResourceWithRawResponse: + from .resources.extractions import ExtractionsResourceWithRawResponse + + return ExtractionsResourceWithRawResponse(self._client.extractions) + + @cached_property + def enrichments(self) -> enrichments.EnrichmentsResourceWithRawResponse: + from .resources.enrichments import EnrichmentsResourceWithRawResponse + + return EnrichmentsResourceWithRawResponse(self._client.enrichments) class AsyncIsaacusWithRawResponse: + _client: AsyncIsaacus + def __init__(self, client: AsyncIsaacus) -> None: - self.embeddings = embeddings.AsyncEmbeddingsResourceWithRawResponse(client.embeddings) - self.classifications = classifications.AsyncClassificationsResourceWithRawResponse(client.classifications) - self.rerankings = rerankings.AsyncRerankingsResourceWithRawResponse(client.rerankings) - self.extractions = extractions.AsyncExtractionsResourceWithRawResponse(client.extractions) + self._client = client + + @cached_property + def embeddings(self) -> embeddings.AsyncEmbeddingsResourceWithRawResponse: + from .resources.embeddings import AsyncEmbeddingsResourceWithRawResponse + + return AsyncEmbeddingsResourceWithRawResponse(self._client.embeddings) + + @cached_property + def classifications(self) -> classifications.AsyncClassificationsResourceWithRawResponse: + from .resources.classifications import AsyncClassificationsResourceWithRawResponse + + return AsyncClassificationsResourceWithRawResponse(self._client.classifications) + + @cached_property + def rerankings(self) -> rerankings.AsyncRerankingsResourceWithRawResponse: + from .resources.rerankings import AsyncRerankingsResourceWithRawResponse + + return AsyncRerankingsResourceWithRawResponse(self._client.rerankings) + + @cached_property + def extractions(self) -> extractions.AsyncExtractionsResourceWithRawResponse: + from .resources.extractions import AsyncExtractionsResourceWithRawResponse + + return AsyncExtractionsResourceWithRawResponse(self._client.extractions) + + @cached_property + def enrichments(self) -> enrichments.AsyncEnrichmentsResourceWithRawResponse: + from .resources.enrichments import AsyncEnrichmentsResourceWithRawResponse + + return AsyncEnrichmentsResourceWithRawResponse(self._client.enrichments) class IsaacusWithStreamedResponse: + _client: Isaacus + def __init__(self, client: Isaacus) -> None: - self.embeddings = embeddings.EmbeddingsResourceWithStreamingResponse(client.embeddings) - self.classifications = classifications.ClassificationsResourceWithStreamingResponse(client.classifications) - self.rerankings = rerankings.RerankingsResourceWithStreamingResponse(client.rerankings) - self.extractions = extractions.ExtractionsResourceWithStreamingResponse(client.extractions) + self._client = client + + @cached_property + def embeddings(self) -> embeddings.EmbeddingsResourceWithStreamingResponse: + from .resources.embeddings import EmbeddingsResourceWithStreamingResponse + + return EmbeddingsResourceWithStreamingResponse(self._client.embeddings) + + @cached_property + def classifications(self) -> classifications.ClassificationsResourceWithStreamingResponse: + from .resources.classifications import ClassificationsResourceWithStreamingResponse + + return ClassificationsResourceWithStreamingResponse(self._client.classifications) + + @cached_property + def rerankings(self) -> rerankings.RerankingsResourceWithStreamingResponse: + from .resources.rerankings import RerankingsResourceWithStreamingResponse + + return RerankingsResourceWithStreamingResponse(self._client.rerankings) + + @cached_property + def extractions(self) -> extractions.ExtractionsResourceWithStreamingResponse: + from .resources.extractions import ExtractionsResourceWithStreamingResponse + + return ExtractionsResourceWithStreamingResponse(self._client.extractions) + + @cached_property + def enrichments(self) -> enrichments.EnrichmentsResourceWithStreamingResponse: + from .resources.enrichments import EnrichmentsResourceWithStreamingResponse + + return EnrichmentsResourceWithStreamingResponse(self._client.enrichments) class AsyncIsaacusWithStreamedResponse: + _client: AsyncIsaacus + def __init__(self, client: AsyncIsaacus) -> None: - self.embeddings = embeddings.AsyncEmbeddingsResourceWithStreamingResponse(client.embeddings) - self.classifications = classifications.AsyncClassificationsResourceWithStreamingResponse(client.classifications) - self.rerankings = rerankings.AsyncRerankingsResourceWithStreamingResponse(client.rerankings) - self.extractions = extractions.AsyncExtractionsResourceWithStreamingResponse(client.extractions) + self._client = client + + @cached_property + def embeddings(self) -> embeddings.AsyncEmbeddingsResourceWithStreamingResponse: + from .resources.embeddings import AsyncEmbeddingsResourceWithStreamingResponse + + return AsyncEmbeddingsResourceWithStreamingResponse(self._client.embeddings) + + @cached_property + def classifications(self) -> classifications.AsyncClassificationsResourceWithStreamingResponse: + from .resources.classifications import AsyncClassificationsResourceWithStreamingResponse + + return AsyncClassificationsResourceWithStreamingResponse(self._client.classifications) + + @cached_property + def rerankings(self) -> rerankings.AsyncRerankingsResourceWithStreamingResponse: + from .resources.rerankings import AsyncRerankingsResourceWithStreamingResponse + + return AsyncRerankingsResourceWithStreamingResponse(self._client.rerankings) + + @cached_property + def extractions(self) -> extractions.AsyncExtractionsResourceWithStreamingResponse: + from .resources.extractions import AsyncExtractionsResourceWithStreamingResponse + + return AsyncExtractionsResourceWithStreamingResponse(self._client.extractions) + + @cached_property + def enrichments(self) -> enrichments.AsyncEnrichmentsResourceWithStreamingResponse: + from .resources.enrichments import AsyncEnrichmentsResourceWithStreamingResponse + + return AsyncEnrichmentsResourceWithStreamingResponse(self._client.enrichments) Client = Isaacus diff --git a/src/isaacus/_compat.py b/src/isaacus/_compat.py index bdef67f..786ff42 100644 --- a/src/isaacus/_compat.py +++ b/src/isaacus/_compat.py @@ -139,6 +139,7 @@ def model_dump( exclude_defaults: bool = False, warnings: bool = True, mode: Literal["json", "python"] = "python", + by_alias: bool | None = None, ) -> dict[str, Any]: if (not PYDANTIC_V1) or hasattr(model, "model_dump"): return model.model_dump( @@ -148,13 +149,12 @@ def model_dump( exclude_defaults=exclude_defaults, # warnings are not supported in Pydantic v1 warnings=True if PYDANTIC_V1 else warnings, + by_alias=by_alias, ) return cast( "dict[str, Any]", model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - exclude=exclude, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, + exclude=exclude, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, by_alias=bool(by_alias) ), ) diff --git a/src/isaacus/_models.py b/src/isaacus/_models.py index 6a3cd1d..29070e0 100644 --- a/src/isaacus/_models.py +++ b/src/isaacus/_models.py @@ -2,7 +2,21 @@ import os import inspect -from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast +import weakref +from typing import ( + IO, + TYPE_CHECKING, + Any, + Type, + Union, + Generic, + TypeVar, + Callable, + Iterable, + Optional, + AsyncIterable, + cast, +) from datetime import date, datetime from typing_extensions import ( List, @@ -256,15 +270,16 @@ def model_dump( mode: Literal["json", "python"] | str = "python", include: IncEx | None = None, exclude: IncEx | None = None, + context: Any | None = None, by_alias: bool | None = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, + exclude_computed_fields: bool = False, round_trip: bool = False, warnings: bool | Literal["none", "warn", "error"] = True, - context: dict[str, Any] | None = None, - serialize_as_any: bool = False, fallback: Callable[[Any], Any] | None = None, + serialize_as_any: bool = False, ) -> dict[str, Any]: """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump @@ -272,16 +287,24 @@ def model_dump( Args: mode: The mode in which `to_python` should run. - If mode is 'json', the dictionary will only contain JSON serializable types. - If mode is 'python', the dictionary may contain any Python objects. - include: A list of fields to include in the output. - exclude: A list of fields to exclude from the output. + If mode is 'json', the output will only contain JSON serializable types. + If mode is 'python', the output may contain non-JSON-serializable Python objects. + include: A set of fields to include in the output. + exclude: A set of fields to exclude from the output. + context: Additional context to pass to the serializer. by_alias: Whether to use the field's alias in the dictionary key if defined. - exclude_unset: Whether to exclude fields that are unset or None from the output. - exclude_defaults: Whether to exclude fields that are set to their default value from the output. - exclude_none: Whether to exclude fields that have a value of `None` from the output. - round_trip: Whether to enable serialization and deserialization round-trip support. - warnings: Whether to log warnings when invalid fields are encountered. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that are set to their default value. + exclude_none: Whether to exclude fields that have a value of `None`. + exclude_computed_fields: Whether to exclude computed fields. + While this can be useful for round-tripping, it is usually recommended to use the dedicated + `round_trip` parameter instead. + round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T]. + warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, + "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. + fallback: A function to call when an unknown value is encountered. If not provided, + a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised. + serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. Returns: A dictionary representation of the model. @@ -298,6 +321,8 @@ def model_dump( raise ValueError("serialize_as_any is only supported in Pydantic v2") if fallback is not None: raise ValueError("fallback is only supported in Pydantic v2") + if exclude_computed_fields != False: + raise ValueError("exclude_computed_fields is only supported in Pydantic v2") dumped = super().dict( # pyright: ignore[reportDeprecated] include=include, exclude=exclude, @@ -314,15 +339,17 @@ def model_dump_json( self, *, indent: int | None = None, + ensure_ascii: bool = False, include: IncEx | None = None, exclude: IncEx | None = None, + context: Any | None = None, by_alias: bool | None = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, + exclude_computed_fields: bool = False, round_trip: bool = False, warnings: bool | Literal["none", "warn", "error"] = True, - context: dict[str, Any] | None = None, fallback: Callable[[Any], Any] | None = None, serialize_as_any: bool = False, ) -> str: @@ -354,6 +381,10 @@ def model_dump_json( raise ValueError("serialize_as_any is only supported in Pydantic v2") if fallback is not None: raise ValueError("fallback is only supported in Pydantic v2") + if ensure_ascii != False: + raise ValueError("ensure_ascii is only supported in Pydantic v2") + if exclude_computed_fields != False: + raise ValueError("exclude_computed_fields is only supported in Pydantic v2") return super().json( # type: ignore[reportDeprecated] indent=indent, include=include, @@ -573,6 +604,9 @@ class CachedDiscriminatorType(Protocol): __discriminator__: DiscriminatorDetails +DISCRIMINATOR_CACHE: weakref.WeakKeyDictionary[type, DiscriminatorDetails] = weakref.WeakKeyDictionary() + + class DiscriminatorDetails: field_name: str """The name of the discriminator field in the variant class, e.g. @@ -615,8 +649,9 @@ def __init__( def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None: - if isinstance(union, CachedDiscriminatorType): - return union.__discriminator__ + cached = DISCRIMINATOR_CACHE.get(union) + if cached is not None: + return cached discriminator_field_name: str | None = None @@ -669,7 +704,7 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, discriminator_field=discriminator_field_name, discriminator_alias=discriminator_alias, ) - cast(CachedDiscriminatorType, union).__discriminator__ = details + DISCRIMINATOR_CACHE.setdefault(union, details) return details @@ -765,6 +800,7 @@ class FinalRequestOptionsInput(TypedDict, total=False): timeout: float | Timeout | None files: HttpxRequestFiles | None idempotency_key: str + content: Union[bytes, bytearray, IO[bytes], Iterable[bytes], AsyncIterable[bytes], None] json_data: Body extra_json: AnyMapping follow_redirects: bool @@ -783,6 +819,7 @@ class FinalRequestOptions(pydantic.BaseModel): post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven() follow_redirects: Union[bool, None] = None + content: Union[bytes, bytearray, IO[bytes], Iterable[bytes], AsyncIterable[bytes], None] = None # It should be noted that we cannot use `json` here as that would override # a BaseModel method in an incompatible fashion. json_data: Union[Body, None] = None diff --git a/src/isaacus/_streaming.py b/src/isaacus/_streaming.py index b0d9a98..6e5dfdb 100644 --- a/src/isaacus/_streaming.py +++ b/src/isaacus/_streaming.py @@ -54,12 +54,12 @@ def __stream__(self) -> Iterator[_T]: process_data = self._client._process_response_data iterator = self._iter_events() - for sse in iterator: - yield process_data(data=sse.json(), cast_to=cast_to, response=response) - - # Ensure the entire stream is consumed - for _sse in iterator: - ... + try: + for sse in iterator: + yield process_data(data=sse.json(), cast_to=cast_to, response=response) + finally: + # Ensure the response is closed even if the consumer doesn't read all data + response.close() def __enter__(self) -> Self: return self @@ -118,12 +118,12 @@ async def __stream__(self) -> AsyncIterator[_T]: process_data = self._client._process_response_data iterator = self._iter_events() - async for sse in iterator: - yield process_data(data=sse.json(), cast_to=cast_to, response=response) - - # Ensure the entire stream is consumed - async for _sse in iterator: - ... + try: + async for sse in iterator: + yield process_data(data=sse.json(), cast_to=cast_to, response=response) + finally: + # Ensure the response is closed even if the consumer doesn't read all data + await response.aclose() async def __aenter__(self) -> Self: return self diff --git a/src/isaacus/_types.py b/src/isaacus/_types.py index 905c413..92adbbc 100644 --- a/src/isaacus/_types.py +++ b/src/isaacus/_types.py @@ -13,9 +13,11 @@ Mapping, TypeVar, Callable, + Iterable, Iterator, Optional, Sequence, + AsyncIterable, ) from typing_extensions import ( Set, @@ -56,6 +58,13 @@ else: Base64FileInput = Union[IO[bytes], PathLike] FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8. + + +# Used for sending raw binary data / streaming data in request bodies +# e.g. for file uploads without multipart encoding +BinaryTypes = Union[bytes, bytearray, IO[bytes], Iterable[bytes]] +AsyncBinaryTypes = Union[bytes, bytearray, IO[bytes], AsyncIterable[bytes]] + FileTypes = Union[ # file (or bytes) FileContent, @@ -243,6 +252,9 @@ class HttpxSendArgs(TypedDict, total=False): if TYPE_CHECKING: # This works because str.__contains__ does not accept object (either in typeshed or at runtime) # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285 + # + # Note: index() and count() methods are intentionally omitted to allow pyright to properly + # infer TypedDict types when dict literals are used in lists assigned to SequenceNotStr. class SequenceNotStr(Protocol[_T_co]): @overload def __getitem__(self, index: SupportsIndex, /) -> _T_co: ... @@ -251,8 +263,6 @@ def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ... def __contains__(self, value: object, /) -> bool: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T_co]: ... - def index(self, value: Any, start: int = 0, stop: int = ..., /) -> int: ... - def count(self, value: Any, /) -> int: ... def __reversed__(self) -> Iterator[_T_co]: ... else: # just point this to a normal `Sequence` at runtime to avoid having to special case diff --git a/src/isaacus/_utils/_json.py b/src/isaacus/_utils/_json.py new file mode 100644 index 0000000..6058421 --- /dev/null +++ b/src/isaacus/_utils/_json.py @@ -0,0 +1,35 @@ +import json +from typing import Any +from datetime import datetime +from typing_extensions import override + +import pydantic + +from .._compat import model_dump + + +def openapi_dumps(obj: Any) -> bytes: + """ + Serialize an object to UTF-8 encoded JSON bytes. + + Extends the standard json.dumps with support for additional types + commonly used in the SDK, such as `datetime`, `pydantic.BaseModel`, etc. + """ + return json.dumps( + obj, + cls=_CustomEncoder, + # Uses the same defaults as httpx's JSON serialization + ensure_ascii=False, + separators=(",", ":"), + allow_nan=False, + ).encode() + + +class _CustomEncoder(json.JSONEncoder): + @override + def default(self, o: Any) -> Any: + if isinstance(o, datetime): + return o.isoformat() + if isinstance(o, pydantic.BaseModel): + return model_dump(o, exclude_unset=True, mode="json", by_alias=True) + return super().default(o) diff --git a/src/isaacus/_utils/_sync.py b/src/isaacus/_utils/_sync.py index ad7ec71..f6027c1 100644 --- a/src/isaacus/_utils/_sync.py +++ b/src/isaacus/_utils/_sync.py @@ -1,10 +1,8 @@ from __future__ import annotations -import sys import asyncio import functools -import contextvars -from typing import Any, TypeVar, Callable, Awaitable +from typing import TypeVar, Callable, Awaitable from typing_extensions import ParamSpec import anyio @@ -15,34 +13,11 @@ T_ParamSpec = ParamSpec("T_ParamSpec") -if sys.version_info >= (3, 9): - _asyncio_to_thread = asyncio.to_thread -else: - # backport of https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread - # for Python 3.8 support - async def _asyncio_to_thread( - func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs - ) -> Any: - """Asynchronously run function *func* in a separate thread. - - Any *args and **kwargs supplied for this function are directly passed - to *func*. Also, the current :class:`contextvars.Context` is propagated, - allowing context variables from the main thread to be accessed in the - separate thread. - - Returns a coroutine that can be awaited to get the eventual result of *func*. - """ - loop = asyncio.events.get_running_loop() - ctx = contextvars.copy_context() - func_call = functools.partial(ctx.run, func, *args, **kwargs) - return await loop.run_in_executor(None, func_call) - - async def to_thread( func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs ) -> T_Retval: if sniffio.current_async_library() == "asyncio": - return await _asyncio_to_thread(func, *args, **kwargs) + return await asyncio.to_thread(func, *args, **kwargs) return await anyio.to_thread.run_sync( functools.partial(func, *args, **kwargs), @@ -53,10 +28,7 @@ async def to_thread( def asyncify(function: Callable[T_ParamSpec, T_Retval]) -> Callable[T_ParamSpec, Awaitable[T_Retval]]: """ Take a blocking function and create an async one that receives the same - positional and keyword arguments. For python version 3.9 and above, it uses - asyncio.to_thread to run the function in a separate thread. For python version - 3.8, it uses locally defined copy of the asyncio.to_thread function which was - introduced in python 3.9. + positional and keyword arguments. Usage: diff --git a/src/isaacus/_utils/_utils.py b/src/isaacus/_utils/_utils.py index 50d5926..eec7f4a 100644 --- a/src/isaacus/_utils/_utils.py +++ b/src/isaacus/_utils/_utils.py @@ -133,7 +133,7 @@ def is_given(obj: _T | NotGiven | Omit) -> TypeGuard[_T]: # Type safe methods for narrowing types with TypeVars. # The default narrowing for isinstance(obj, dict) is dict[unknown, unknown], # however this cause Pyright to rightfully report errors. As we know we don't -# care about the contained types we can safely use `object` in it's place. +# care about the contained types we can safely use `object` in its place. # # There are two separate functions defined, `is_*` and `is_*_t` for different use cases. # `is_*` is for when you're dealing with an unknown input diff --git a/src/isaacus/_version.py b/src/isaacus/_version.py index 0f561fd..d428a2f 100644 --- a/src/isaacus/_version.py +++ b/src/isaacus/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "isaacus" -__version__ = "0.9.1" # x-release-please-version +__version__ = "0.10.0" # x-release-please-version diff --git a/src/isaacus/resources/__init__.py b/src/isaacus/resources/__init__.py index 3c62ff4..c00d631 100644 --- a/src/isaacus/resources/__init__.py +++ b/src/isaacus/resources/__init__.py @@ -16,6 +16,14 @@ RerankingsResourceWithStreamingResponse, AsyncRerankingsResourceWithStreamingResponse, ) +from .enrichments import ( + EnrichmentsResource, + AsyncEnrichmentsResource, + EnrichmentsResourceWithRawResponse, + AsyncEnrichmentsResourceWithRawResponse, + EnrichmentsResourceWithStreamingResponse, + AsyncEnrichmentsResourceWithStreamingResponse, +) from .extractions import ( ExtractionsResource, AsyncExtractionsResource, @@ -58,4 +66,10 @@ "AsyncExtractionsResourceWithRawResponse", "ExtractionsResourceWithStreamingResponse", "AsyncExtractionsResourceWithStreamingResponse", + "EnrichmentsResource", + "AsyncEnrichmentsResource", + "EnrichmentsResourceWithRawResponse", + "AsyncEnrichmentsResourceWithRawResponse", + "EnrichmentsResourceWithStreamingResponse", + "AsyncEnrichmentsResourceWithStreamingResponse", ] diff --git a/src/isaacus/resources/enrichments.py b/src/isaacus/resources/enrichments.py new file mode 100644 index 0000000..4286b16 --- /dev/null +++ b/src/isaacus/resources/enrichments.py @@ -0,0 +1,224 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Optional +from typing_extensions import Literal + +import httpx + +from ..types import enrichment_create_params +from .._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from .._base_client import make_request_options +from ..types.enrichment_response import EnrichmentResponse + +__all__ = ["EnrichmentsResource", "AsyncEnrichmentsResource"] + + +class EnrichmentsResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> EnrichmentsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/isaacus-dev/isaacus-python#accessing-raw-response-data-eg-headers + """ + return EnrichmentsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> EnrichmentsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/isaacus-dev/isaacus-python#with_streaming_response + """ + return EnrichmentsResourceWithStreamingResponse(self) + + def create( + self, + *, + model: Literal["kanon-2-enricher-preview"], + texts: Union[SequenceNotStr[str], str], + overflow_strategy: Optional[Literal["auto", "drop_end"]] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> EnrichmentResponse: + """ + Enrich documents with an Isaacus enricher model. + + Args: + model: The ID of the [model](https://docs.isaacus.com/models#enrichment) to use for + enrichment. + + texts: A text or array of texts to be enriched, each containing at least one + non-whitespace character. + + No more than 8 texts can be enriched in a single request. + + overflow_strategy: The strategy for handling content exceeding the model's maximum input length. + + `auto` currently behaves the same as `drop_end`, dropping excess tokens from the + end of input. In the future, `auto` may implement more sophisticated strategies + such as chunking and context-aware stitching. + + `drop_end` drops tokens from the end of input exceeding the model's maximum + input length. + + `null`, which is the default setting, raises an error if the input exceeds the + model's maximum input length. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/enrichments", + body=maybe_transform( + { + "model": model, + "texts": texts, + "overflow_strategy": overflow_strategy, + }, + enrichment_create_params.EnrichmentCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EnrichmentResponse, + ) + + +class AsyncEnrichmentsResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncEnrichmentsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/isaacus-dev/isaacus-python#accessing-raw-response-data-eg-headers + """ + return AsyncEnrichmentsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncEnrichmentsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/isaacus-dev/isaacus-python#with_streaming_response + """ + return AsyncEnrichmentsResourceWithStreamingResponse(self) + + async def create( + self, + *, + model: Literal["kanon-2-enricher-preview"], + texts: Union[SequenceNotStr[str], str], + overflow_strategy: Optional[Literal["auto", "drop_end"]] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> EnrichmentResponse: + """ + Enrich documents with an Isaacus enricher model. + + Args: + model: The ID of the [model](https://docs.isaacus.com/models#enrichment) to use for + enrichment. + + texts: A text or array of texts to be enriched, each containing at least one + non-whitespace character. + + No more than 8 texts can be enriched in a single request. + + overflow_strategy: The strategy for handling content exceeding the model's maximum input length. + + `auto` currently behaves the same as `drop_end`, dropping excess tokens from the + end of input. In the future, `auto` may implement more sophisticated strategies + such as chunking and context-aware stitching. + + `drop_end` drops tokens from the end of input exceeding the model's maximum + input length. + + `null`, which is the default setting, raises an error if the input exceeds the + model's maximum input length. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/enrichments", + body=await async_maybe_transform( + { + "model": model, + "texts": texts, + "overflow_strategy": overflow_strategy, + }, + enrichment_create_params.EnrichmentCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EnrichmentResponse, + ) + + +class EnrichmentsResourceWithRawResponse: + def __init__(self, enrichments: EnrichmentsResource) -> None: + self._enrichments = enrichments + + self.create = to_raw_response_wrapper( + enrichments.create, + ) + + +class AsyncEnrichmentsResourceWithRawResponse: + def __init__(self, enrichments: AsyncEnrichmentsResource) -> None: + self._enrichments = enrichments + + self.create = async_to_raw_response_wrapper( + enrichments.create, + ) + + +class EnrichmentsResourceWithStreamingResponse: + def __init__(self, enrichments: EnrichmentsResource) -> None: + self._enrichments = enrichments + + self.create = to_streamed_response_wrapper( + enrichments.create, + ) + + +class AsyncEnrichmentsResourceWithStreamingResponse: + def __init__(self, enrichments: AsyncEnrichmentsResource) -> None: + self._enrichments = enrichments + + self.create = async_to_streamed_response_wrapper( + enrichments.create, + ) diff --git a/src/isaacus/resources/rerankings.py b/src/isaacus/resources/rerankings.py index e389668..94b7b31 100644 --- a/src/isaacus/resources/rerankings.py +++ b/src/isaacus/resources/rerankings.py @@ -62,7 +62,7 @@ def create( timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> RerankingResponse: """ - Rerank legal documents by their relevance to a query with an Isaacus legal AI + Rank legal documents by their relevance to a query with an Isaacus legal AI reranker. Args: @@ -173,7 +173,7 @@ async def create( timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> RerankingResponse: """ - Rerank legal documents by their relevance to a query with an Isaacus legal AI + Rank legal documents by their relevance to a query with an Isaacus legal AI reranker. Args: diff --git a/src/isaacus/types/__init__.py b/src/isaacus/types/__init__.py index 7a481fd..f1751e9 100644 --- a/src/isaacus/types/__init__.py +++ b/src/isaacus/types/__init__.py @@ -4,5 +4,7 @@ from .embedding_response import EmbeddingResponse as EmbeddingResponse from .reranking_response import RerankingResponse as RerankingResponse +from .enrichment_response import EnrichmentResponse as EnrichmentResponse from .embedding_create_params import EmbeddingCreateParams as EmbeddingCreateParams from .reranking_create_params import RerankingCreateParams as RerankingCreateParams +from .enrichment_create_params import EnrichmentCreateParams as EnrichmentCreateParams diff --git a/src/isaacus/types/classifications/universal_classification_response.py b/src/isaacus/types/classifications/universal_classification_response.py index 2dd9920..aa4d93e 100644 --- a/src/isaacus/types/classifications/universal_classification_response.py +++ b/src/isaacus/types/classifications/universal_classification_response.py @@ -68,6 +68,8 @@ class Classification(BaseModel): class Usage(BaseModel): + """Statistics about the usage of resources in the process of classifying the text.""" + input_tokens: int """The number of tokens inputted to the model.""" diff --git a/src/isaacus/types/classifications/universal_create_params.py b/src/isaacus/types/classifications/universal_create_params.py index 167737a..c79b494 100644 --- a/src/isaacus/types/classifications/universal_create_params.py +++ b/src/isaacus/types/classifications/universal_create_params.py @@ -59,11 +59,13 @@ class UniversalCreateParams(TypedDict, total=False): class ChunkingOptions(TypedDict, total=False): + """Options for how to split text into smaller chunks.""" + overlap_ratio: Optional[float] """A number greater than or equal to 0 and less than 1.""" overlap_tokens: Optional[int] - """A whole number greater than -1.""" + """A whole number greater than or equal to 0.""" size: Optional[int] """A whole number greater than or equal to 1.""" diff --git a/src/isaacus/types/embedding_response.py b/src/isaacus/types/embedding_response.py index bcd16fe..13d6c89 100644 --- a/src/isaacus/types/embedding_response.py +++ b/src/isaacus/types/embedding_response.py @@ -19,6 +19,8 @@ class Embedding(BaseModel): class Usage(BaseModel): + """Statistics about the usage of resources in the process of embedding the inputs.""" + input_tokens: int """The number of tokens inputted to the model.""" diff --git a/src/isaacus/types/enrichment_create_params.py b/src/isaacus/types/enrichment_create_params.py new file mode 100644 index 0000000..d009b6c --- /dev/null +++ b/src/isaacus/types/enrichment_create_params.py @@ -0,0 +1,40 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Optional +from typing_extensions import Literal, Required, TypedDict + +from .._types import SequenceNotStr + +__all__ = ["EnrichmentCreateParams"] + + +class EnrichmentCreateParams(TypedDict, total=False): + model: Required[Literal["kanon-2-enricher-preview"]] + """ + The ID of the [model](https://docs.isaacus.com/models#enrichment) to use for + enrichment. + """ + + texts: Required[Union[SequenceNotStr[str], str]] + """ + A text or array of texts to be enriched, each containing at least one + non-whitespace character. + + No more than 8 texts can be enriched in a single request. + """ + + overflow_strategy: Optional[Literal["auto", "drop_end"]] + """The strategy for handling content exceeding the model's maximum input length. + + `auto` currently behaves the same as `drop_end`, dropping excess tokens from the + end of input. In the future, `auto` may implement more sophisticated strategies + such as chunking and context-aware stitching. + + `drop_end` drops tokens from the end of input exceeding the model's maximum + input length. + + `null`, which is the default setting, raises an error if the input exceeds the + model's maximum input length. + """ diff --git a/src/isaacus/types/enrichment_response.py b/src/isaacus/types/enrichment_response.py new file mode 100644 index 0000000..e9c6e25 --- /dev/null +++ b/src/isaacus/types/enrichment_response.py @@ -0,0 +1,1258 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = [ + "EnrichmentResponse", + "Result", + "ResultDocument", + "ResultDocumentCrossreference", + "ResultDocumentCrossreferenceSpan", + "ResultDocumentDate", + "ResultDocumentDateMention", + "ResultDocumentEmail", + "ResultDocumentEmailMention", + "ResultDocumentExternalDocument", + "ResultDocumentExternalDocumentMention", + "ResultDocumentExternalDocumentName", + "ResultDocumentExternalDocumentPinpoint", + "ResultDocumentHeading", + "ResultDocumentIDNumber", + "ResultDocumentIDNumberMention", + "ResultDocumentJunk", + "ResultDocumentLocation", + "ResultDocumentLocationMention", + "ResultDocumentLocationName", + "ResultDocumentPerson", + "ResultDocumentPersonMention", + "ResultDocumentPersonName", + "ResultDocumentPhoneNumber", + "ResultDocumentPhoneNumberMention", + "ResultDocumentQuote", + "ResultDocumentQuoteSpan", + "ResultDocumentSegment", + "ResultDocumentSegmentCode", + "ResultDocumentSegmentSpan", + "ResultDocumentSegmentTitle", + "ResultDocumentSegmentTypeName", + "ResultDocumentSubtitle", + "ResultDocumentTerm", + "ResultDocumentTermMeaning", + "ResultDocumentTermMention", + "ResultDocumentTermName", + "ResultDocumentTitle", + "ResultDocumentWebsite", + "ResultDocumentWebsiteMention", + "Usage", +] + + +class ResultDocumentCrossreferenceSpan(BaseModel): + """The span of the segment within the document's text.""" + + end: int + + start: int + + +class ResultDocumentCrossreference(BaseModel): + """A cross-reference within the document pointing to one or more segments.""" + + end: str + """ + The unique identifier of the latest segment in the span of segments being + cross-referenced with ties broken in favor of the least-nested (i.e., largest) + segment. If the cross-reference points to a single segment, `start` and `end` + will be identical. + """ + + span: ResultDocumentCrossreferenceSpan + """The span of the segment within the document's text.""" + + start: str + """ + The unique identifier of the earliest segment in the span of segments being + cross-referenced with ties broken in favor of the least-nested (i.e., largest) + segment. If the cross-reference points to a single segment, `start` and `end` + will be identical. + """ + + +class ResultDocumentDateMention(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentDate(BaseModel): + """ + A date identified in a document belonging to one of the following types: `creation`, `signature`, `effective`, `expiry`, `delivery`, `renewal`, `payment`, `birth`, or `death`. + + Only Gregorian dates between the years 1000 and 9999 (inclusive) fitting into one of the supported date types are extractable. + """ + + mentions: List[ResultDocumentDateMention] + """ + An array of one or more spans within the document's text where the date is + mentioned. + """ + + person: Optional[str] = None + """ + A unique identifier for a legal person in the format `per:{index}` where + `{index}` is a non-negative incrementing integer starting from zero. + """ + + type: Literal["creation", "signature", "effective", "expiry", "delivery", "renewal", "payment", "birth", "death"] + """ + The type of the date, being one of `creation`, `signature`, `effective`, + `expiry`, `delivery`, `renewal`, `payment`, `birth`, or `death`. If a date is + mentioned in a document that does not fit into a supported type, it will not be + extracted. + + `creation` denotes the date the document was created. There may only be one + `creation` date per document. + + `signature` denotes the date the document was signed. + + `effective` denotes the date when the document or a part thereof comes into + effect (e.g., commencement or enactment dates). + + `expiry` denotes the date when the document or a part thereof is no longer in + effect. + + `delivery` denotes the date when goods or services are to be delivered under the + document. + + `renewal` denotes the date when one or more of the document's terms are to be + renewed. + + `payment` denotes the date when payment is to be made under the document. + + `birth` denotes the birth date of a natural person or establishment (e.g., + incorporation) date of a non-natural legal person identified in the document. + There can only be one `birth` date linked to a single person and all `birth` + dates must be linked to a person. A person's `birth` date will never be after + their `death` date. + + `death` denotes the death date of a natural person or dissolution date of a + non-natural legal person identified in the document. There can only be one + `death` date linked to a single person and all `death` dates must be linked to a + person. A person's `death` date will never be before their `birth` date. + """ + + value: str + """The date in ISO 8601 format (YYYY-MM-DD).""" + + +class ResultDocumentEmailMention(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentEmail(BaseModel): + """An email address identified in a document belonging to a legal person. + + If an email address was mentioned in the document but is not attributable to a legal person, it will not be extracted. + """ + + address: str + """The normalized email address.""" + + mentions: List[ResultDocumentEmailMention] + """ + An array of one or more spans within the document's text where the email address + is mentioned. + """ + + person: str + """The unique identifier of the person that this email address belongs to.""" + + +class ResultDocumentExternalDocumentMention(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentExternalDocumentName(BaseModel): + """The span of the segment within the document's text.""" + + end: int + + start: int + + +class ResultDocumentExternalDocumentPinpoint(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentExternalDocument(BaseModel): + """A document identified within another document.""" + + id: str + """ + The unique identifier of the external document in the format `exd:{index}` where + `{index}` is a non-negative incrementing integer starting from zero. + """ + + jurisdiction: Optional[str] = None + """ + A jurisdiction code representing a country (via an initial country code) and, + optionally, a subdivision within that country (via a subsequent subdivision code + prefixed by a hyphen). + + All 249 ISO 3166-1 alpha-2 country codes are representable in addition to + special `INT` and `EU` codes for international and European Union law, + respectively. + + All 5,046 ISO 3166-2 codes are also representable in addition to a special `FED` + code for federal law. + """ + + mentions: List[ResultDocumentExternalDocumentMention] + """ + An array of one or more spans within the document's text where the external + document is mentioned by name, for example, 'the US Constitution' in 'the Second + Amendment to the US Constitution protects freedom of speech'. + """ + + name: ResultDocumentExternalDocumentName + """The span of the segment within the document's text.""" + + pinpoints: List[ResultDocumentExternalDocumentPinpoint] + """ + An array of spans within the document's text where specific parts of the + external document are referenced, for example, 'Section 2' in 'as defined in + Section 2 of the US Constitution'. + """ + + reception: Literal["positive", "mixed", "negative", "neutral"] + """ + The sentiment of the document towards the external document, being one of + `positive`, `mixed`, `negative`, or `neutral`. + + `positive` indicates that the document expresses a favorable view of the + external document whether by endorsing or approving it. + + `mixed` indicates that the document expresses both favorable and unfavorable + views of the external document, for example, by affirming parts of it and + disapproving others. + + `negative` indicates that the document expresses an unfavorable view of the + external document whether by criticizing, repealing, overruling, or explicitly + contradicting it. + + `neutral` indicates that the document references the external document without + expressing any particular sentiment towards it. + """ + + type: Literal["statute", "regulation", "decision", "contract", "other"] + """ + The type of the external document, being one of `statute`, `regulation`, + `decision`, `contract`, or `other`. + + `statute` denotes primary legislation such as acts, bills, codes, and + constitutions. + + `regulation` denotes secondary legislation such as rules, statutory instruments, + and ordinances. + + `decision` denotes judicial or quasi-judicial decisions such as court judgments, + judicial opinions, and tribunal rulings. + + `other` is used for all other types of legal documents that do not fit into any + of the predefined types. + """ + + +class ResultDocumentHeading(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentIDNumberMention(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentIDNumber(BaseModel): + """An identification number mentioned in a document belonging to a legal person. + + If an identification number was mentioned in the document but is not attributable to a legal person, it will not be extracted. + """ + + mentions: List[ResultDocumentIDNumberMention] + """ + An array of one or more spans within the document's text where the + identification number is mentioned. + """ + + number: str + """The identification number.""" + + person: str + """The unique identifier of the person that this identification number belongs to.""" + + +class ResultDocumentJunk(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentLocationMention(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentLocationName(BaseModel): + """The span of the segment within the document's text.""" + + end: int + + start: int + + +class ResultDocumentLocation(BaseModel): + """A location identified within a document.""" + + id: str + """ + The unique identifier of the location in the format `loc:{index}` where + `{index}` is a non-negative incrementing integer starting from zero. + """ + + mentions: List[ResultDocumentLocationMention] + """ + An array of one or more spans within the document's text where the location is + mentioned. + """ + + name: ResultDocumentLocationName + """The span of the segment within the document's text.""" + + parent: Optional[str] = None + """ + A unique identifier for a location in the format `loc:{index}` where `{index}` + is a non-negative incrementing integer starting from zero. + """ + + type: Literal["country", "state", "city", "address", "other"] + """ + The type of the location, being one of `country`, `state`, `city`, `address`, or + `other`. + """ + + +class ResultDocumentPersonMention(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentPersonName(BaseModel): + """The span of the segment within the document's text.""" + + end: int + + start: int + + +class ResultDocumentPerson(BaseModel): + """A legal person identified in a document.""" + + id: str + """ + The unique identifier of the person in the format `per:{index}` where `{index}` + is a non-negative incrementing integer starting from zero. + """ + + mentions: List[ResultDocumentPersonMention] + """ + An array of one or more spans within the document's text where the person is + mentioned. + """ + + name: ResultDocumentPersonName + """The span of the segment within the document's text.""" + + parent: Optional[str] = None + """ + A unique identifier for a legal person in the format `per:{index}` where + `{index}` is a non-negative incrementing integer starting from zero. + """ + + residence: Optional[str] = None + """ + A unique identifier for a location in the format `loc:{index}` where `{index}` + is a non-negative incrementing integer starting from zero. + """ + + role: Literal[ + "plaintiff", + "petitioner", + "applicant", + "appellant", + "appellee", + "claimant", + "complainant", + "defendant", + "respondent", + "prior_authority", + "prosecutor", + "defense_counsel", + "amicus", + "intervener", + "borrower", + "lender", + "guarantor", + "lessee", + "lessor", + "employer", + "employee", + "licensor", + "licensee", + "franchisor", + "franchisee", + "buyer", + "seller", + "contractor", + "shareholder", + "joint_venturer", + "investor", + "insurer", + "insured", + "enacting_authority", + "empowered_authority", + "settlor", + "trustee", + "beneficiary", + "debater", + "director", + "governing_jurisdiction", + "clerk", + "witness", + "other", + "non_party", + ] + """The role of the person in relation to the subject of the document. + + The following roles are currently supported: | | | | ------------------------ | + ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + | | `plaintiff` | A party initiating the case that is the subject of the + document. | | `petitioner` | A party initiating the petition that is the subject + of the document. | | `applicant` | A party initiating the application that is + the subject of the document. | | `appellant` | A party appealing the decision + that is the subject of the document. | | `appellee` | A party responding to the + appeal that is the subject of the document if they are explicitly referred to as + an 'appellee'. | | `claimant` | A party making a claim in the case that is the + subject of the document. | | `complainant` | A party making a complaint in the + case that is the subject of the document. | | `defendant` | A party defending + against the case that is the subject of the document. | | `respondent` | A party + responding to the petition, appeal, or application that is the subject of the + document. | | `prior_authority` | An authority (e.g., judge, tribunal, court) + that made a prior decision in the case that is the subject of the document. Both + individual judges and courts should be annotated with this role where + applicable. This is not to be used for authorities cited as precedent, only for + those that made prior decisions in the same case. | | `prosecutor` | A lawyer + prosecuting the case that is the subject of the document. | | `defense_counsel` + | A lawyer defending the case that is the subject of the document. | | `amicus` + | A party filing an amicus curiae brief in the case that is the subject of the + document. | | `intervener` | A party attempting to or that has intervened in the + case that is the subject of the document. | | `borrower` | A party borrowing + money or other assets under the agreement that is the subject of the document, + including 'mortgagors' and 'debtors'. | | `lender` | A party lending money or + other assets under the agreement that is the subject of the document, including + 'mortgagees' and 'creditors'. | | `guarantor` | A party guaranteeing obligations + under the agreement that is the subject of the document, including 'sureties'. | + | `lessee` | A party leasing goods or services under the agreement that is the + subject of the document, including 'tenants'. | | `lessor` | A party leasing + goods or services under the agreement that is the subject of the document, + including 'landlords'. | | `employer` | A party employing personnel under the + agreement that is the subject of the document. | | `employee` | A party employed + under the agreement that is the subject of the document. | | `licensor` | A + party licensing intellectual property or other rights under the agreement that + are the subject of the document. | | `licensee` | A party licensed to use + intellectual property or other rights under the agreement that are the subject + of the document. | | `franchisor` | A party granting a franchise under the + agreement that is the subject of the document. | | `franchisee` | A party + granted a franchise under the agreement that is the subject of the document. | | + `buyer` | A party purchasing goods or services under the agreement that is the + subject of the document, including 'purchasers', 'customers', and 'clients'. | | + `seller` | A party selling or providing goods or services under the agreement + that is the subject of the document, including 'Vendors', 'Suppliers', and + 'Service Providers' (where such parties are actually providing goods or services + under the agreement). | | `contractor` | A party contracted to perform work or + services under the agreement that is the subject of the document, including + 'consultants'. | | `shareholder` | A party holding shares or equity under the + agreement that is the subject of the document. | | `joint_venturer` | A party + participating in a joint venture under the agreement that is the subject of the + document. | | `investor` | A party investing money or assets under the agreement + that is the subject of the document. | | `insurer` | A party providing insurance + under the agreement that is the subject of the document. | | `insured` | A party + insured under the agreement that is the subject of the document. | | `settlor` | + A party establishing the trust that is the subject of the document. | | + `trustee` | A party managing the trust that is the subject of the document. | | + `beneficiary` | A party benefiting from the trust that is the subject of the + document. | | `enacting_authority` | An authority (e.g., legislature, regulator, + Minister/Secretary, President/Prime Minister, tribunal, court, judge) giving + legal effect to or authorizing the document. All relevant individuals and bodies + should be annotated with this role where applicable. | | `empowered_authority` | + An authority (e.g., government agency, regulator, Minister/Secretary, + President/Prime Minister, tribunal, court) empowered by the document to carry + out functions or duties. | | `debater` | A person participating in the debate + that is the subject of the document. | | `governing_jurisdiction` | The + jurisdiction whose laws govern the document. | | `director` | A director or + other officer of a corporate legal person mentioned in the document. | | `clerk` + | A clerk, notary, or other official certifying, witnessing, filing, recording, + registering, or otherwise administering the document. | | `witness` | A witness + witnessing the signing of the document, or whose testimony is part of the case + that is the subject of the document. | | `other` | A party to the case, + agreement, legislation, or regulation that is the subject of the document that + does not fit into any of the other roles. | | `non_party` | A legal person + mentioned in the document that is not a party to the case, agreement, + legislation, or regulation that is the subject of the document. | + """ + + type: Literal["natural", "corporate", "politic"] + """ + The legal entity type of the person, being one of `natural`, `corporate`, or + `politic`. + + `natural` denotes a human being in their capacity as a natural legal person, + including when representing unincorporated entities such as partnerships and + trusts. + + `corporate` denotes a body corporate such as a company, incorporated + partnership, or statutory corporation. + + `politic` denotes a body politic such as a court, state, government, or + intergovernmental organization. + """ + + +class ResultDocumentPhoneNumberMention(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentPhoneNumber(BaseModel): + """A valid phone number identified in a document belonging to a legal person. + + If a phone number was mentioned in the document but is not valid, possible, or attributable to a legal person, it will not be extracted. + """ + + mentions: List[ResultDocumentPhoneNumberMention] + """ + An array of one or more spans within the document's text where the phone number + is mentioned. + """ + + number: str + """ + The normalized phone number in E.123 international notation conforming with + local conventions on the use of spaces and hyphens as separators. + """ + + person: str + """The unique identifier of the person that this phone number belongs to.""" + + +class ResultDocumentQuoteSpan(BaseModel): + """The span of the segment within the document's text.""" + + end: int + + start: int + + +class ResultDocumentQuote(BaseModel): + """A quotation within a document.""" + + amending: bool + """ + Whether the quote is being used to amend or modify content, typically in other + documents. + """ + + source_document: Optional[str] = None + """ + A unique identifier for an external document in the format `exd:{index}` where + `{index}` is a non-negative incrementing integer starting from zero. + """ + + source_person: Optional[str] = None + """ + A unique identifier for a legal person in the format `per:{index}` where + `{index}` is a non-negative incrementing integer starting from zero. + """ + + source_segment: Optional[str] = None + """ + A unique identifier for a segment in the format `seg:{index}` where `{index}` is + a non-negative incrementing integer starting from zero. + """ + + span: ResultDocumentQuoteSpan + """The span of the segment within the document's text.""" + + +class ResultDocumentSegmentCode(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentSegmentSpan(BaseModel): + """The span of the segment within the document's text.""" + + end: int + + start: int + + +class ResultDocumentSegmentTitle(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentSegmentTypeName(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentSegment(BaseModel): + """ + A segment within the document representing a structurally distinct portion of the document's content. + """ + + id: str + """ + The unique identifier of the segment in the format `seg:{index}` where `{index}` + is a non-negative incrementing integer starting from zero. + """ + + category: Literal["front_matter", "scope", "main", "annotation", "back_matter", "other"] + """ + The functional 'category' of the segment within the document, being one of + `front_matter`, `scope`, `main`, `annotation`, `back_matter`, or `other`. + + `front_matter` denotes non-operative contextualizing content occurring at the + start of a document such as a preamble or recitals. + + `scope` denotes operative content defining the application or interpretation of + a document such as definition sections and governing law clauses. + + `main` denotes operative, non-scopal content. + + `annotation` denotes non-operative annotative content providing explanatory or + referential information such as commentary, footnotes, and endnotes. + + `back_matter` denotes non-operative contextualizing content occurring at the end + of a document such as authority statements. + + `other` denotes content that does not fit into any of the other categories. + """ + + code: Optional[ResultDocumentSegmentCode] = None + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible + for any two spans to partially overlap; they can only be disjoint, adjacent, or + wholly nested. Spans of the exact same type (e.g., segments) will never be + duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses + zero-based, half-open, Unicode code point-spaced string indexing), indices may + need to be translated accordingly (for example, JavaScript slices into UTF-16 + code units instead of Unicode code points). + """ + + kind: Literal["container", "unit", "item", "figure"] + """ + The structural 'kind' of the segment, being one of `container`, `unit`, `item`, + or `figure`. + + A `container` is a structural or semantic grouping of content such as a chapter. + It can contain segments of any kind or none at all. + + A `unit` is a single syntactically independent unit of text such as a paragraph. + It can only contain `item`s and `figure`s. + + An `item` is a syntactically subordinate unit of text such as an item in a + run-in list. It can only contain other `item`s. Note that an `item` is + conceptually distinct from a list item—it is perfectly possible to encounter + list items that are syntactically independent of their surrounding items just as + it is possible to encounter dependent clauses that do not appear as part of a + list. + + A `figure` is a visually structured or tabular unit of content such as a + diagram, equation, or table. It cannot contain segments. + """ + + parent: Optional[str] = None + """ + A unique identifier for a segment in the format `seg:{index}` where `{index}` is + a non-negative incrementing integer starting from zero. + """ + + span: ResultDocumentSegmentSpan + """The span of the segment within the document's text.""" + + title: Optional[ResultDocumentSegmentTitle] = None + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible + for any two spans to partially overlap; they can only be disjoint, adjacent, or + wholly nested. Spans of the exact same type (e.g., segments) will never be + duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses + zero-based, half-open, Unicode code point-spaced string indexing), indices may + need to be translated accordingly (for example, JavaScript slices into UTF-16 + code units instead of Unicode code points). + """ + + type: Optional[ + Literal[ + "title", + "book", + "part", + "chapter", + "subchapter", + "division", + "subdivision", + "subpart", + "subtitle", + "table_of_contents", + "article", + "section", + "regulation", + "rule", + "clause", + "paragraph", + "subarticle", + "subsection", + "subregulation", + "subrule", + "subclause", + "subparagraph", + "item", + "subitem", + "point", + "indent", + "schedule", + "annex", + "appendix", + "exhibit", + "recital", + "signature", + "note", + "figure", + "table", + "formula", + ] + ] = None + """ + The addressable 'type' of the segment within the document's referential scheme + and hierarchy, whether defined explicitly (e.g., by headings, such as + 'Section 2. Definitions'), implicitly (e.g., by way of reference, such as 'as + defined in Section 2'), or by convention (e.g., [42] in a judgment often denotes + a `paragraph`, independent provisions in statute are often `section`s, etc.). If + the type is not known or not applicable, it will be set to `null`. + + Note that, although many segment types may coincide with syntactic constructs, + they should be thought of purely as distinct formal citable units. Most + paragraphs (in the syntactic sense) will not have the `paragraph` type, for + example. That type is reserved for segments that would formally be cited as a + 'Paragraph' within the document's referential scheme. + + The following types are currently supported: `title`, `book`, `part`, `chapter`, + `subchapter`, `division`, `subdivision`, `subpart`, `subtitle`, + `table_of_contents`, `article`, `section`, `regulation`, `rule`, `clause`, + `paragraph`, `subarticle`, `subsection`, `subregulation`, `subrule`, + `subclause`, `subparagraph`, `item`, `subitem`, `point`, `indent`, `schedule`, + `annex`, `appendix`, `exhibit`, `recital`, `signature`, `note`, `figure`, + `table`, and `formula`. + + The `title`, `book`, `part`, `chapter`, `subchapter`, `division`, `subdivision`, + `subpart`, `subtitle`, and `table_of_contents` types are exclusive to the + `container` kind. + + The `figure` kind only supports the `figure`, `table`, and `formula` types, all + of which are exclusive to it. + """ + + type_name: Optional[ResultDocumentSegmentTypeName] = None + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible + for any two spans to partially overlap; they can only be disjoint, adjacent, or + wholly nested. Spans of the exact same type (e.g., segments) will never be + duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses + zero-based, half-open, Unicode code point-spaced string indexing), indices may + need to be translated accordingly (for example, JavaScript slices into UTF-16 + code units instead of Unicode code points). + """ + + +class ResultDocumentSubtitle(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentTermMeaning(BaseModel): + """The span of the segment within the document's text.""" + + end: int + + start: int + + +class ResultDocumentTermMention(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentTermName(BaseModel): + """The span of the segment within the document's text.""" + + end: int + + start: int + + +class ResultDocumentTerm(BaseModel): + """A term assigned a definite meaning within a document.""" + + id: str + """ + The unique identifier of the term in the format `term:{index}` where `{index}` + is a non-negative incrementing integer starting from zero. + """ + + meaning: ResultDocumentTermMeaning + """The span of the segment within the document's text.""" + + mentions: List[ResultDocumentTermMention] + """ + An array of spans within the document's text where the term is mentioned outside + of its definition. + + It is possible for the term to have no mentions if, outside of its definition, + it is never referred to in the document. + """ + + name: ResultDocumentTermName + """The span of the segment within the document's text.""" + + +class ResultDocumentTitle(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentWebsiteMention(BaseModel): + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible for any two spans to partially overlap; they can only be disjoint, adjacent, or wholly nested. Spans of the exact same type (e.g., segments) will never be duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses zero-based, half-open, Unicode code point-spaced string indexing), indices may need to be translated accordingly (for example, JavaScript slices into UTF-16 code units instead of Unicode code points). + """ + + end: int + + start: int + + +class ResultDocumentWebsite(BaseModel): + """A website identified in a document belonging to a legal person. + + If a website was mentioned in the document but is not attributable to a legal person, it will not be extracted. + """ + + mentions: List[ResultDocumentWebsiteMention] + """ + An array of one or more spans within the document's text where the website is + mentioned (including paths and slugs which are not part of the website's + normalized URL). + """ + + person: str + """The unique identifier of the person that this website belongs to.""" + + url: str + """The normalized URL of the website in the form `https://{host}/`.""" + + +class ResultDocument(BaseModel): + """The enriched document.""" + + crossreferences: List[ResultDocumentCrossreference] + """ + An array of cross-references within the document pointing to a single segment or + a span of segments. + """ + + dates: List[ResultDocumentDate] + """ + An array of dates identified in the document belonging to one of the following + types: `creation`, `signature`, `effective`, `expiry`, `delivery`, `renewal`, + `payment`, `birth`, or `death`. + + Only Gregorian dates between the years 1000 and 9999 (inclusive) fitting into + one of the supported date types are extractable. + """ + + emails: List[ResultDocumentEmail] + """ + An array of email addresses identified in the document belonging to legal + persons. + + Email addresses mentioned in the document that are not attributable to legal + persons will not be extracted. + """ + + external_documents: List[ResultDocumentExternalDocument] + """An array of documents identified within the document.""" + + headings: List[ResultDocumentHeading] + """An array of spans within the document's text constituting headings.""" + + id_numbers: List[ResultDocumentIDNumber] + """ + An array of identification numbers identified in the document belonging to legal + persons. + + Identification numbers mentioned in the document that are not attributable to + legal persons will not be extracted. + """ + + junk: List[ResultDocumentJunk] + """ + An array of spans within the document's text constituting non-operative, + non-substantive 'junk' content such as headers, footers, page numbers, and OCR + artifacts. + """ + + jurisdiction: Optional[str] = None + """ + A jurisdiction code representing a country (via an initial country code) and, + optionally, a subdivision within that country (via a subsequent subdivision code + prefixed by a hyphen). + + All 249 ISO 3166-1 alpha-2 country codes are representable in addition to + special `INT` and `EU` codes for international and European Union law, + respectively. + + All 5,046 ISO 3166-2 codes are also representable in addition to a special `FED` + code for federal law. + """ + + locations: List[ResultDocumentLocation] + """An array of locations identified in the document.""" + + persons: List[ResultDocumentPerson] + """An array of legal persons identified in the document.""" + + phone_numbers: List[ResultDocumentPhoneNumber] + """ + An array of valid phone numbers identified in the document belonging to legal + persons. + + Phone numbers mentioned in the document that are not valid, possible, or + attributable to legal persons will not be extracted. + """ + + quotes: List[ResultDocumentQuote] + """An array of quotations within the document.""" + + segments: List[ResultDocumentSegment] + """ + An array of segments within the document representing structurally distinct + portions of its content. + """ + + subtitle: Optional[ResultDocumentSubtitle] = None + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible + for any two spans to partially overlap; they can only be disjoint, adjacent, or + wholly nested. Spans of the exact same type (e.g., segments) will never be + duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses + zero-based, half-open, Unicode code point-spaced string indexing), indices may + need to be translated accordingly (for example, JavaScript slices into UTF-16 + code units instead of Unicode code points). + """ + + terms: List[ResultDocumentTerm] + """An array of terms assigned definite meanings within the document.""" + + title: Optional[ResultDocumentTitle] = None + """A zero-based, half-open span into the Unicode code point space of input text. + + All spans are globally laminar and well-nested similar to XML—it is impossible + for any two spans to partially overlap; they can only be disjoint, adjacent, or + wholly nested. Spans of the exact same type (e.g., segments) will never be + duplicated. + + A span cannot be empty and will never start or end at whitespace. + + Note that, when using programming languages other than Python (which uses + zero-based, half-open, Unicode code point-spaced string indexing), indices may + need to be translated accordingly (for example, JavaScript slices into UTF-16 + code units instead of Unicode code points). + """ + + type: Literal["statute", "regulation", "decision", "contract", "other"] + """ + The type of the document, being one of `statute`, `regulation`, `decision`, + `contract`, or `other`. + + `statute` denotes primary legislation such as acts, bills, codes, and + constitutions. + + `regulation` denotes secondary legislation such as rules, statutory instruments, + and ordinances. + + `decision` denotes judicial or quasi-judicial decisions such as court judgments, + judicial opinions, and tribunal rulings. + + `other` is used for all other types of legal documents that do not fit into any + of the predefined types. + """ + + version: Literal["ilgs@1"] + + websites: List[ResultDocumentWebsite] + """An array of websites identified in the document belonging to legal persons. + + Websites mentioned in the document that are not attributable to legal persons + will not be extracted. + """ + + +class Result(BaseModel): + """An enriched document alongside its index in the input array of texts.""" + + document: ResultDocument + """The enriched document.""" + + index: int + """ + The index of this document in the input array of texts, starting at `0` (and, + therefore, ending at the number of inputs minus `1`). + """ + + +class Usage(BaseModel): + """Statistics about the usage of resources in the process of enriching the input.""" + + input_tokens: int + """The total number of tokens inputted to the model.""" + + +class EnrichmentResponse(BaseModel): + results: List[Result] + """ + The input documents enriched into version 1.0.0 of the Isaacus Legal Graph + Schema (IGLS). + + All spans in an enriched document graph are indexed into the Unicode code point + space of a source document. Access to source documents is thus required to + resolve spans into text. + + The start and end indices of spans are zero-based (i.e., the first Unicode code + point in the document is at index 0) and half-open (i.e., the end index is + exclusive). + + All spans are globally laminar and well-nested similar to XML—it is impossible + for any two spans to partially overlap; they can only be disjoint, adjacent, or + wholly nested. + + Spans of the exact same type (e.g., segments) will never be duplicated. + + Spans cannot be empty and will never start or end at whitespace. + + When using programming languages other than Python (which uses zero-based, + half-open, Unicode code point-spaced string indexing), indices may need to be + translated accordingly (for example, JavaScript slices into UTF-16 code units + instead of Unicode code points). + """ + + usage: Usage + """Statistics about the usage of resources in the process of enriching the input.""" diff --git a/src/isaacus/types/extractions/answer_extraction_response.py b/src/isaacus/types/extractions/answer_extraction_response.py index 1466a62..ba610f6 100644 --- a/src/isaacus/types/extractions/answer_extraction_response.py +++ b/src/isaacus/types/extractions/answer_extraction_response.py @@ -8,6 +8,8 @@ class ExtractionAnswer(BaseModel): + """An answer extracted from a text.""" + end: int """ The index of the character immediately after the last character of the answer in @@ -31,6 +33,8 @@ class ExtractionAnswer(BaseModel): class Extraction(BaseModel): + """The result of extracting answers from a text.""" + answers: List[ExtractionAnswer] """Answers extracted from the text, ordered from highest to lowest score.""" @@ -53,6 +57,10 @@ class Extraction(BaseModel): class Usage(BaseModel): + """ + Statistics about the usage of resources in the process of extracting answers from the texts. + """ + input_tokens: int """The number of tokens inputted to the model.""" diff --git a/src/isaacus/types/extractions/qa_create_params.py b/src/isaacus/types/extractions/qa_create_params.py index 867d3d1..8cc1b91 100644 --- a/src/isaacus/types/extractions/qa_create_params.py +++ b/src/isaacus/types/extractions/qa_create_params.py @@ -57,11 +57,13 @@ class QaCreateParams(TypedDict, total=False): class ChunkingOptions(TypedDict, total=False): + """Options for how to split text into smaller chunks.""" + overlap_ratio: Optional[float] """A number greater than or equal to 0 and less than 1.""" overlap_tokens: Optional[int] - """A whole number greater than -1.""" + """A whole number greater than or equal to 0.""" size: Optional[int] """A whole number greater than or equal to 1.""" diff --git a/src/isaacus/types/reranking_create_params.py b/src/isaacus/types/reranking_create_params.py index 7f7f800..416cab9 100644 --- a/src/isaacus/types/reranking_create_params.py +++ b/src/isaacus/types/reranking_create_params.py @@ -67,11 +67,13 @@ class RerankingCreateParams(TypedDict, total=False): class ChunkingOptions(TypedDict, total=False): + """Options for how to split text into smaller chunks.""" + overlap_ratio: Optional[float] """A number greater than or equal to 0 and less than 1.""" overlap_tokens: Optional[int] - """A whole number greater than -1.""" + """A whole number greater than or equal to 0.""" size: Optional[int] """A whole number greater than or equal to 1.""" diff --git a/src/isaacus/types/reranking_response.py b/src/isaacus/types/reranking_response.py index 71cc1ce..a0ba8d8 100644 --- a/src/isaacus/types/reranking_response.py +++ b/src/isaacus/types/reranking_response.py @@ -22,6 +22,8 @@ class Result(BaseModel): class Usage(BaseModel): + """Statistics about the usage of resources in the process of reranking the texts.""" + input_tokens: int """The number of tokens inputted to the model.""" diff --git a/tests/api_resources/test_enrichments.py b/tests/api_resources/test_enrichments.py new file mode 100644 index 0000000..5ef70d1 --- /dev/null +++ b/tests/api_resources/test_enrichments.py @@ -0,0 +1,118 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from isaacus import Isaacus, AsyncIsaacus +from tests.utils import assert_matches_type +from isaacus.types import EnrichmentResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestEnrichments: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create(self, client: Isaacus) -> None: + enrichment = client.enrichments.create( + model="kanon-2-enricher-preview", + texts=['1.5 You (the "User") agree to be bound by these Terms.'], + ) + assert_matches_type(EnrichmentResponse, enrichment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_with_all_params(self, client: Isaacus) -> None: + enrichment = client.enrichments.create( + model="kanon-2-enricher-preview", + texts=['1.5 You (the "User") agree to be bound by these Terms.'], + overflow_strategy=None, + ) + assert_matches_type(EnrichmentResponse, enrichment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_create(self, client: Isaacus) -> None: + response = client.enrichments.with_raw_response.create( + model="kanon-2-enricher-preview", + texts=['1.5 You (the "User") agree to be bound by these Terms.'], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + enrichment = response.parse() + assert_matches_type(EnrichmentResponse, enrichment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_create(self, client: Isaacus) -> None: + with client.enrichments.with_streaming_response.create( + model="kanon-2-enricher-preview", + texts=['1.5 You (the "User") agree to be bound by these Terms.'], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + enrichment = response.parse() + assert_matches_type(EnrichmentResponse, enrichment, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncEnrichments: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create(self, async_client: AsyncIsaacus) -> None: + enrichment = await async_client.enrichments.create( + model="kanon-2-enricher-preview", + texts=['1.5 You (the "User") agree to be bound by these Terms.'], + ) + assert_matches_type(EnrichmentResponse, enrichment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncIsaacus) -> None: + enrichment = await async_client.enrichments.create( + model="kanon-2-enricher-preview", + texts=['1.5 You (the "User") agree to be bound by these Terms.'], + overflow_strategy=None, + ) + assert_matches_type(EnrichmentResponse, enrichment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_create(self, async_client: AsyncIsaacus) -> None: + response = await async_client.enrichments.with_raw_response.create( + model="kanon-2-enricher-preview", + texts=['1.5 You (the "User") agree to be bound by these Terms.'], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + enrichment = await response.parse() + assert_matches_type(EnrichmentResponse, enrichment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_create(self, async_client: AsyncIsaacus) -> None: + async with async_client.enrichments.with_streaming_response.create( + model="kanon-2-enricher-preview", + texts=['1.5 You (the "User") agree to be bound by these Terms.'], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + enrichment = await response.parse() + assert_matches_type(EnrichmentResponse, enrichment, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/test_client.py b/tests/test_client.py index 0478e5f..841655d 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -8,10 +8,11 @@ import json import asyncio import inspect +import dataclasses import tracemalloc -from typing import Any, Union, cast +from typing import Any, Union, TypeVar, Callable, Iterable, Iterator, Optional, Coroutine, cast from unittest import mock -from typing_extensions import Literal +from typing_extensions import Literal, AsyncIterator, override import httpx import pytest @@ -36,6 +37,7 @@ from .utils import update_env +T = TypeVar("T") base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") api_key = "My API Key" @@ -50,6 +52,57 @@ def _low_retry_timeout(*_args: Any, **_kwargs: Any) -> float: return 0.1 +def mirror_request_content(request: httpx.Request) -> httpx.Response: + return httpx.Response(200, content=request.content) + + +# note: we can't use the httpx.MockTransport class as it consumes the request +# body itself, which means we can't test that the body is read lazily +class MockTransport(httpx.BaseTransport, httpx.AsyncBaseTransport): + def __init__( + self, + handler: Callable[[httpx.Request], httpx.Response] + | Callable[[httpx.Request], Coroutine[Any, Any, httpx.Response]], + ) -> None: + self.handler = handler + + @override + def handle_request( + self, + request: httpx.Request, + ) -> httpx.Response: + assert not inspect.iscoroutinefunction(self.handler), "handler must not be a coroutine function" + assert inspect.isfunction(self.handler), "handler must be a function" + return self.handler(request) + + @override + async def handle_async_request( + self, + request: httpx.Request, + ) -> httpx.Response: + assert inspect.iscoroutinefunction(self.handler), "handler must be a coroutine function" + return await self.handler(request) + + +@dataclasses.dataclass +class Counter: + value: int = 0 + + +def _make_sync_iterator(iterable: Iterable[T], counter: Optional[Counter] = None) -> Iterator[T]: + for item in iterable: + if counter: + counter.value += 1 + yield item + + +async def _make_async_iterator(iterable: Iterable[T], counter: Optional[Counter] = None) -> AsyncIterator[T]: + for item in iterable: + if counter: + counter.value += 1 + yield item + + def _get_open_connections(client: Isaacus | AsyncIsaacus) -> int: transport = client._client._transport assert isinstance(transport, httpx.HTTPTransport) or isinstance(transport, httpx.AsyncHTTPTransport) @@ -59,51 +112,49 @@ def _get_open_connections(client: Isaacus | AsyncIsaacus) -> int: class TestIsaacus: - client = Isaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) - @pytest.mark.respx(base_url=base_url) - def test_raw_response(self, respx_mock: MockRouter) -> None: + def test_raw_response(self, respx_mock: MockRouter, client: Isaacus) -> None: respx_mock.post("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - response = self.client.post("/foo", cast_to=httpx.Response) + response = client.post("/foo", cast_to=httpx.Response) assert response.status_code == 200 assert isinstance(response, httpx.Response) assert response.json() == {"foo": "bar"} @pytest.mark.respx(base_url=base_url) - def test_raw_response_for_binary(self, respx_mock: MockRouter) -> None: + def test_raw_response_for_binary(self, respx_mock: MockRouter, client: Isaacus) -> None: respx_mock.post("/foo").mock( return_value=httpx.Response(200, headers={"Content-Type": "application/binary"}, content='{"foo": "bar"}') ) - response = self.client.post("/foo", cast_to=httpx.Response) + response = client.post("/foo", cast_to=httpx.Response) assert response.status_code == 200 assert isinstance(response, httpx.Response) assert response.json() == {"foo": "bar"} - def test_copy(self) -> None: - copied = self.client.copy() - assert id(copied) != id(self.client) + def test_copy(self, client: Isaacus) -> None: + copied = client.copy() + assert id(copied) != id(client) - copied = self.client.copy(api_key="another My API Key") + copied = client.copy(api_key="another My API Key") assert copied.api_key == "another My API Key" - assert self.client.api_key == "My API Key" + assert client.api_key == "My API Key" - def test_copy_default_options(self) -> None: + def test_copy_default_options(self, client: Isaacus) -> None: # options that have a default are overridden correctly - copied = self.client.copy(max_retries=7) + copied = client.copy(max_retries=7) assert copied.max_retries == 7 - assert self.client.max_retries == 2 + assert client.max_retries == 2 copied2 = copied.copy(max_retries=6) assert copied2.max_retries == 6 assert copied.max_retries == 7 # timeout - assert isinstance(self.client.timeout, httpx.Timeout) - copied = self.client.copy(timeout=None) + assert isinstance(client.timeout, httpx.Timeout) + copied = client.copy(timeout=None) assert copied.timeout is None - assert isinstance(self.client.timeout, httpx.Timeout) + assert isinstance(client.timeout, httpx.Timeout) def test_copy_default_headers(self) -> None: client = Isaacus( @@ -138,6 +189,7 @@ def test_copy_default_headers(self) -> None: match="`default_headers` and `set_default_headers` arguments are mutually exclusive", ): client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"}) + client.close() def test_copy_default_query(self) -> None: client = Isaacus( @@ -175,13 +227,15 @@ def test_copy_default_query(self) -> None: ): client.copy(set_default_query={}, default_query={"foo": "Bar"}) - def test_copy_signature(self) -> None: + client.close() + + def test_copy_signature(self, client: Isaacus) -> None: # ensure the same parameters that can be passed to the client are defined in the `.copy()` method init_signature = inspect.signature( # mypy doesn't like that we access the `__init__` property. - self.client.__init__, # type: ignore[misc] + client.__init__, # type: ignore[misc] ) - copy_signature = inspect.signature(self.client.copy) + copy_signature = inspect.signature(client.copy) exclude_params = {"transport", "proxies", "_strict_response_validation"} for name in init_signature.parameters.keys(): @@ -192,12 +246,12 @@ def test_copy_signature(self) -> None: assert copy_param is not None, f"copy() signature is missing the {name} param" @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12") - def test_copy_build_request(self) -> None: + def test_copy_build_request(self, client: Isaacus) -> None: options = FinalRequestOptions(method="get", url="/foo") def build_request(options: FinalRequestOptions) -> None: - client = self.client.copy() - client._build_request(options) + client_copy = client.copy() + client_copy._build_request(options) # ensure that the machinery is warmed up before tracing starts. build_request(options) @@ -254,14 +308,12 @@ def add_leak(leaks: list[tracemalloc.StatisticDiff], diff: tracemalloc.Statistic print(frame) raise AssertionError() - def test_request_timeout(self) -> None: - request = self.client._build_request(FinalRequestOptions(method="get", url="/foo")) + def test_request_timeout(self, client: Isaacus) -> None: + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore assert timeout == DEFAULT_TIMEOUT - request = self.client._build_request( - FinalRequestOptions(method="get", url="/foo", timeout=httpx.Timeout(100.0)) - ) + request = client._build_request(FinalRequestOptions(method="get", url="/foo", timeout=httpx.Timeout(100.0))) timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore assert timeout == httpx.Timeout(100.0) @@ -272,6 +324,8 @@ def test_client_timeout_option(self) -> None: timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore assert timeout == httpx.Timeout(0) + client.close() + def test_http_client_timeout_option(self) -> None: # custom timeout given to the httpx client should be used with httpx.Client(timeout=None) as http_client: @@ -283,6 +337,8 @@ def test_http_client_timeout_option(self) -> None: timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore assert timeout == httpx.Timeout(None) + client.close() + # no timeout given to the httpx client should not use the httpx default with httpx.Client() as http_client: client = Isaacus( @@ -293,6 +349,8 @@ def test_http_client_timeout_option(self) -> None: timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore assert timeout == DEFAULT_TIMEOUT + client.close() + # explicitly passing the default timeout currently results in it being ignored with httpx.Client(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client: client = Isaacus( @@ -303,6 +361,8 @@ def test_http_client_timeout_option(self) -> None: timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore assert timeout == DEFAULT_TIMEOUT # our default + client.close() + async def test_invalid_http_client(self) -> None: with pytest.raises(TypeError, match="Invalid `http_client` arg"): async with httpx.AsyncClient() as http_client: @@ -314,14 +374,14 @@ async def test_invalid_http_client(self) -> None: ) def test_default_headers_option(self) -> None: - client = Isaacus( + test_client = Isaacus( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} ) - request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + request = test_client._build_request(FinalRequestOptions(method="get", url="/foo")) assert request.headers.get("x-foo") == "bar" assert request.headers.get("x-stainless-lang") == "python" - client2 = Isaacus( + test_client2 = Isaacus( base_url=base_url, api_key=api_key, _strict_response_validation=True, @@ -330,10 +390,13 @@ def test_default_headers_option(self) -> None: "X-Stainless-Lang": "my-overriding-header", }, ) - request = client2._build_request(FinalRequestOptions(method="get", url="/foo")) + request = test_client2._build_request(FinalRequestOptions(method="get", url="/foo")) assert request.headers.get("x-foo") == "stainless" assert request.headers.get("x-stainless-lang") == "my-overriding-header" + test_client.close() + test_client2.close() + def test_validate_headers(self) -> None: client = Isaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) request = client._build_request(FinalRequestOptions(method="get", url="/foo")) @@ -362,8 +425,10 @@ def test_default_query_option(self) -> None: url = httpx.URL(request.url) assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} - def test_request_extra_json(self) -> None: - request = self.client._build_request( + client.close() + + def test_request_extra_json(self, client: Isaacus) -> None: + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -374,7 +439,7 @@ def test_request_extra_json(self) -> None: data = json.loads(request.content.decode("utf-8")) assert data == {"foo": "bar", "baz": False} - request = self.client._build_request( + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -385,7 +450,7 @@ def test_request_extra_json(self) -> None: assert data == {"baz": False} # `extra_json` takes priority over `json_data` when keys clash - request = self.client._build_request( + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -396,8 +461,8 @@ def test_request_extra_json(self) -> None: data = json.loads(request.content.decode("utf-8")) assert data == {"foo": "bar", "baz": None} - def test_request_extra_headers(self) -> None: - request = self.client._build_request( + def test_request_extra_headers(self, client: Isaacus) -> None: + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -407,7 +472,7 @@ def test_request_extra_headers(self) -> None: assert request.headers.get("X-Foo") == "Foo" # `extra_headers` takes priority over `default_headers` when keys clash - request = self.client.with_options(default_headers={"X-Bar": "true"})._build_request( + request = client.with_options(default_headers={"X-Bar": "true"})._build_request( FinalRequestOptions( method="post", url="/foo", @@ -418,8 +483,8 @@ def test_request_extra_headers(self) -> None: ) assert request.headers.get("X-Bar") == "false" - def test_request_extra_query(self) -> None: - request = self.client._build_request( + def test_request_extra_query(self, client: Isaacus) -> None: + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -432,7 +497,7 @@ def test_request_extra_query(self) -> None: assert params == {"my_query_param": "Foo"} # if both `query` and `extra_query` are given, they are merged - request = self.client._build_request( + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -446,7 +511,7 @@ def test_request_extra_query(self) -> None: assert params == {"bar": "1", "foo": "2"} # `extra_query` takes priority over `query` when keys clash - request = self.client._build_request( + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -489,7 +554,71 @@ def test_multipart_repeating_array(self, client: Isaacus) -> None: ] @pytest.mark.respx(base_url=base_url) - def test_basic_union_response(self, respx_mock: MockRouter) -> None: + def test_binary_content_upload(self, respx_mock: MockRouter, client: Isaacus) -> None: + respx_mock.post("/upload").mock(side_effect=mirror_request_content) + + file_content = b"Hello, this is a test file." + + response = client.post( + "/upload", + content=file_content, + cast_to=httpx.Response, + options={"headers": {"Content-Type": "application/octet-stream"}}, + ) + + assert response.status_code == 200 + assert response.request.headers["Content-Type"] == "application/octet-stream" + assert response.content == file_content + + def test_binary_content_upload_with_iterator(self) -> None: + file_content = b"Hello, this is a test file." + counter = Counter() + iterator = _make_sync_iterator([file_content], counter=counter) + + def mock_handler(request: httpx.Request) -> httpx.Response: + assert counter.value == 0, "the request body should not have been read" + return httpx.Response(200, content=request.read()) + + with Isaacus( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.Client(transport=MockTransport(handler=mock_handler)), + ) as client: + response = client.post( + "/upload", + content=iterator, + cast_to=httpx.Response, + options={"headers": {"Content-Type": "application/octet-stream"}}, + ) + + assert response.status_code == 200 + assert response.request.headers["Content-Type"] == "application/octet-stream" + assert response.content == file_content + assert counter.value == 1 + + @pytest.mark.respx(base_url=base_url) + def test_binary_content_upload_with_body_is_deprecated(self, respx_mock: MockRouter, client: Isaacus) -> None: + respx_mock.post("/upload").mock(side_effect=mirror_request_content) + + file_content = b"Hello, this is a test file." + + with pytest.deprecated_call( + match="Passing raw bytes as `body` is deprecated and will be removed in a future version. Please pass raw bytes via the `content` parameter instead." + ): + response = client.post( + "/upload", + body=file_content, + cast_to=httpx.Response, + options={"headers": {"Content-Type": "application/octet-stream"}}, + ) + + assert response.status_code == 200 + assert response.request.headers["Content-Type"] == "application/octet-stream" + assert response.content == file_content + + @pytest.mark.respx(base_url=base_url) + def test_basic_union_response(self, respx_mock: MockRouter, client: Isaacus) -> None: class Model1(BaseModel): name: str @@ -498,12 +627,12 @@ class Model2(BaseModel): respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - response = self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + response = client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) assert isinstance(response, Model2) assert response.foo == "bar" @pytest.mark.respx(base_url=base_url) - def test_union_response_different_types(self, respx_mock: MockRouter) -> None: + def test_union_response_different_types(self, respx_mock: MockRouter, client: Isaacus) -> None: """Union of objects with the same field name using a different type""" class Model1(BaseModel): @@ -514,18 +643,18 @@ class Model2(BaseModel): respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - response = self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + response = client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) assert isinstance(response, Model2) assert response.foo == "bar" respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": 1})) - response = self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + response = client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) assert isinstance(response, Model1) assert response.foo == 1 @pytest.mark.respx(base_url=base_url) - def test_non_application_json_content_type_for_json_data(self, respx_mock: MockRouter) -> None: + def test_non_application_json_content_type_for_json_data(self, respx_mock: MockRouter, client: Isaacus) -> None: """ Response that sets Content-Type to something other than application/json but returns json data """ @@ -541,7 +670,7 @@ class Model(BaseModel): ) ) - response = self.client.get("/foo", cast_to=Model) + response = client.get("/foo", cast_to=Model) assert isinstance(response, Model) assert response.foo == 2 @@ -553,6 +682,8 @@ def test_base_url_setter(self) -> None: assert client.base_url == "https://example.com/from_setter/" + client.close() + def test_base_url_env(self) -> None: with update_env(ISAACUS_BASE_URL="http://localhost:5000/from/env"): client = Isaacus(api_key=api_key, _strict_response_validation=True) @@ -580,6 +711,7 @@ def test_base_url_trailing_slash(self, client: Isaacus) -> None: ), ) assert request.url == "http://localhost:5000/custom/path/foo" + client.close() @pytest.mark.parametrize( "client", @@ -603,6 +735,7 @@ def test_base_url_no_trailing_slash(self, client: Isaacus) -> None: ), ) assert request.url == "http://localhost:5000/custom/path/foo" + client.close() @pytest.mark.parametrize( "client", @@ -626,35 +759,36 @@ def test_absolute_request_url(self, client: Isaacus) -> None: ), ) assert request.url == "https://myapi.com/foo" + client.close() def test_copied_client_does_not_close_http(self) -> None: - client = Isaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) - assert not client.is_closed() + test_client = Isaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) + assert not test_client.is_closed() - copied = client.copy() - assert copied is not client + copied = test_client.copy() + assert copied is not test_client del copied - assert not client.is_closed() + assert not test_client.is_closed() def test_client_context_manager(self) -> None: - client = Isaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) - with client as c2: - assert c2 is client + test_client = Isaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) + with test_client as c2: + assert c2 is test_client assert not c2.is_closed() - assert not client.is_closed() - assert client.is_closed() + assert not test_client.is_closed() + assert test_client.is_closed() @pytest.mark.respx(base_url=base_url) - def test_client_response_validation_error(self, respx_mock: MockRouter) -> None: + def test_client_response_validation_error(self, respx_mock: MockRouter, client: Isaacus) -> None: class Model(BaseModel): foo: str respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": {"invalid": True}})) with pytest.raises(APIResponseValidationError) as exc: - self.client.get("/foo", cast_to=Model) + client.get("/foo", cast_to=Model) assert isinstance(exc.value.__cause__, ValidationError) @@ -674,11 +808,14 @@ class Model(BaseModel): with pytest.raises(APIResponseValidationError): strict_client.get("/foo", cast_to=Model) - client = Isaacus(base_url=base_url, api_key=api_key, _strict_response_validation=False) + non_strict_client = Isaacus(base_url=base_url, api_key=api_key, _strict_response_validation=False) - response = client.get("/foo", cast_to=Model) + response = non_strict_client.get("/foo", cast_to=Model) assert isinstance(response, str) # type: ignore[unreachable] + strict_client.close() + non_strict_client.close() + @pytest.mark.parametrize( "remaining_retries,retry_after,timeout", [ @@ -701,9 +838,9 @@ class Model(BaseModel): ], ) @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) - def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str, timeout: float) -> None: - client = Isaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) - + def test_parse_retry_after_header( + self, remaining_retries: int, retry_after: str, timeout: float, client: Isaacus + ) -> None: headers = httpx.Headers({"retry-after": retry_after}) options = FinalRequestOptions(method="get", url="/foo", max_retries=3) calculated = client._calculate_retry_timeout(remaining_retries, options, headers) @@ -720,7 +857,7 @@ def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, clien texts=["Are restraints of trade enforceable under English law?", "What is a non-compete clause?"], ).__enter__() - assert _get_open_connections(self.client) == 0 + assert _get_open_connections(client) == 0 @mock.patch("isaacus._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) @@ -732,7 +869,7 @@ def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client model="kanon-2-embedder", texts=["Are restraints of trade enforceable under English law?", "What is a non-compete clause?"], ).__enter__() - assert _get_open_connections(self.client) == 0 + assert _get_open_connections(client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) @mock.patch("isaacus._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @@ -845,83 +982,77 @@ def test_default_client_creation(self) -> None: ) @pytest.mark.respx(base_url=base_url) - def test_follow_redirects(self, respx_mock: MockRouter) -> None: + def test_follow_redirects(self, respx_mock: MockRouter, client: Isaacus) -> None: # Test that the default follow_redirects=True allows following redirects respx_mock.post("/redirect").mock( return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) ) respx_mock.get("/redirected").mock(return_value=httpx.Response(200, json={"status": "ok"})) - response = self.client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response) + response = client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response) assert response.status_code == 200 assert response.json() == {"status": "ok"} @pytest.mark.respx(base_url=base_url) - def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None: + def test_follow_redirects_disabled(self, respx_mock: MockRouter, client: Isaacus) -> None: # Test that follow_redirects=False prevents following redirects respx_mock.post("/redirect").mock( return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) ) with pytest.raises(APIStatusError) as exc_info: - self.client.post( - "/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response - ) + client.post("/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response) assert exc_info.value.response.status_code == 302 assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected" class TestAsyncIsaacus: - client = AsyncIsaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) - @pytest.mark.respx(base_url=base_url) - @pytest.mark.asyncio - async def test_raw_response(self, respx_mock: MockRouter) -> None: + async def test_raw_response(self, respx_mock: MockRouter, async_client: AsyncIsaacus) -> None: respx_mock.post("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - response = await self.client.post("/foo", cast_to=httpx.Response) + response = await async_client.post("/foo", cast_to=httpx.Response) assert response.status_code == 200 assert isinstance(response, httpx.Response) assert response.json() == {"foo": "bar"} @pytest.mark.respx(base_url=base_url) - @pytest.mark.asyncio - async def test_raw_response_for_binary(self, respx_mock: MockRouter) -> None: + async def test_raw_response_for_binary(self, respx_mock: MockRouter, async_client: AsyncIsaacus) -> None: respx_mock.post("/foo").mock( return_value=httpx.Response(200, headers={"Content-Type": "application/binary"}, content='{"foo": "bar"}') ) - response = await self.client.post("/foo", cast_to=httpx.Response) + response = await async_client.post("/foo", cast_to=httpx.Response) assert response.status_code == 200 assert isinstance(response, httpx.Response) assert response.json() == {"foo": "bar"} - def test_copy(self) -> None: - copied = self.client.copy() - assert id(copied) != id(self.client) + def test_copy(self, async_client: AsyncIsaacus) -> None: + copied = async_client.copy() + assert id(copied) != id(async_client) - copied = self.client.copy(api_key="another My API Key") + copied = async_client.copy(api_key="another My API Key") assert copied.api_key == "another My API Key" - assert self.client.api_key == "My API Key" + assert async_client.api_key == "My API Key" - def test_copy_default_options(self) -> None: + def test_copy_default_options(self, async_client: AsyncIsaacus) -> None: # options that have a default are overridden correctly - copied = self.client.copy(max_retries=7) + copied = async_client.copy(max_retries=7) assert copied.max_retries == 7 - assert self.client.max_retries == 2 + assert async_client.max_retries == 2 copied2 = copied.copy(max_retries=6) assert copied2.max_retries == 6 assert copied.max_retries == 7 # timeout - assert isinstance(self.client.timeout, httpx.Timeout) - copied = self.client.copy(timeout=None) + assert isinstance(async_client.timeout, httpx.Timeout) + copied = async_client.copy(timeout=None) assert copied.timeout is None - assert isinstance(self.client.timeout, httpx.Timeout) + assert isinstance(async_client.timeout, httpx.Timeout) - def test_copy_default_headers(self) -> None: + async def test_copy_default_headers(self) -> None: client = AsyncIsaacus( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} ) @@ -954,8 +1085,9 @@ def test_copy_default_headers(self) -> None: match="`default_headers` and `set_default_headers` arguments are mutually exclusive", ): client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"}) + await client.close() - def test_copy_default_query(self) -> None: + async def test_copy_default_query(self) -> None: client = AsyncIsaacus( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"foo": "bar"} ) @@ -991,13 +1123,15 @@ def test_copy_default_query(self) -> None: ): client.copy(set_default_query={}, default_query={"foo": "Bar"}) - def test_copy_signature(self) -> None: + await client.close() + + def test_copy_signature(self, async_client: AsyncIsaacus) -> None: # ensure the same parameters that can be passed to the client are defined in the `.copy()` method init_signature = inspect.signature( # mypy doesn't like that we access the `__init__` property. - self.client.__init__, # type: ignore[misc] + async_client.__init__, # type: ignore[misc] ) - copy_signature = inspect.signature(self.client.copy) + copy_signature = inspect.signature(async_client.copy) exclude_params = {"transport", "proxies", "_strict_response_validation"} for name in init_signature.parameters.keys(): @@ -1008,12 +1142,12 @@ def test_copy_signature(self) -> None: assert copy_param is not None, f"copy() signature is missing the {name} param" @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12") - def test_copy_build_request(self) -> None: + def test_copy_build_request(self, async_client: AsyncIsaacus) -> None: options = FinalRequestOptions(method="get", url="/foo") def build_request(options: FinalRequestOptions) -> None: - client = self.client.copy() - client._build_request(options) + client_copy = async_client.copy() + client_copy._build_request(options) # ensure that the machinery is warmed up before tracing starts. build_request(options) @@ -1070,12 +1204,12 @@ def add_leak(leaks: list[tracemalloc.StatisticDiff], diff: tracemalloc.Statistic print(frame) raise AssertionError() - async def test_request_timeout(self) -> None: - request = self.client._build_request(FinalRequestOptions(method="get", url="/foo")) + async def test_request_timeout(self, async_client: AsyncIsaacus) -> None: + request = async_client._build_request(FinalRequestOptions(method="get", url="/foo")) timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore assert timeout == DEFAULT_TIMEOUT - request = self.client._build_request( + request = async_client._build_request( FinalRequestOptions(method="get", url="/foo", timeout=httpx.Timeout(100.0)) ) timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore @@ -1090,6 +1224,8 @@ async def test_client_timeout_option(self) -> None: timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore assert timeout == httpx.Timeout(0) + await client.close() + async def test_http_client_timeout_option(self) -> None: # custom timeout given to the httpx client should be used async with httpx.AsyncClient(timeout=None) as http_client: @@ -1101,6 +1237,8 @@ async def test_http_client_timeout_option(self) -> None: timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore assert timeout == httpx.Timeout(None) + await client.close() + # no timeout given to the httpx client should not use the httpx default async with httpx.AsyncClient() as http_client: client = AsyncIsaacus( @@ -1111,6 +1249,8 @@ async def test_http_client_timeout_option(self) -> None: timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore assert timeout == DEFAULT_TIMEOUT + await client.close() + # explicitly passing the default timeout currently results in it being ignored async with httpx.AsyncClient(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client: client = AsyncIsaacus( @@ -1121,6 +1261,8 @@ async def test_http_client_timeout_option(self) -> None: timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore assert timeout == DEFAULT_TIMEOUT # our default + await client.close() + def test_invalid_http_client(self) -> None: with pytest.raises(TypeError, match="Invalid `http_client` arg"): with httpx.Client() as http_client: @@ -1131,15 +1273,15 @@ def test_invalid_http_client(self) -> None: http_client=cast(Any, http_client), ) - def test_default_headers_option(self) -> None: - client = AsyncIsaacus( + async def test_default_headers_option(self) -> None: + test_client = AsyncIsaacus( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} ) - request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + request = test_client._build_request(FinalRequestOptions(method="get", url="/foo")) assert request.headers.get("x-foo") == "bar" assert request.headers.get("x-stainless-lang") == "python" - client2 = AsyncIsaacus( + test_client2 = AsyncIsaacus( base_url=base_url, api_key=api_key, _strict_response_validation=True, @@ -1148,10 +1290,13 @@ def test_default_headers_option(self) -> None: "X-Stainless-Lang": "my-overriding-header", }, ) - request = client2._build_request(FinalRequestOptions(method="get", url="/foo")) + request = test_client2._build_request(FinalRequestOptions(method="get", url="/foo")) assert request.headers.get("x-foo") == "stainless" assert request.headers.get("x-stainless-lang") == "my-overriding-header" + await test_client.close() + await test_client2.close() + def test_validate_headers(self) -> None: client = AsyncIsaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) request = client._build_request(FinalRequestOptions(method="get", url="/foo")) @@ -1162,7 +1307,7 @@ def test_validate_headers(self) -> None: client2 = AsyncIsaacus(base_url=base_url, api_key=None, _strict_response_validation=True) _ = client2 - def test_default_query_option(self) -> None: + async def test_default_query_option(self) -> None: client = AsyncIsaacus( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"query_param": "bar"} ) @@ -1180,8 +1325,10 @@ def test_default_query_option(self) -> None: url = httpx.URL(request.url) assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} - def test_request_extra_json(self) -> None: - request = self.client._build_request( + await client.close() + + def test_request_extra_json(self, client: Isaacus) -> None: + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -1192,7 +1339,7 @@ def test_request_extra_json(self) -> None: data = json.loads(request.content.decode("utf-8")) assert data == {"foo": "bar", "baz": False} - request = self.client._build_request( + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -1203,7 +1350,7 @@ def test_request_extra_json(self) -> None: assert data == {"baz": False} # `extra_json` takes priority over `json_data` when keys clash - request = self.client._build_request( + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -1214,8 +1361,8 @@ def test_request_extra_json(self) -> None: data = json.loads(request.content.decode("utf-8")) assert data == {"foo": "bar", "baz": None} - def test_request_extra_headers(self) -> None: - request = self.client._build_request( + def test_request_extra_headers(self, client: Isaacus) -> None: + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -1225,7 +1372,7 @@ def test_request_extra_headers(self) -> None: assert request.headers.get("X-Foo") == "Foo" # `extra_headers` takes priority over `default_headers` when keys clash - request = self.client.with_options(default_headers={"X-Bar": "true"})._build_request( + request = client.with_options(default_headers={"X-Bar": "true"})._build_request( FinalRequestOptions( method="post", url="/foo", @@ -1236,8 +1383,8 @@ def test_request_extra_headers(self) -> None: ) assert request.headers.get("X-Bar") == "false" - def test_request_extra_query(self) -> None: - request = self.client._build_request( + def test_request_extra_query(self, client: Isaacus) -> None: + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -1250,7 +1397,7 @@ def test_request_extra_query(self) -> None: assert params == {"my_query_param": "Foo"} # if both `query` and `extra_query` are given, they are merged - request = self.client._build_request( + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -1264,7 +1411,7 @@ def test_request_extra_query(self) -> None: assert params == {"bar": "1", "foo": "2"} # `extra_query` takes priority over `query` when keys clash - request = self.client._build_request( + request = client._build_request( FinalRequestOptions( method="post", url="/foo", @@ -1307,7 +1454,73 @@ def test_multipart_repeating_array(self, async_client: AsyncIsaacus) -> None: ] @pytest.mark.respx(base_url=base_url) - async def test_basic_union_response(self, respx_mock: MockRouter) -> None: + async def test_binary_content_upload(self, respx_mock: MockRouter, async_client: AsyncIsaacus) -> None: + respx_mock.post("/upload").mock(side_effect=mirror_request_content) + + file_content = b"Hello, this is a test file." + + response = await async_client.post( + "/upload", + content=file_content, + cast_to=httpx.Response, + options={"headers": {"Content-Type": "application/octet-stream"}}, + ) + + assert response.status_code == 200 + assert response.request.headers["Content-Type"] == "application/octet-stream" + assert response.content == file_content + + async def test_binary_content_upload_with_asynciterator(self) -> None: + file_content = b"Hello, this is a test file." + counter = Counter() + iterator = _make_async_iterator([file_content], counter=counter) + + async def mock_handler(request: httpx.Request) -> httpx.Response: + assert counter.value == 0, "the request body should not have been read" + return httpx.Response(200, content=await request.aread()) + + async with AsyncIsaacus( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.AsyncClient(transport=MockTransport(handler=mock_handler)), + ) as client: + response = await client.post( + "/upload", + content=iterator, + cast_to=httpx.Response, + options={"headers": {"Content-Type": "application/octet-stream"}}, + ) + + assert response.status_code == 200 + assert response.request.headers["Content-Type"] == "application/octet-stream" + assert response.content == file_content + assert counter.value == 1 + + @pytest.mark.respx(base_url=base_url) + async def test_binary_content_upload_with_body_is_deprecated( + self, respx_mock: MockRouter, async_client: AsyncIsaacus + ) -> None: + respx_mock.post("/upload").mock(side_effect=mirror_request_content) + + file_content = b"Hello, this is a test file." + + with pytest.deprecated_call( + match="Passing raw bytes as `body` is deprecated and will be removed in a future version. Please pass raw bytes via the `content` parameter instead." + ): + response = await async_client.post( + "/upload", + body=file_content, + cast_to=httpx.Response, + options={"headers": {"Content-Type": "application/octet-stream"}}, + ) + + assert response.status_code == 200 + assert response.request.headers["Content-Type"] == "application/octet-stream" + assert response.content == file_content + + @pytest.mark.respx(base_url=base_url) + async def test_basic_union_response(self, respx_mock: MockRouter, async_client: AsyncIsaacus) -> None: class Model1(BaseModel): name: str @@ -1316,12 +1529,12 @@ class Model2(BaseModel): respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - response = await self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + response = await async_client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) assert isinstance(response, Model2) assert response.foo == "bar" @pytest.mark.respx(base_url=base_url) - async def test_union_response_different_types(self, respx_mock: MockRouter) -> None: + async def test_union_response_different_types(self, respx_mock: MockRouter, async_client: AsyncIsaacus) -> None: """Union of objects with the same field name using a different type""" class Model1(BaseModel): @@ -1332,18 +1545,20 @@ class Model2(BaseModel): respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - response = await self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + response = await async_client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) assert isinstance(response, Model2) assert response.foo == "bar" respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": 1})) - response = await self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + response = await async_client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) assert isinstance(response, Model1) assert response.foo == 1 @pytest.mark.respx(base_url=base_url) - async def test_non_application_json_content_type_for_json_data(self, respx_mock: MockRouter) -> None: + async def test_non_application_json_content_type_for_json_data( + self, respx_mock: MockRouter, async_client: AsyncIsaacus + ) -> None: """ Response that sets Content-Type to something other than application/json but returns json data """ @@ -1359,11 +1574,11 @@ class Model(BaseModel): ) ) - response = await self.client.get("/foo", cast_to=Model) + response = await async_client.get("/foo", cast_to=Model) assert isinstance(response, Model) assert response.foo == 2 - def test_base_url_setter(self) -> None: + async def test_base_url_setter(self) -> None: client = AsyncIsaacus( base_url="https://example.com/from_init", api_key=api_key, _strict_response_validation=True ) @@ -1373,7 +1588,9 @@ def test_base_url_setter(self) -> None: assert client.base_url == "https://example.com/from_setter/" - def test_base_url_env(self) -> None: + await client.close() + + async def test_base_url_env(self) -> None: with update_env(ISAACUS_BASE_URL="http://localhost:5000/from/env"): client = AsyncIsaacus(api_key=api_key, _strict_response_validation=True) assert client.base_url == "http://localhost:5000/from/env/" @@ -1393,7 +1610,7 @@ def test_base_url_env(self) -> None: ], ids=["standard", "custom http client"], ) - def test_base_url_trailing_slash(self, client: AsyncIsaacus) -> None: + async def test_base_url_trailing_slash(self, client: AsyncIsaacus) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -1402,6 +1619,7 @@ def test_base_url_trailing_slash(self, client: AsyncIsaacus) -> None: ), ) assert request.url == "http://localhost:5000/custom/path/foo" + await client.close() @pytest.mark.parametrize( "client", @@ -1418,7 +1636,7 @@ def test_base_url_trailing_slash(self, client: AsyncIsaacus) -> None: ], ids=["standard", "custom http client"], ) - def test_base_url_no_trailing_slash(self, client: AsyncIsaacus) -> None: + async def test_base_url_no_trailing_slash(self, client: AsyncIsaacus) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -1427,6 +1645,7 @@ def test_base_url_no_trailing_slash(self, client: AsyncIsaacus) -> None: ), ) assert request.url == "http://localhost:5000/custom/path/foo" + await client.close() @pytest.mark.parametrize( "client", @@ -1443,7 +1662,7 @@ def test_base_url_no_trailing_slash(self, client: AsyncIsaacus) -> None: ], ids=["standard", "custom http client"], ) - def test_absolute_request_url(self, client: AsyncIsaacus) -> None: + async def test_absolute_request_url(self, client: AsyncIsaacus) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -1452,37 +1671,37 @@ def test_absolute_request_url(self, client: AsyncIsaacus) -> None: ), ) assert request.url == "https://myapi.com/foo" + await client.close() async def test_copied_client_does_not_close_http(self) -> None: - client = AsyncIsaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) - assert not client.is_closed() + test_client = AsyncIsaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) + assert not test_client.is_closed() - copied = client.copy() - assert copied is not client + copied = test_client.copy() + assert copied is not test_client del copied await asyncio.sleep(0.2) - assert not client.is_closed() + assert not test_client.is_closed() async def test_client_context_manager(self) -> None: - client = AsyncIsaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) - async with client as c2: - assert c2 is client + test_client = AsyncIsaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) + async with test_client as c2: + assert c2 is test_client assert not c2.is_closed() - assert not client.is_closed() - assert client.is_closed() + assert not test_client.is_closed() + assert test_client.is_closed() @pytest.mark.respx(base_url=base_url) - @pytest.mark.asyncio - async def test_client_response_validation_error(self, respx_mock: MockRouter) -> None: + async def test_client_response_validation_error(self, respx_mock: MockRouter, async_client: AsyncIsaacus) -> None: class Model(BaseModel): foo: str respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": {"invalid": True}})) with pytest.raises(APIResponseValidationError) as exc: - await self.client.get("/foo", cast_to=Model) + await async_client.get("/foo", cast_to=Model) assert isinstance(exc.value.__cause__, ValidationError) @@ -1493,7 +1712,6 @@ async def test_client_max_retries_validation(self) -> None: ) @pytest.mark.respx(base_url=base_url) - @pytest.mark.asyncio async def test_received_text_for_expected_json(self, respx_mock: MockRouter) -> None: class Model(BaseModel): name: str @@ -1505,11 +1723,14 @@ class Model(BaseModel): with pytest.raises(APIResponseValidationError): await strict_client.get("/foo", cast_to=Model) - client = AsyncIsaacus(base_url=base_url, api_key=api_key, _strict_response_validation=False) + non_strict_client = AsyncIsaacus(base_url=base_url, api_key=api_key, _strict_response_validation=False) - response = await client.get("/foo", cast_to=Model) + response = await non_strict_client.get("/foo", cast_to=Model) assert isinstance(response, str) # type: ignore[unreachable] + await strict_client.close() + await non_strict_client.close() + @pytest.mark.parametrize( "remaining_retries,retry_after,timeout", [ @@ -1532,13 +1753,12 @@ class Model(BaseModel): ], ) @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) - @pytest.mark.asyncio - async def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str, timeout: float) -> None: - client = AsyncIsaacus(base_url=base_url, api_key=api_key, _strict_response_validation=True) - + async def test_parse_retry_after_header( + self, remaining_retries: int, retry_after: str, timeout: float, async_client: AsyncIsaacus + ) -> None: headers = httpx.Headers({"retry-after": retry_after}) options = FinalRequestOptions(method="get", url="/foo", max_retries=3) - calculated = client._calculate_retry_timeout(remaining_retries, options, headers) + calculated = async_client._calculate_retry_timeout(remaining_retries, options, headers) assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] @mock.patch("isaacus._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @@ -1554,7 +1774,7 @@ async def test_retrying_timeout_errors_doesnt_leak( texts=["Are restraints of trade enforceable under English law?", "What is a non-compete clause?"], ).__aenter__() - assert _get_open_connections(self.client) == 0 + assert _get_open_connections(async_client) == 0 @mock.patch("isaacus._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) @@ -1566,12 +1786,11 @@ async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, model="kanon-2-embedder", texts=["Are restraints of trade enforceable under English law?", "What is a non-compete clause?"], ).__aenter__() - assert _get_open_connections(self.client) == 0 + assert _get_open_connections(async_client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) @mock.patch("isaacus._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - @pytest.mark.asyncio @pytest.mark.parametrize("failure_mode", ["status", "exception"]) async def test_retries_taken( self, @@ -1606,7 +1825,6 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) @mock.patch("isaacus._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - @pytest.mark.asyncio async def test_omit_retry_count_header( self, async_client: AsyncIsaacus, failures_before_success: int, respx_mock: MockRouter ) -> None: @@ -1634,7 +1852,6 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) @mock.patch("isaacus._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - @pytest.mark.asyncio async def test_overwrite_retry_count_header( self, async_client: AsyncIsaacus, failures_before_success: int, respx_mock: MockRouter ) -> None: @@ -1686,26 +1903,26 @@ async def test_default_client_creation(self) -> None: ) @pytest.mark.respx(base_url=base_url) - async def test_follow_redirects(self, respx_mock: MockRouter) -> None: + async def test_follow_redirects(self, respx_mock: MockRouter, async_client: AsyncIsaacus) -> None: # Test that the default follow_redirects=True allows following redirects respx_mock.post("/redirect").mock( return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) ) respx_mock.get("/redirected").mock(return_value=httpx.Response(200, json={"status": "ok"})) - response = await self.client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response) + response = await async_client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response) assert response.status_code == 200 assert response.json() == {"status": "ok"} @pytest.mark.respx(base_url=base_url) - async def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None: + async def test_follow_redirects_disabled(self, respx_mock: MockRouter, async_client: AsyncIsaacus) -> None: # Test that follow_redirects=False prevents following redirects respx_mock.post("/redirect").mock( return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) ) with pytest.raises(APIStatusError) as exc_info: - await self.client.post( + await async_client.post( "/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response ) diff --git a/tests/test_models.py b/tests/test_models.py index 42d881c..292aa85 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -9,7 +9,7 @@ from isaacus._utils import PropertyInfo from isaacus._compat import PYDANTIC_V1, parse_obj, model_dump, model_json -from isaacus._models import BaseModel, construct_type +from isaacus._models import DISCRIMINATOR_CACHE, BaseModel, construct_type class BasicModel(BaseModel): @@ -809,7 +809,7 @@ class B(BaseModel): UnionType = cast(Any, Union[A, B]) - assert not hasattr(UnionType, "__discriminator__") + assert not DISCRIMINATOR_CACHE.get(UnionType) m = construct_type( value={"type": "b", "data": "foo"}, type_=cast(Any, Annotated[UnionType, PropertyInfo(discriminator="type")]) @@ -818,7 +818,7 @@ class B(BaseModel): assert m.type == "b" assert m.data == "foo" # type: ignore[comparison-overlap] - discriminator = UnionType.__discriminator__ + discriminator = DISCRIMINATOR_CACHE.get(UnionType) assert discriminator is not None m = construct_type( @@ -830,7 +830,7 @@ class B(BaseModel): # if the discriminator details object stays the same between invocations then # we hit the cache - assert UnionType.__discriminator__ is discriminator + assert DISCRIMINATOR_CACHE.get(UnionType) is discriminator @pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1") diff --git a/tests/test_utils/test_json.py b/tests/test_utils/test_json.py new file mode 100644 index 0000000..fca71a0 --- /dev/null +++ b/tests/test_utils/test_json.py @@ -0,0 +1,126 @@ +from __future__ import annotations + +import datetime +from typing import Union + +import pydantic + +from isaacus import _compat +from isaacus._utils._json import openapi_dumps + + +class TestOpenapiDumps: + def test_basic(self) -> None: + data = {"key": "value", "number": 42} + json_bytes = openapi_dumps(data) + assert json_bytes == b'{"key":"value","number":42}' + + def test_datetime_serialization(self) -> None: + dt = datetime.datetime(2023, 1, 1, 12, 0, 0) + data = {"datetime": dt} + json_bytes = openapi_dumps(data) + assert json_bytes == b'{"datetime":"2023-01-01T12:00:00"}' + + def test_pydantic_model_serialization(self) -> None: + class User(pydantic.BaseModel): + first_name: str + last_name: str + age: int + + model_instance = User(first_name="John", last_name="Kramer", age=83) + data = {"model": model_instance} + json_bytes = openapi_dumps(data) + assert json_bytes == b'{"model":{"first_name":"John","last_name":"Kramer","age":83}}' + + def test_pydantic_model_with_default_values(self) -> None: + class User(pydantic.BaseModel): + name: str + role: str = "user" + active: bool = True + score: int = 0 + + model_instance = User(name="Alice") + data = {"model": model_instance} + json_bytes = openapi_dumps(data) + assert json_bytes == b'{"model":{"name":"Alice"}}' + + def test_pydantic_model_with_default_values_overridden(self) -> None: + class User(pydantic.BaseModel): + name: str + role: str = "user" + active: bool = True + + model_instance = User(name="Bob", role="admin", active=False) + data = {"model": model_instance} + json_bytes = openapi_dumps(data) + assert json_bytes == b'{"model":{"name":"Bob","role":"admin","active":false}}' + + def test_pydantic_model_with_alias(self) -> None: + class User(pydantic.BaseModel): + first_name: str = pydantic.Field(alias="firstName") + last_name: str = pydantic.Field(alias="lastName") + + model_instance = User(firstName="John", lastName="Doe") + data = {"model": model_instance} + json_bytes = openapi_dumps(data) + assert json_bytes == b'{"model":{"firstName":"John","lastName":"Doe"}}' + + def test_pydantic_model_with_alias_and_default(self) -> None: + class User(pydantic.BaseModel): + user_name: str = pydantic.Field(alias="userName") + user_role: str = pydantic.Field(default="member", alias="userRole") + is_active: bool = pydantic.Field(default=True, alias="isActive") + + model_instance = User(userName="charlie") + data = {"model": model_instance} + json_bytes = openapi_dumps(data) + assert json_bytes == b'{"model":{"userName":"charlie"}}' + + model_with_overrides = User(userName="diana", userRole="admin", isActive=False) + data = {"model": model_with_overrides} + json_bytes = openapi_dumps(data) + assert json_bytes == b'{"model":{"userName":"diana","userRole":"admin","isActive":false}}' + + def test_pydantic_model_with_nested_models_and_defaults(self) -> None: + class Address(pydantic.BaseModel): + street: str + city: str = "Unknown" + + class User(pydantic.BaseModel): + name: str + address: Address + verified: bool = False + + if _compat.PYDANTIC_V1: + # to handle forward references in Pydantic v1 + User.update_forward_refs(**locals()) # type: ignore[reportDeprecated] + + address = Address(street="123 Main St") + user = User(name="Diana", address=address) + data = {"user": user} + json_bytes = openapi_dumps(data) + assert json_bytes == b'{"user":{"name":"Diana","address":{"street":"123 Main St"}}}' + + address_with_city = Address(street="456 Oak Ave", city="Boston") + user_verified = User(name="Eve", address=address_with_city, verified=True) + data = {"user": user_verified} + json_bytes = openapi_dumps(data) + assert ( + json_bytes == b'{"user":{"name":"Eve","address":{"street":"456 Oak Ave","city":"Boston"},"verified":true}}' + ) + + def test_pydantic_model_with_optional_fields(self) -> None: + class User(pydantic.BaseModel): + name: str + email: Union[str, None] + phone: Union[str, None] + + model_with_none = User(name="Eve", email=None, phone=None) + data = {"model": model_with_none} + json_bytes = openapi_dumps(data) + assert json_bytes == b'{"model":{"name":"Eve","email":null,"phone":null}}' + + model_with_values = User(name="Frank", email="frank@example.com", phone=None) + data = {"model": model_with_values} + json_bytes = openapi_dumps(data) + assert json_bytes == b'{"model":{"name":"Frank","email":"frank@example.com","phone":null}}'