From aef11328834d4b9263e78e812e33a5877c741df6 Mon Sep 17 00:00:00 2001 From: tvaron3 Date: Thu, 16 Apr 2026 15:24:14 -0700 Subject: [PATCH 1/9] Release azure-cosmos-spark 4.47.0 Version bumps and CHANGELOG updates for: - azure-cosmos-spark_3-3_2-12 4.47.0 - azure-cosmos-spark_3-4_2-12 4.47.0 - azure-cosmos-spark_3-5_2-12 4.47.0 - azure-cosmos-spark_3-5_2-13 4.47.0 - azure-cosmos-spark_4-0_2-13 4.47.0 Features Added: - Added support for change feed with startFrom point-in-time on merged partitions (PR #48752) Bugs Fixed: - Fixed readContainerThroughput unnecessary permission requirement (PR #48800) Also updated azure-cosmos CHANGELOG to reclassify the startFrom fix as a feature. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- eng/versioning/version_client.txt | 10 +++++----- .../pom.xml | 4 ++-- sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md | 7 ++----- sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md | 9 +++++++-- sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml | 2 +- sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md | 7 ++----- sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md | 9 +++++++-- sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml | 2 +- sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md | 7 ++----- sdk/cosmos/azure-cosmos-spark_3-5_2-12/README.md | 9 +++++++-- sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml | 2 +- sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md | 7 ++----- sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md | 9 +++++++-- sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml | 2 +- sdk/cosmos/azure-cosmos-spark_3/docs/quick-start.md | 10 +++++----- sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md | 9 ++------- sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md | 9 +++++++-- sdk/cosmos/azure-cosmos-spark_4-0_2-13/pom.xml | 2 +- sdk/cosmos/azure-cosmos/CHANGELOG.md | 2 +- sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml | 4 ++-- 20 files changed, 65 insertions(+), 57 deletions(-) diff --git a/eng/versioning/version_client.txt b/eng/versioning/version_client.txt index 5ba08c978a76..effe5145f3cd 100644 --- a/eng/versioning/version_client.txt +++ b/eng/versioning/version_client.txt @@ -113,11 +113,11 @@ com.azure.cosmos.spark:azure-cosmos-spark_3-5;0.0.1-beta.1;0.0.1-beta.1 com.azure:azure-cosmos-encryption;2.28.0;2.29.0-beta.1 com.azure.cosmos.spark:azure-cosmos-spark-account-data-resolver-sample;1.0.0-beta.1;1.0.0-beta.1 com.azure:azure-cosmos-test;1.0.0-beta.18;1.0.0-beta.19 -com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12;4.46.0;4.47.0-beta.1 -com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12;4.46.0;4.47.0-beta.1 -com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12;4.46.0;4.47.0-beta.1 -com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13;4.46.0;4.47.0-beta.1 -com.azure.cosmos.spark:azure-cosmos-spark_4-0_2-13;4.46.0;4.47.0-beta.1 +com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12;4.46.0;4.47.0 +com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12;4.46.0;4.47.0 +com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12;4.46.0;4.47.0 +com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13;4.46.0;4.47.0 +com.azure.cosmos.spark:azure-cosmos-spark_4-0_2-13;4.46.0;4.47.0 com.azure.cosmos.spark:fabric-cosmos-spark-auth_3;1.1.0;1.2.0-beta.1 com.azure:azure-cosmos-tests;1.0.0-beta.1;1.0.0-beta.1 com.azure:azure-data-appconfiguration;1.9.1;1.10.0-beta.1 diff --git a/sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/pom.xml b/sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/pom.xml index 9c5d27e84c87..0a33752d7b4e 100644 --- a/sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/pom.xml @@ -97,7 +97,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-5_2-12 - 4.47.0-beta.1 + 4.47.0 provided @@ -290,7 +290,7 @@ com.fasterxml.jackson.core:jackson-databind:[2.18.6] com.fasterxml.jackson.module:jackson-module-scala_2.12:[2.18.6] com.globalmentor:hadoop-bare-naked-local-fs:[0.1.0] - com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:[4.47.0-beta.1] + com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:[4.47.0] diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md index cbf97c610f9f..d2b627ec8a6d 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md @@ -1,16 +1,13 @@ ## Release History -### 4.47.0-beta.1 (Unreleased) +### 4.47.0 (2026-04-16) #### Features Added - -#### Breaking Changes +* Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability in the azure-cosmos SDK. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) #### Bugs Fixed * Fixed an issue where `readContainerThroughput` was always called even when `targetThroughput` is explicitly configured, requiring unnecessary `throughputSettings/read` permission for AAD principals. - See [PR 48800](https://github.com/Azure/azure-sdk-for-java/pull/48800) -#### Other Changes - ### 4.46.0 (2026-03-27) #### Bugs Fixed diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md index 3cac79a6687c..fc2188955682 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md +++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md @@ -28,6 +28,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-3_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------| +| 4.47.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.46.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.45.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.44.2 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | @@ -84,6 +85,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-4_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.46.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.45.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.44.2 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | @@ -131,6 +133,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-5_2-12 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|----------------------------|-------------------------------|------------------------------| +| 4.47.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.46.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.45.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.*, 15.\*, 16.4 LTS | 1.3.\* | | 4.44.2 | 3.5.0 | [8, 11, 17] | 2.12 | 14.*, 15.\*, 16.4 LTS | 1.3.\* | @@ -164,6 +167,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-5_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.46.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.45.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.44.2 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | @@ -175,6 +179,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_4-0_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.46.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.45.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.44.2 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | @@ -186,11 +191,11 @@ to use the same version of Scala that Spark was compiled for. ### Download You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven: -`com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12:4.46.0` +`com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12:4.47.0` You can also integrate against Cosmos DB Spark Connector in your SBT project: ```scala -libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-3_2-12" % "4.46.0" +libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-3_2-12" % "4.47.0" ``` Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark). diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml index 1095c39f0aa6..8682a60c38c6 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-3_2-12 - 4.47.0-beta.1 + 4.47.0 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-3_2-12 OLTP Spark 3.3 Connector for Azure Cosmos DB SQL API diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md index c9097e749f03..a737c1233935 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md @@ -1,16 +1,13 @@ ## Release History -### 4.47.0-beta.1 (Unreleased) +### 4.47.0 (2026-04-16) #### Features Added - -#### Breaking Changes +* Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability in the azure-cosmos SDK. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) #### Bugs Fixed * Fixed an issue where `readContainerThroughput` was always called even when `targetThroughput` is explicitly configured, requiring unnecessary `throughputSettings/read` permission for AAD principals. - See [PR 48800](https://github.com/Azure/azure-sdk-for-java/pull/48800) -#### Other Changes - ### 4.46.0 (2026-03-27) #### Bugs Fixed diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md index 29d6e2af8930..4fec0e7f0533 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md @@ -28,6 +28,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-4_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.46.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.45.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.44.2 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | @@ -75,6 +76,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-3_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------| +| 4.47.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.46.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.45.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.44.2 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | @@ -131,6 +133,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-5_2-12 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|--------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.46.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.45.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.*, 15.\*, 16.4 LTS | 1.3.\* | | 4.44.2 | 3.5.0 | [8, 11, 17] | 2.12 | 14.*, 15.\*, 16.4 LTS | 1.3.\* | @@ -164,6 +167,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-5_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.46.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.45.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.44.2 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | @@ -175,6 +179,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_4-0_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.46.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.45.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.44.2 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | @@ -186,11 +191,11 @@ to use the same version of Scala that Spark was compiled for. ### Download You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven: -`com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12:4.46.0` +`com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12:4.47.0` You can also integrate against Cosmos DB Spark Connector in your SBT project: ```scala -libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-4_2-12" % "4.46.0" +libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-4_2-12" % "4.47.0" ``` Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark). diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml index 634a658caa99..d149c455a1ef 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-4_2-12 - 4.47.0-beta.1 + 4.47.0 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-4_2-12 OLTP Spark 3.4 Connector for Azure Cosmos DB SQL API diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md index f5eac38bdb71..e70fa628e985 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md @@ -1,16 +1,13 @@ ## Release History -### 4.47.0-beta.1 (Unreleased) +### 4.47.0 (2026-04-16) #### Features Added - -#### Breaking Changes +* Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability in the azure-cosmos SDK. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) #### Bugs Fixed * Fixed an issue where `readContainerThroughput` was always called even when `targetThroughput` is explicitly configured, requiring unnecessary `throughputSettings/read` permission for AAD principals. - See [PR 48800](https://github.com/Azure/azure-sdk-for-java/pull/48800) -#### Other Changes - ### 4.46.0 (2026-03-27) #### Bugs Fixed diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/README.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/README.md index ad44ec300890..02ba86c28a75 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/README.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/README.md @@ -28,6 +28,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-5_2-12 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.46.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.45.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.44.2 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | @@ -61,6 +62,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-3_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------| +| 4.47.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.46.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.45.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.44.2 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | @@ -117,6 +119,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-4_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.46.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.45.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.44.2 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | @@ -164,6 +167,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-5_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.46.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.45.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.44.2 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | @@ -175,6 +179,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_4-0_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.46.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.45.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.44.2 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | @@ -186,11 +191,11 @@ to use the same version of Scala that Spark was compiled for. ### Download You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven: -`com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:4.46.0` +`com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:4.47.0` You can also integrate against Cosmos DB Spark Connector in your SBT project: ```scala -libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-5_2-12" % "4.46.0" +libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-5_2-12" % "4.47.0" ``` Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark). diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml index eba4e943268f..bc2935f92e2c 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-5_2-12 - 4.47.0-beta.1 + 4.47.0 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-5_2-12 OLTP Spark 3.5 Connector for Azure Cosmos DB SQL API diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md index 919d7fbfa325..a283803e2a31 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md @@ -1,16 +1,13 @@ ## Release History -### 4.47.0-beta.1 (Unreleased) +### 4.47.0 (2026-04-16) #### Features Added - -#### Breaking Changes +* Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability in the azure-cosmos SDK. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) #### Bugs Fixed * Fixed an issue where `readContainerThroughput` was always called even when `targetThroughput` is explicitly configured, requiring unnecessary `throughputSettings/read` permission for AAD principals. - See [PR 48800](https://github.com/Azure/azure-sdk-for-java/pull/48800) -#### Other Changes - ### 4.46.0 (2026-03-27) #### Bugs Fixed diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md index 46ea52d07f38..f9cde9782105 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md @@ -28,6 +28,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-5_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.46.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.45.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.44.2 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | @@ -42,6 +43,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-3_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------| +| 4.47.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.46.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.45.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.44.2 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | @@ -98,6 +100,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-4_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.46.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.45.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.44.2 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | @@ -145,6 +148,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-5_2-12 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.46.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.45.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.44.2 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | @@ -178,6 +182,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_4-0_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.46.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.45.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.44.2 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | @@ -192,11 +197,11 @@ to use Scala 2.13 that Spark 4.0 was compiled for. ### Download You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven: -`com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13:4.46.0` +`com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13:4.47.0` You can also integrate against Cosmos DB Spark Connector in your SBT project: ```scala -libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-5_2-13" % "4.46.0" +libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-5_2-13" % "4.47.0" ``` Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark). diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml index cf91a57b165e..d4038b94a990 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-5_2-13 - 4.47.0-beta.1 + 4.47.0 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-5_2-13 OLTP Spark 3.5 Connector for Azure Cosmos DB SQL API diff --git a/sdk/cosmos/azure-cosmos-spark_3/docs/quick-start.md b/sdk/cosmos/azure-cosmos-spark_3/docs/quick-start.md index ab93fe71e849..b13621e9a5ba 100644 --- a/sdk/cosmos/azure-cosmos-spark_3/docs/quick-start.md +++ b/sdk/cosmos/azure-cosmos-spark_3/docs/quick-start.md @@ -29,19 +29,19 @@ You can use any other Spark 3.5 spark offering as well, also you should be able SLF4J is only needed if you plan to use logging, please also download an SLF4J binding which will link the SLF4J API with the logging implementation of your choice. See the [SLF4J user manual](https://www.slf4j.org/manual.html) for more information. For Spark 3.3: -- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12:4.46.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-3_2-12/4.46.0/jar) +- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12:4.47.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-3_2-12/4.47.0/jar) For Spark 3.4: -- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12:4.46.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-4_2-12/4.46.0/jar) +- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12:4.47.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-4_2-12/4.47.0/jar) For Spark 3.5 (Scala 2.12): -- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:4.46.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-5_2-12/4.46.0/jar) +- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:4.47.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-5_2-12/4.47.0/jar) For Spark 3.5 (Scala 2.13): -- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13:4.46.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-5_2-13/4.46.0/jar) +- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13:4.47.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-5_2-13/4.47.0/jar) For Spark 4.0: -- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_4-0_2-13:4.46.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_4-0_2-13/4.46.0/jar) +- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_4-0_2-13:4.47.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_4-0_2-13/4.47.0/jar) The getting started guide is based on PySpark however you can use the equivalent scala version as well, and you can run the following code snippet in an Azure Databricks PySpark notebook. diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md index 3972ae6aeb98..3a8c44dcd9cd 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md @@ -1,14 +1,9 @@ ## Release History -### 4.47.0-beta.1 (Unreleased) +### 4.47.0 (2026-04-16) #### Features Added - -#### Breaking Changes - -#### Bugs Fixed - -#### Other Changes +* Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability in the azure-cosmos SDK. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) ### 4.46.0 (2026-03-27) diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md index 81869fdba085..306a244db656 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md @@ -20,6 +20,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_4-0_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.46.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.45.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.44.2 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | @@ -34,6 +35,7 @@ to use Scala 2.13 that Spark 4.0 was compiled for. #### azure-cosmos-spark_3-3_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------| +| 4.47.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.46.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.45.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.44.2 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | @@ -90,6 +92,7 @@ to use Scala 2.13 that Spark 4.0 was compiled for. #### azure-cosmos-spark_3-4_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.46.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.45.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.44.2 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | @@ -137,6 +140,7 @@ to use Scala 2.13 that Spark 4.0 was compiled for. #### azure-cosmos-spark_3-5_2-12 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.46.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.45.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.44.2 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | @@ -170,6 +174,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-5_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.47.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.46.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.45.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.44.2 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | @@ -181,11 +186,11 @@ to use the same version of Scala that Spark was compiled for. ### Download You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven: -`com.azure.cosmos.spark:azure-cosmos-spark_4-0_2-13:4.46.0` +`com.azure.cosmos.spark:azure-cosmos-spark_4-0_2-13:4.47.0` You can also integrate against Cosmos DB Spark Connector in your SBT project: ```scala -libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_4-0_2-13" % "4.46.0" +libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_4-0_2-13" % "4.47.0" ``` Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark). diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/pom.xml b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/pom.xml index 6e2c1649773c..766b9e60cc32 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_4-0_2-13 - 4.47.0-beta.1 + 4.47.0 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_4-0_2-13 OLTP Spark 4.0 Connector for Azure Cosmos DB SQL API diff --git a/sdk/cosmos/azure-cosmos/CHANGELOG.md b/sdk/cosmos/azure-cosmos/CHANGELOG.md index e8ea564fab7b..1726ce967f20 100644 --- a/sdk/cosmos/azure-cosmos/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos/CHANGELOG.md @@ -3,12 +3,12 @@ ### 4.80.0-beta.1 (Unreleased) #### Features Added +* Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) #### Breaking Changes #### Bugs Fixed * Fixed an issue where the throughput control `throughputQueryMono` was always subscribed even when `targetThroughput` is used (not `targetThroughputThreshold`), causing unnecessary `throughputSettings/read` permission requirement for AAD principals. - See [PR 48800](https://github.com/Azure/azure-sdk-for-java/pull/48800) -* Fixed an issue where change feed with `startFrom` point-in-time returned `400` on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability. * Fixed JVM `` deadlock when multiple threads concurrently trigger Cosmos SDK class loading for the first time. - See [PR 48689](https://github.com/Azure/azure-sdk-for-java/pull/48689) #### Other Changes diff --git a/sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml b/sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml index 3567ec487552..630a563f4956 100644 --- a/sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml +++ b/sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml @@ -100,7 +100,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-5_2-12 - 4.47.0-beta.1 + 4.47.0 provided @@ -183,7 +183,7 @@ com.fasterxml.jackson.datatype:jackson-datatype-jsr310:[2.18.6] com.fasterxml.jackson.core:jackson-databind:[2.18.6] com.fasterxml.jackson.module:jackson-module-scala_2.12:[2.18.6] - com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:[4.47.0-beta.1] + com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:[4.47.0] com.microsoft.azure.synapse:synapseutils_2.12:[1.5.4] From 5c3c1fbfeb186101fd7f12d92d8b0ca90e599d70 Mon Sep 17 00:00:00 2001 From: tvaron3 Date: Thu, 16 Apr 2026 16:36:21 -0700 Subject: [PATCH 2/9] Address PR review: add clinit fix to CHANGELOGs and DBR 17.3 known issue - Added JVM deadlock fix (PR #48689) to all 5 spark connector CHANGELOGs - Added Known Issues section to Spark 4.0 README for Structured Streaming incompatibility with Databricks Runtime 17.3 Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md | 1 + sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md | 1 + sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md | 1 + sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md | 1 + sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md | 3 +++ sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md | 5 +++++ 6 files changed, 12 insertions(+) diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md index d2b627ec8a6d..c930f917d697 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md @@ -7,6 +7,7 @@ #### Bugs Fixed * Fixed an issue where `readContainerThroughput` was always called even when `targetThroughput` is explicitly configured, requiring unnecessary `throughputSettings/read` permission for AAD principals. - See [PR 48800](https://github.com/Azure/azure-sdk-for-java/pull/48800) +* Fixed JVM `` deadlock when multiple threads concurrently trigger Cosmos SDK class loading for the first time. - See [PR 48689](https://github.com/Azure/azure-sdk-for-java/pull/48689) ### 4.46.0 (2026-03-27) diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md index a737c1233935..8586e234941d 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md @@ -7,6 +7,7 @@ #### Bugs Fixed * Fixed an issue where `readContainerThroughput` was always called even when `targetThroughput` is explicitly configured, requiring unnecessary `throughputSettings/read` permission for AAD principals. - See [PR 48800](https://github.com/Azure/azure-sdk-for-java/pull/48800) +* Fixed JVM `` deadlock when multiple threads concurrently trigger Cosmos SDK class loading for the first time. - See [PR 48689](https://github.com/Azure/azure-sdk-for-java/pull/48689) ### 4.46.0 (2026-03-27) diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md index e70fa628e985..77fde5bdebca 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md @@ -7,6 +7,7 @@ #### Bugs Fixed * Fixed an issue where `readContainerThroughput` was always called even when `targetThroughput` is explicitly configured, requiring unnecessary `throughputSettings/read` permission for AAD principals. - See [PR 48800](https://github.com/Azure/azure-sdk-for-java/pull/48800) +* Fixed JVM `` deadlock when multiple threads concurrently trigger Cosmos SDK class loading for the first time. - See [PR 48689](https://github.com/Azure/azure-sdk-for-java/pull/48689) ### 4.46.0 (2026-03-27) diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md index a283803e2a31..0de57b05f7cd 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md @@ -7,6 +7,7 @@ #### Bugs Fixed * Fixed an issue where `readContainerThroughput` was always called even when `targetThroughput` is explicitly configured, requiring unnecessary `throughputSettings/read` permission for AAD principals. - See [PR 48800](https://github.com/Azure/azure-sdk-for-java/pull/48800) +* Fixed JVM `` deadlock when multiple threads concurrently trigger Cosmos SDK class loading for the first time. - See [PR 48689](https://github.com/Azure/azure-sdk-for-java/pull/48689) ### 4.46.0 (2026-03-27) diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md index 3a8c44dcd9cd..c5f1c9585316 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md @@ -5,6 +5,9 @@ #### Features Added * Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability in the azure-cosmos SDK. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) +#### Bugs Fixed +* Fixed JVM `` deadlock when multiple threads concurrently trigger Cosmos SDK class loading for the first time. - See [PR 48689](https://github.com/Azure/azure-sdk-for-java/pull/48689) + ### 4.46.0 (2026-03-27) #### Bugs Fixed diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md index 306a244db656..2c09aa059ff2 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md @@ -183,6 +183,11 @@ to use the same version of Scala that Spark was compiled for. | 4.43.1 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.43.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | +### Known Issues + +#### Spark Structured Streaming does not work with Databricks Runtime 17.3 +Databricks Runtime 17.3 uses Spark 4.0.0-preview2, which relocated the internal class `org.apache.spark.sql.connector.read.streaming.SparkDataStream` to a different package starting in Spark 4.1. This causes a `NoClassDefFoundError` when using Spark Structured Streaming with the Cosmos DB Spark connector on DBR 17.3. This issue affects all Spark connectors that depend on this internal API. We are tracking the upstream Spark issue and will provide a fix once a public API is available. + ### Download You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven: From 03da04ef2143da3e50f8e2cf12394af8bfcd3ce6 Mon Sep 17 00:00:00 2001 From: tvaron3 Date: Thu, 16 Apr 2026 16:47:05 -0700 Subject: [PATCH 3/9] Reword DBR 17.3 known issue based on IcM 779484786 Updated with accurate details: MetadataVersionUtil$ class removal, DBR 17.3 includes Spark 4.1 changes while reporting 4.0.0, and recommendation to stay on previous LTS until DBR 18 LTS. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md index 2c09aa059ff2..edda3593b87e 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md @@ -186,7 +186,7 @@ to use the same version of Scala that Spark was compiled for. ### Known Issues #### Spark Structured Streaming does not work with Databricks Runtime 17.3 -Databricks Runtime 17.3 uses Spark 4.0.0-preview2, which relocated the internal class `org.apache.spark.sql.connector.read.streaming.SparkDataStream` to a different package starting in Spark 4.1. This causes a `NoClassDefFoundError` when using Spark Structured Streaming with the Cosmos DB Spark connector on DBR 17.3. This issue affects all Spark connectors that depend on this internal API. We are tracking the upstream Spark issue and will provide a fix once a public API is available. +Databricks Runtime 17.3 LTS includes internal API changes from Spark 4.1 (specifically the removal of `org.apache.spark.sql.execution.streaming.MetadataVersionUtil$`) while still reporting Spark 4.0.0 compatibility. This causes a `java.lang.NoClassDefFoundError` when using Spark Structured Streaming with the Cosmos DB change feed on DBR 17.3. We recommend remaining on a previous Databricks LTS version until Databricks 18 LTS releases, at which point the Spark 4.1-compatible connector can be used. ### Download From 158ff917d9903d049a92db6d1d157790d5657660 Mon Sep 17 00:00:00 2001 From: tvaron3 Date: Thu, 16 Apr 2026 17:09:48 -0700 Subject: [PATCH 4/9] Remove DBR 17.3 known issue - will be fixed before release Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md | 5 ----- 1 file changed, 5 deletions(-) diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md index edda3593b87e..306a244db656 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md @@ -183,11 +183,6 @@ to use the same version of Scala that Spark was compiled for. | 4.43.1 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.43.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | -### Known Issues - -#### Spark Structured Streaming does not work with Databricks Runtime 17.3 -Databricks Runtime 17.3 LTS includes internal API changes from Spark 4.1 (specifically the removal of `org.apache.spark.sql.execution.streaming.MetadataVersionUtil$`) while still reporting Spark 4.0.0 compatibility. This causes a `java.lang.NoClassDefFoundError` when using Spark Structured Streaming with the Cosmos DB change feed on DBR 17.3. We recommend remaining on a previous Databricks LTS version until Databricks 18 LTS releases, at which point the Spark 4.1-compatible connector can be used. - ### Download You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven: From 3b31fb29276b84dcf2a28206396cd8f4d27e8810 Mon Sep 17 00:00:00 2001 From: tvaron3 Date: Fri, 17 Apr 2026 07:36:41 -0700 Subject: [PATCH 5/9] Update spark release date to 2026-04-17 Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md | 2 +- sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md | 2 +- sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md | 2 +- sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md | 2 +- sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md index c930f917d697..1ab82688c647 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md @@ -1,6 +1,6 @@ ## Release History -### 4.47.0 (2026-04-16) +### 4.47.0 (2026-04-17) #### Features Added * Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability in the azure-cosmos SDK. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md index 8586e234941d..d4e4472ba7e4 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md @@ -1,6 +1,6 @@ ## Release History -### 4.47.0 (2026-04-16) +### 4.47.0 (2026-04-17) #### Features Added * Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability in the azure-cosmos SDK. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md index 77fde5bdebca..2f13037e9608 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md @@ -1,6 +1,6 @@ ## Release History -### 4.47.0 (2026-04-16) +### 4.47.0 (2026-04-17) #### Features Added * Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability in the azure-cosmos SDK. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md index 0de57b05f7cd..2ba04f6c6f28 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md @@ -1,6 +1,6 @@ ## Release History -### 4.47.0 (2026-04-16) +### 4.47.0 (2026-04-17) #### Features Added * Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability in the azure-cosmos SDK. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md index c5f1c9585316..26d1622151b4 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md @@ -1,6 +1,6 @@ ## Release History -### 4.47.0 (2026-04-16) +### 4.47.0 (2026-04-17) #### Features Added * Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability in the azure-cosmos SDK. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) From 13629fcede51c88344d600c4aacbed22864476d3 Mon Sep 17 00:00:00 2001 From: tvaron3 Date: Fri, 17 Apr 2026 07:42:12 -0700 Subject: [PATCH 6/9] Add MetadataVersionUtil fix to Spark 4.0 CHANGELOG (PR #48837) Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md index 26d1622151b4..0723d845c5c9 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md @@ -6,6 +6,7 @@ * Added support for change feed with `startFrom` point-in-time on merged partitions by enabling the `CHANGE_FEED_WITH_START_TIME_POST_MERGE` SDK capability in the azure-cosmos SDK. - See [PR 48752](https://github.com/Azure/azure-sdk-for-java/pull/48752) #### Bugs Fixed +* Fixed `NoClassDefFoundError` for `MetadataVersionUtil` when using change feed Spark Structured Streaming on Databricks Runtime 17.3+ by inlining the version validation logic. - See [PR 48837](https://github.com/Azure/azure-sdk-for-java/pull/48837) * Fixed JVM `` deadlock when multiple threads concurrently trigger Cosmos SDK class loading for the first time. - See [PR 48689](https://github.com/Azure/azure-sdk-for-java/pull/48689) ### 4.46.0 (2026-03-27) From fc259a65ca570022141e0d7f41503ed7ef6f8f53 Mon Sep 17 00:00:00 2001 From: tvaron3 Date: Mon, 20 Apr 2026 13:24:43 -0700 Subject: [PATCH 7/9] [Cosmos] Migrate Java Cosmos weekly pipelines to TME Point tests.yml, spark.yml, and kafka.yml at the Azure SDK Test Resources - TME tenant/subscription and the new azure-sdk-tests-cosmos-tme service connection. Prefix the long-lived Spark resource group with SSS3PT_ so that local-auth Cosmos keys do not trip S360 alerts (see eng/common/TestResources/New-TestResources.ps1 lines 130/314). Per-run resource groups created by New-TestResources.ps1 are prefixed automatically because the TME tenant id is in $wellKnownTMETenants. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- sdk/cosmos/kafka.yml | 4 ++-- sdk/cosmos/spark.yml | 36 ++++++++++++++++++------------------ sdk/cosmos/tests.yml | 14 +++++++------- 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/sdk/cosmos/kafka.yml b/sdk/cosmos/kafka.yml index 76acbfe0cac7..f05c3a78bf11 100644 --- a/sdk/cosmos/kafka.yml +++ b/sdk/cosmos/kafka.yml @@ -8,7 +8,7 @@ extends: parameters: TestName: 'Kafka_TestContainer_Integration' EnvVars: - ACCOUNT_TENANT_ID: '72f988bf-86f1-41af-91ab-2d7cd011db47' + ACCOUNT_TENANT_ID: '70a036f6-8e4d-4615-bad6-149c02e7720d' ACCOUNT_AAD_CLIENT_ID: $(spark-databricks-cosmos-spn-clientId) ACCOUNT_AAD_CLIENT_SECRET: $(spark-databricks-cosmos-spn-clientSecret) COSMOS.CLIENT_TELEMETRY_ENDPOINT: $(cosmos-client-telemetry-endpoint) @@ -16,7 +16,7 @@ extends: COSMOS_ACR_NAME: $(kafka-mcr-name) CloudConfig: Public: - ServiceConnection: azure-sdk-tests-cosmos + ServiceConnection: azure-sdk-tests-cosmos-tme MatrixConfigs: - Name: Kafka_TestContainer_Integration_Test Path: sdk/cosmos/kafka-testcontainer-matrix.json diff --git a/sdk/cosmos/spark.yml b/sdk/cosmos/spark.yml index fe4d3dc79f4e..1bf5c9deec30 100644 --- a/sdk/cosmos/spark.yml +++ b/sdk/cosmos/spark.yml @@ -14,9 +14,9 @@ stages: CosmosEndpoint: $(spark-databricks-cosmos-endpoint) CosmosKey: $(spark-databricks-cosmos-key) DatabricksEndpoint: $(spark-databricks-endpoint-with-msi) - SubscriptionId: '8fba6d4f-7c37-4d13-9063-fd58ad2b86e2' - TenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47' - ResourceGroupName: 'oltp-spark-ci' + SubscriptionId: '4d042dc6-fe17-4698-a23f-ec6a8d1e98f4' + TenantId: '70a036f6-8e4d-4615-bad6-149c02e7720d' + ResourceGroupName: 'SSS3PT_oltp-spark-ci' ClientId: $(spark-databricks-cosmos-spn-clientId) ClientSecret: $(spark-databricks-cosmos-spn-clientSecret) CosmosContainerName: 'sampleContainer3' @@ -36,9 +36,9 @@ stages: CosmosEndpoint: $(spark-databricks-cosmos-endpoint) CosmosKey: $(spark-databricks-cosmos-key) DatabricksEndpoint: $(spark-databricks-endpoint-with-msi) - SubscriptionId: '8fba6d4f-7c37-4d13-9063-fd58ad2b86e2' - TenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47' - ResourceGroupName: 'oltp-spark-ci' + SubscriptionId: '4d042dc6-fe17-4698-a23f-ec6a8d1e98f4' + TenantId: '70a036f6-8e4d-4615-bad6-149c02e7720d' + ResourceGroupName: 'SSS3PT_oltp-spark-ci' ClientId: $(spark-databricks-cosmos-spn-clientId) ClientSecret: $(spark-databricks-cosmos-spn-clientSecret) CosmosContainerName: 'sampleContainer4' @@ -58,9 +58,9 @@ stages: CosmosEndpoint: $(spark-databricks-cosmos-endpoint) CosmosKey: $(spark-databricks-cosmos-key) DatabricksEndpoint: $(spark-databricks-endpoint-with-msi) - SubscriptionId: '8fba6d4f-7c37-4d13-9063-fd58ad2b86e2' - TenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47' - ResourceGroupName: 'oltp-spark-ci' + SubscriptionId: '4d042dc6-fe17-4698-a23f-ec6a8d1e98f4' + TenantId: '70a036f6-8e4d-4615-bad6-149c02e7720d' + ResourceGroupName: 'SSS3PT_oltp-spark-ci' ClientId: $(spark-databricks-cosmos-spn-clientId) ClientSecret: $(spark-databricks-cosmos-spn-clientSecret) CosmosContainerName: 'sampleContainer5' @@ -81,9 +81,9 @@ stages: CosmosEndpoint: $(spark-databricks-cosmos-endpoint) CosmosKey: $(spark-databricks-cosmos-key) DatabricksEndpoint: $(spark-databricks-endpoint-with-msi) - SubscriptionId: '8fba6d4f-7c37-4d13-9063-fd58ad2b86e2' - TenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47' - ResourceGroupName: 'oltp-spark-ci' + SubscriptionId: '4d042dc6-fe17-4698-a23f-ec6a8d1e98f4' + TenantId: '70a036f6-8e4d-4615-bad6-149c02e7720d' + ResourceGroupName: 'SSS3PT_oltp-spark-ci' ClientId: $(spark-databricks-cosmos-spn-clientId) ClientSecret: $(spark-databricks-cosmos-spn-clientSecret) CosmosContainerName: 'sampleContainer6' @@ -104,9 +104,9 @@ stages: CosmosEndpoint: $(spark-databricks-cosmos-endpoint) CosmosKey: $(spark-databricks-cosmos-key) DatabricksEndpoint: $(spark-databricks-endpoint-with-msi) - SubscriptionId: '8fba6d4f-7c37-4d13-9063-fd58ad2b86e2' - TenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47' - ResourceGroupName: 'oltp-spark-ci' + SubscriptionId: '4d042dc6-fe17-4698-a23f-ec6a8d1e98f4' + TenantId: '70a036f6-8e4d-4615-bad6-149c02e7720d' + ResourceGroupName: 'SSS3PT_oltp-spark-ci' ClientId: $(spark-databricks-cosmos-spn-clientId) ClientSecret: $(spark-databricks-cosmos-spn-clientSecret) CosmosContainerName: 'sampleContainer7' @@ -127,9 +127,9 @@ stages: CosmosEndpoint: $(spark-databricks-cosmos-endpoint) CosmosKey: $(spark-databricks-cosmos-key) DatabricksEndpoint: $(spark-databricks-endpoint-with-msi) - SubscriptionId: '8fba6d4f-7c37-4d13-9063-fd58ad2b86e2' - TenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47' - ResourceGroupName: 'oltp-spark-ci' + SubscriptionId: '4d042dc6-fe17-4698-a23f-ec6a8d1e98f4' + TenantId: '70a036f6-8e4d-4615-bad6-149c02e7720d' + ResourceGroupName: 'SSS3PT_oltp-spark-ci' ClientId: $(spark-databricks-cosmos-spn-clientId) ClientSecret: $(spark-databricks-cosmos-spn-clientSecret) CosmosContainerName: 'sampleContainer8' diff --git a/sdk/cosmos/tests.yml b/sdk/cosmos/tests.yml index 92bbc0347acb..df40ab53793f 100644 --- a/sdk/cosmos/tests.yml +++ b/sdk/cosmos/tests.yml @@ -8,7 +8,7 @@ extends: parameters: CloudConfig: Public: - ServiceConnection: azure-sdk-tests-cosmos + ServiceConnection: azure-sdk-tests-cosmos-tme MatrixConfigs: - Name: Cosmos_live_test Path: sdk/cosmos/live-platform-matrix.json @@ -40,7 +40,7 @@ extends: TestName: 'Cosmos_Live_Test_Http2' CloudConfig: Public: - ServiceConnection: azure-sdk-tests-cosmos + ServiceConnection: azure-sdk-tests-cosmos-tme MatrixConfigs: - Name: Cosmos_live_test_http2 Path: sdk/cosmos/live-http2-platform-matrix.json @@ -72,7 +72,7 @@ extends: TestName: 'Cosmos_Live_Test_ThinClient' CloudConfig: Public: - ServiceConnection: azure-sdk-tests-cosmos + ServiceConnection: azure-sdk-tests-cosmos-tme MatrixConfigs: - Name: Cosmos_live_test_thinclient Path: sdk/cosmos/live-thinclient-platform-matrix.json @@ -104,7 +104,7 @@ extends: TestName: 'Cosmos_Live_Test_ThinClient_MultiRegion' CloudConfig: Public: - ServiceConnection: azure-sdk-tests-cosmos + ServiceConnection: azure-sdk-tests-cosmos-tme MatrixConfigs: - Name: Cosmos_live_test_thinclient_multiregion Path: sdk/cosmos/live-thinclient-multi-region-platform-matrix.json @@ -136,7 +136,7 @@ extends: TestName: 'Cosmos_Live_Test_ThinClient_MultiMaster' CloudConfig: Public: - ServiceConnection: azure-sdk-tests-cosmos + ServiceConnection: azure-sdk-tests-cosmos-tme MatrixConfigs: - Name: Cosmos_live_test_thinclient_multimaster Path: sdk/cosmos/live-thinclient-multi-master-platform-matrix.json @@ -168,7 +168,7 @@ extends: TestName: 'Spring_Data_Cosmos_Integration' CloudConfig: Public: - ServiceConnection: azure-sdk-tests-cosmos + ServiceConnection: azure-sdk-tests-cosmos-tme MatrixConfigs: - Name: Cosmos_live_test_integration Path: sdk/spring/pipeline/cosmos-integration-matrix.json @@ -193,7 +193,7 @@ extends: TestName: 'Kafka_Cosmos_Integration' CloudConfig: Public: - ServiceConnection: azure-sdk-tests-cosmos + ServiceConnection: azure-sdk-tests-cosmos-tme MatrixConfigs: - Name: Kafka_Cosmos_Integration_Test Path: sdk/cosmos/kafka-cosmos-matrix.json From 0e5042d51b12e42290290c6480473b23bf72c9c0 Mon Sep 17 00:00:00 2001 From: tvaron3 Date: Tue, 21 Apr 2026 09:58:44 -0700 Subject: [PATCH 8/9] [Cosmos] Update Spark SubscriptionId to migrated TME subscription Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- sdk/cosmos/spark.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sdk/cosmos/spark.yml b/sdk/cosmos/spark.yml index 1bf5c9deec30..6e14f125d79c 100644 --- a/sdk/cosmos/spark.yml +++ b/sdk/cosmos/spark.yml @@ -14,7 +14,7 @@ stages: CosmosEndpoint: $(spark-databricks-cosmos-endpoint) CosmosKey: $(spark-databricks-cosmos-key) DatabricksEndpoint: $(spark-databricks-endpoint-with-msi) - SubscriptionId: '4d042dc6-fe17-4698-a23f-ec6a8d1e98f4' + SubscriptionId: '7cc44528-8730-4b11-ae30-5738d333ff2d' TenantId: '70a036f6-8e4d-4615-bad6-149c02e7720d' ResourceGroupName: 'SSS3PT_oltp-spark-ci' ClientId: $(spark-databricks-cosmos-spn-clientId) @@ -36,7 +36,7 @@ stages: CosmosEndpoint: $(spark-databricks-cosmos-endpoint) CosmosKey: $(spark-databricks-cosmos-key) DatabricksEndpoint: $(spark-databricks-endpoint-with-msi) - SubscriptionId: '4d042dc6-fe17-4698-a23f-ec6a8d1e98f4' + SubscriptionId: '7cc44528-8730-4b11-ae30-5738d333ff2d' TenantId: '70a036f6-8e4d-4615-bad6-149c02e7720d' ResourceGroupName: 'SSS3PT_oltp-spark-ci' ClientId: $(spark-databricks-cosmos-spn-clientId) @@ -58,7 +58,7 @@ stages: CosmosEndpoint: $(spark-databricks-cosmos-endpoint) CosmosKey: $(spark-databricks-cosmos-key) DatabricksEndpoint: $(spark-databricks-endpoint-with-msi) - SubscriptionId: '4d042dc6-fe17-4698-a23f-ec6a8d1e98f4' + SubscriptionId: '7cc44528-8730-4b11-ae30-5738d333ff2d' TenantId: '70a036f6-8e4d-4615-bad6-149c02e7720d' ResourceGroupName: 'SSS3PT_oltp-spark-ci' ClientId: $(spark-databricks-cosmos-spn-clientId) @@ -81,7 +81,7 @@ stages: CosmosEndpoint: $(spark-databricks-cosmos-endpoint) CosmosKey: $(spark-databricks-cosmos-key) DatabricksEndpoint: $(spark-databricks-endpoint-with-msi) - SubscriptionId: '4d042dc6-fe17-4698-a23f-ec6a8d1e98f4' + SubscriptionId: '7cc44528-8730-4b11-ae30-5738d333ff2d' TenantId: '70a036f6-8e4d-4615-bad6-149c02e7720d' ResourceGroupName: 'SSS3PT_oltp-spark-ci' ClientId: $(spark-databricks-cosmos-spn-clientId) @@ -104,7 +104,7 @@ stages: CosmosEndpoint: $(spark-databricks-cosmos-endpoint) CosmosKey: $(spark-databricks-cosmos-key) DatabricksEndpoint: $(spark-databricks-endpoint-with-msi) - SubscriptionId: '4d042dc6-fe17-4698-a23f-ec6a8d1e98f4' + SubscriptionId: '7cc44528-8730-4b11-ae30-5738d333ff2d' TenantId: '70a036f6-8e4d-4615-bad6-149c02e7720d' ResourceGroupName: 'SSS3PT_oltp-spark-ci' ClientId: $(spark-databricks-cosmos-spn-clientId) @@ -127,7 +127,7 @@ stages: CosmosEndpoint: $(spark-databricks-cosmos-endpoint) CosmosKey: $(spark-databricks-cosmos-key) DatabricksEndpoint: $(spark-databricks-endpoint-with-msi) - SubscriptionId: '4d042dc6-fe17-4698-a23f-ec6a8d1e98f4' + SubscriptionId: '7cc44528-8730-4b11-ae30-5738d333ff2d' TenantId: '70a036f6-8e4d-4615-bad6-149c02e7720d' ResourceGroupName: 'SSS3PT_oltp-spark-ci' ClientId: $(spark-databricks-cosmos-spn-clientId) From eefdfcfdb628f525223b13c8a46442026b7baad6 Mon Sep 17 00:00:00 2001 From: tvaron3 Date: Tue, 21 Apr 2026 16:21:27 -0700 Subject: [PATCH 9/9] Migrate Spring Cosmos live test stages to TME service connection Override CloudConfig.Public.ServiceConnection to azure-sdk-tests-cosmos-tme for the IT_Cosmos and Spring_Data_Cosmos_Integration stages in sdk/spring/tests.yml. Thread a CloudConfig passthrough parameter through tests-supported-spring-versions-template.yml and tests-supported-spring-versions-filter-template.yml so the override reaches archetype-sdk-tests-isolated.yml. Defaults are unchanged so non-cosmos Spring stages (AppConfig, ServiceBus, EventHubs_Storage, KeyVault, AppConfig_IT) continue to use their current service connections. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .../tests-supported-spring-versions-filter-template.yml | 6 ++++++ .../pipeline/tests-supported-spring-versions-template.yml | 6 ++++++ sdk/spring/tests.yml | 6 ++++++ 3 files changed, 18 insertions(+) diff --git a/sdk/spring/pipeline/tests-supported-spring-versions-filter-template.yml b/sdk/spring/pipeline/tests-supported-spring-versions-filter-template.yml index 6a615c3f8c46..dc30812ac825 100644 --- a/sdk/spring/pipeline/tests-supported-spring-versions-filter-template.yml +++ b/sdk/spring/pipeline/tests-supported-spring-versions-filter-template.yml @@ -14,6 +14,11 @@ parameters: - name: TestResourceDirectories type: object default: [] + - name: CloudConfig + type: object + default: + Public: + Preview: stages: - template: /sdk/spring/pipeline/tests-supported-spring-versions-template.yml @@ -22,6 +27,7 @@ stages: TimeoutInMinutes: ${{ parameters.TimeoutInMinutes }} TestOptions: ' ${{ parameters.TestOptions }} -Dit.test=${{ parameters.TestClassPatterns }} ' TestResourceDirectories: ${{ parameters.TestResourceDirectories }} + CloudConfig: ${{ parameters.CloudConfig }} Artifacts: - name: spring-cloud-azure-integration-tests groupId: com.azure.spring diff --git a/sdk/spring/pipeline/tests-supported-spring-versions-template.yml b/sdk/spring/pipeline/tests-supported-spring-versions-template.yml index 4578041b21e9..c4bfa733f2c9 100644 --- a/sdk/spring/pipeline/tests-supported-spring-versions-template.yml +++ b/sdk/spring/pipeline/tests-supported-spring-versions-template.yml @@ -35,6 +35,11 @@ parameters: - name: MatrixConfigFileName type: string default: '' + - name: CloudConfig + type: object + default: + Public: + Preview: stages: - template: /eng/pipelines/templates/stages/archetype-sdk-tests-isolated.yml @@ -49,6 +54,7 @@ stages: TestGoals: "clean verify" TestOptions: ${{ parameters.TestOptions }} MatrixConfigs: ${{ parameters.MatrixConfigs }} + CloudConfig: ${{ parameters.CloudConfig }} PreGenerationSteps: - script: | python -m pip install termcolor diff --git a/sdk/spring/tests.yml b/sdk/spring/tests.yml index 506ed7c2bef4..164a3a9bfb58 100644 --- a/sdk/spring/tests.yml +++ b/sdk/spring/tests.yml @@ -16,6 +16,9 @@ extends: TestClassPatterns: com.azure.spring.cloud.integration.tests.cosmos.*IT,com.azure.spring.cloud.integration.tests.cosmos.*.*IT TestResourceDirectories: - spring/spring-cloud-azure-integration-tests/test-resources/cosmos + CloudConfig: + Public: + ServiceConnection: azure-sdk-tests-cosmos-tme - template: /sdk/spring/pipeline/tests-supported-spring-versions-filter-template.yml parameters: TestName: IT_ServiceBus @@ -67,3 +70,6 @@ extends: # -U forces Maven to re-check remote repositories for failed artifact downloads, # preventing cached "Connection reset" errors from blocking the From Source build. BuildOptions: '-Denforcer.skip=true -U' + CloudConfig: + Public: + ServiceConnection: azure-sdk-tests-cosmos-tme