From 6fdc07d53bedea9eec16ab4b16b9d441bfbeaeab Mon Sep 17 00:00:00 2001 From: Fabian Meiswinkel Date: Thu, 26 Feb 2026 11:19:09 +0000 Subject: [PATCH 1/7] Allowing audience/scope for management endpoint to be specified in non-public clouds --- .../azure-cosmos-spark_3-4_2-12/pom.xml | 2 +- .../azure-cosmos-spark_3/docs/AAD-Auth.md | 11 ++++---- .../docs/configuration-reference.md | 25 ++++++++++--------- .../cosmos/spark/CosmosClientCache.scala | 4 ++- .../com/azure/cosmos/spark/CosmosConfig.scala | 22 +++++++++++++--- 5 files changed, 42 insertions(+), 22 deletions(-) diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml index dcbfb2becff7..e4aaf3816a6c 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-4_2-12 - 4.43.1 + 4.43.2-private.1 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-4_2-12 OLTP Spark 3.4 Connector for Azure Cosmos DB SQL API diff --git a/sdk/cosmos/azure-cosmos-spark_3/docs/AAD-Auth.md b/sdk/cosmos/azure-cosmos-spark_3/docs/AAD-Auth.md index e042cdd1da11..6c17a212e3e5 100644 --- a/sdk/cosmos/azure-cosmos-spark_3/docs/AAD-Auth.md +++ b/sdk/cosmos/azure-cosmos-spark_3/docs/AAD-Auth.md @@ -34,12 +34,13 @@ To enable managed identity support out-of-the-box, the Spark environment needs t | `spark.cosmos.account.resourceGroupName` | None | The simple resource group name (not the full qualified one) of the Azure Cosmos DB account resource specified under `spark.cosmos.accountEndpoint`. This parameter is required for all management operations when using AAD / Microsoft Entra ID authentication. | #### Non-public clouds -For non-public clouds the `spark.cosmos.account.azureEnvironment` config value need to be set to `Custom`and the config entries `spark.cosmos.account.azureEnvironment.management` and `spark.cosmos.account.azureEnvironment.aad` have to be specified to the correct values for the non-public cloud. +For non-public clouds the `spark.cosmos.account.azureEnvironment` config value need to be set to `Custom`and the config entries `spark.cosmos.account.azureEnvironment.management`, `spark.cosmos.account.azureEnvironment.management.scope` and `spark.cosmos.account.azureEnvironment.aad` have to be specified to the correct values for the non-public cloud. -| Config Property Name | Default | Description | -|:---------------------------------------------------|:--------|:----------------------------------------------------------------------------------------------------------------------------------------------------------| -| `spark.cosmos.account.azureEnvironment.management` | None | The Uri of the ARM (Resource Manager) endpoint in the custom cloud - e.g. the corresponding value to `https://management.azure.com/` in the public cloud. | -| `spark.cosmos.account.azureEnvironment.aad` | None | The Uri of the AAD endpoint in the custom cloud - e.g. the corresponding value to `https://login.microsoftonline.com/` in the public cloud. | +| Config Property Name | Default | Description | +|:---------------------------------------------------------|:--------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `spark.cosmos.account.azureEnvironment.management` | None | The Uri of the ARM (Resource Manager) endpoint in the custom cloud - e.g. the corresponding value to `https://management.azure.com/` in the public cloud. | +| `spark.cosmos.account.azureEnvironment.management.scope` | None | The scope/audience for authenticating to the ARM (Resource Manager) endpoint in the custom cloud - e.g. the corresponding value to `https://management.core.windows.net/` in the public cloud. | +| `spark.cosmos.account.azureEnvironment.aad` | None | The Uri of the AAD endpoint in the custom cloud - e.g. the corresponding value to `https://login.microsoftonline.com/` in the public cloud. | #### Environment variables or system properties diff --git a/sdk/cosmos/azure-cosmos-spark_3/docs/configuration-reference.md b/sdk/cosmos/azure-cosmos-spark_3/docs/configuration-reference.md index ada88ec487bd..166d0a0c9f75 100644 --- a/sdk/cosmos/azure-cosmos-spark_3/docs/configuration-reference.md +++ b/sdk/cosmos/azure-cosmos-spark_3/docs/configuration-reference.md @@ -2,18 +2,19 @@ ## Generic Configuration -| Config Property Name | Default | Description | -|:---------------------------------------------------|:--------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `spark.cosmos.accountEndpoint` | None | Cosmos DB Account Endpoint Uri | -| `spark.cosmos.accountKey` | None | Cosmos DB Account Key | -| `spark.cosmos.database` | None | Cosmos DB database name | -| `spark.cosmos.container` | None | Cosmos DB container name | -| `spark.cosmos.account.subscriptionId` | None | The subscriptionId of the Cosmos DB account. Required for `ServicePrincipal` authentication. | -| `spark.cosmos.account.tenantId` | None | The tenantId of the Cosmos DB account. Required for `ServicePrincipal` authentication. | -| `spark.cosmos.account.resourceGroupName` | None | The resource group of the Cosmos DB account. Required for `ServicePrincipal` authentication. | -| `spark.cosmos.account.azureEnvironment` | `Azure` | The azure environment of the Cosmos DB account: `Azure`, `AzureChina`, `AzureUsGovernment`, `AzureGermany` or `Custom` - when using `Custom` (only needed for non-public clouds) the config entries `spark.cosmos.account.azureEnvironment.management` and `spark.cosmos.account.azureEnvironment.aad` have to also be specified. | -| `spark.cosmos.account.azureEnvironment.management` | None | The Uri of the ARM (Resource Manager) endpoint in the custom cloud - e.g. the corresponding value to `https://management.azure.com/` in the public cloud. | -| `spark.cosmos.account.azureEnvironment.aad` | None | The Uri of the AAD endpoint in the custom cloud - e.g. the corresponding value to `https://login.microsoftonline.com/` in the public cloud. | +| Config Property Name | Default | Description | +|:---------------------------------------------------------|:--------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `spark.cosmos.accountEndpoint` | None | Cosmos DB Account Endpoint Uri | +| `spark.cosmos.accountKey` | None | Cosmos DB Account Key | +| `spark.cosmos.database` | None | Cosmos DB database name | +| `spark.cosmos.container` | None | Cosmos DB container name | +| `spark.cosmos.account.subscriptionId` | None | The subscriptionId of the Cosmos DB account. Required for `ServicePrincipal` authentication. | +| `spark.cosmos.account.tenantId` | None | The tenantId of the Cosmos DB account. Required for `ServicePrincipal` authentication. | +| `spark.cosmos.account.resourceGroupName` | None | The resource group of the Cosmos DB account. Required for `ServicePrincipal` authentication. | +| `spark.cosmos.account.azureEnvironment` | `Azure` | The azure environment of the Cosmos DB account: `Azure`, `AzureChina`, `AzureUsGovernment`, `AzureGermany` or `Custom` - when using `Custom` (only needed for non-public clouds) the config entries `spark.cosmos.account.azureEnvironment.management`, `spark.cosmos.account.azureEnvironment.management.scope` and `spark.cosmos.account.azureEnvironment.aad` have to also be specified. | +| `spark.cosmos.account.azureEnvironment.management` | None | The Uri of the ARM (Resource Manager) endpoint in the custom cloud - e.g. the corresponding value to `https://management.azure.com/` in the public cloud. | +| `spark.cosmos.account.azureEnvironment.management.scope` | None | The scope/audience for authenticating to the ARM (Resource Manager) endpoint in the custom cloud - e.g. the corresponding value to `https://management.core.windows.net/` in the public cloud. | +| `spark.cosmos.account.azureEnvironment.aad` | None | The Uri of the AAD endpoint in the custom cloud - e.g. the corresponding value to `https://login.microsoftonline.com/` in the public cloud. | ### AAD Auth Config | Config Property Name | Default | Description | diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosClientCache.scala b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosClientCache.scala index e61a271aeb8b..3b87ef08c3a0 100644 --- a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosClientCache.scala +++ b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosClientCache.scala @@ -993,9 +993,11 @@ private[spark] object CosmosClientCache extends BasicLoggingTrait { private[this] class CosmosAccessTokenCredential(val tokenProvider: List[String] =>CosmosAccessToken) extends TokenCredential { override def getToken(tokenRequestContext: TokenRequestContext): Mono[AccessToken] = { + val scopes = tokenRequestContext.getScopes.asScala.toList + logDebug(s"CosmosAccessTokenCredential:getToken(${scopes.mkString(", ")} - Callstack = ${Thread.currentThread().getStackTrace.mkString("\n")})") val returnValue: Mono[AccessToken] = Mono.fromCallable(() => { val token = tokenProvider - .apply(tokenRequestContext.getScopes.asScala.toList) + .apply(scopes) new AccessToken(token.token, token.Offset) }) diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosConfig.scala b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosConfig.scala index eef3f6ae1f8d..951f4735444d 100644 --- a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosConfig.scala +++ b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosConfig.scala @@ -52,6 +52,7 @@ private[spark] object CosmosConfigNames { val AzureEnvironment = "spark.cosmos.account.azureEnvironment" val AzureEnvironmentAAD = "spark.cosmos.account.azureEnvironment.aad" val AzureEnvironmentManagement = "spark.cosmos.account.azureEnvironment.management" + val AzureEnvironmentManagementScope = "spark.cosmos.account.azureEnvironment.management.scope" val AuthType = "spark.cosmos.auth.type" val ClientId = "spark.cosmos.auth.aad.clientId" val ResourceId = "spark.cosmos.auth.aad.resourceId" @@ -192,6 +193,7 @@ private[spark] object CosmosConfigNames { AzureEnvironment, AzureEnvironmentAAD, AzureEnvironmentManagement, + AzureEnvironmentManagementScope, Database, Container, PreferredRegionsList, @@ -688,6 +690,12 @@ private object CosmosAccountConfig extends BasicLoggingTrait { parseFromStringFunction = managementUri => managementUri, helpMessage = "The ARM management endpoint to be used when selecting AzureEnvironment `Custom`.") + private val AzureEnvironmentManagementScope = CosmosConfigEntry[String](key = CosmosConfigNames.AzureEnvironmentManagementScope, + defaultValue = None, + mandatory = false, + parseFromStringFunction = scope => scope, + helpMessage = "The audience/scope for the ARM management endpoint to be used when selecting AzureEnvironment `Custom`.") + private val AzureEnvironmentAadUri = CosmosConfigEntry[String](key = CosmosConfigNames.AzureEnvironmentAAD, defaultValue = None, mandatory = false, @@ -767,15 +775,23 @@ private object CosmosAccountConfig extends BasicLoggingTrait { && "Custom".equalsIgnoreCase(kvp._2))) { val endpoints: util.Map[String, String] = new util.HashMap[String, String]() - val mgmtEndpoint = CosmosConfigEntry.parse(cfg, AzureEnvironmentManagementUri) - if (mgmtEndpoint.isDefined) { - endpoints.put("resourceManagerEndpointUrl", mgmtEndpoint.get) + val resMgrEndpoint = CosmosConfigEntry.parse(cfg, AzureEnvironmentManagementUri) + if (resMgrEndpoint.isDefined) { + endpoints.put("resourceManagerEndpointUrl", resMgrEndpoint.get) } else { throw new IllegalArgumentException( s"The configuration '${CosmosConfigNames.AzureEnvironmentManagement}' is required when " + "choosing AzureEnvironment 'Custom'.") } + val mgmtScope = CosmosConfigEntry.parse(cfg, AzureEnvironmentManagementScope) + if (mgmtScope.isDefined) { + endpoints.put("managementEndpointUrl", mgmtScope.get) + } else { + logError(s"The configuration '${CosmosConfigNames.AzureEnvironmentManagementScope}' is missing. " + + "This config is required for Spark catalog integration when choosing AzureEnvironment 'Custom'.") + } + val aadEndpoint = CosmosConfigEntry.parse(cfg, AzureEnvironmentAadUri) if (aadEndpoint.isDefined) { endpoints.put("activeDirectoryEndpointUrl", aadEndpoint.get) From 06914b5a6ee4c4ceaf91865f347825ae39c7108f Mon Sep 17 00:00:00 2001 From: Fabian Meiswinkel Date: Thu, 26 Feb 2026 12:20:19 +0000 Subject: [PATCH 2/7] Update pom.xml --- sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml index e4aaf3816a6c..bc714ebb37de 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-4_2-12 - 4.43.2-private.1 + 4.44.0.beta.1 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-4_2-12 OLTP Spark 3.4 Connector for Azure Cosmos DB SQL API From 642477490dd1c31e30f092b7b21c8c3e562d3794 Mon Sep 17 00:00:00 2001 From: Fabian Meiswinkel Date: Thu, 26 Feb 2026 12:25:36 +0000 Subject: [PATCH 3/7] Changelogs --- sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md | 1 + sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md | 1 + sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md | 1 + sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md | 1 + sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md | 1 + 5 files changed, 5 insertions(+) diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md index 570ebca9aaa9..a0669c8a88c2 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md @@ -3,6 +3,7 @@ ### 4.44.0-beta.1 (Unreleased) #### Features Added +* Added config entry `spark.cosmos.account.azureEnvironment.management.scope` to allow specifying the Entra ID scope/audience to be used when retrieving tokens to authenticate against the ARM/management endpoint of non-public clouds. - See [PR 48137](https://github.com/Azure/azure-sdk-for-java/pull/48137) #### Breaking Changes diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md index e06c4d64c10f..4a79e927e2f7 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md @@ -3,6 +3,7 @@ ### 4.44.0-beta.1 (Unreleased) #### Features Added +* Added config entry `spark.cosmos.account.azureEnvironment.management.scope` to allow specifying the Entra ID scope/audience to be used when retrieving tokens to authenticate against the ARM/management endpoint of non-public clouds. - See [PR 48137](https://github.com/Azure/azure-sdk-for-java/pull/48137) #### Breaking Changes diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md index f818770efd17..953e54ce9c42 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md @@ -3,6 +3,7 @@ ### 4.44.0-beta.1 (Unreleased) #### Features Added +* Added config entry `spark.cosmos.account.azureEnvironment.management.scope` to allow specifying the Entra ID scope/audience to be used when retrieving tokens to authenticate against the ARM/management endpoint of non-public clouds. - See [PR 48137](https://github.com/Azure/azure-sdk-for-java/pull/48137) #### Breaking Changes diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md index 9c689123f5a9..bb089a7289f0 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md @@ -3,6 +3,7 @@ ### 4.44.0-beta.1 (Unreleased) #### Features Added +* Added config entry `spark.cosmos.account.azureEnvironment.management.scope` to allow specifying the Entra ID scope/audience to be used when retrieving tokens to authenticate against the ARM/management endpoint of non-public clouds. - See [PR 48137](https://github.com/Azure/azure-sdk-for-java/pull/48137) #### Breaking Changes diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md index e3201f066f52..d03d8132d287 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md @@ -3,6 +3,7 @@ ### 4.44.0-beta.1 (Unreleased) #### Features Added +* Added config entry `spark.cosmos.account.azureEnvironment.management.scope` to allow specifying the Entra ID scope/audience to be used when retrieving tokens to authenticate against the ARM/management endpoint of non-public clouds. - See [PR 48137](https://github.com/Azure/azure-sdk-for-java/pull/48137) #### Breaking Changes From 68c7df91d787a9b708bf8998dfd9ea2fca7cc71f Mon Sep 17 00:00:00 2001 From: Fabian Meiswinkel Date: Thu, 26 Feb 2026 12:50:27 +0000 Subject: [PATCH 4/7] Update pom.xml --- sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml index bc714ebb37de..dd8eb6dd43d3 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-4_2-12 - 4.44.0.beta.1 + 4.44.0-beta.1 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-4_2-12 OLTP Spark 3.4 Connector for Azure Cosmos DB SQL API From f05a5d98eef0bb9223c3ce38592afc10f5b3c383 Mon Sep 17 00:00:00 2001 From: Fabian Meiswinkel Date: Fri, 27 Feb 2026 11:36:01 +0000 Subject: [PATCH 5/7] azure-cosmos-spark release 4.44.0 --- eng/versioning/version_client.txt | 10 +++++----- .../pom.xml | 4 ++-- sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md | 8 +------- sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md | 9 +++++++-- sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml | 2 +- sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md | 8 +------- sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md | 9 +++++++-- sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md | 8 +------- sdk/cosmos/azure-cosmos-spark_3-5_2-12/README.md | 9 +++++++-- sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml | 2 +- sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md | 8 +------- sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md | 9 +++++++-- sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml | 2 +- sdk/cosmos/azure-cosmos-spark_3/docs/quick-start.md | 10 +++++----- sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md | 8 +------- sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md | 9 +++++++-- sdk/cosmos/azure-cosmos-spark_4-0_2-13/pom.xml | 2 +- sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml | 4 ++-- 18 files changed, 58 insertions(+), 63 deletions(-) diff --git a/eng/versioning/version_client.txt b/eng/versioning/version_client.txt index ee22f35c629d..0ae96d1cd2d4 100644 --- a/eng/versioning/version_client.txt +++ b/eng/versioning/version_client.txt @@ -112,11 +112,11 @@ com.azure.cosmos.spark:azure-cosmos-spark_3-5;0.0.1-beta.1;0.0.1-beta.1 com.azure:azure-cosmos-encryption;2.27.0;2.28.0-beta.1 com.azure.cosmos.spark:azure-cosmos-spark-account-data-resolver-sample;1.0.0-beta.1;1.0.0-beta.1 com.azure:azure-cosmos-test;1.0.0-beta.17;1.0.0-beta.18 -com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12;4.43.1;4.44.0-beta.1 -com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12;4.43.1;4.44.0-beta.1 -com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12;4.43.1;4.44.0-beta.1 -com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13;4.43.1;4.44.0-beta.1 -com.azure.cosmos.spark:azure-cosmos-spark_4-0_2-13;4.43.1;4.44.0-beta.1 +com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12;4.43.1;4.44.0 +com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12;4.43.1;4.44.0 +com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12;4.43.1;4.44.0 +com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13;4.43.1;4.44.0 +com.azure.cosmos.spark:azure-cosmos-spark_4-0_2-13;4.43.1;4.44.0 com.azure.cosmos.spark:fabric-cosmos-spark-auth_3;1.1.0;1.2.0-beta.1 com.azure:azure-cosmos-tests;1.0.0-beta.1;1.0.0-beta.1 com.azure:azure-data-appconfiguration;1.9.1;1.10.0-beta.1 diff --git a/sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/pom.xml b/sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/pom.xml index e11416c36419..a6b07e7c1af7 100644 --- a/sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/pom.xml @@ -97,7 +97,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-5_2-12 - 4.44.0-beta.1 + 4.44.0 provided @@ -290,7 +290,7 @@ com.fasterxml.jackson.core:jackson-databind:[2.18.4] com.fasterxml.jackson.module:jackson-module-scala_2.12:[2.18.4] com.globalmentor:hadoop-bare-naked-local-fs:[0.1.0] - com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:[4.44.0-beta.1] + com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:[4.44.0] diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md index a0669c8a88c2..a7aa548f3d55 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md @@ -1,16 +1,10 @@ ## Release History -### 4.44.0-beta.1 (Unreleased) +### 4.44.0 (2026-02-27) #### Features Added * Added config entry `spark.cosmos.account.azureEnvironment.management.scope` to allow specifying the Entra ID scope/audience to be used when retrieving tokens to authenticate against the ARM/management endpoint of non-public clouds. - See [PR 48137](https://github.com/Azure/azure-sdk-for-java/pull/48137) -#### Breaking Changes - -#### Bugs Fixed - -#### Other Changes - ### 4.43.1 (2026-02-25) #### Bugs Fixed diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md index 417946d82c2e..868bcb996b4e 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md +++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md @@ -28,6 +28,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-3_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------| +| 4.44.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.43.1 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.43.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.42.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | @@ -79,6 +80,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-4_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.43.1 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.43.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.42.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | @@ -121,6 +123,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-5_2-12 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|----------------------------|-------------------------------|------------------------------| +| 4.44.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.*, 15.\*, 16.4 LTS | 1.3.\* | | 4.43.1 | 3.5.0 | [8, 11, 17] | 2.12 | 14.*, 15.\*, 16.4 LTS | 1.3.\* | | 4.43.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.*, 15.\*, 16.4 LTS | 1.3.\* | | 4.42.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.*, 15.\*, 16.4 LTS | 1.3.\* | @@ -149,23 +152,25 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-5_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.43.1 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.43.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | #### azure-cosmos-spark_4-0_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.43.1 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.43.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | ### Download You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven: -`com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12:4.43.1` +`com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12:4.44.0` You can also integrate against Cosmos DB Spark Connector in your SBT project: ```scala -libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-3_2-12" % "4.43.1" +libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-3_2-12" % "4.44.0" ``` Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark). diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml index 0a4788c7c407..b00d0a11fd0d 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-3_2-12 - 4.44.0-beta.1 + 4.44.0 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-3_2-12 OLTP Spark 3.3 Connector for Azure Cosmos DB SQL API diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md index 4a79e927e2f7..3aea823137eb 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md @@ -1,16 +1,10 @@ ## Release History -### 4.44.0-beta.1 (Unreleased) +### 4.44.0 (2026-02-27) #### Features Added * Added config entry `spark.cosmos.account.azureEnvironment.management.scope` to allow specifying the Entra ID scope/audience to be used when retrieving tokens to authenticate against the ARM/management endpoint of non-public clouds. - See [PR 48137](https://github.com/Azure/azure-sdk-for-java/pull/48137) -#### Breaking Changes - -#### Bugs Fixed - -#### Other Changes - ### 4.43.1 (2026-02-25) #### Bugs Fixed diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md index 99b29330749d..de97d0aa6472 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md @@ -28,6 +28,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-4_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.43.1 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.43.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.42.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | @@ -70,6 +71,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-3_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------| +| 4.44.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.43.1 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.43.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.42.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | @@ -121,6 +123,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-5_2-12 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|--------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.*, 15.\*, 16.4 LTS | 1.3.\* | | 4.43.1 | 3.5.0 | [8, 11, 17] | 2.12 | 14.*, 15.\*, 16.4 LTS | 1.3.\* | | 4.43.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.*, 15.\*, 16.4 LTS | 1.3.\* | | 4.42.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.*, 15.\*, 16.4 LTS | 1.3.\* | @@ -149,23 +152,25 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-5_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.43.1 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.43.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | #### azure-cosmos-spark_4-0_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.43.1 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.43.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | ### Download You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven: -`com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12:4.43.1` +`com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12:4.44.0` You can also integrate against Cosmos DB Spark Connector in your SBT project: ```scala -libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-4_2-12" % "4.43.1" +libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-4_2-12" % "4.44.0" ``` Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark). diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md index 953e54ce9c42..d88429605b84 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md @@ -1,16 +1,10 @@ ## Release History -### 4.44.0-beta.1 (Unreleased) +### 4.44.0 (2026-02-27) #### Features Added * Added config entry `spark.cosmos.account.azureEnvironment.management.scope` to allow specifying the Entra ID scope/audience to be used when retrieving tokens to authenticate against the ARM/management endpoint of non-public clouds. - See [PR 48137](https://github.com/Azure/azure-sdk-for-java/pull/48137) -#### Breaking Changes - -#### Bugs Fixed - -#### Other Changes - ### 4.43.1 (2026-02-25) #### Bugs Fixed diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/README.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/README.md index 6a8eb6e06255..c2c173008bd0 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/README.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/README.md @@ -28,6 +28,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-5_2-12 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.43.1 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.43.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.42.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | @@ -56,6 +57,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-3_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------| +| 4.44.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.43.1 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.43.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.42.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | @@ -107,6 +109,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-4_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.43.1 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.43.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.42.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | @@ -149,23 +152,25 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-5_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.43.1 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.43.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | #### azure-cosmos-spark_4-0_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.43.1 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.43.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | ### Download You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven: -`com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:4.43.1` +`com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:4.44.0` You can also integrate against Cosmos DB Spark Connector in your SBT project: ```scala -libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-5_2-12" % "4.43.1" +libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-5_2-12" % "4.44.0" ``` Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark). diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml index 4c82e1c8cc94..b77872bdf772 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-5_2-12 - 4.44.0-beta.1 + 4.44.0 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-5_2-12 OLTP Spark 3.5 Connector for Azure Cosmos DB SQL API diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md index bb089a7289f0..191509adb6c6 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md @@ -1,16 +1,10 @@ ## Release History -### 4.44.0-beta.1 (Unreleased) +### 4.44.0 (2026-02-27) #### Features Added * Added config entry `spark.cosmos.account.azureEnvironment.management.scope` to allow specifying the Entra ID scope/audience to be used when retrieving tokens to authenticate against the ARM/management endpoint of non-public clouds. - See [PR 48137](https://github.com/Azure/azure-sdk-for-java/pull/48137) -#### Breaking Changes - -#### Bugs Fixed - -#### Other Changes - ### 4.43.1 (2026-02-25) #### Bugs Fixed diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md index 085fc45ce274..d3e4f430faeb 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md @@ -28,6 +28,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_3-5_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.43.1 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.43.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | @@ -37,6 +38,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-3_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------| +| 4.44.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.43.1 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.43.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.42.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | @@ -88,6 +90,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-4_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.43.1 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.43.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.42.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | @@ -130,6 +133,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-5_2-12 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.43.1 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.43.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.42.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | @@ -158,6 +162,7 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_4-0_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.43.1 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.43.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | @@ -167,11 +172,11 @@ to use Scala 2.13 that Spark 4.0 was compiled for. ### Download You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven: -`com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13:4.43.1` +`com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13:4.44.0` You can also integrate against Cosmos DB Spark Connector in your SBT project: ```scala -libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-5_2-13" % "4.43.1" +libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-5_2-13" % "4.44.0" ``` Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark). diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml index f240e8d5bf49..8ffdb5bb4b62 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-5_2-13 - 4.44.0-beta.1 + 4.44.0 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-5_2-13 OLTP Spark 3.5 Connector for Azure Cosmos DB SQL API diff --git a/sdk/cosmos/azure-cosmos-spark_3/docs/quick-start.md b/sdk/cosmos/azure-cosmos-spark_3/docs/quick-start.md index 148b47679ae0..279091386752 100644 --- a/sdk/cosmos/azure-cosmos-spark_3/docs/quick-start.md +++ b/sdk/cosmos/azure-cosmos-spark_3/docs/quick-start.md @@ -29,19 +29,19 @@ You can use any other Spark 3.5 spark offering as well, also you should be able SLF4J is only needed if you plan to use logging, please also download an SLF4J binding which will link the SLF4J API with the logging implementation of your choice. See the [SLF4J user manual](https://www.slf4j.org/manual.html) for more information. For Spark 3.3: -- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12:4.43.1](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-3_2-12/4.43.0/jar) +- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12:4.44.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-3_2-12/4.44.0/jar) For Spark 3.4: -- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12:4.43.1](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-4_2-12/4.43.0/jar) +- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12:4.44.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-4_2-12/4.44.0/jar) For Spark 3.5 (Scala 2.12): -- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:4.43.1](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-5_2-12/4.43.0/jar) +- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:4.44.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-5_2-12/4.44.0/jar) For Spark 3.5 (Scala 2.13): -- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13:4.43.1](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-5_2-13/4.43.0/jar) +- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13:4.44.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-5_2-13/4.44.0/jar) For Spark 4.0: -- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_4-0_2-13:4.43.1](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_4-0_2-13/4.43.0/jar) +- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_4-0_2-13:4.44.0](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_4-0_2-13/4.44.0/jar) The getting started guide is based on PySpark however you can use the equivalent scala version as well, and you can run the following code snippet in an Azure Databricks PySpark notebook. diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md index d03d8132d287..02f6427f58ae 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/CHANGELOG.md @@ -1,16 +1,10 @@ ## Release History -### 4.44.0-beta.1 (Unreleased) +### 4.44.0 (2026-02-27) #### Features Added * Added config entry `spark.cosmos.account.azureEnvironment.management.scope` to allow specifying the Entra ID scope/audience to be used when retrieving tokens to authenticate against the ARM/management endpoint of non-public clouds. - See [PR 48137](https://github.com/Azure/azure-sdk-for-java/pull/48137) -#### Breaking Changes - -#### Bugs Fixed - -#### Other Changes - ### 4.43.1 (2026-02-25) #### Bugs Fixed diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md index 7952504370fa..5ba7b2c1ac4a 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/README.md @@ -20,6 +20,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new #### azure-cosmos-spark_4-0_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.43.1 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | | 4.43.0 | 4.0.0 | [17, 21] | 2.13 | 17.\* | TBD | @@ -29,6 +30,7 @@ to use Scala 2.13 that Spark 4.0 was compiled for. #### azure-cosmos-spark_3-3_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------| +| 4.44.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.43.1 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.43.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | | 4.42.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* | @@ -80,6 +82,7 @@ to use Scala 2.13 that Spark 4.0 was compiled for. #### azure-cosmos-spark_3-4_2-12 | Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|------------------------|--------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.43.1 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.43.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | | 4.42.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | | @@ -122,6 +125,7 @@ to use Scala 2.13 that Spark 4.0 was compiled for. #### azure-cosmos-spark_3-5_2-12 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.43.1 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.43.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | | 4.42.0 | 3.5.0 | [8, 11, 17] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* | @@ -150,17 +154,18 @@ to use the same version of Scala that Spark was compiled for. #### azure-cosmos-spark_3-5_2-13 | Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes | |-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------| +| 4.44.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.43.1 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | | 4.43.0 | 3.5.0 | [17] | 2.13 | 16.4 LTS | TBD | ### Download You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven: -`com.azure.cosmos.spark:azure-cosmos-spark_4-0_2-13:4.43.1` +`com.azure.cosmos.spark:azure-cosmos-spark_4-0_2-13:4.44.0` You can also integrate against Cosmos DB Spark Connector in your SBT project: ```scala -libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_4-0_2-13" % "4.43.1" +libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_4-0_2-13" % "4.44.0" ``` Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark). diff --git a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/pom.xml b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/pom.xml index b48d80d7816d..3b74cc724d40 100644 --- a/sdk/cosmos/azure-cosmos-spark_4-0_2-13/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_4-0_2-13/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_4-0_2-13 - 4.44.0-beta.1 + 4.44.0 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_4-0_2-13 OLTP Spark 4.0 Connector for Azure Cosmos DB SQL API diff --git a/sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml b/sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml index 975546f7279c..67c4a867a9e3 100644 --- a/sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml +++ b/sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml @@ -100,7 +100,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-5_2-12 - 4.44.0-beta.1 + 4.44.0 provided @@ -183,7 +183,7 @@ com.fasterxml.jackson.datatype:jackson-datatype-jsr310:[2.18.4] com.fasterxml.jackson.core:jackson-databind:[2.18.4] com.fasterxml.jackson.module:jackson-module-scala_2.12:[2.18.4] - com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:[4.44.0-beta.1] + com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:[4.44.0] com.microsoft.azure.synapse:synapseutils_2.12:[1.5.4] From c91a54359e7997447d8de93005e5dca50d7fa369 Mon Sep 17 00:00:00 2001 From: Fabian Meiswinkel Date: Fri, 27 Feb 2026 14:41:49 +0000 Subject: [PATCH 6/7] Update pom.xml --- sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml index dd8eb6dd43d3..f6faf3abbead 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml @@ -11,7 +11,7 @@ com.azure.cosmos.spark azure-cosmos-spark_3-4_2-12 - 4.44.0-beta.1 + 4.44.0 jar https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-4_2-12 OLTP Spark 3.4 Connector for Azure Cosmos DB SQL API From 7293ed736c3724faee75e95a78193379562f7edc Mon Sep 17 00:00:00 2001 From: Fabian Meiswinkel Date: Fri, 27 Feb 2026 19:14:17 +0000 Subject: [PATCH 7/7] Update pom.xml --- sdk/cosmos/azure-cosmos-spark_3/pom.xml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/sdk/cosmos/azure-cosmos-spark_3/pom.xml b/sdk/cosmos/azure-cosmos-spark_3/pom.xml index 284f3a904216..1d3889106259 100644 --- a/sdk/cosmos/azure-cosmos-spark_3/pom.xml +++ b/sdk/cosmos/azure-cosmos-spark_3/pom.xml @@ -140,6 +140,11 @@ jackson-module-afterburner 2.18.4 + + com.fasterxml.jackson.dataformat + jackson-dataformat-xml + 2.18.4 + io.opentelemetry opentelemetry-api @@ -310,6 +315,7 @@ com.fasterxml.jackson.core:jackson-databind:[2.18.4] com.fasterxml.jackson.datatype:jackson-datatype-jsr310:[2.18.4] com.fasterxml.jackson.module:jackson-module-afterburner:[2.18.4] + com.fasterxml.jackson.dataformat:jackson-dataformat-xml:[2.18.4] com.fasterxml.jackson.module:jackson-module-scala_2.12:[${scala-jackson.version}] com.fasterxml.jackson.module:jackson-module-scala_2.13:[${scala-jackson.version}] io.micrometer:micrometer-registry-azure-monitor:[1.15.1] @@ -590,6 +596,12 @@ ** + + com.fasterxml.jackson.dataformat:jackson-dataformat-xml + + ** + + *:*