Skip to content

Commit 3a342d0

Browse files
committed
Chore: update databricks and snowflake auth in integration tests
1 parent d4a3acb commit 3a342d0

File tree

7 files changed

+57
-58
lines changed

7 files changed

+57
-58
lines changed

.circleci/continue_config.yml

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -282,24 +282,24 @@ workflows:
282282
parameters:
283283
python_version:
284284
- "3.9"
285-
- "3.10"
286-
- "3.11"
287-
- "3.12"
288-
- "3.13"
285+
#- "3.10"
286+
#- "3.11"
287+
#- "3.12"
288+
#- "3.13"
289289
- cicd_tests_windows
290290
- engine_tests_docker:
291291
name: engine_<< matrix.engine >>
292292
matrix:
293293
parameters:
294294
engine:
295295
- duckdb
296-
- postgres
297-
- mysql
298-
- mssql
299-
- trino
300-
- spark
301-
- clickhouse
302-
- risingwave
296+
#- postgres
297+
#- mysql
298+
#- mssql
299+
#- trino
300+
#- spark
301+
#- clickhouse
302+
#- risingwave
303303
- engine_tests_cloud:
304304
name: cloud_engine_<< matrix.engine >>
305305
context:
@@ -309,18 +309,18 @@ workflows:
309309
matrix:
310310
parameters:
311311
engine:
312-
#- snowflake
312+
- snowflake
313313
- databricks
314-
- redshift
315-
- bigquery
316-
- clickhouse-cloud
317-
- athena
318-
- fabric
319-
- gcp-postgres
320-
filters:
321-
branches:
322-
only:
323-
- main
314+
#- redshift
315+
#- bigquery
316+
#- clickhouse-cloud
317+
#- athena
318+
#- fabric
319+
#- gcp-postgres
320+
#filters:
321+
# branches:
322+
# only:
323+
# - main
324324
- ui_style
325325
- ui_test
326326
- vscode_test

.circleci/manage-test-db.sh

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ function_exists() {
2525
# Snowflake
2626
snowflake_init() {
2727
echo "Installing Snowflake CLI"
28-
pip install "snowflake-cli-labs<3.8.0"
28+
pip install "snowflake-cli"
2929
}
3030

3131
snowflake_up() {
@@ -40,20 +40,6 @@ snowflake_down() {
4040
databricks_init() {
4141
echo "Installing Databricks CLI"
4242
curl -fsSL https://raw.githubusercontent.com/databricks/setup-cli/main/install.sh | sudo sh || true
43-
44-
echo "Writing out Databricks CLI config file"
45-
echo -e "[DEFAULT]\nhost = $DATABRICKS_SERVER_HOSTNAME\ntoken = $DATABRICKS_ACCESS_TOKEN" > ~/.databrickscfg
46-
47-
# this takes a path like 'sql/protocolv1/o/2934659247569/0723-005339-foobar' and extracts '0723-005339-foobar' from it
48-
CLUSTER_ID=${DATABRICKS_HTTP_PATH##*/}
49-
50-
echo "Extracted cluster id: $CLUSTER_ID from '$DATABRICKS_HTTP_PATH'"
51-
52-
# Note: the cluster doesnt need to be running to create / drop catalogs, but it does need to be running to run the integration tests
53-
echo "Ensuring cluster is running"
54-
# the || true is to prevent the following error from causing an abort:
55-
# > Error: is in unexpected state Running.
56-
databricks clusters start $CLUSTER_ID || true
5743
}
5844

5945
databricks_up() {

Makefile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -212,14 +212,14 @@ risingwave-test: engine-risingwave-up
212212
# Cloud Engines #
213213
#################
214214

215-
snowflake-test: guard-SNOWFLAKE_ACCOUNT guard-SNOWFLAKE_WAREHOUSE guard-SNOWFLAKE_DATABASE guard-SNOWFLAKE_USER guard-SNOWFLAKE_PASSWORD engine-snowflake-install
215+
snowflake-test: guard-SNOWFLAKE_ACCOUNT guard-SNOWFLAKE_WAREHOUSE guard-SNOWFLAKE_DATABASE guard-SNOWFLAKE_USER engine-snowflake-install
216216
pytest -n auto -m "snowflake" --reruns 3 --junitxml=test-results/junit-snowflake.xml
217217

218218
bigquery-test: guard-BIGQUERY_KEYFILE engine-bigquery-install
219219
$(PIP) install -e ".[bigframes]"
220220
pytest -n auto -m "bigquery" --reruns 3 --junitxml=test-results/junit-bigquery.xml
221221

222-
databricks-test: guard-DATABRICKS_CATALOG guard-DATABRICKS_SERVER_HOSTNAME guard-DATABRICKS_HTTP_PATH guard-DATABRICKS_ACCESS_TOKEN guard-DATABRICKS_CONNECT_VERSION engine-databricks-install
222+
databricks-test: guard-DATABRICKS_CATALOG guard-DATABRICKS_SERVER_HOSTNAME guard-DATABRICKS_HTTP_PATH guard-DATABRICKS_CONNECT_VERSION engine-databricks-install
223223
$(PIP) install 'databricks-connect==${DATABRICKS_CONNECT_VERSION}'
224224
pytest -n auto -m "databricks" --reruns 3 --junitxml=test-results/junit-databricks.xml
225225

sqlmesh/core/engine_adapter/databricks.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -78,21 +78,21 @@ def can_access_databricks_connect(cls, disable_databricks_connect: bool) -> bool
7878
def _use_spark_session(self) -> bool:
7979
if self.can_access_spark_session(bool(self._extra_config.get("disable_spark_session"))):
8080
return True
81-
return (
82-
self.can_access_databricks_connect(
83-
bool(self._extra_config.get("disable_databricks_connect"))
84-
)
85-
and (
86-
{
87-
"databricks_connect_server_hostname",
88-
"databricks_connect_access_token",
89-
}.issubset(self._extra_config)
90-
)
91-
and (
92-
"databricks_connect_cluster_id" in self._extra_config
93-
or "databricks_connect_use_serverless" in self._extra_config
94-
)
95-
)
81+
82+
if self.can_access_databricks_connect(
83+
bool(self._extra_config.get("disable_databricks_connect"))
84+
):
85+
if self._extra_config.get("databricks_connect_use_serverless"):
86+
return True
87+
88+
if {
89+
"databricks_connect_cluster_id",
90+
"databricks_connect_server_hostname",
91+
"databricks_connect_access_token",
92+
}.issubset(self._extra_config):
93+
return True
94+
95+
return False
9696

9797
@property
9898
def is_spark_session_connection(self) -> bool:
@@ -108,7 +108,7 @@ def _set_spark_engine_adapter_if_needed(self) -> None:
108108

109109
connect_kwargs = dict(
110110
host=self._extra_config["databricks_connect_server_hostname"],
111-
token=self._extra_config["databricks_connect_access_token"],
111+
token=self._extra_config.get("databricks_connect_access_token"),
112112
)
113113
if "databricks_connect_use_serverless" in self._extra_config:
114114
connect_kwargs["serverless"] = True

tests/core/engine_adapter/integration/config.yaml

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,8 @@ gateways:
128128
warehouse: {{ env_var('SNOWFLAKE_WAREHOUSE') }}
129129
database: {{ env_var('SNOWFLAKE_DATABASE') }}
130130
user: {{ env_var('SNOWFLAKE_USER') }}
131-
password: {{ env_var('SNOWFLAKE_PASSWORD') }}
131+
private_key: {{ env_var('SNOWFLAKE_PRIVATE_KEY_RAW') }}
132+
#private_key_path: {{ env_var('SNOWFLAKE_PRIVATE_KEY_FILE') }}
132133
check_import: false
133134
state_connection:
134135
type: duckdb
@@ -139,7 +140,10 @@ gateways:
139140
catalog: {{ env_var('DATABRICKS_CATALOG') }}
140141
server_hostname: {{ env_var('DATABRICKS_SERVER_HOSTNAME') }}
141142
http_path: {{ env_var('DATABRICKS_HTTP_PATH') }}
142-
access_token: {{ env_var('DATABRICKS_ACCESS_TOKEN') }}
143+
auth_type: {{ env_var('DATABRICKS_AUTH_TYPE', 'databricks-oauth') }}
144+
oauth_client_id: {{ env_var('DATABRICKS_CLIENT_ID') }}
145+
oauth_client_secret: {{ env_var('DATABRICKS_CLIENT_SECRET') }}
146+
databricks_connect_use_serverless: true
143147
check_import: false
144148

145149
inttest_redshift:

tests/core/engine_adapter/integration/conftest.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
import logging
88
from pytest import FixtureRequest
99

10-
1110
from sqlmesh import Config, EngineAdapter
1211
from sqlmesh.core.constants import SQLMESH_PATH
1312
from sqlmesh.core.config.connection import (

tests/core/engine_adapter/integration/test_freshness.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,16 @@
2525
EVALUATION_SPY = None
2626

2727

28+
@pytest.fixture(autouse=True)
29+
def _skip_snowflake(ctx: TestContext):
30+
if ctx.dialect == "snowflake":
31+
# these tests use callbacks that need to run db queries within a time_travel context that changes the system time to be in the future
32+
# this causes invalid JWT's to be generated when the callbacks try to run a db query
33+
pytest.skip(
34+
"snowflake.connector generates an invalid JWT when time_travel changes the system time"
35+
)
36+
37+
2838
# Mock the snapshot evaluator's evaluate function to count the number of times it is called
2939
@pytest.fixture(autouse=True, scope="function")
3040
def _install_evaluation_spy(mocker: MockerFixture):

0 commit comments

Comments
 (0)