Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
cd9bf62
Initial python changes for removing dbt-fabric dependency in dbt-sqls…
Benjamin-Knight Jan 5, 2026
28b0833
Fix up some pylance warnings.
Benjamin-Knight Jan 5, 2026
19c32a1
Initial work at porting over the macros from fabric
Benjamin-Knight Jan 5, 2026
ace259d
Fix misconfigured credentials authenication type
Benjamin-Knight Jan 5, 2026
1b2cc59
Move all snapshot macros to the same folder naming as fabric
Benjamin-Knight Jan 5, 2026
c77fc48
Fix loss of data type handling on microbatching
Benjamin-Knight Jan 5, 2026
5801ae2
feat: Allow optional thread count parameter for functional tests
Benjamin-Knight Jan 5, 2026
26838e5
Update the functional tests to let the user specify the thread count …
Benjamin-Knight Jan 5, 2026
a55d29c
Add more of the fabric functional tests that were not present in the …
Benjamin-Knight Jan 5, 2026
81f64a7
Add python 3.13 potential support
Benjamin-Knight Jan 5, 2026
25f548e
Add python 3.13 to more required locations.
Benjamin-Knight Jan 5, 2026
c807b9d
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jan 5, 2026
df8ddb8
Update some missing pre-commit checks
Benjamin-Knight Jan 5, 2026
073d0b2
Fix an issue with the snapshot tests using escaping that SQL server d…
Benjamin-Knight Jan 6, 2026
c9f910b
Remove a few more references to fabric and a readme that is no longer…
Benjamin-Knight Jan 7, 2026
cad8e80
Merge branch 'master' into remove_fabric_dependency
Benjamin-Knight Jan 29, 2026
a68d0da
Fix the duplication of some tests because this update moved them comp…
Benjamin-Knight Jan 30, 2026
c3bd28f
Remove materialization no longer required
Benjamin-Knight Feb 2, 2026
e4754c7
Remove old python versions from setup.py
Benjamin-Knight Feb 11, 2026
cb773fe
Add behaviour flag for schema name generation due to deviation from D…
Benjamin-Knight Mar 4, 2026
1d30b38
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 4, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
.DEFAULT_GOAL:=help
THREADS ?= auto

.PHONY: dev
dev: ## Installs adapter in develop mode along with development dependencies
Expand Down Expand Up @@ -44,7 +45,7 @@ unit: ## Runs unit tests.
.PHONY: functional
functional: ## Runs functional tests.
@\
pytest -n auto -ra -v tests/functional
pytest -n $(THREADS) -ra -v tests/functional

.PHONY: test
test: ## Runs unit tests and code checks against staged changes.
Expand Down
27 changes: 27 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,33 @@ pip install -U --pre dbt-sqlserver

See [the changelog](CHANGELOG.md)

## Configuration

### Flags

- `dbt_sqlserver_use_default_schema_concat`: *(default: `false`)* Controls schema name generation when a [custom schema](https://docs.getdbt.com/docs/build/custom-schemas) is set on a model.

| Flag value | `custom_schema_name` | Result |
|---|---|---|
| `false` (default, legacy) | *(none)* | `target.schema` |
| `false` (default, legacy) | `"reporting"` | `reporting` |
| `true` (dbt-core standard) | *(none)* | `target.schema` |
| `true` (dbt-core standard) | `"reporting"` | `target.schema_reporting` |

When `false` (the default), the adapter uses its legacy behaviour: `custom_schema_name` is used **as-is** without being prefixed by `target.schema`.
When `true`, the adapter delegates to dbt-core's `default__generate_schema_name`, which concatenates `target.schema` + `_` + `custom_schema_name`.

**Example usage in `dbt_project.yml`:**

```yaml
vars:
dbt_sqlserver_use_default_schema_concat: true # Enable standard schema concatenation
```

> **Note:** If you want to permanently customise schema generation and avoid any future deprecation of this flag, override the `sqlserver__generate_schema_name` macro directly in your project.



## Contributing

[![Unit tests](https://github.com/dbt-msft/dbt-sqlserver/actions/workflows/unit-tests.yml/badge.svg)](https://github.com/dbt-msft/dbt-sqlserver/actions/workflows/unit-tests.yml)
Expand Down
2 changes: 1 addition & 1 deletion dbt/adapters/sqlserver/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
adapter=SQLServerAdapter,
credentials=SQLServerCredentials,
include_path=sqlserver.PACKAGE_PATH,
dependencies=["fabric"],
dependencies=[],
)

__all__ = [
Expand Down
253 changes: 238 additions & 15 deletions dbt/adapters/sqlserver/sqlserver_adapter.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,45 @@
from typing import Optional
from typing import List, Optional

import dbt.exceptions
import agate
import dbt_common.exceptions
from dbt.adapters.base.column import Column as BaseColumn
from dbt.adapters.base.impl import ConstraintSupport
from dbt.adapters.fabric import FabricAdapter
from dbt.contracts.graph.nodes import ConstraintType
from dbt.adapters.base.meta import available
from dbt.adapters.base.relation import BaseRelation
from dbt.adapters.capability import Capability, CapabilityDict, CapabilitySupport, Support
from dbt.adapters.events.types import SchemaCreation
from dbt.adapters.reference_keys import _make_ref_key_dict
from dbt.adapters.sql.impl import CREATE_SCHEMA_MACRO_NAME, SQLAdapter
from dbt_common.behavior_flags import BehaviorFlag
from dbt_common.contracts.constraints import (
ColumnLevelConstraint,
ConstraintType,
ModelLevelConstraint,
)
from dbt_common.events.functions import fire_event

from dbt.adapters.sqlserver.sqlserver_column import SQLServerColumn
from dbt.adapters.sqlserver.sqlserver_configs import SQLServerConfigs
from dbt.adapters.sqlserver.sqlserver_connections import SQLServerConnectionManager
from dbt.adapters.sqlserver.sqlserver_relation import SQLServerRelation


class SQLServerAdapter(FabricAdapter):
class SQLServerAdapter(SQLAdapter):
"""
Controls actual implmentation of adapter, and ability to override certain methods.
"""

ConnectionManager = SQLServerConnectionManager
Column = SQLServerColumn
AdapterSpecificConfigs = SQLServerConfigs
Relation = SQLServerRelation

_capabilities: CapabilityDict = CapabilityDict(
{
Capability.SchemaMetadataByRelations: CapabilitySupport(support=Support.Full),
Capability.TableLastModifiedMetadata: CapabilitySupport(support=Support.Full),
}
)
CONSTRAINT_SUPPORT = {
ConstraintType.check: ConstraintSupport.ENFORCED,
ConstraintType.not_null: ConstraintSupport.ENFORCED,
Expand All @@ -27,13 +48,196 @@ class SQLServerAdapter(FabricAdapter):
ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
}

@property
def _behavior_flags(self) -> List[BehaviorFlag]:
return [
{
"name": "empty",
"default": False,
"description": (
"When enabled, table and view materializations will be created as empty "
"structures (no data)."
),
},
{
"name": "dbt_sqlserver_use_default_schema_concat",
"default": False,
"description": (
"When True, uses dbt-core's standard schema concatenation "
"(`target.schema` + `_` + `custom_schema_name`). "
"When False (default), uses legacy adapter behaviour: "
"`custom_schema_name` is used directly without prefixing `target.schema`. "
"For a permanent solution, override the `sqlserver__generate_schema_name` "
"macro in your project instead."
),
},
]

@available.parse(lambda *a, **k: [])
def get_column_schema_from_query(self, sql: str) -> List[BaseColumn]:
"""Get a list of the Columns with names and data types from the given sql."""
_, cursor = self.connections.add_select_query(sql)

columns = [
self.Column.create(
column_name, self.connections.data_type_code_to_name(column_type_code)
)
# https://peps.python.org/pep-0249/#description
for column_name, column_type_code, *_ in cursor.description
]
return columns

@classmethod
def convert_boolean_type(cls, agate_table, col_idx):
return "bit"

@classmethod
def convert_datetime_type(cls, agate_table, col_idx):
return "datetime2(6)"

@classmethod
def render_model_constraint(cls, constraint) -> Optional[str]:
def convert_number_type(cls, agate_table, col_idx):
decimals = agate_table.aggregate(agate.MaxPrecision(col_idx))
return "float" if decimals else "int"

def create_schema(self, relation: BaseRelation) -> None:
relation = relation.without_identifier()
fire_event(SchemaCreation(relation=_make_ref_key_dict(relation)))
macro_name = CREATE_SCHEMA_MACRO_NAME
kwargs = {
"relation": relation,
}

if self.config.credentials.schema_authorization:
kwargs["schema_authorization"] = self.config.credentials.schema_authorization
macro_name = "sqlserver__create_schema_with_authorization"

self.execute_macro(macro_name, kwargs=kwargs)
self.commit_if_has_connection()

@classmethod
def convert_text_type(cls, agate_table, col_idx):
column = agate_table.columns[col_idx]
# see https://github.com/fishtown-analytics/dbt/pull/2255
lens = [len(d.encode("utf-8")) for d in column.values_without_nulls()]
max_len = max(lens) if lens else 64
length = max_len if max_len > 16 else 16
return "varchar({})".format(length)

@classmethod
def convert_time_type(cls, agate_table, col_idx):
return "time(6)"

@classmethod
def date_function(cls):
return "getdate()"

# Methods used in adapter tests
def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str:
# note: 'interval' is not supported for T-SQL
# for backwards compatibility, we're compelled to set some sort of
# default. A lot of searching has lead me to believe that the
# '+ interval' syntax used in postgres/redshift is relatively common
# and might even be the SQL standard's intention.
return f"DATEADD({interval},{number},{add_to})"

def string_add_sql(
self,
add_to: str,
value: str,
location="append",
) -> str:
"""
`+` is T-SQL's string concatenation operator
"""
if location == "append":
return f"{add_to} + '{value}'"
elif location == "prepend":
return f"'{value}' + {add_to}"
else:
raise ValueError(f'Got an unexpected location value of "{location}"')

def get_rows_different_sql(
self,
relation_a: BaseRelation,
relation_b: BaseRelation,
column_names: Optional[List[str]] = None,
except_operator: str = "EXCEPT",
) -> str:
"""
note: using is not supported on Synapse so COLUMNS_EQUAL_SQL is adjsuted
Generate SQL for a query that returns a single row with a two
columns: the number of rows that are different between the two
relations and the number of mismatched rows.
"""
# This method only really exists for test reasons.
names: List[str]
if column_names is None:
columns = self.get_columns_in_relation(relation_a)
names = sorted((self.quote(c.name) for c in columns))
else:
names = sorted((self.quote(n) for n in column_names))
columns_csv = ", ".join(names)

if columns_csv == "":
columns_csv = "*"

sql = COLUMNS_EQUAL_SQL.format(
columns=columns_csv,
relation_a=str(relation_a),
relation_b=str(relation_b),
except_op=except_operator,
)

return sql

def valid_incremental_strategies(self):
"""The set of standard builtin strategies which this adapter supports out-of-the-box.
Not used to validate custom strategies defined by end users.
"""
return ["append", "delete+insert", "merge", "microbatch"]

# This is for use in the test suite
def run_sql_for_tests(self, sql, fetch, conn):
cursor = conn.handle.cursor()
try:
cursor.execute(sql)
if not fetch:
conn.handle.commit()
if fetch == "one":
return cursor.fetchone()
elif fetch == "all":
return cursor.fetchall()
else:
return
except BaseException:
if conn.handle and not getattr(conn.handle, "closed", True):
conn.handle.rollback()
raise
finally:
conn.transaction_open = False

@available
@classmethod
def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> Optional[str]:
rendered_column_constraint = None
if constraint.type == ConstraintType.not_null:
rendered_column_constraint = "not null "
else:
rendered_column_constraint = ""

if rendered_column_constraint:
rendered_column_constraint = rendered_column_constraint.strip()

return rendered_column_constraint

@classmethod
def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[str]:
constraint_prefix = "add constraint "
column_list = ", ".join(constraint.columns)

if constraint.name is None:
raise dbt.exceptions.DbtDatabaseError(
raise dbt_common.exceptions.DbtDatabaseError(
"Constraint name cannot be empty. Provide constraint name - column "
+ column_list
+ " and run the project again."
Expand All @@ -56,12 +260,31 @@ def render_model_constraint(cls, constraint) -> Optional[str]:
else:
return None

@classmethod
def date_function(cls):
return "getdate()"

def valid_incremental_strategies(self):
"""The set of standard builtin strategies which this adapter supports out-of-the-box.
Not used to validate custom strategies defined by end users.
"""
return ["append", "delete+insert", "merge", "microbatch"]
COLUMNS_EQUAL_SQL = """
with diff_count as (
SELECT
1 as id,
COUNT(*) as num_missing FROM (
(SELECT {columns} FROM {relation_a} {except_op}
SELECT {columns} FROM {relation_b})
UNION ALL
(SELECT {columns} FROM {relation_b} {except_op}
SELECT {columns} FROM {relation_a})
) as a
), table_a as (
SELECT COUNT(*) as num_rows FROM {relation_a}
), table_b as (
SELECT COUNT(*) as num_rows FROM {relation_b}
), row_count_diff as (
select
1 as id,
table_a.num_rows - table_b.num_rows as difference
from table_a, table_b
)
select
row_count_diff.difference as row_count_difference,
diff_count.num_missing as num_mismatched
from row_count_diff
join diff_count on row_count_diff.id = diff_count.id
""".strip()
Loading