Skip to content

Commit 8948b0c

Browse files
committed
test: add unit tests and changelog for type classification fix (#1380)
Address review feedback: add parametrized unit tests covering all five overridden methods (is_string, is_integer, is_float, is_number, is_numeric) with true/false cases and case-insensitive checks, and add a CHANGELOG entry under 1.11.7. https://claude.ai/code/session_017ZAXMwLSnqz4FqvTt5H7D1
1 parent e491e09 commit 8948b0c

2 files changed

Lines changed: 54 additions & 0 deletions

File tree

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
- Fix `workflow_job` Python model submission method failing with dictionary attribute error ([#1360](https://github.com/databricks/dbt-databricks/issues/1360))
1111
- Fix `TestWorkflowJob` functional test that was unreachable on all profiles due to incorrect skip list, wrong model fixture, and invalid `max_retries` parameter ([#1360](https://github.com/databricks/dbt-databricks/issues/1360))
1212
- Fix column order mismatch in microbatch and replace_where incremental strategies by using INSERT BY NAME syntax ([#1338](https://github.com/databricks/dbt-databricks/issues/1338))
13+
- Fix `is_string()`, `is_number()`, `is_float()`, `is_integer()`, and `is_numeric()` returning `False` for Databricks/Spark column types by overriding them in `DatabricksColumn` with the correct Spark type names ([#1380](https://github.com/databricks/dbt-databricks/issues/1380))
1314
- Fix `dbt run --empty` failing with inline `ref()` / `source()` aliases ([dbt-labs/dbt-adapters#660](https://github.com/dbt-labs/dbt-adapters/issues/660))
1415

1516
### Under the Hood

tests/unit/test_column.py

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -290,3 +290,56 @@ def test_parse_type_from_json_primitive_types(self):
290290
type_info = {"name": type_name}
291291
result = DatabricksColumn._parse_type_from_json(type_info)
292292
assert result == type_name
293+
294+
295+
class TestTypeClassification:
296+
@pytest.mark.parametrize(
297+
"dtype",
298+
["string", "varchar", "char", "text", "character varying", "character", "nchar", "nvarchar",
299+
"STRING", "VARCHAR", "CHAR"],
300+
)
301+
def test_is_string_true(self, dtype):
302+
assert DatabricksColumn("col", dtype).is_string() is True
303+
304+
@pytest.mark.parametrize("dtype", ["int", "bigint", "double", "decimal(10,2)"])
305+
def test_is_string_false(self, dtype):
306+
assert DatabricksColumn("col", dtype).is_string() is False
307+
308+
@pytest.mark.parametrize(
309+
"dtype",
310+
["tinyint", "smallint", "int", "integer", "bigint", "long", "INT", "BIGINT"],
311+
)
312+
def test_is_integer_true(self, dtype):
313+
assert DatabricksColumn("col", dtype).is_integer() is True
314+
315+
@pytest.mark.parametrize("dtype", ["float", "double", "string", "decimal(10,2)"])
316+
def test_is_integer_false(self, dtype):
317+
assert DatabricksColumn("col", dtype).is_integer() is False
318+
319+
@pytest.mark.parametrize("dtype", ["float", "double", "real", "FLOAT", "DOUBLE"])
320+
def test_is_float_true(self, dtype):
321+
assert DatabricksColumn("col", dtype).is_float() is True
322+
323+
@pytest.mark.parametrize("dtype", ["int", "bigint", "string", "decimal(10,2)"])
324+
def test_is_float_false(self, dtype):
325+
assert DatabricksColumn("col", dtype).is_float() is False
326+
327+
@pytest.mark.parametrize(
328+
"dtype",
329+
["tinyint", "smallint", "int", "integer", "bigint", "long",
330+
"float", "double", "decimal", "numeric", "real",
331+
"decimal(10,2)", "decimal(38,0)", "DECIMAL(10,2)"],
332+
)
333+
def test_is_number_true(self, dtype):
334+
assert DatabricksColumn("col", dtype).is_number() is True
335+
336+
@pytest.mark.parametrize("dtype", ["string", "varchar", "boolean", "date", "timestamp"])
337+
def test_is_number_false(self, dtype):
338+
assert DatabricksColumn("col", dtype).is_number() is False
339+
340+
def test_is_numeric_delegates_to_is_number(self):
341+
col = DatabricksColumn("col", "bigint")
342+
assert col.is_numeric() == col.is_number()
343+
344+
def test_is_numeric_false_for_string(self):
345+
assert DatabricksColumn("col", "string").is_numeric() is False

0 commit comments

Comments
 (0)