Skip to content

Commit ffc5568

Browse files
committed
added support for measure columns
1 parent bcab1df commit ffc5568

File tree

2 files changed

+15
-2
lines changed

2 files changed

+15
-2
lines changed

src/databricks/sql/backend/thrift_backend.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -764,14 +764,16 @@ def _col_to_description(col, field=None, session_id_hex=None):
764764
else:
765765
precision, scale = None, None
766766

767-
# Extract variant type from field if available
767+
# Extract variant/measure type from field if available
768768
if field is not None:
769769
try:
770-
# Check for variant type in metadata
770+
# Check for variant/measure type in metadata
771771
if field.metadata and b"Spark:DataType:SqlName" in field.metadata:
772772
sql_type = field.metadata.get(b"Spark:DataType:SqlName")
773773
if sql_type == b"VARIANT":
774774
cleaned_type = "variant"
775+
if sql_type and b"measure" in sql_type:
776+
cleaned_type += " measure"
775777
except Exception as e:
776778
logger.debug(f"Could not extract variant type from field: {e}")
777779

tests/unit/test_thrift_backend.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2402,6 +2402,17 @@ def test_hive_schema_to_description(self):
24022402
],
24032403
[("regular_col", "string"), ("variant_col", "variant")],
24042404
),
2405+
(
2406+
[
2407+
("measure_col", ttypes.TTypeId.DOUBLE_TYPE),
2408+
("int_measure_col", ttypes.TTypeId.INT_TYPE),
2409+
],
2410+
[
2411+
("measure_col", {b"Spark:DataType:SqlName": b"double measure"}),
2412+
("int_measure_col", {b"Spark:DataType:SqlName": b"int measure"}),
2413+
],
2414+
[("measure_col", "double measure"), ("int_measure_col", "int measure")],
2415+
),
24052416
(
24062417
[("regular_col", ttypes.TTypeId.STRING_TYPE)],
24072418
None, # No arrow schema

0 commit comments

Comments
 (0)