Skip to content

Commit 392e45f

Browse files
run e2e tests for metadata queries, run transform regardless of None value
Signed-off-by: varun-edachali-dbx <varun.edachali@databricks.com>
1 parent 244e3c8 commit 392e45f

File tree

5 files changed

+95
-23
lines changed

5 files changed

+95
-23
lines changed

src/databricks/sql/backend/sea/backend.py

Lines changed: 22 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,12 @@
2020
MetadataCommands,
2121
)
2222
from databricks.sql.backend.sea.utils.metadata_mappings import MetadataColumnMappings
23+
from databricks.sql.backend.sea.utils.metadata_transforms import (
24+
create_table_catalog_transform,
25+
)
2326
from databricks.sql.backend.sea.utils.normalize import normalize_sea_type_to_thrift
27+
from databricks.sql.backend.sea.utils.result_column import ResultColumn
28+
from databricks.sql.backend.sea.utils.conversion import SqlType
2429
from databricks.sql.thrift_api.TCLIService import ttypes
2530

2631
if TYPE_CHECKING:
@@ -740,7 +745,23 @@ def get_schemas(
740745
assert isinstance(
741746
result, SeaResultSet
742747
), "Expected SeaResultSet from SEA backend"
743-
result.prepare_metadata_columns(MetadataColumnMappings.SCHEMA_COLUMNS)
748+
749+
# Create dynamic schema columns with catalog name bound to TABLE_CATALOG
750+
schema_columns = []
751+
for col in MetadataColumnMappings.SCHEMA_COLUMNS:
752+
if col.thrift_col_name == "TABLE_CATALOG":
753+
# Create a new column with the catalog transform bound
754+
dynamic_col = ResultColumn(
755+
col.thrift_col_name,
756+
col.sea_col_name,
757+
col.thrift_col_type,
758+
create_table_catalog_transform(catalog_name),
759+
)
760+
schema_columns.append(dynamic_col)
761+
else:
762+
schema_columns.append(col)
763+
764+
result.prepare_metadata_columns(schema_columns)
744765
return result
745766

746767
def get_tables(

src/databricks/sql/backend/sea/result_set.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -361,14 +361,13 @@ def _normalise_arrow_metadata_cols(self, table: "pyarrow.Table") -> "pyarrow.Tab
361361
column = pyarrow.nulls(table.num_rows)
362362
else:
363363
column = table.column(old_idx)
364-
# Apply transform if available
365-
if result_column.transform_value:
366-
# Convert to list, apply transform, and convert back
367-
values = column.to_pylist()
368-
transformed_values = [
369-
result_column.transform_value(v) for v in values
370-
]
371-
column = pyarrow.array(transformed_values)
364+
365+
# Apply transform if available
366+
if result_column.transform_value:
367+
# Convert to list, apply transform, and convert back
368+
values = column.to_pylist()
369+
transformed_values = [result_column.transform_value(v) for v in values]
370+
column = pyarrow.array(transformed_values)
372371

373372
new_columns.append(column)
374373
column_names.append(result_column.thrift_col_name)
@@ -392,7 +391,7 @@ def _normalise_json_metadata_cols(self, rows: List[List[str]]) -> List[List[Any]
392391

393392
value = None if old_idx is None else row[old_idx]
394393
# Apply transform if available
395-
if value is not None and result_column.transform_value:
394+
if result_column.transform_value:
396395
value = result_column.transform_value(value)
397396
new_row.append(value)
398397
transformed_rows.append(new_row)

src/databricks/sql/backend/sea/utils/metadata_mappings.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,9 @@ class MetadataColumnMappings:
2626
SCHEMA_COLUMN = ResultColumn("TABLE_SCHEM", "namespace", SqlType.STRING)
2727
TABLE_NAME_COLUMN = ResultColumn("TABLE_NAME", "tableName", SqlType.STRING)
2828
TABLE_TYPE_COLUMN = ResultColumn("TABLE_TYPE", "tableType", SqlType.STRING)
29-
REMARKS_COLUMN = ResultColumn("REMARKS", "remarks", SqlType.STRING, transform_remarks)
29+
REMARKS_COLUMN = ResultColumn(
30+
"REMARKS", "remarks", SqlType.STRING, transform_remarks
31+
)
3032
TYPE_CATALOG_COLUMN = ResultColumn("TYPE_CAT", None, SqlType.STRING)
3133
TYPE_SCHEM_COLUMN = ResultColumn("TYPE_SCHEM", None, SqlType.STRING)
3234
TYPE_NAME_COLUMN = ResultColumn("TYPE_NAME", None, SqlType.STRING)
@@ -54,7 +56,9 @@ class MetadataColumnMappings:
5456
transform_ordinal_position,
5557
)
5658

57-
NULLABLE_COLUMN = ResultColumn("NULLABLE", "isNullable", SqlType.INT, transform_nullable)
59+
NULLABLE_COLUMN = ResultColumn(
60+
"NULLABLE", "isNullable", SqlType.INT, transform_nullable
61+
)
5862
COLUMN_DEF_COLUMN = ResultColumn("COLUMN_DEF", None, SqlType.STRING)
5963
SQL_DATA_TYPE_COLUMN = ResultColumn("SQL_DATA_TYPE", None, SqlType.INT)
6064
SQL_DATETIME_SUB_COLUMN = ResultColumn("SQL_DATETIME_SUB", None, SqlType.INT)

src/databricks/sql/backend/sea/utils/metadata_transforms.py

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,12 @@ def transform_is_nullable(value):
1616
return "NO"
1717
return value
1818

19+
1920
def transform_remarks(value):
20-
if value is None:
21-
return ""
22-
return value
21+
if value is None:
22+
return ""
23+
return value
24+
2325

2426
def transform_nullable(value):
2527
"""Transform NULLABLE column: boolean/string to integer."""
@@ -69,3 +71,13 @@ def transform_ordinal_position(value):
6971
if isinstance(value, int):
7072
return value - 1
7173
return value
74+
75+
76+
def create_table_catalog_transform(catalog_name):
77+
"""Factory function to create TABLE_CATALOG transform with bound catalog name."""
78+
79+
def transform_table_catalog(value):
80+
"""Transform TABLE_CATALOG: return the catalog name for all rows."""
81+
return catalog_name
82+
83+
return transform_table_catalog

tests/e2e/test_driver.py

Lines changed: 44 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -386,8 +386,17 @@ def test_create_table_will_return_empty_result_set(self, extra_params):
386386
finally:
387387
cursor.execute("DROP TABLE IF EXISTS {}".format(table_name))
388388

389-
def test_get_tables(self):
390-
with self.cursor({}) as cursor:
389+
@pytest.mark.parametrize(
390+
"backend_params",
391+
[
392+
{},
393+
{
394+
"use_sea": True,
395+
},
396+
],
397+
)
398+
def test_get_tables(self, backend_params):
399+
with self.cursor(backend_params) as cursor:
391400
table_name = "table_{uuid}".format(uuid=str(uuid4()).replace("-", "_"))
392401
table_names = [table_name + "_1", table_name + "_2"]
393402

@@ -432,8 +441,17 @@ def test_get_tables(self):
432441
for table in table_names:
433442
cursor.execute("DROP TABLE IF EXISTS {}".format(table))
434443

435-
def test_get_columns(self):
436-
with self.cursor({}) as cursor:
444+
@pytest.mark.parametrize(
445+
"backend_params",
446+
[
447+
{},
448+
{
449+
"use_sea": True,
450+
},
451+
],
452+
)
453+
def test_get_columns(self, backend_params):
454+
with self.cursor(backend_params) as cursor:
437455
table_name = "table_{uuid}".format(uuid=str(uuid4()).replace("-", "_"))
438456
table_names = [table_name + "_1", table_name + "_2"]
439457

@@ -544,8 +562,17 @@ def test_escape_single_quotes(self):
544562
rows = cursor.fetchall()
545563
assert rows[0]["col_1"] == "you're"
546564

547-
def test_get_schemas(self):
548-
with self.cursor({}) as cursor:
565+
@pytest.mark.parametrize(
566+
"backend_params",
567+
[
568+
{},
569+
{
570+
"use_sea": True,
571+
},
572+
],
573+
)
574+
def test_get_schemas(self, backend_params):
575+
with self.cursor(backend_params) as cursor:
549576
database_name = "db_{uuid}".format(uuid=str(uuid4()).replace("-", "_"))
550577
try:
551578
cursor.execute("CREATE DATABASE IF NOT EXISTS {}".format(database_name))
@@ -562,8 +589,17 @@ def test_get_schemas(self):
562589
finally:
563590
cursor.execute("DROP DATABASE IF EXISTS {}".format(database_name))
564591

565-
def test_get_catalogs(self):
566-
with self.cursor({}) as cursor:
592+
@pytest.mark.parametrize(
593+
"backend_params",
594+
[
595+
{},
596+
{
597+
"use_sea": True,
598+
},
599+
],
600+
)
601+
def test_get_catalogs(self, backend_params):
602+
with self.cursor(backend_params) as cursor:
567603
cursor.catalogs()
568604
cursor.fetchall()
569605
catalogs_desc = cursor.description

0 commit comments

Comments
 (0)