77logger = logging .getLogger (__name__ )
88
99
10+ def log_metadata_results (result_type : str , results , logger ):
11+ """
12+ General function to log metadata results using Row.asDict() for any structure.
13+
14+ Args:
15+ result_type: String describing what type of metadata (e.g., "catalogs", "schemas")
16+ results: List of Row objects from cursor.fetchall()
17+ logger: Logger instance to use for output
18+ """
19+ if not results :
20+ logger .info (f"No { result_type } found" )
21+ return
22+
23+ logger .info (f"Found { len (results )} { result_type } :" )
24+
25+ # Log all results with full details
26+ for i , row in enumerate (results ):
27+ row_dict = row .asDict ()
28+ logger .info (f" { result_type } [{ i } ]: { row_dict } " )
29+
30+ # Show available fields for this result type
31+ if results :
32+ first_row_dict = results [0 ].asDict ()
33+ available_fields = list (first_row_dict .keys ())
34+ logger .info (f" Available fields for { result_type } : { available_fields } " )
35+
36+
37+ def extract_key_values (results , key_field ):
38+ """
39+ Extract values for a specific key field from results using Row.asDict().
40+
41+ Args:
42+ results: List of Row objects
43+ key_field: String name of the field to extract
44+
45+ Returns:
46+ List of values for the specified field, or empty list if field doesn't exist
47+ """
48+ if not results :
49+ return []
50+
51+ values = []
52+ for row in results :
53+ row_dict = row .asDict ()
54+ if key_field in row_dict :
55+ values .append (row_dict [key_field ])
56+ else :
57+ # Log available fields if the requested key doesn't exist
58+ available_fields = list (row_dict .keys ())
59+ logger .warning (f"Field '{ key_field } ' not found. Available fields: { available_fields } " )
60+ break
61+
62+ return values
63+
64+
1065def test_sea_result_set_json_array_inline ():
1166 """
1267 Test the SEA result set implementation with JSON_ARRAY format and INLINE disposition.
@@ -27,6 +82,9 @@ def test_sea_result_set_json_array_inline():
2782 )
2883 sys .exit (1 )
2984
85+ catalog = "samples"
86+ schema = "tpch"
87+
3088 try :
3189 # Create connection with SEA backend
3290 logger .info ("Creating connection with SEA backend..." )
@@ -88,37 +146,57 @@ def test_sea_result_set_json_array_inline():
88146 except ImportError :
89147 logger .warning ("PyArrow not installed, skipping Arrow tests" )
90148
91- # Test metadata commands
149+ # Test metadata commands with general logging
92150 logger .info ("Testing metadata commands..." )
93151
94152 # Get catalogs
95153 logger .info ("Getting catalogs..." )
96154 cursor .catalogs ()
97155 catalogs = cursor .fetchall ()
98- logger .info (f"Available catalogs: { [c .catalog for c in catalogs ]} " )
156+ log_metadata_results ("catalogs" , catalogs , logger )
157+
158+ # Extract catalog names using the general function
159+ catalog_names = extract_key_values (catalogs , "catalog" )
160+ if catalog_names :
161+ logger .info (f"Catalog names: { catalog_names } " )
99162
100163 # Get schemas
101164 if catalog :
102165 logger .info (f"Getting schemas for catalog '{ catalog } '..." )
103166 cursor .schemas (catalog_name = catalog )
104167 schemas = cursor .fetchall ()
105- logger .info (f"Available schemas in { catalog } : { [s .databaseName for s in schemas ]} " )
168+ log_metadata_results ("schemas" , schemas , logger )
169+
170+ # Extract schema names - try common field names
171+ schema_names = extract_key_values (schemas , "databaseName" ) or extract_key_values (schemas , "schemaName" ) or extract_key_values (schemas , "schema_name" )
172+ if schema_names :
173+ logger .info (f"Schema names: { schema_names } " )
106174
107175 # Get tables for a schema
108- if schemas :
109- schema = schemas [0 ]. databaseName
176+ if schemas and schema_names :
177+ schema = schema_names [0 ]
110178 logger .info (f"Getting tables for schema '{ schema } '..." )
111179 cursor .tables (catalog_name = catalog , schema_name = schema )
112180 tables = cursor .fetchall ()
113- logger .info (f"Available tables in { schema } : { [t .tableName for t in tables ]} " )
181+ log_metadata_results ("tables" , tables , logger )
182+
183+ # Extract table names
184+ table_names = extract_key_values (tables , "tableName" ) or extract_key_values (tables , "table_name" )
185+ if table_names :
186+ logger .info (f"Table names: { table_names } " )
114187
115188 # Get columns for a table
116- if tables :
117- table = tables [0 ]. tableName
189+ if tables and table_names :
190+ table = table_names [0 ]
118191 logger .info (f"Getting columns for table '{ table } '..." )
119192 cursor .columns (catalog_name = catalog , schema_name = schema , table_name = table )
120193 columns = cursor .fetchall ()
121- logger .info (f"Columns in { table } : { [c .column_name for c in columns ]} " )
194+ log_metadata_results ("columns" , columns , logger )
195+
196+ # Extract column names
197+ column_names = extract_key_values (columns , "column_name" ) or extract_key_values (columns , "columnName" ) or extract_key_values (columns , "COLUMN_NAME" )
198+ if column_names :
199+ logger .info (f"Column names: { column_names } " )
122200
123201 # Close cursor and connection
124202 cursor .close ()
0 commit comments