@@ -48,10 +48,11 @@ def __init__(
4848 command_id : CommandId ,
4949 status : CommandState ,
5050 has_been_closed_server_side : bool = False ,
51- has_more_rows : bool = False ,
5251 results_queue = None ,
5352 description = None ,
5453 is_staging_operation : bool = False ,
54+ lz4_compressed : bool = False ,
55+ arrow_schema_bytes : bytes = b"" ,
5556 ):
5657 """
5758 A ResultSet manages the results of a single command.
@@ -79,9 +80,10 @@ def __init__(
7980 self .command_id = command_id
8081 self .status = status
8182 self .has_been_closed_server_side = has_been_closed_server_side
82- self .has_more_rows = has_more_rows
8383 self .results = results_queue
8484 self ._is_staging_operation = is_staging_operation
85+ self .lz4_compressed = lz4_compressed
86+ self ._arrow_schema_bytes = arrow_schema_bytes
8587
8688 def __iter__ (self ):
8789 while True :
@@ -185,9 +187,24 @@ def __init__(
185187 has_more_rows: Whether there are more rows to fetch
186188 """
187189 # Initialize ThriftResultSet-specific attributes
188- self ._arrow_schema_bytes = execute_response .arrow_schema_bytes
189190 self ._use_cloud_fetch = use_cloud_fetch
190- self .lz4_compressed = execute_response .lz4_compressed
191+ self .has_more_rows = has_more_rows
192+
193+ # Build the results queue if t_row_set is provided
194+ results_queue = None
195+ if t_row_set and execute_response .result_format is not None :
196+ from databricks .sql .utils import ThriftResultSetQueueFactory
197+
198+ # Create the results queue using the provided format
199+ results_queue = ThriftResultSetQueueFactory .build_queue (
200+ row_set_type = execute_response .result_format ,
201+ t_row_set = t_row_set ,
202+ arrow_schema_bytes = execute_response .arrow_schema_bytes or b"" ,
203+ max_download_threads = max_download_threads ,
204+ lz4_compressed = execute_response .lz4_compressed ,
205+ description = execute_response .description ,
206+ ssl_options = ssl_options ,
207+ )
191208
192209 # Call parent constructor with common attributes
193210 super ().__init__ (
@@ -198,7 +215,6 @@ def __init__(
198215 command_id = execute_response .command_id ,
199216 status = execute_response .status ,
200217 has_been_closed_server_side = execute_response .has_been_closed_server_side ,
201- has_more_rows = has_more_rows ,
202218 results_queue = results_queue ,
203219 description = execute_response .description ,
204220 is_staging_operation = execute_response .is_staging_operation ,
@@ -434,7 +450,7 @@ def map_col_type(type_):
434450
435451
436452class SeaResultSet (ResultSet ):
437- """ResultSet implementation for the SEA backend."""
453+ """ResultSet implementation for SEA backend."""
438454
439455 def __init__ (
440456 self ,
@@ -451,11 +467,12 @@ def __init__(
451467
452468 Args:
453469 connection: The parent connection
470+ execute_response: Response from the execute command
454471 sea_client: The SeaDatabricksClient instance for direct access
455472 buffer_size_bytes: Buffer size for fetching results
456473 arraysize: Default number of rows to fetch
457- execute_response: Response from the execute command (new style )
458- sea_response: Direct SEA response (legacy style )
474+ result_data: Result data from SEA response (optional )
475+ manifest: Manifest from SEA response (optional )
459476 """
460477
461478 if result_data :
@@ -481,6 +498,8 @@ def __init__(
481498 results_queue = queue ,
482499 description = execute_response .description ,
483500 is_staging_operation = execute_response .is_staging_operation ,
501+ lz4_compressed = execute_response .lz4_compressed ,
502+ arrow_schema_bytes = execute_response .arrow_schema_bytes ,
484503 )
485504
486505 def _convert_to_row_objects (self , rows ):
0 commit comments