Skip to content

Commit a5e9cdf

Browse files
align with old impl
Signed-off-by: varun-edachali-dbx <varun.edachali@databricks.com>
1 parent ed4d7ab commit a5e9cdf

File tree

1 file changed

+30
-29
lines changed

1 file changed

+30
-29
lines changed

src/databricks/sql/utils.py

Lines changed: 30 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -227,11 +227,12 @@ def __init__(
227227
lz4_compressed: Whether the data is LZ4 compressed
228228
description: Column descriptions
229229
"""
230+
231+
self.schema_bytes = schema_bytes
232+
self.max_download_threads = max_download_threads
230233
self.lz4_compressed = lz4_compressed
231234
self.description = description
232-
self.schema_bytes = schema_bytes
233235
self._ssl_options = ssl_options
234-
self.max_download_threads = max_download_threads
235236

236237
# Table state
237238
self.table = None
@@ -240,33 +241,6 @@ def __init__(
240241
# Initialize download manager
241242
self.download_manager: Optional["ResultFileDownloadManager"] = None
242243

243-
def remaining_rows(self) -> "pyarrow.Table":
244-
"""
245-
Get all remaining rows of the cloud fetch Arrow dataframes.
246-
247-
Returns:
248-
pyarrow.Table
249-
"""
250-
if not self.table:
251-
# Return empty pyarrow table to cause retry of fetch
252-
return self._create_empty_table()
253-
254-
results = pyarrow.Table.from_pydict({}) # Empty table
255-
while self.table:
256-
table_slice = self.table.slice(
257-
self.table_row_index, self.table.num_rows - self.table_row_index
258-
)
259-
if results.num_rows > 0:
260-
results = pyarrow.concat_tables([results, table_slice])
261-
else:
262-
results = table_slice
263-
264-
self.table_row_index += table_slice.num_rows
265-
self.table = self._create_next_table()
266-
self.table_row_index = 0
267-
268-
return results
269-
270244
def next_n_rows(self, num_rows: int) -> "pyarrow.Table":
271245
"""Get up to the next n rows of the cloud fetch Arrow dataframes."""
272246
if not self.table:
@@ -320,6 +294,33 @@ def next_n_rows(self, num_rows: int) -> "pyarrow.Table":
320294
logger.info("CloudFetchQueue: Retrieved {} rows".format(results.num_rows))
321295
return results
322296

297+
def remaining_rows(self) -> "pyarrow.Table":
298+
"""
299+
Get all remaining rows of the cloud fetch Arrow dataframes.
300+
301+
Returns:
302+
pyarrow.Table
303+
"""
304+
if not self.table:
305+
# Return empty pyarrow table to cause retry of fetch
306+
return self._create_empty_table()
307+
308+
results = pyarrow.Table.from_pydict({}) # Empty table
309+
while self.table:
310+
table_slice = self.table.slice(
311+
self.table_row_index, self.table.num_rows - self.table_row_index
312+
)
313+
if results.num_rows > 0:
314+
results = pyarrow.concat_tables([results, table_slice])
315+
else:
316+
results = table_slice
317+
318+
self.table_row_index += table_slice.num_rows
319+
self.table = self._create_next_table()
320+
self.table_row_index = 0
321+
322+
return results
323+
323324
def _create_empty_table(self) -> "pyarrow.Table":
324325
"""Create a 0-row table with just the schema bytes."""
325326
if not self.schema_bytes:

0 commit comments

Comments
 (0)