@@ -43,7 +43,7 @@ def __init__(
4343 command_id : CommandId ,
4444 status : CommandState ,
4545 has_been_closed_server_side : bool = False ,
46- is_direct_results : bool = False ,
46+ has_more_rows : bool = False ,
4747 results_queue = None ,
4848 description : List [Tuple ] = [],
4949 is_staging_operation : bool = False ,
@@ -61,7 +61,7 @@ def __init__(
6161 :param command_id: The command ID
6262 :param status: The command status
6363 :param has_been_closed_server_side: Whether the command has been closed on the server
64- :param is_direct_results : Whether the command has more rows
64+ :param has_more_rows : Whether the command has more rows
6565 :param results_queue: The results queue
6666 :param description: column description of the results
6767 :param is_staging_operation: Whether the command is a staging operation
@@ -76,7 +76,7 @@ def __init__(
7676 self .command_id = command_id
7777 self .status = status
7878 self .has_been_closed_server_side = has_been_closed_server_side
79- self .is_direct_results = is_direct_results
79+ self .has_more_rows = has_more_rows
8080 self .results = results_queue
8181 self ._is_staging_operation = is_staging_operation
8282 self .lz4_compressed = lz4_compressed
@@ -170,7 +170,11 @@ def close(self) -> None:
170170 been closed on the server for some other reason, issue a request to the server to close it.
171171 """
172172 try :
173- self .results .close ()
173+ if self .results is not None :
174+ self .results .close ()
175+ else :
176+ logger .warning ("result set close: queue not initialized" )
177+
174178 if (
175179 self .status != CommandState .CLOSED
176180 and not self .has_been_closed_server_side
@@ -193,14 +197,13 @@ def __init__(
193197 connection : Connection ,
194198 execute_response : ExecuteResponse ,
195199 thrift_client : ThriftDatabricksClient ,
196- session_id_hex : Optional [str ],
197200 buffer_size_bytes : int = 104857600 ,
198201 arraysize : int = 10000 ,
199202 use_cloud_fetch : bool = True ,
200203 t_row_set = None ,
201204 max_download_threads : int = 10 ,
202205 ssl_options = None ,
203- is_direct_results : bool = True ,
206+ has_more_rows : bool = True ,
204207 ):
205208 """
206209 Initialize a ThriftResultSet with direct access to the ThriftDatabricksClient.
@@ -215,13 +218,13 @@ def __init__(
215218 :param t_row_set: The TRowSet containing result data (if available)
216219 :param max_download_threads: Maximum number of download threads for cloud fetch
217220 :param ssl_options: SSL options for cloud fetch
218- :param is_direct_results : Whether there are more rows to fetch
221+ :param has_more_rows : Whether there are more rows to fetch
219222 """
220- self .num_downloaded_chunks = 0
223+ self .num_chunks = 0
221224
222225 # Initialize ThriftResultSet-specific attributes
223226 self ._use_cloud_fetch = use_cloud_fetch
224- self .is_direct_results = is_direct_results
227+ self .has_more_rows = has_more_rows
225228
226229 # Build the results queue if t_row_set is provided
227230 results_queue = None
@@ -237,12 +240,12 @@ def __init__(
237240 lz4_compressed = execute_response .lz4_compressed ,
238241 description = execute_response .description ,
239242 ssl_options = ssl_options ,
240- session_id_hex = session_id_hex ,
243+ session_id_hex = connection . get_session_id_hex () ,
241244 statement_id = execute_response .command_id .to_hex_guid (),
242- chunk_id = self .num_downloaded_chunks ,
245+ chunk_id = self .num_chunks ,
243246 )
244247 if t_row_set .resultLinks :
245- self .num_downloaded_chunks += len (t_row_set .resultLinks )
248+ self .num_chunks += len (t_row_set .resultLinks )
246249
247250 # Call parent constructor with common attributes
248251 super ().__init__ (
@@ -253,7 +256,7 @@ def __init__(
253256 command_id = execute_response .command_id ,
254257 status = execute_response .status ,
255258 has_been_closed_server_side = execute_response .has_been_closed_server_side ,
256- is_direct_results = is_direct_results ,
259+ has_more_rows = has_more_rows ,
257260 results_queue = results_queue ,
258261 description = execute_response .description ,
259262 is_staging_operation = execute_response .is_staging_operation ,
@@ -266,7 +269,7 @@ def __init__(
266269 self ._fill_results_buffer ()
267270
268271 def _fill_results_buffer (self ):
269- results , is_direct_results , result_links_count = self .backend .fetch_results (
272+ results , has_more_rows , result_links_count = self .backend .fetch_results (
270273 command_id = self .command_id ,
271274 max_rows = self .arraysize ,
272275 max_bytes = self .buffer_size_bytes ,
@@ -275,11 +278,11 @@ def _fill_results_buffer(self):
275278 arrow_schema_bytes = self ._arrow_schema_bytes ,
276279 description = self .description ,
277280 use_cloud_fetch = self ._use_cloud_fetch ,
278- chunk_id = self .num_downloaded_chunks ,
281+ chunk_id = self .num_chunks ,
279282 )
280283 self .results = results
281- self .is_direct_results = is_direct_results
282- self .num_downloaded_chunks += result_links_count
284+ self .has_more_rows = has_more_rows
285+ self .num_chunks += result_links_count
283286
284287 def _convert_columnar_table (self , table ):
285288 column_names = [c [0 ] for c in self .description ]
@@ -326,7 +329,7 @@ def fetchmany_arrow(self, size: int) -> "pyarrow.Table":
326329 while (
327330 n_remaining_rows > 0
328331 and not self .has_been_closed_server_side
329- and self .is_direct_results
332+ and self .has_more_rows
330333 ):
331334 self ._fill_results_buffer ()
332335 partial_results = self .results .next_n_rows (n_remaining_rows )
@@ -351,7 +354,7 @@ def fetchmany_columnar(self, size: int):
351354 while (
352355 n_remaining_rows > 0
353356 and not self .has_been_closed_server_side
354- and self .is_direct_results
357+ and self .has_more_rows
355358 ):
356359 self ._fill_results_buffer ()
357360 partial_results = self .results .next_n_rows (n_remaining_rows )
@@ -366,7 +369,7 @@ def fetchall_arrow(self) -> "pyarrow.Table":
366369 results = self .results .remaining_rows ()
367370 self ._next_row_index += results .num_rows
368371 partial_result_chunks = [results ]
369- while not self .has_been_closed_server_side and self .is_direct_results :
372+ while not self .has_been_closed_server_side and self .has_more_rows :
370373 self ._fill_results_buffer ()
371374 partial_results = self .results .remaining_rows ()
372375 if isinstance (results , ColumnTable ) and isinstance (
@@ -392,7 +395,7 @@ def fetchall_columnar(self):
392395 results = self .results .remaining_rows ()
393396 self ._next_row_index += results .num_rows
394397
395- while not self .has_been_closed_server_side and self .is_direct_results :
398+ while not self .has_been_closed_server_side and self .has_more_rows :
396399 self ._fill_results_buffer ()
397400 partial_results = self .results .remaining_rows ()
398401 results = self .merge_columnar (results , partial_results )
0 commit comments