4040import pyarrow
4141
4242import bigframes .core
43- from bigframes .core import expression
4443import bigframes .core .compile
4544import bigframes .core .guid
4645import bigframes .core .identifiers
@@ -91,7 +90,6 @@ def to_sql(
9190 self ,
9291 array_value : bigframes .core .ArrayValue ,
9392 offset_column : Optional [str ] = None ,
94- col_id_overrides : Mapping [str , str ] = {},
9593 ordered : bool = False ,
9694 enable_cache : bool = True ,
9795 ) -> str :
@@ -105,7 +103,6 @@ def execute(
105103 array_value : bigframes .core .ArrayValue ,
106104 * ,
107105 ordered : bool = True ,
108- col_id_overrides : Mapping [str , str ] = {},
109106 use_explicit_destination : Optional [bool ] = False ,
110107 get_size_bytes : bool = False ,
111108 page_size : Optional [int ] = None ,
@@ -119,7 +116,6 @@ def execute(
119116 def export_gbq (
120117 self ,
121118 array_value : bigframes .core .ArrayValue ,
122- col_id_overrides : Mapping [str , str ],
123119 destination : bigquery .TableReference ,
124120 if_exists : Literal ["fail" , "replace" , "append" ] = "fail" ,
125121 cluster_cols : Sequence [str ] = [],
@@ -132,7 +128,6 @@ def export_gbq(
132128 def export_gcs (
133129 self ,
134130 array_value : bigframes .core .ArrayValue ,
135- col_id_overrides : Mapping [str , str ],
136131 uri : str ,
137132 format : Literal ["json" , "csv" , "parquet" ],
138133 export_options : Mapping [str , Union [bool , str ]],
@@ -220,29 +215,23 @@ def to_sql(
220215 self ,
221216 array_value : bigframes .core .ArrayValue ,
222217 offset_column : Optional [str ] = None ,
223- col_id_overrides : Mapping [str , str ] = {},
224218 ordered : bool = False ,
225219 enable_cache : bool = True ,
226220 ) -> str :
227221 if offset_column :
228222 array_value , internal_offset_col = array_value .promote_offsets ()
229- col_id_overrides = dict (col_id_overrides )
230- col_id_overrides [internal_offset_col ] = offset_column
231223 node = (
232224 self .replace_cached_subtrees (array_value .node )
233225 if enable_cache
234226 else array_value .node
235227 )
236- if col_id_overrides :
237- node = override_ids (node , col_id_overrides )
238228 return self .compiler .compile (node , ordered = ordered )
239229
240230 def execute (
241231 self ,
242232 array_value : bigframes .core .ArrayValue ,
243233 * ,
244234 ordered : bool = True ,
245- col_id_overrides : Mapping [str , str ] = {},
246235 use_explicit_destination : Optional [bool ] = False ,
247236 get_size_bytes : bool = False ,
248237 page_size : Optional [int ] = None ,
@@ -254,15 +243,12 @@ def execute(
254243 if bigframes .options .compute .enable_multi_query_execution :
255244 self ._simplify_with_caching (array_value )
256245
257- sql = self .to_sql (
258- array_value , ordered = ordered , col_id_overrides = col_id_overrides
259- )
260- adjusted_schema = array_value .schema .rename (col_id_overrides )
246+ sql = self .to_sql (array_value , ordered = ordered )
261247 job_config = bigquery .QueryJobConfig ()
262248 # Use explicit destination to avoid 10GB limit of temporary table
263249 if use_explicit_destination :
264250 destination_table = self .storage_manager .create_temp_table (
265- adjusted_schema .to_bigquery (), cluster_cols = []
251+ array_value . schema .to_bigquery (), cluster_cols = []
266252 )
267253 job_config .destination = destination_table
268254 # TODO(swast): plumb through the api_name of the user-facing api that
@@ -293,12 +279,12 @@ def iterator_supplier():
293279 )
294280 # Runs strict validations to ensure internal type predictions and ibis are completely in sync
295281 # Do not execute these validations outside of testing suite.
296- if "PYTEST_CURRENT_TEST" in os .environ and len ( col_id_overrides ) == 0 :
282+ if "PYTEST_CURRENT_TEST" in os .environ :
297283 self ._validate_result_schema (array_value , iterator .schema )
298284
299285 return ExecuteResult (
300286 arrow_batches = iterator_supplier ,
301- schema = adjusted_schema ,
287+ schema = array_value . schema ,
302288 query_job = query_job ,
303289 total_bytes = size_bytes ,
304290 total_rows = iterator .total_rows ,
@@ -307,7 +293,6 @@ def iterator_supplier():
307293 def export_gbq (
308294 self ,
309295 array_value : bigframes .core .ArrayValue ,
310- col_id_overrides : Mapping [str , str ],
311296 destination : bigquery .TableReference ,
312297 if_exists : Literal ["fail" , "replace" , "append" ] = "fail" ,
313298 cluster_cols : Sequence [str ] = [],
@@ -323,7 +308,7 @@ def export_gbq(
323308 "replace" : bigquery .WriteDisposition .WRITE_TRUNCATE ,
324309 "append" : bigquery .WriteDisposition .WRITE_APPEND ,
325310 }
326- sql = self .to_sql (array_value , ordered = False , col_id_overrides = col_id_overrides )
311+ sql = self .to_sql (array_value , ordered = False )
327312 job_config = bigquery .QueryJobConfig (
328313 write_disposition = dispositions [if_exists ],
329314 destination = destination ,
@@ -340,15 +325,13 @@ def export_gbq(
340325 def export_gcs (
341326 self ,
342327 array_value : bigframes .core .ArrayValue ,
343- col_id_overrides : Mapping [str , str ],
344328 uri : str ,
345329 format : Literal ["json" , "csv" , "parquet" ],
346330 export_options : Mapping [str , Union [bool , str ]],
347331 ):
348332 query_job = self .execute (
349333 array_value ,
350334 ordered = False ,
351- col_id_overrides = col_id_overrides ,
352335 use_explicit_destination = True ,
353336 ).query_job
354337 result_table = query_job .destination
@@ -678,18 +661,3 @@ def generate_head_plan(node: nodes.BigFrameNode, n: int):
678661
679662def generate_row_count_plan (node : nodes .BigFrameNode ):
680663 return nodes .RowCountNode (node )
681-
682-
683- def override_ids (
684- node : nodes .BigFrameNode , col_id_overrides : Mapping [str , str ]
685- ) -> nodes .SelectionNode :
686- output_ids = [col_id_overrides .get (id , id ) for id in node .schema .names ]
687- return nodes .SelectionNode (
688- node ,
689- tuple (
690- nodes .AliasedRef (
691- expression .DerefOp (old_id ), bigframes .core .identifiers .ColumnId (out_id )
692- )
693- for old_id , out_id in zip (node .ids , output_ids )
694- ),
695- )
0 commit comments