@@ -32,7 +32,6 @@ class ExecutionMetrics:
3232 bytes_processed : int = 0
3333 execution_secs : float = 0
3434 query_char_count : int = 0
35- total_rows : int = 0
3635
3736 def count_job_stats (
3837 self ,
@@ -47,13 +46,11 @@ def count_job_stats(
4746 query_char_count = len (getattr (row_iterator , "query" , "" ))
4847 slot_millis = getattr (row_iterator , "slot_millis" , 0 )
4948 exec_seconds = 0.0
50- total_rows = getattr (row_iterator , "total_rows" , 0 ) or 0
5149
5250 self .execution_count += 1
5351 self .query_char_count += query_char_count
5452 self .bytes_processed += bytes_processed
5553 self .slot_millis += slot_millis
56- self .total_rows += total_rows
5754
5855 elif query_job .configuration .dry_run :
5956 query_char_count = len (query_job .query )
@@ -62,43 +59,39 @@ def count_job_stats(
6259 bytes_processed = 0
6360 slot_millis = 0
6461 exec_seconds = 0.0
65- total_rows = 0
6662
6763 elif (stats := get_performance_stats (query_job )) is not None :
68- (
69- query_char_count ,
70- bytes_processed ,
71- slot_millis ,
72- exec_seconds ,
73- total_rows ,
74- ) = stats
64+ query_char_count , bytes_processed , slot_millis , exec_seconds = stats
7565 self .execution_count += 1
7666 self .query_char_count += query_char_count
7767 self .bytes_processed += bytes_processed
7868 self .slot_millis += slot_millis
7969 self .execution_secs += exec_seconds
80- self .total_rows += total_rows
70+ write_stats_to_disk (
71+ query_char_count = query_char_count ,
72+ bytes_processed = bytes_processed ,
73+ slot_millis = slot_millis ,
74+ exec_seconds = exec_seconds ,
75+ )
8176
8277 else :
8378 # TODO(tswast): Pass None after making benchmark publishing robust to missing data.
8479 bytes_processed = 0
8580 query_char_count = 0
8681 slot_millis = 0
8782 exec_seconds = 0
88- total_rows = 0
8983
9084 write_stats_to_disk (
9185 query_char_count = query_char_count ,
9286 bytes_processed = bytes_processed ,
9387 slot_millis = slot_millis ,
9488 exec_seconds = exec_seconds ,
95- total_rows = total_rows ,
9689 )
9790
9891
9992def get_performance_stats (
10093 query_job : bigquery .QueryJob ,
101- ) -> Optional [Tuple [int , int , int , float , int ]]:
94+ ) -> Optional [Tuple [int , int , int , float ]]:
10295 """Parse the query job for performance stats.
10396
10497 Return None if the stats do not reflect real work done in bigquery.
@@ -121,17 +114,13 @@ def get_performance_stats(
121114 execution_secs = (query_job .ended - query_job .created ).total_seconds ()
122115 query_char_count = len (query_job .query )
123116
124- # Extract total rows from query job
125- total_rows = getattr (query_job , "total_rows" , 0 ) or 0
126-
127117 return (
128118 query_char_count ,
129119 # Not every job populates these. For example, slot_millis is missing
130120 # from queries that came from cached results.
131121 bytes_processed if bytes_processed else 0 ,
132122 slot_millis if slot_millis else 0 ,
133123 execution_secs ,
134- total_rows ,
135124 )
136125
137126
@@ -141,7 +130,6 @@ def write_stats_to_disk(
141130 bytes_processed : int ,
142131 slot_millis : int ,
143132 exec_seconds : float ,
144- total_rows : Optional [int ] = None ,
145133):
146134 """For pytest runs only, log information about the query job
147135 to a file in order to create a performance report.
@@ -176,9 +164,3 @@ def write_stats_to_disk(
176164 bytes_file = os .path .join (current_directory , test_name + ".bytesprocessed" )
177165 with open (bytes_file , "a" ) as f :
178166 f .write (str (bytes_processed ) + "\n " )
179-
180- # store total rows
181- if total_rows is not None :
182- total_rows_file = os .path .join (current_directory , test_name + ".totalrows" )
183- with open (total_rows_file , "a" ) as f :
184- f .write (str (total_rows ) + "\n " )
0 commit comments