@@ -2245,3 +2245,155 @@ def test_remote_function_ingress_settings_unsupported(session):
22452245 @session .remote_function (reuse = False , cloud_function_ingress_settings = "unknown" )
22462246 def square (x : int ) -> int :
22472247 return x * x
2248+
2249+
2250+ @pytest .mark .parametrize (
2251+ ("session_creator" ),
2252+ [
2253+ pytest .param (bigframes .Session , id = "session-constructor" ),
2254+ pytest .param (bigframes .connect , id = "connect-method" ),
2255+ ],
2256+ )
2257+ @pytest .mark .flaky (retries = 2 , delay = 120 )
2258+ def test_remote_function_w_context_manager_unnamed (
2259+ scalars_dfs , dataset_id , bq_cf_connection , session_creator
2260+ ):
2261+ def add_one (x : int ) -> int :
2262+ return x + 1
2263+
2264+ scalars_df , scalars_pandas_df = scalars_dfs
2265+ pd_result = scalars_pandas_df ["int64_too" ].apply (add_one )
2266+
2267+ temporary_bigquery_remote_function = None
2268+ temporary_cloud_run_function = None
2269+
2270+ try :
2271+ with session_creator () as session :
2272+ # create a temporary remote function
2273+ add_one_remote_temp = session .remote_function (
2274+ dataset = dataset_id ,
2275+ bigquery_connection = bq_cf_connection ,
2276+ reuse = False ,
2277+ )(add_one )
2278+
2279+ temporary_bigquery_remote_function = (
2280+ add_one_remote_temp .bigframes_remote_function
2281+ )
2282+ assert temporary_bigquery_remote_function is not None
2283+ assert (
2284+ session .bqclient .get_routine (temporary_bigquery_remote_function )
2285+ is not None
2286+ )
2287+
2288+ temporary_cloud_run_function = add_one_remote_temp .bigframes_cloud_function
2289+ assert temporary_cloud_run_function is not None
2290+ assert (
2291+ session .cloudfunctionsclient .get_function (
2292+ name = temporary_cloud_run_function
2293+ )
2294+ is not None
2295+ )
2296+
2297+ bf_result = scalars_df ["int64_too" ].apply (add_one_remote_temp ).to_pandas ()
2298+ pandas .testing .assert_series_equal (bf_result , pd_result , check_dtype = False )
2299+
2300+ # outside the with statement context manager the temporary BQ remote
2301+ # function and the underlying cloud run function should have been
2302+ # cleaned up
2303+ assert temporary_bigquery_remote_function is not None
2304+ with pytest .raises (google .api_core .exceptions .NotFound ):
2305+ session .bqclient .get_routine (temporary_bigquery_remote_function )
2306+ # the deletion of cloud function happens in a non-blocking way, ensure that
2307+ # it either exists in a being-deleted state, or is already deleted
2308+ assert temporary_cloud_run_function is not None
2309+ try :
2310+ gcf = session .cloudfunctionsclient .get_function (
2311+ name = temporary_cloud_run_function
2312+ )
2313+ assert gcf .state is functions_v2 .Function .State .DELETING
2314+ except google .cloud .exceptions .NotFound :
2315+ pass
2316+ finally :
2317+ # clean up the gcp assets created for the temporary remote function,
2318+ # just in case it was not explicitly cleaned up in the try clause due
2319+ # to assertion failure or exception earlier than that
2320+ cleanup_remote_function_assets (
2321+ session .bqclient , session .cloudfunctionsclient , add_one_remote_temp
2322+ )
2323+
2324+
2325+ @pytest .mark .parametrize (
2326+ ("session_creator" ),
2327+ [
2328+ pytest .param (bigframes .Session , id = "session-constructor" ),
2329+ pytest .param (bigframes .connect , id = "connect-method" ),
2330+ ],
2331+ )
2332+ @pytest .mark .flaky (retries = 2 , delay = 120 )
2333+ def test_remote_function_w_context_manager_named (
2334+ scalars_dfs , dataset_id , bq_cf_connection , session_creator
2335+ ):
2336+ def add_one (x : int ) -> int :
2337+ return x + 1
2338+
2339+ scalars_df , scalars_pandas_df = scalars_dfs
2340+ pd_result = scalars_pandas_df ["int64_too" ].apply (add_one )
2341+
2342+ persistent_bigquery_remote_function = None
2343+ persistent_cloud_run_function = None
2344+
2345+ try :
2346+ with session_creator () as session :
2347+ # create a persistent remote function
2348+ name = test_utils .prefixer .Prefixer ("bigframes" , "" ).create_prefix ()
2349+ add_one_remote_persist = session .remote_function (
2350+ dataset = dataset_id ,
2351+ bigquery_connection = bq_cf_connection ,
2352+ reuse = False ,
2353+ name = name ,
2354+ )(add_one )
2355+
2356+ persistent_bigquery_remote_function = (
2357+ add_one_remote_persist .bigframes_remote_function
2358+ )
2359+ assert persistent_bigquery_remote_function is not None
2360+ assert (
2361+ session .bqclient .get_routine (persistent_bigquery_remote_function )
2362+ is not None
2363+ )
2364+
2365+ persistent_cloud_run_function = (
2366+ add_one_remote_persist .bigframes_cloud_function
2367+ )
2368+ assert persistent_cloud_run_function is not None
2369+ assert (
2370+ session .cloudfunctionsclient .get_function (
2371+ name = persistent_cloud_run_function
2372+ )
2373+ is not None
2374+ )
2375+
2376+ bf_result = (
2377+ scalars_df ["int64_too" ].apply (add_one_remote_persist ).to_pandas ()
2378+ )
2379+ pandas .testing .assert_series_equal (bf_result , pd_result , check_dtype = False )
2380+
2381+ # outside the with statement context manager the persistent BQ remote
2382+ # function and the underlying cloud run function should still exist
2383+ assert persistent_bigquery_remote_function is not None
2384+ assert (
2385+ session .bqclient .get_routine (persistent_bigquery_remote_function )
2386+ is not None
2387+ )
2388+ assert persistent_cloud_run_function is not None
2389+ assert (
2390+ session .cloudfunctionsclient .get_function (
2391+ name = persistent_cloud_run_function
2392+ )
2393+ is not None
2394+ )
2395+ finally :
2396+ # clean up the gcp assets created for the persistent remote function
2397+ cleanup_remote_function_assets (
2398+ session .bqclient , session .cloudfunctionsclient , add_one_remote_persist
2399+ )
0 commit comments