diff --git a/samples/snippets/conftest.py b/samples/snippets/conftest.py index 81595967ec..e19cfbceb4 100644 --- a/samples/snippets/conftest.py +++ b/samples/snippets/conftest.py @@ -63,6 +63,17 @@ def gcs_bucket(storage_client: storage.Client) -> Generator[str, None, None]: blob.delete() +@pytest.fixture(scope="session") +def gcs_bucket_snippets(storage_client: storage.Client) -> Generator[str, None, None]: + bucket_name = "bigframes_blob_test_snippet_with_data_wipeout" + + yield bucket_name + + bucket = storage_client.get_bucket(bucket_name) + for blob in bucket.list_blobs(): + blob.delete() + + @pytest.fixture(autouse=True) def reset_session() -> None: """An autouse fixture ensuring each sample runs in a fresh session. diff --git a/samples/snippets/multimodal_test.py b/samples/snippets/multimodal_test.py index 1ea6a3f0a6..033fead33e 100644 --- a/samples/snippets/multimodal_test.py +++ b/samples/snippets/multimodal_test.py @@ -13,9 +13,9 @@ # limitations under the License. -def test_multimodal_dataframe(gcs_bucket: str) -> None: +def test_multimodal_dataframe(gcs_bucket_snippets: str) -> None: # destination folder must be in a GCS bucket that the BQ connection service account (default or user provided) has write access to. - dst_bucket = f"gs://{gcs_bucket}" + dst_bucket = f"gs://{gcs_bucket_snippets}" # [START bigquery_dataframes_multimodal_dataframe_create] import bigframes