Skip to content

Commit 0bf5119

Browse files
feat: Add dataset_view parameter to get_dataset method
This commit introduces a new `dataset_view` parameter to the `get_dataset` method in the BigQuery client. This allows you to specify the level of detail (METADATA, ACL, FULL) returned when fetching a dataset. The `DatasetView` enum has been added to `enums.py`. Unit tests have been added to verify: - Correct query parameter (`view`) formation for each enum value. - Correct behavior when `dataset_view` is None. - AttributeError is raised for invalid `dataset_view` types.
1 parent 12490f2 commit 0bf5119

File tree

3 files changed

+79
-1
lines changed

3 files changed

+79
-1
lines changed

google/cloud/bigquery/client.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@
9090
from google.cloud.bigquery.dataset import Dataset
9191
from google.cloud.bigquery.dataset import DatasetListItem
9292
from google.cloud.bigquery.dataset import DatasetReference
93-
from google.cloud.bigquery.enums import AutoRowIDs
93+
from google.cloud.bigquery.enums import AutoRowIDs, DatasetView
9494
from google.cloud.bigquery.format_options import ParquetOptions
9595
from google.cloud.bigquery.job import (
9696
CopyJob,
@@ -849,6 +849,7 @@ def get_dataset(
849849
dataset_ref: Union[DatasetReference, str],
850850
retry: retries.Retry = DEFAULT_RETRY,
851851
timeout: TimeoutType = DEFAULT_TIMEOUT,
852+
dataset_view: Optional[DatasetView] = None,
852853
) -> Dataset:
853854
"""Fetch the dataset referenced by ``dataset_ref``
854855
@@ -866,6 +867,9 @@ def get_dataset(
866867
timeout (Optional[float]):
867868
The number of seconds to wait for the underlying HTTP transport
868869
before using ``retry``.
870+
dataset_view (Optional[google.cloud.bigquery.enums.DatasetView]):
871+
Specifies the level of detail to return. Defaults to None, which
872+
returns the default representation of the dataset.
869873
870874
Returns:
871875
google.cloud.bigquery.dataset.Dataset:
@@ -876,6 +880,9 @@ def get_dataset(
876880
dataset_ref, default_project=self.project
877881
)
878882
path = dataset_ref.path
883+
params: Dict[str, Any] = {}
884+
if dataset_view is not None:
885+
params["view"] = dataset_view.value
879886
span_attributes = {"path": path}
880887
api_response = self._call_api(
881888
retry,
@@ -884,6 +891,7 @@ def get_dataset(
884891
method="GET",
885892
path=path,
886893
timeout=timeout,
894+
query_params=params if params else None,
887895
)
888896
return Dataset.from_api_repr(api_response)
889897

google/cloud/bigquery/enums.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,13 @@ class CreateDisposition(object):
8080
returned in the job result."""
8181

8282

83+
class DatasetView(enum.Enum):
84+
DATASET_VIEW_UNSPECIFIED = "DATASET_VIEW_UNSPECIFIED"
85+
METADATA = "METADATA"
86+
ACL = "ACL"
87+
FULL = "FULL"
88+
89+
8390
class DefaultPandasDTypes(enum.Enum):
8491
"""Default Pandas DataFrem DTypes to convert BigQuery data. These
8592
Sentinel values are used instead of None to maintain backward compatibility,

tests/unit/test_client.py

Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@
6161
from google.cloud.bigquery import job as bqjob
6262
import google.cloud.bigquery._job_helpers
6363
from google.cloud.bigquery.dataset import DatasetReference
64+
from google.cloud.bigquery.enums import DatasetView
6465
from google.cloud.bigquery import exceptions
6566
from google.cloud.bigquery import ParquetOptions
6667
import google.cloud.bigquery.retry
@@ -807,6 +808,68 @@ def test_get_dataset(self):
807808

808809
self.assertEqual(dataset.dataset_id, self.DS_ID)
809810

811+
@pytest.mark.parametrize(
812+
"dataset_view_arg,expected_param_value",
813+
[
814+
(None, None),
815+
(DatasetView.DATASET_VIEW_UNSPECIFIED, "DATASET_VIEW_UNSPECIFIED"),
816+
(DatasetView.METADATA, "METADATA"),
817+
(DatasetView.ACL, "ACL"),
818+
(DatasetView.FULL, "FULL"),
819+
],
820+
)
821+
def test_get_dataset_with_dataset_view(
822+
self, dataset_view_arg, expected_param_value
823+
):
824+
path = "projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID)
825+
creds = _make_credentials()
826+
http = object()
827+
client = self._make_one(project=self.PROJECT, credentials=creds, _http=http)
828+
resource = {
829+
"id": "%s:%s" % (self.PROJECT, self.DS_ID),
830+
"datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID},
831+
}
832+
conn = client._connection = make_connection(resource)
833+
dataset_ref = DatasetReference(self.PROJECT, self.DS_ID)
834+
835+
dataset = client.get_dataset(dataset_ref, dataset_view=dataset_view_arg)
836+
837+
self.assertEqual(dataset.dataset_id, self.DS_ID)
838+
expected_query_params = {}
839+
if expected_param_value is not None:
840+
expected_query_params["view"] = expected_param_value
841+
842+
conn.api_request.assert_called_once_with(
843+
method="GET",
844+
path="/%s" % path,
845+
timeout=DEFAULT_TIMEOUT,
846+
query_params=expected_query_params if expected_query_params else None,
847+
)
848+
849+
@pytest.mark.parametrize(
850+
"invalid_view_value",
851+
[
852+
"INVALID_STRING",
853+
123,
854+
123.45,
855+
object(),
856+
],
857+
)
858+
def test_get_dataset_with_invalid_dataset_view(self, invalid_view_value):
859+
path = "projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID)
860+
creds = _make_credentials()
861+
http = object()
862+
client = self._make_one(project=self.PROJECT, credentials=creds, _http=http)
863+
resource = {
864+
"id": "%s:%s" % (self.PROJECT, self.DS_ID),
865+
"datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID},
866+
}
867+
conn = client._connection = make_connection(resource)
868+
dataset_ref = DatasetReference(self.PROJECT, self.DS_ID)
869+
870+
with pytest.raises(AttributeError):
871+
client.get_dataset(dataset_ref, dataset_view=invalid_view_value)
872+
810873
def test_ensure_bqstorage_client_creating_new_instance(self):
811874
bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage")
812875

0 commit comments

Comments
 (0)