Skip to content

Commit 095c552

Browse files
committed
fix all lint!
1 parent b8f758a commit 095c552

15 files changed

+60
-34
lines changed

bigquery_storage/pyarrow/append_rows_with_arrow.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,10 @@
1313
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1414
# See the License for the specific language governing permissions and
1515
# limitations under the License.
16+
from concurrent.futures import Future
1617
import datetime
1718
import decimal
19+
from typing import Iterable
1820

1921
from google.cloud import bigquery
2022
from google.cloud import bigquery_storage_v1
@@ -158,7 +160,9 @@ def generate_pyarrow_table(num_rows: int = TABLE_LENGTH) -> pa.Table:
158160
return table
159161

160162

161-
def generate_write_requests(pyarrow_table):
163+
def generate_write_requests(
164+
pyarrow_table: pa.Table,
165+
) -> Iterable[gapic_types.AppendRowsRequest]:
162166
# Determine max_chunksize of the record batches. Because max size of
163167
# AppendRowsRequest is 10 MB, we need to split the table if it's too big.
164168
# See: https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1#appendrowsrequest
@@ -173,7 +177,9 @@ def generate_write_requests(pyarrow_table):
173177
yield request
174178

175179

176-
def verify_result(client, table, futures):
180+
def verify_result(
181+
client: bigquery.Client, table: bigquery.Table, futures: "list[Future]"
182+
) -> None:
177183
bq_table = client.get_table(table)
178184

179185
# Verify table schema.
@@ -190,7 +196,7 @@ def verify_result(client, table, futures):
190196
assert len(futures) == 2
191197

192198

193-
def main(project_id, dataset):
199+
def main(project_id: str, dataset: bigquery.Dataset) -> None:
194200
# Initialize clients.
195201
write_client = bqstorage_write_client()
196202
bq_client = bigquery.Client()

bigquery_storage/pyarrow/append_rows_with_arrow_test.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,10 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
from google.cloud import bigquery
16+
1517
from . import append_rows_with_arrow
1618

1719

18-
def test_append_rows_with_arrow(project_id, dataset):
20+
def test_append_rows_with_arrow(project_id: str, dataset: bigquery.Dataset) -> None:
1921
append_rows_with_arrow.main(project_id, dataset)

bigquery_storage/quickstart/quickstart.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
import argparse
1616

1717

18-
def main(project_id="your-project-id", snapshot_millis=0):
18+
def main(project_id: str = "your-project-id", snapshot_millis: int = 0) -> None:
1919
# [START bigquerystorage_quickstart]
2020
from google.cloud.bigquery_storage import BigQueryReadClient, types
2121

bigquery_storage/quickstart/quickstart_test.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,23 +14,27 @@
1414

1515
import datetime
1616

17+
import pytest
18+
1719
from . import quickstart
1820

1921

20-
def now_millis():
22+
def now_millis() -> int:
2123
return int(
2224
(datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds()
2325
* 1000
2426
)
2527

2628

27-
def test_quickstart_wo_snapshot(capsys, project_id):
29+
def test_quickstart_wo_snapshot(capsys: pytest.CaptureFixture, project_id: str) -> None:
2830
quickstart.main(project_id)
2931
out, _ = capsys.readouterr()
3032
assert "unique names in states: WA" in out
3133

3234

33-
def test_quickstart_with_snapshot(capsys, project_id):
35+
def test_quickstart_with_snapshot(
36+
capsys: pytest.CaptureFixture, project_id: str
37+
) -> None:
3438
quickstart.main(project_id, now_millis() - 5000)
3539
out, _ = capsys.readouterr()
3640
assert "unique names in states: WA" in out

bigquery_storage/snippets/append_rows_pending.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,9 @@
1818
using the low-level generated client for Python.
1919
"""
2020

21-
from google.protobuf import descriptor_pb2
22-
2321
from google.cloud import bigquery_storage_v1
2422
from google.cloud.bigquery_storage_v1 import types, writer
23+
from google.protobuf import descriptor_pb2
2524

2625
# If you update the customer_record.proto protocol buffer definition, run:
2726
#
@@ -31,14 +30,14 @@
3130
from . import customer_record_pb2
3231

3332

34-
def create_row_data(row_num: int, name: str):
33+
def create_row_data(row_num: int, name: str) -> bytes:
3534
row = customer_record_pb2.CustomerRecord()
3635
row.row_num = row_num
3736
row.customer_name = name
3837
return row.SerializeToString()
3938

4039

41-
def append_rows_pending(project_id: str, dataset_id: str, table_id: str):
40+
def append_rows_pending(project_id: str, dataset_id: str, table_id: str) -> None:
4241
"""Create a write stream, write some sample data, and commit the stream."""
4342
write_client = bigquery_storage_v1.BigQueryWriteClient()
4443
parent = write_client.table_path(project_id, dataset_id, table_id)

bigquery_storage/snippets/append_rows_pending_test.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,8 @@
1515
import pathlib
1616
import random
1717

18-
import pytest
19-
2018
from google.cloud import bigquery
19+
import pytest
2120

2221
from . import append_rows_pending
2322

@@ -51,7 +50,7 @@ def test_append_rows_pending(
5150
capsys: pytest.CaptureFixture,
5251
bigquery_client: bigquery.Client,
5352
sample_data_table: str,
54-
):
53+
) -> None:
5554
project_id, dataset_id, table_id = sample_data_table.split(".")
5655
append_rows_pending.append_rows_pending(
5756
project_id=project_id, dataset_id=dataset_id, table_id=table_id

bigquery_storage/snippets/append_rows_proto2.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,9 @@
2020
import datetime
2121
import decimal
2222

23-
from google.protobuf import descriptor_pb2
24-
2523
from google.cloud import bigquery_storage_v1
2624
from google.cloud.bigquery_storage_v1 import types, writer
25+
from google.protobuf import descriptor_pb2
2726

2827
# If you make updates to the sample_data.proto protocol buffers definition,
2928
# run:
@@ -34,7 +33,7 @@
3433
from . import sample_data_pb2
3534

3635

37-
def append_rows_proto2(project_id: str, dataset_id: str, table_id: str):
36+
def append_rows_proto2(project_id: str, dataset_id: str, table_id: str) -> None:
3837
"""Create a write stream, write some sample data, and commit the stream."""
3938
write_client = bigquery_storage_v1.BigQueryWriteClient()
4039
parent = write_client.table_path(project_id, dataset_id, table_id)

bigquery_storage/snippets/append_rows_proto2_test.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,8 @@
1717
import pathlib
1818
import random
1919

20-
import pytest
21-
2220
from google.cloud import bigquery
21+
import pytest
2322

2423
from . import append_rows_proto2
2524

@@ -53,7 +52,7 @@ def test_append_rows_proto2(
5352
capsys: pytest.CaptureFixture,
5453
bigquery_client: bigquery.Client,
5554
sample_data_table: str,
56-
):
55+
) -> None:
5756
project_id, dataset_id, table_id = sample_data_table.split(".")
5857
append_rows_proto2.append_rows_proto2(
5958
project_id=project_id, dataset_id=dataset_id, table_id=table_id

bigquery_storage/snippets/conftest.py

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,16 +12,17 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import pytest
16-
import test_utils.prefixer
15+
from typing import Generator
1716

1817
from google.cloud import bigquery
18+
import pytest
19+
import test_utils.prefixer
1920

2021
prefixer = test_utils.prefixer.Prefixer("python-bigquery-storage", "samples/snippets")
2122

2223

2324
@pytest.fixture(scope="session", autouse=True)
24-
def cleanup_datasets(bigquery_client: bigquery.Client):
25+
def cleanup_datasets(bigquery_client: bigquery.Client) -> None:
2526
for dataset in bigquery_client.list_datasets():
2627
if prefixer.should_cleanup(dataset.dataset_id):
2728
bigquery_client.delete_dataset(
@@ -30,17 +31,19 @@ def cleanup_datasets(bigquery_client: bigquery.Client):
3031

3132

3233
@pytest.fixture(scope="session")
33-
def bigquery_client():
34+
def bigquery_client() -> bigquery.Client:
3435
return bigquery.Client()
3536

3637

3738
@pytest.fixture(scope="session")
38-
def project_id(bigquery_client):
39+
def project_id(bigquery_client: bigquery.Client) -> str:
3940
return bigquery_client.project
4041

4142

4243
@pytest.fixture(scope="session")
43-
def dataset_id(bigquery_client: bigquery.Client, project_id: str):
44+
def dataset_id(
45+
bigquery_client: bigquery.Client, project_id: str
46+
) -> Generator[str, None, None]:
4447
dataset_id = prefixer.create_prefix()
4548
full_dataset_id = f"{project_id}.{dataset_id}"
4649
dataset = bigquery.Dataset(full_dataset_id)
@@ -50,7 +53,9 @@ def dataset_id(bigquery_client: bigquery.Client, project_id: str):
5053

5154

5255
@pytest.fixture(scope="session")
53-
def dataset_id_non_us(bigquery_client: bigquery.Client, project_id: str):
56+
def dataset_id_non_us(
57+
bigquery_client: bigquery.Client, project_id: str
58+
) -> Generator[str, None, None]:
5459
dataset_id = prefixer.create_prefix()
5560
full_dataset_id = f"{project_id}.{dataset_id}"
5661
dataset = bigquery.Dataset(full_dataset_id)

bigquery_storage/to_dataframe/read_query_results.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,10 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import pandas
1516

16-
def read_query_results():
17+
18+
def read_query_results() -> pandas.DataFrame:
1719
# [START bigquerystorage_pandas_tutorial_read_query_results]
1820
from google.cloud import bigquery
1921

0 commit comments

Comments
 (0)