Skip to content

Commit f8fa57a

Browse files
committed
fix some lint errors
1 parent 93ee955 commit f8fa57a

File tree

2 files changed

+13
-13
lines changed

2 files changed

+13
-13
lines changed

bigquery_storage/conftest.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,19 +15,20 @@
1515
import datetime
1616
import os
1717
import random
18+
from typing import Generator
19+
20+
from google.cloud import bigquery
1821

1922
import pytest
2023

2124

2225
@pytest.fixture(scope="session")
23-
def project_id():
26+
def project_id() -> str:
2427
return os.environ["GOOGLE_CLOUD_PROJECT"]
2528

2629

2730
@pytest.fixture(scope="session")
28-
def dataset(project_id):
29-
from google.cloud import bigquery
30-
31+
def dataset(project_id: str) -> Generator[bigquery.Dataset, None, None]:
3132
client = bigquery.Client()
3233

3334
# Add a random suffix to dataset name to avoid conflict, because we run

bigquery_storage/pyarrow/append_rows_with_arrow.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,14 @@
1616
import datetime
1717
import decimal
1818

19+
from google.cloud import bigquery
20+
from google.cloud import bigquery_storage_v1
1921
from google.cloud.bigquery import enums
22+
from google.cloud.bigquery_storage_v1 import types as gapic_types
23+
from google.cloud.bigquery_storage_v1.writer import AppendRowsStream
2024
import pandas as pd
2125
import pyarrow as pa
2226

23-
from google.cloud import bigquery
24-
from google.cloud.bigquery_storage_v1 import types as gapic_types
25-
from google.cloud.bigquery_storage_v1.writer import AppendRowsStream
2627

2728
TABLE_LENGTH = 100_000
2829

@@ -84,13 +85,11 @@
8485
)
8586

8687

87-
def bqstorage_write_client():
88-
from google.cloud import bigquery_storage_v1
89-
88+
def bqstorage_write_client() -> bigquery_storage_v1.BigQueryWriteClient:
9089
return bigquery_storage_v1.BigQueryWriteClient()
9190

9291

93-
def make_table(project_id, dataset_id, bq_client):
92+
def make_table(project_id: str, dataset_id: str, bq_client: bigquery.Client) -> bigquery.Table:
9493
table_id = "append_rows_w_arrow_test"
9594
table_id_full = f"{project_id}.{dataset_id}.{table_id}"
9695
bq_table = bigquery.Table(table_id_full, schema=BQ_SCHEMA)
@@ -99,7 +98,7 @@ def make_table(project_id, dataset_id, bq_client):
9998
return created_table
10099

101100

102-
def create_stream(bqstorage_write_client, table):
101+
def create_stream(bqstorage_write_client: bigquery_storage_v1.BigQueryWriteClient, table: bigquery.Table) -> AppendRowsStream:
103102
stream_name = f"projects/{table.project}/datasets/{table.dataset_id}/tables/{table.table_id}/_default"
104103
request_template = gapic_types.AppendRowsRequest()
105104
request_template.write_stream = stream_name
@@ -116,7 +115,7 @@ def create_stream(bqstorage_write_client, table):
116115
return append_rows_stream
117116

118117

119-
def generate_pyarrow_table(num_rows=TABLE_LENGTH):
118+
def generate_pyarrow_table(num_rows: int = TABLE_LENGTH) -> pa.Table:
120119
date_1 = datetime.date(2020, 10, 1)
121120
date_2 = datetime.date(2021, 10, 1)
122121

0 commit comments

Comments
 (0)