diff --git a/google-cloud-bigquery-jdbc/pom.xml b/google-cloud-bigquery-jdbc/pom.xml new file mode 100644 index 000000000..a697ba101 --- /dev/null +++ b/google-cloud-bigquery-jdbc/pom.xml @@ -0,0 +1,329 @@ + + + + 4.0.0 + com.google.cloud + google-cloud-bigquery-jdbc + 0.1.0 + jar + BigQuery JDBC + https://github.com/googleapis/java-bigquery-jdbc + JDBC for BigQuery + + + UTF-8 + UTF-8 + github + google-cloud-bigquery-jdbc + + + + + + + src/main/resources + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.5.2 + + ${skipSurefire} + + + + org.jacoco + jacoco-maven-plugin + 0.8.13 + + + + + + com.google.cloud + google-cloud-bigquery-jdbc-parent + 0.1.0 + + + + com.google.cloud + google-cloud-bigquery + + + com.google.cloud + google-cloud-bigquerystorage + + + com.google.api + api-common + + + org.apache.arrow + arrow-vector + + + com.google.guava + guava + + + com.google.cloud + google-cloud-core + + + com.google.api + gax + + + com.google.auth + google-auth-library-oauth2-http + + + com.google.auth + google-auth-library-credentials + + + + + org.apache.arrow + arrow-memory-core + + + org.apache.arrow + arrow-memory-netty + + + com.google.protobuf + protobuf-java + + + com.google.api.grpc + proto-google-cloud-bigquerystorage-v1 + + + com.google.code.gson + gson + + + com.google.code.findbugs + jsr305 + + + org.apache.httpcomponents.core5 + httpcore5 + + + org.apache.httpcomponents.client5 + httpclient5 + + + com.google.http-client + google-http-client + + + com.google.http-client + google-http-client-apache-v5 + + + org.apache.httpcomponents + httpcore + + + org.apache.httpcomponents + httpclient + + + + + com.google.cloud + google-cloud-core-http + + + com.google.api + gax-grpc + + + io.grpc + grpc-api + + + io.grpc + grpc-netty-shaded + + + io.grpc + grpc-core + + + + com.google.truth + truth + 1.1.3 + test + + + junit + junit + 4.13.2 + test + + + org.mockito + mockito-core + 4.11.0 + test + + + + + + java17 + + [17,) + + + !jvm + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + --add-opens=java.base/java.nio=org.apache.arrow.memory.core,ALL-UNNAMED + + + + + + + + + release-all-dependencies-shaded + + + + org.apache.maven.plugins + maven-shade-plugin + 3.5.2 + + + + + + com + shaded.bqjdbc.com + + com.google.cloud.bigquery.jdbc.* + + + + org + shaded.bqjdbc.org + + org.conscrypt.* + + + + io + shaded.bqjdbc.io + + + + + + + + + + + + release-all-dependencies + + + + org.apache.maven.plugins + maven-shade-plugin + 3.5.2 + + + package + + shade + + + false + + + java.base/java.nio=ALL-UNNAMED + + + + + + + META-INF/io.netty.versions.properties + + + + + *:* + + META-INF/LICENSE* + META-INF/NOTICE* + META-INF/DEPENDENCIES + META-INF/proguard/*.pro + META-INF/maven/** + META-INF/*.MF + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + arrow-git.properties + + + + + + + + + + + + + + docker + + + env.JDBC_DOCKER_ENV + + + + + /mvn/test-target + + + + \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryConversionException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryConversionException.java new file mode 100644 index 000000000..90e758b05 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryConversionException.java @@ -0,0 +1,29 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import java.sql.SQLException; + +/** + * Exception for errors that occur when the driver cannot convert a value from one type to another. + */ +public class BigQueryConversionException extends SQLException { + + public BigQueryConversionException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionException.java new file mode 100644 index 000000000..185ef54bb --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionException.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.api.core.InternalApi; + +/** + * Thrown to indicate that the coercion was attempted but couldn't be performed successfully because + * of some error. + */ +@InternalApi +public class BigQueryJdbcCoercionException extends RuntimeException { + + /** + * Construct a new exception with the specified cause. + * + * @param cause the actual cause which was thrown while performing the coercion. + */ + public BigQueryJdbcCoercionException(Exception cause) { + super("Coercion error", cause); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionNotFoundException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionNotFoundException.java new file mode 100644 index 000000000..b4eafb2ee --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionNotFoundException.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.api.core.InternalApi; + +/** + * Thrown to indicate that the current TypeCoercer can not perform the coercion as the Coercion + * implementation is not registered for the mentioned source and target type. + */ +@InternalApi +public class BigQueryJdbcCoercionNotFoundException extends RuntimeException { + + /** + * Construct a new exception. + * + * @param source the source type. + * @param target the target type. + */ + public BigQueryJdbcCoercionNotFoundException(Class source, Class target) { + super( + String.format( + "Coercion not found for [%s -> %s] conversion", + source.getCanonicalName(), target.getCanonicalName())); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcException.java new file mode 100644 index 000000000..72a22aba6 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcException.java @@ -0,0 +1,76 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.cloud.bigquery.BigQueryException; +import java.sql.SQLException; + +public class BigQueryJdbcException extends SQLException { + private BigQueryException bigQueryException = null; + + /** + * Constructs a new BigQueryJdbcException with the given message. + * + * @param message The detail message. + */ + public BigQueryJdbcException(String message) { + super(message); + } + + /** + * Constructs a new BigQueryJdbcException from InterruptedException + * + * @param ex The InterruptedException to be thrown. + */ + public BigQueryJdbcException(InterruptedException ex) { + super(ex); + } + + /** + * Constructs a new BigQueryJdbcException from BigQueryException + * + * @param ex The BigQueryException to be thrown. + */ + public BigQueryJdbcException(BigQueryException ex) { + super(ex); + this.bigQueryException = ex; + } + + /** + * Construct a new BigQueryJdbcException with the cause. + * + * @param message Specific message that is being added to the Exception. + * @param cause Throwable that is being converted. + */ + public BigQueryJdbcException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Constructs a new BigQueryJdbcException with the specified cause and a detail message of + * (cause==null ? null : cause.toString()) + * + * @param cause Throwable that is being converted. + */ + public BigQueryJdbcException(Throwable cause) { + super(cause); + } + + public BigQueryException getBigQueryException() { + return bigQueryException; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcRuntimeException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcRuntimeException.java new file mode 100644 index 000000000..38e5171be --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcRuntimeException.java @@ -0,0 +1,48 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +public class BigQueryJdbcRuntimeException extends RuntimeException { + + /** + * Constructs a new BigQueryJdbcRuntimeException with the given message. + * + * @param message The detail message. + */ + public BigQueryJdbcRuntimeException(String message) { + super(message); + } + + /** + * Constructs a new BigQueryJdbcRuntimeException from a Throwable exception. + * + * @param ex Throwable to be thrown. + */ + public BigQueryJdbcRuntimeException(Throwable ex) { + super(ex); + } + + /** + * Constructs a new BigQueryJdbcRuntimeException from a Throwable exception and a message. + * + * @param message The detail message. + * @param ex Throwable to be thrown. + */ + public BigQueryJdbcRuntimeException(String message, InterruptedException ex) { + super(message, ex); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlFeatureNotSupportedException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlFeatureNotSupportedException.java new file mode 100644 index 000000000..8c93d8764 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlFeatureNotSupportedException.java @@ -0,0 +1,40 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.cloud.bigquery.BigQueryException; +import java.sql.SQLFeatureNotSupportedException; + +public class BigQueryJdbcSqlFeatureNotSupportedException extends SQLFeatureNotSupportedException { + /** + * Constructs a new BigQueryJdbcSqlFeatureNotSupportedException with the given message. + * + * @param message The detail message. + */ + public BigQueryJdbcSqlFeatureNotSupportedException(String message) { + super(message); + } + + /** + * Constructs a new BigQueryJdbcSqlFeatureNotSupportedException from BigQueryException + * + * @param ex The BigQueryException to be thrown. + */ + public BigQueryJdbcSqlFeatureNotSupportedException(BigQueryException ex) { + super(ex); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlSyntaxErrorException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlSyntaxErrorException.java new file mode 100644 index 000000000..99edcd0c5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlSyntaxErrorException.java @@ -0,0 +1,36 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.cloud.bigquery.BigQueryException; +import java.sql.SQLSyntaxErrorException; + +/** + * Specific {@link SQLSyntaxErrorException} thrown when the SQLState class value is '42', or under + * vendor-specified conditions. This indicates that the in-progress query has violated SQL syntax + * rules. + */ +public class BigQueryJdbcSqlSyntaxErrorException extends SQLSyntaxErrorException { + /** + * Constructs a new BigQueryJdbcSqlSyntaxErrorException from BigQueryException + * + * @param ex The BigQueryException to be thrown. + */ + public BigQueryJdbcSqlSyntaxErrorException(BigQueryException ex) { + super(ex.getMessage(), "Incorrect SQL syntax."); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArray.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArray.java new file mode 100644 index 000000000..49bd565df --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArray.java @@ -0,0 +1,105 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import java.sql.ResultSet; +import java.sql.SQLException; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; + +/** + * An implementation of {@link BigQueryBaseArray} used to represent Array values from Arrow data. + */ +class BigQueryArrowArray extends BigQueryBaseArray { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryArrowArray.class.getName()); + private static final BigQueryTypeCoercer BIGQUERY_TYPE_COERCER = + BigQueryTypeCoercionUtility.INSTANCE; + private JsonStringArrayList values; + + public BigQueryArrowArray(Field schema, JsonStringArrayList values) { + super(schema); + this.values = values; + } + + @Override + public Object getArray() { + LOG.finest("++enter++"); + ensureValid(); + if (values == null) { + return null; + } + return getArrayInternal(0, values.size()); + } + + @Override + public Object getArray(long index, int count) { + LOG.finest("++enter++"); + ensureValid(); + if (values == null) { + return null; + } + Tuple range = createRange(index, count, this.values.size()); + return getArrayInternal(range.x(), range.y()); + } + + @Override + public ResultSet getResultSet() throws SQLException { + LOG.finest("++enter++"); + ensureValid(); + if (values == null) { + return new BigQueryArrowResultSet(); + } + BigQueryArrowBatchWrapper arrowBatchWrapper = + BigQueryArrowBatchWrapper.getNestedFieldValueListWrapper(values); + return BigQueryArrowResultSet.getNestedResultSet( + Schema.of(singleElementSchema()), arrowBatchWrapper, 0, this.values.size()); + } + + @Override + public ResultSet getResultSet(long index, int count) throws SQLException { + LOG.finest("++enter++"); + ensureValid(); + if (values == null) { + return new BigQueryArrowResultSet(); + } + Tuple range = createRange(index, count, this.values.size()); + BigQueryArrowBatchWrapper arrowBatchWrapper = + BigQueryArrowBatchWrapper.getNestedFieldValueListWrapper(values); + return BigQueryArrowResultSet.getNestedResultSet( + Schema.of(singleElementSchema()), arrowBatchWrapper, range.x(), range.y()); + } + + @Override + public void free() { + LOG.finest("++enter++"); + this.values = null; + markInvalid(); + } + + @Override + Object getCoercedValue(int index) { + LOG.finest("++enter++"); + Object value = this.values.get(index); + return this.arrayOfStruct + ? new BigQueryArrowStruct(schema.getSubFields(), (JsonStringHashMap) value) + : BIGQUERY_TYPE_COERCER.coerceTo(getTargetClass(), value); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowBatchWrapper.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowBatchWrapper.java new file mode 100644 index 000000000..4d322ad97 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowBatchWrapper.java @@ -0,0 +1,68 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import org.apache.arrow.vector.util.JsonStringArrayList; + +/** This class acts as a facade layer and wraps Arrow's VectorSchemaRoot & JsonStringArrayList */ +class BigQueryArrowBatchWrapper { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryArrowBatchWrapper.class.getName()); + // Reference to the current arrowBatch + private final ArrowRecordBatch currentArrowBatch; + // Reference to the nested Records, set as null otherwise (Arrays) + private final JsonStringArrayList nestedRecords; + + // Marks the end of the stream for the ResultSet + private final boolean isLast; + + private BigQueryArrowBatchWrapper( + ArrowRecordBatch currentArrowBatch, JsonStringArrayList nestedRecords, boolean isLast) { + this.currentArrowBatch = currentArrowBatch; + this.nestedRecords = nestedRecords; + this.isLast = isLast; + } + + static BigQueryArrowBatchWrapper of(ArrowRecordBatch currentArrowBatch, boolean... isLast) { + LOG.finest("++enter++"); + boolean isLastFlag = isLast != null && isLast.length == 1 && isLast[0]; + return new BigQueryArrowBatchWrapper(currentArrowBatch, null, isLastFlag); + } + + static BigQueryArrowBatchWrapper getNestedFieldValueListWrapper( + JsonStringArrayList nestedRecords, boolean... isLast) { + LOG.finest("++enter++"); + boolean isLastFlag = isLast != null && isLast.length == 1 && isLast[0]; + return new BigQueryArrowBatchWrapper(null, nestedRecords, isLastFlag); + } + + ArrowRecordBatch getCurrentArrowBatch() { + LOG.finest("++enter++"); + return this.currentArrowBatch; + } + + JsonStringArrayList getNestedRecords() { + LOG.finest("++enter++"); + return this.nestedRecords; + } + + boolean isLast() { + LOG.finest("++enter++"); + return this.isLast; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSet.java new file mode 100644 index 000000000..004dfb02b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSet.java @@ -0,0 +1,492 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryBaseArray.isArray; +import static com.google.cloud.bigquery.jdbc.BigQueryBaseStruct.isStruct; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import java.io.IOException; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.VectorLoader; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.ipc.ReadChannel; +import org.apache.arrow.vector.ipc.message.MessageSerializer; +import org.apache.arrow.vector.util.ByteArrayReadableSeekableByteChannel; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; + +/** {@link ResultSet} Implementation for Arrow datasource (Using Storage Read APIs) */ +class BigQueryArrowResultSet extends BigQueryBaseResultSet { + private final long totalRows; + // count of rows read by the current instance of ResultSet + private long rowCount = 0; + // IMP: This is a buffer of Arrow batches, the max size should be kept at min as + // possible to avoid holding too much memory + private final BlockingQueue buffer; + + // TODO(neenu): See if it makes sense to have the nested batch represented by + // 'JsonStringArrayList' directly + // points to the nested batch of arrow record + private final BigQueryArrowBatchWrapper currentNestedBatch; + private final int fromIndex; + private final int toIndexExclusive; + + // Acts as a cursor, resets to -1 when the `currentBatch` is processed. points to a + // logical row in the columnar BigQueryBigQueryArrowBatchWrapper currentBatch + private int currentBatchRowIndex = -1; + private boolean hasReachedEnd = false; + + // Tracks the index of the nested element under process + private int nestedRowIndex; + + private boolean afterLast = false; + + private ArrowDeserializer arrowDeserializer; + BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); + // Decoder object will be reused to avoid re-allocation and too much garbage collection. + private VectorSchemaRoot vectorSchemaRoot; + private VectorLoader vectorLoader; + // producer thread's reference + private final Thread ownedThread; + + private BigQueryArrowResultSet( + Schema schema, + ArrowSchema arrowSchema, + long totalRows, + BigQueryStatement statement, + BlockingQueue buffer, + BigQueryArrowBatchWrapper currentNestedBatch, + boolean isNested, + int fromIndex, + int toIndexExclusive, + Thread ownedThread, + BigQuery bigQuery) + throws SQLException { + super(bigQuery, statement, schema, isNested); + LOG.finest("++enter++"); + this.totalRows = totalRows; + this.buffer = buffer; + this.currentNestedBatch = currentNestedBatch; + this.fromIndex = fromIndex; + this.toIndexExclusive = toIndexExclusive; + this.nestedRowIndex = fromIndex - 1; + this.ownedThread = ownedThread; + if (!isNested && arrowSchema != null) { + try { + this.arrowDeserializer = new ArrowDeserializer(arrowSchema); + } catch (IOException ex) { + throw new BigQueryJdbcException(ex); + } + } + } + + /** + * This method returns an instance of BigQueryArrowResultSet after adding it in the list of + * ArrowResultSetFinalizer + * + * @return BigQueryArrowResultSet + */ + static BigQueryArrowResultSet of( + Schema schema, + ArrowSchema arrowSchema, + long totalRows, + BigQueryStatement statement, + BlockingQueue buffer, + Thread ownedThread, + BigQuery bigQuery) + throws SQLException { + return new BigQueryArrowResultSet( + schema, + arrowSchema, + totalRows, + statement, + buffer, + null, + false, + -1, + -1, + ownedThread, + bigQuery); + } + + BigQueryArrowResultSet() throws SQLException { + super(null, null, null, false); + this.totalRows = 0; + this.buffer = null; + this.currentNestedBatch = null; + this.fromIndex = 0; + this.toIndexExclusive = 0; + this.ownedThread = null; + this.arrowDeserializer = null; + this.vectorSchemaRoot = null; + this.vectorLoader = null; + } + + static BigQueryArrowResultSet getNestedResultSet( + Schema schema, BigQueryArrowBatchWrapper nestedBatch, int fromIndex, int toIndexExclusive) + throws SQLException { + return new BigQueryArrowResultSet( + schema, null, -1, null, null, nestedBatch, true, fromIndex, toIndexExclusive, null, null); + } + + private class ArrowDeserializer implements AutoCloseable { + + /* Decoder object will be reused to avoid re-allocation and too much garbage collection. */ + private ArrowDeserializer(ArrowSchema arrowSchema) throws IOException { + org.apache.arrow.vector.types.pojo.Schema schema = + MessageSerializer.deserializeSchema( + new org.apache.arrow.vector.ipc.ReadChannel( + new ByteArrayReadableSeekableByteChannel( + arrowSchema.getSerializedSchema().toByteArray()))); + List vectors = new ArrayList<>(); + List fields = schema.getFields(); + for (org.apache.arrow.vector.types.pojo.Field field : fields) { + vectors.add(field.createVector(allocator)); + } + vectorSchemaRoot = new VectorSchemaRoot(vectors); + vectorLoader = new VectorLoader(vectorSchemaRoot); + } + + private void deserializeArrowBatch(ArrowRecordBatch batch) throws SQLException { + LOG.finest("++enter++"); + try { + if (vectorSchemaRoot != null) { + // Clear vectorSchemaRoot before populating a new batch + vectorSchemaRoot.clear(); + } + org.apache.arrow.vector.ipc.message.ArrowRecordBatch deserializedBatch = + MessageSerializer.deserializeRecordBatch( + new ReadChannel( + new ByteArrayReadableSeekableByteChannel( + batch.getSerializedRecordBatch().toByteArray())), + allocator); + + vectorLoader.load(deserializedBatch); + // Release buffers from batch (they are still held in the vectors in root). + deserializedBatch.close(); + } catch (RuntimeException | IOException ex) { + throw new BigQueryJdbcException(ex); + } + } + + @Override + public void close() { + LOG.finest("++enter++"); + vectorSchemaRoot.close(); + allocator.close(); + } + } + + @Override + public boolean next() throws SQLException { + checkClosed(); + if (this.isNested) { + if (this.currentNestedBatch == null || this.currentNestedBatch.getNestedRecords() == null) { + throw new IllegalStateException( + "currentNestedBatch/JsonStringArrayList can not be null working with the nested record"); + } + if (this.nestedRowIndex < (this.toIndexExclusive - 1)) { + /* Check if there's a next record in the array which can be read */ + this.nestedRowIndex++; + return true; + } + this.afterLast = true; + return false; + } else { + /* Non nested */ + if (this.hasReachedEnd || this.isLast()) { + this.afterLast = true; + return false; + } + try { + if (this.currentBatchRowIndex == -1 + || this.currentBatchRowIndex == (this.vectorSchemaRoot.getRowCount() - 1)) { + /* Start of iteration or we have exhausted the current batch */ + // Advance the cursor. Potentially blocking operation. + BigQueryArrowBatchWrapper batchWrapper = this.buffer.take(); + if (batchWrapper.isLast()) { + /* Marks the end of the records */ + if (this.vectorSchemaRoot != null) { + // IMP: To avoid memory leak: clear vectorSchemaRoot as it still holds + // the last batch + this.vectorSchemaRoot.clear(); + } + this.hasReachedEnd = true; + this.rowCount++; + return false; + } + // Valid batch, process it + ArrowRecordBatch arrowBatch = batchWrapper.getCurrentArrowBatch(); + // Populates vectorSchemaRoot + this.arrowDeserializer.deserializeArrowBatch(arrowBatch); + // Pointing to the first row in this fresh batch + this.currentBatchRowIndex = 0; + this.rowCount++; + return true; + } + // There are rows left in the current batch. + else if (this.currentBatchRowIndex < this.vectorSchemaRoot.getRowCount()) { + this.currentBatchRowIndex++; + this.rowCount++; + return true; + } + } catch (InterruptedException | SQLException ex) { + throw new BigQueryJdbcException( + "Error occurred while advancing the cursor. This could happen when connection is closed while the next method is being called.", + ex); + } + } + return false; + } + + private Object getObjectInternal(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + Object value; + if (this.isNested) { + // BigQuery doesn't support multidimensional arrays, so + // just the default row num column (1) and the actual column (2) is supposed to be read + if (!(columnIndex == 1 || columnIndex == 2)) { + + throw new IllegalArgumentException( + "Column index is required to be 1 or 2 for nested arrays"); + } + if (this.currentNestedBatch.getNestedRecords() == null) { + throw new IllegalStateException("JsonStringArrayList cannot be null for nested records."); + } + // For Arrays the first column is Index, ref: + // https://docs.oracle.com/javase/7/docs/api/java/sql/Array.html#getResultSet() + if (columnIndex == 1) { + return this.nestedRowIndex + 1; + } + // columnIndex = 2, return the data against the current nestedRowIndex + else { + value = this.currentNestedBatch.getNestedRecords().get(this.nestedRowIndex); + } + } else { + // get the current column + // SQL index to Java Index + FieldVector currentColumn = this.vectorSchemaRoot.getVector(columnIndex - 1); + // get the current row + value = currentColumn.getObject(this.currentBatchRowIndex); + } + setWasNull(value); + return value; + } + + @Override + public Object getObject(int columnIndex) throws SQLException { + + // columnIndex is SQL index starting at 1 + LOG.finest("++enter++"); + checkClosed(); + Object value = getObjectInternal(columnIndex); + if (value == null) { + return null; + } + + if (this.isNested && columnIndex == 1) { + return this.bigQueryTypeCoercer.coerceTo(Integer.class, value); + } + + if (this.isNested && columnIndex == 2) { + Field arrayField = this.schema.getFields().get(0); + if (isStruct(arrayField)) { + return new BigQueryArrowStruct(arrayField.getSubFields(), (JsonStringHashMap) value); + } + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + arrayField.getType().getStandardType()); + return this.bigQueryTypeCoercer.coerceTo(targetClass, value); + } + + int fieldIndex = this.isNested ? 0 : columnIndex - 1; + Field fieldSchema = this.schemaFieldList.get(fieldIndex); + if (isArray(fieldSchema)) { + JsonStringArrayList originalList = (JsonStringArrayList) value; + StandardSQLTypeName elementTypeName = fieldSchema.getType().getStandardType(); + if (elementTypeName == StandardSQLTypeName.NUMERIC + || elementTypeName == StandardSQLTypeName.BIGNUMERIC) { + JsonStringArrayList newList = new JsonStringArrayList<>(); + for (Object item : originalList) { + if (item != null) { + newList.add(((BigDecimal) item).stripTrailingZeros()); + } else { + newList.add(null); + } + } + return new BigQueryArrowArray(fieldSchema, newList); + } else if (elementTypeName == StandardSQLTypeName.RANGE) { + JsonStringArrayList newList = new JsonStringArrayList<>(); + for (Object item : originalList) { + if (item != null) { + JsonStringHashMap rangeMap = (JsonStringHashMap) item; + Object start = rangeMap.get("start"); + Object end = rangeMap.get("end"); + + Object representativeElement = (start != null) ? start : end; + StandardSQLTypeName rangeElementType = getElementTypeFromValue(representativeElement); + + String formattedStart = formatRangeElement(start, rangeElementType); + String formattedEnd = formatRangeElement(end, rangeElementType); + + newList.add(String.format("[%s, %s)", formattedStart, formattedEnd)); + } else { + newList.add(null); + } + } + return new BigQueryArrowArray(fieldSchema, newList); + } + return new BigQueryArrowArray(fieldSchema, originalList); + } else if (isStruct(fieldSchema)) { + return new BigQueryArrowStruct(fieldSchema.getSubFields(), (JsonStringHashMap) value); + } else if (fieldSchema.getType().getStandardType() == StandardSQLTypeName.RANGE) { + JsonStringHashMap rangeMap = (JsonStringHashMap) value; + Object start = rangeMap.get("start"); + Object end = rangeMap.get("end"); + + Object representativeElement = (start != null) ? start : end; + StandardSQLTypeName elementType = getElementTypeFromValue(representativeElement); + + String formattedStart = formatRangeElement(start, elementType); + String formattedEnd = formatRangeElement(end, elementType); + + return String.format("[%s, %s)", formattedStart, formattedEnd); + } else { + if ((fieldSchema.getType().getStandardType() == StandardSQLTypeName.NUMERIC + || fieldSchema.getType().getStandardType() == StandardSQLTypeName.BIGNUMERIC) + && value instanceof BigDecimal) { + // The Arrow DecimalVector may return a BigDecimal with a larger scale than necessary. + // Strip trailing zeros to match JSON API and CLI output + return ((BigDecimal) value).stripTrailingZeros(); + } + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + fieldSchema.getType().getStandardType()); + return this.bigQueryTypeCoercer.coerceTo(targetClass, value); + } + } + + private StandardSQLTypeName getElementTypeFromValue(Object element) { + if (element == null) { + return StandardSQLTypeName.STRING; + } + if (element instanceof Integer) { + return StandardSQLTypeName.DATE; + } + if (element instanceof Long) { + return StandardSQLTypeName.TIMESTAMP; + } + if (element instanceof LocalDateTime) { + return StandardSQLTypeName.DATETIME; + } + return StandardSQLTypeName.STRING; + } + + private String formatRangeElement(Object element, StandardSQLTypeName elementType) { + if (element == null) { + return "UNBOUNDED"; + } + switch (elementType) { + case DATE: + // Arrow gives DATE as an Integer (days since epoch) + Date date = this.bigQueryTypeCoercer.coerceTo(Date.class, (Integer) element); + return date.toString(); + case DATETIME: + // Arrow gives DATETIME as a LocalDateTime + Timestamp dtTs = + this.bigQueryTypeCoercer.coerceTo(Timestamp.class, (LocalDateTime) element); + return this.bigQueryTypeCoercer.coerceTo(String.class, dtTs); + case TIMESTAMP: + // Arrow gives TIMESTAMP as a Long (microseconds since epoch) + Timestamp ts = this.bigQueryTypeCoercer.coerceTo(Timestamp.class, (Long) element); + return this.bigQueryTypeCoercer.coerceTo(String.class, ts); + default: + // Fallback for any other unexpected type + return element.toString(); + } + } + + @Override + public void close() { + LOG.fine(String.format("Closing BigqueryArrowResultSet %s.", this)); + this.isClosed = true; + if (ownedThread != null && !ownedThread.isInterrupted()) { + // interrupt the producer thread when result set is closed + ownedThread.interrupt(); + } + super.close(); + } + + @Override + public boolean isBeforeFirst() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (this.isNested) { + return this.nestedRowIndex < this.fromIndex; + } else { + return this.rowCount == 0; + } + } + + @Override + public boolean isAfterLast() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + return this.afterLast; + } + + @Override + public boolean isFirst() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (this.isNested) { + return this.nestedRowIndex == this.fromIndex; + } else { + return this.rowCount == 1; + } + } + + @Override + public boolean isLast() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (this.isNested) { + return this.nestedRowIndex == this.toIndexExclusive - 1; + } else { + return this.rowCount == this.totalRows; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStruct.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStruct.java new file mode 100644 index 000000000..33befe902 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStruct.java @@ -0,0 +1,87 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryBaseArray.isArray; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import java.lang.reflect.Array; +import java.util.ArrayList; +import java.util.List; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; + +/** + * An implementation of {@link BigQueryBaseStruct} used to represent Struct values from Arrow data. + */ +class BigQueryArrowStruct extends BigQueryBaseStruct { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryArrowStruct.class.getName()); + + private static final BigQueryTypeCoercer BIGQUERY_TYPE_COERCER = + BigQueryTypeCoercionUtility.INSTANCE; + + private final FieldList schema; + + private final JsonStringHashMap values; + + BigQueryArrowStruct(FieldList schema, JsonStringHashMap values) { + this.schema = schema; + this.values = values; + } + + @Override + FieldList getSchema() { + return this.schema; + } + + @Override + public Object[] getAttributes() { + LOG.finest("++enter++"); + int size = this.schema.size(); + Object[] attributes = (Object[]) Array.newInstance(Object.class, size); + + if (this.values == null) { + return attributes; + } + List structValues = new ArrayList<>(this.values.values()); + + for (int index = 0; index < size; index++) { + Field currentSchema = this.schema.get(index); + Object currentValue = structValues.get(index); + Object coercedValue = getValue(currentSchema, currentValue); + Array.set(attributes, index, coercedValue); + } + return attributes; + } + + private Object getValue(Field currentSchema, Object currentValue) { + LOG.finest("++enter++"); + if (isArray(currentSchema)) { + return new BigQueryArrowArray(currentSchema, (JsonStringArrayList) currentValue); + } else if (isStruct(currentSchema)) { + return new BigQueryArrowStruct( + currentSchema.getSubFields(), (JsonStringHashMap) currentValue); + } else { + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + currentSchema.getType().getStandardType()); + return BIGQUERY_TYPE_COERCER.coerceTo(targetClass, currentValue); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseArray.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseArray.java new file mode 100644 index 000000000..5fc2c15bb --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseArray.java @@ -0,0 +1,172 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.Field.Mode.REPEATED; +import static com.google.cloud.bigquery.jdbc.BigQueryBaseStruct.isStruct; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.lang.reflect.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Struct; +import java.util.Arrays; +import java.util.Base64; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * An abstract implementation of {@link java.sql.Array} used as a base class for {@link + * BigQueryArrowArray} and {@link BigQueryJsonArray}. An Array value is a transaction-duration + * reference to an SQL ARRAY value. + */ +abstract class BigQueryBaseArray implements java.sql.Array { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryBaseArray.class.getName()); + + protected final boolean arrayOfStruct; + private boolean valid; + protected Field schema; + + BigQueryBaseArray(Field schema) { + this.schema = schema; + this.arrayOfStruct = isStruct(schema); + this.valid = true; + } + + @Override + public final String getBaseTypeName() { + LOG.finest("++enter++"); + ensureValid(); + return this.schema.getType().getStandardType().name(); + } + + @Override + public final int getBaseType() { + LOG.finest("++enter++"); + ensureValid(); + return BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.get( + schema.getType().getStandardType()); + } + + @Override + public final Object getArray(Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Override + public final Object getArray(long index, int count, Map> map) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Override + public final ResultSet getResultSet(Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Override + public final ResultSet getResultSet(long index, int count, Map> map) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + protected Object getArrayInternal(int fromIndex, int toIndexExclusive) { + LOG.finest("++enter++"); + Class targetClass = getTargetClass(); + int size = toIndexExclusive - fromIndex; + Object javaArray = Array.newInstance(targetClass, size); + + for (int index = 0; index < size; index++) { + Array.set(javaArray, index, getCoercedValue(fromIndex + index)); + } + return javaArray; + } + + protected void ensureValid() throws IllegalStateException { + LOG.finest("++enter++"); + if (!this.valid) { + throw new IllegalStateException(INVALID_ARRAY); + } + } + + protected void markInvalid() { + LOG.finest("++enter++"); + this.schema = null; + this.valid = false; + } + + protected Field singleElementSchema() { + LOG.finest("++enter++"); + return this.schema.toBuilder().setMode(Mode.REQUIRED).build(); + } + + protected Tuple createRange(long index, int count, int size) + throws IllegalStateException { + LOG.finest("++enter++"); + // jdbc array follows 1 based array indexing + long normalisedFromIndex = index - 1; + if (normalisedFromIndex + count > size) { + throw new IllegalArgumentException( + String.format( + "The array index is out of range: %d, number of elements: %d.", index + count, size)); + } + long toIndex = normalisedFromIndex + count; + return Tuple.of((int) normalisedFromIndex, (int) toIndex); + } + + protected Class getTargetClass() { + LOG.finest("++enter++"); + return this.arrayOfStruct + ? Struct.class + : BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + this.schema.getType().getStandardType()); + } + + abstract Object getCoercedValue(int index); + + static boolean isArray(Field currentSchema) { + LOG.finest("++enter++"); + return currentSchema.getMode() == REPEATED; + } + + @Override + public String toString() { + try { + Object[] array = (Object[]) getArray(); + if (array == null) { + return "null"; + } + if (this.schema.getType().getStandardType() == StandardSQLTypeName.BYTES) { + return Arrays.stream(array) + .map( + element -> + element == null ? "null" : Base64.getEncoder().encodeToString((byte[]) element)) + .collect(Collectors.joining(", ", "[", "]")); + } + return Arrays.deepToString(array); + } catch (SQLException e) { + return "[Error converting array to string: " + e.getMessage() + "]"; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSet.java new file mode 100644 index 000000000..7367a8153 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSet.java @@ -0,0 +1,618 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryConversionException; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionException; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionNotFoundException; +import java.io.InputStream; +import java.io.Reader; +import java.io.StringReader; +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Calendar; + +public abstract class BigQueryBaseResultSet extends BigQueryNoOpsResultSet + implements BigQueryResultSet { + protected final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private BigQuery bigQuery; + private JobId jobId; + private String queryId; + private QueryStatistics queryStatistics; + protected final BigQueryStatement statement; + protected final Schema schema; + protected final FieldList schemaFieldList; + protected final boolean isNested; + protected boolean isClosed = false; + protected boolean wasNull = false; + protected final BigQueryTypeCoercer bigQueryTypeCoercer = BigQueryTypeCoercionUtility.INSTANCE; + + protected BigQueryBaseResultSet( + BigQuery bigQuery, BigQueryStatement statement, Schema schema, boolean isNested) { + this.bigQuery = bigQuery; + this.statement = statement; + this.schema = schema; + this.schemaFieldList = schema != null ? schema.getFields() : null; + this.isNested = isNested; + } + + public QueryStatistics getQueryStatistics() { + if (queryStatistics != null) { + return queryStatistics; + } + if (jobId == null || bigQuery == null) { + return null; + } + Job job = bigQuery.getJob(jobId); + queryStatistics = job != null ? job.getStatistics() : null; + return queryStatistics; + } + + public void setJobId(JobId jobId) { + this.jobId = jobId; + } + + public JobId getJobId() { + return jobId; + } + + public void setQueryId(String queryId) { + this.queryId = queryId; + } + + public String getQueryId() { + return queryId; + } + + @Override + public void close() { + try { + if (statement != null && statement.isCloseOnCompletion() && !statement.hasMoreResults()) { + statement.close(); + } + } catch (SQLException ex) { + LOG.warning( + String.format("Exception during ResultState.close() operation: %s", ex.getMessage())); + } + } + + protected SQLException createCoercionException( + int columnIndex, Class targetClass, Exception cause) throws SQLException { + checkClosed(); + StandardSQLTypeName type; + String typeName; + + if (isNested) { + if (columnIndex == 1) { + return new BigQueryConversionException( + String.format("Cannot convert index column to type %s.", targetClass.getSimpleName()), + cause); + } else if (columnIndex == 2) { + Field arrayField = this.schema.getFields().get(0); + type = arrayField.getType().getStandardType(); + typeName = type.name(); + } else { + throw new SQLException( + "For a nested ResultSet from an Array, columnIndex must be 1 or 2.", cause); + } + } else { + Field field = this.schemaFieldList.get(columnIndex - 1); + type = field.getType().getStandardType(); + typeName = type.name(); + } + return new BigQueryConversionException( + String.format( + "Cannot convert value of type %s to type %s.", typeName, targetClass.getSimpleName()), + cause); + } + + private StandardSQLTypeName getStandardSQLTypeName(int columnIndex) throws SQLException { + checkClosed(); + if (isNested) { + if (columnIndex == 1) { + return StandardSQLTypeName.INT64; + } else if (columnIndex == 2) { + if (this.schema == null || this.schema.getFields().isEmpty()) { + throw new SQLException("Schema not available for nested result set."); + } + Field arrayField = this.schema.getFields().get(0); + return arrayField.getType().getStandardType(); + } else { + throw new SQLException("For a nested ResultSet from an Array, columnIndex must be 1 or 2."); + } + } else { + if (this.schemaFieldList == null + || columnIndex > this.schemaFieldList.size() + || columnIndex < 1) { + throw new SQLException("Invalid column index: " + columnIndex); + } + Field field = this.schemaFieldList.get(columnIndex - 1); + return field.getType().getStandardType(); + } + } + + protected void setWasNull(Object val) { + this.wasNull = val == null; + } + + @Override + public boolean wasNull() throws SQLException { + checkClosed(); + return this.wasNull; + } + + @Override + public ResultSetMetaData getMetaData() throws SQLException { + checkClosed(); + if (this.isNested) { + return BigQueryResultSetMetadata.of(this.schemaFieldList, this.statement); + } else { + return BigQueryResultSetMetadata.of(this.schema.getFields(), this.statement); + } + } + + @Override + public int getType() throws SQLException { + checkClosed(); + return ResultSet.TYPE_FORWARD_ONLY; + } + + @Override + public int getConcurrency() throws SQLException { + checkClosed(); + return ResultSet.CONCUR_READ_ONLY; + } + + @Override + public Statement getStatement() throws SQLException { + checkClosed(); + return this.statement; + } + + @Override + public int getHoldability() throws SQLException { + checkClosed(); + return ResultSet.HOLD_CURSORS_OVER_COMMIT; + } + + @Override + public boolean isClosed() { + return this.isClosed; + } + + public abstract Object getObject(int columnIndex) throws SQLException; + + protected int getColumnIndex(String columnLabel) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (columnLabel == null) { + throw new SQLException("Column label cannot be null"); + } + // use schema to get the column index, add 1 for SQL index + return this.schemaFieldList.getIndex(columnLabel) + 1; + } + + @Override + public String getString(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(String.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, String.class, e); + } + } + + @Override + public boolean getBoolean(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + + StandardSQLTypeName type = getStandardSQLTypeName(columnIndex); + if (type == StandardSQLTypeName.GEOGRAPHY + || type == StandardSQLTypeName.RANGE + || type == StandardSQLTypeName.JSON) { + throw createCoercionException(columnIndex, Boolean.class, null); + } + + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Boolean.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, Boolean.class, e); + } + } + + @Override + public byte getByte(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Byte.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Byte.class, e); + } + } + + @Override + public short getShort(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Short.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Short.class, e); + } + } + + @Override + public int getInt(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Integer.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Integer.class, e); + } + } + + @Override + public long getLong(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Long.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Long.class, e); + } + } + + @Override + public float getFloat(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Float.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Float.class, e); + } + } + + @Override + public double getDouble(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Double.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Double.class, e); + } + } + + @Override + public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(BigDecimal.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, BigDecimal.class, e); + } + } + + @Override + public byte[] getBytes(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(byte[].class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, byte[].class, e); + } + } + + @Override + public Date getDate(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(java.sql.Date.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, java.sql.Date.class, e); + } + } + + @Override + public Time getTime(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + StandardSQLTypeName type = getStandardSQLTypeName(columnIndex); + if (type == StandardSQLTypeName.INT64) { + throw createCoercionException(columnIndex, java.sql.Time.class, null); + } + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(java.sql.Time.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, java.sql.Time.class, e); + } + } + + @Override + public Timestamp getTimestamp(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + StandardSQLTypeName type = getStandardSQLTypeName(columnIndex); + if (type == StandardSQLTypeName.INT64) { + throw createCoercionException(columnIndex, java.sql.Timestamp.class, null); + } + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(java.sql.Timestamp.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, java.sql.Timestamp.class, e); + } + } + + @Override + public BigDecimal getBigDecimal(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(BigDecimal.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, BigDecimal.class, e); + } + } + + @Override + public Array getArray(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + return (Array) getObject(columnIndex); + } catch (ClassCastException e) { + throw createCoercionException(columnIndex, Array.class, e); + } + } + + @Override + public Blob getBlob(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + byte[] value = getBytes(columnIndex); + return new javax.sql.rowset.serial.SerialBlob(value); + } + + @Override + public Clob getClob(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + String value = getString(columnIndex); + return new javax.sql.rowset.serial.SerialClob(value.toCharArray()); + } + + @Override + public Reader getCharacterStream(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + String value = getString(columnIndex); + return value == null ? null : new StringReader(value); + } + + private InputStream getInputStream(String value, java.nio.charset.Charset charset) { + LOG.finest("++enter++"); + if (value == null) { + return null; + } + return new java.io.ByteArrayInputStream(value.getBytes(charset)); + } + + @Override + public InputStream getAsciiStream(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + return getInputStream(getString(columnIndex), StandardCharsets.US_ASCII); + } + + @Override + public InputStream getUnicodeStream(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + return getInputStream(getString(columnIndex), StandardCharsets.UTF_16LE); + } + + @Override + public InputStream getBinaryStream(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + byte[] bytes = getBytes(columnIndex); + return bytes == null ? null : new java.io.ByteArrayInputStream(bytes); + } + + @Override + public Date getDate(int columnIndex, Calendar cal) throws SQLException { + LOG.finest("++enter++"); + Date date = getDate(columnIndex); + if (date == null || cal == null) { + return null; + } + cal.setTimeInMillis(date.getTime()); + return new java.sql.Date(cal.getTimeInMillis()); + } + + @Override + public Time getTime(int columnIndex, Calendar cal) throws SQLException { + LOG.finest("++enter++"); + Time time = getTime(columnIndex); + if (time == null || cal == null) { + return null; + } + cal.setTimeInMillis(time.getTime()); + return new java.sql.Time(cal.getTimeInMillis()); + } + + @Override + public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { + LOG.finest("++enter++"); + Timestamp timeStamp = getTimestamp(columnIndex); + if (timeStamp == null || cal == null) { + return null; + } + cal.setTimeInMillis(timeStamp.getTime()); + return new java.sql.Timestamp(cal.getTimeInMillis()); + } + + @Override + public int findColumn(String columnLabel) throws SQLException { + LOG.finest("++enter++"); + return getColumnIndex(columnLabel); + } + + @Override + public Object getObject(String columnLabel) throws SQLException { + return getObject(getColumnIndex(columnLabel)); + } + + @Override + public String getString(String columnLabel) throws SQLException { + return getString(getColumnIndex(columnLabel)); + } + + @Override + public boolean getBoolean(String columnLabel) throws SQLException { + return getBoolean(getColumnIndex(columnLabel)); + } + + @Override + public byte getByte(String columnLabel) throws SQLException { + return getByte(getColumnIndex(columnLabel)); + } + + @Override + public short getShort(String columnLabel) throws SQLException { + return getShort(getColumnIndex(columnLabel)); + } + + @Override + public int getInt(String columnLabel) throws SQLException { + return getInt(getColumnIndex(columnLabel)); + } + + @Override + public long getLong(String columnLabel) throws SQLException { + return getLong(getColumnIndex(columnLabel)); + } + + @Override + public float getFloat(String columnLabel) throws SQLException { + return getFloat(getColumnIndex(columnLabel)); + } + + @Override + public double getDouble(String columnLabel) throws SQLException { + return getDouble(getColumnIndex(columnLabel)); + } + + @Override + public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { + return getBigDecimal(getColumnIndex(columnLabel), scale); + } + + @Override + public byte[] getBytes(String columnLabel) throws SQLException { + return getBytes(getColumnIndex(columnLabel)); + } + + @Override + public Date getDate(String columnLabel) throws SQLException { + return getDate(getColumnIndex(columnLabel)); + } + + @Override + public Time getTime(String columnLabel) throws SQLException { + return getTime(getColumnIndex(columnLabel)); + } + + @Override + public Timestamp getTimestamp(String columnLabel) throws SQLException { + return getTimestamp(getColumnIndex(columnLabel)); + } + + @Override + public InputStream getAsciiStream(String columnLabel) throws SQLException { + return getAsciiStream(getColumnIndex(columnLabel)); + } + + @Override + public InputStream getUnicodeStream(String columnLabel) throws SQLException { + return getUnicodeStream(getColumnIndex(columnLabel)); + } + + @Override + public InputStream getBinaryStream(String columnLabel) throws SQLException { + return getBinaryStream(getColumnIndex(columnLabel)); + } + + @Override + public BigDecimal getBigDecimal(String columnLabel) throws SQLException { + return getBigDecimal(getColumnIndex(columnLabel)); + } + + @Override + public Blob getBlob(String columnLabel) throws SQLException { + return getBlob(getColumnIndex(columnLabel)); + } + + @Override + public Clob getClob(String columnLabel) throws SQLException { + return getClob(getColumnIndex(columnLabel)); + } + + @Override + public Array getArray(String columnLabel) throws SQLException { + return getArray(getColumnIndex(columnLabel)); + } + + @Override + public Reader getCharacterStream(String columnLabel) throws SQLException { + return getCharacterStream(getColumnIndex(columnLabel)); + } + + @Override + public Date getDate(String columnLabel, Calendar cal) throws SQLException { + return getDate(getColumnIndex(columnLabel), cal); + } + + @Override + public Time getTime(String columnLabel, Calendar cal) throws SQLException { + return getTime(getColumnIndex(columnLabel), cal); + } + + @Override + public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { + return getTimestamp(getColumnIndex(columnLabel), cal); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseStruct.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseStruct.java new file mode 100644 index 000000000..ab9cf61cb --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseStruct.java @@ -0,0 +1,97 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.StandardSQLTypeName.STRUCT; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.Date; +import java.sql.SQLException; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Base64; +import java.util.Map; + +/** + * An abstract implementation of {@link java.sql.Struct} used as a base class for {@link + * BigQueryArrowStruct} and {@link BigQueryJsonStruct}. A Struct object contains a value for each + * attribute of the SQL structured type that it represents. + */ +abstract class BigQueryBaseStruct implements java.sql.Struct { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryBaseStruct.class.getName()); + + abstract FieldList getSchema(); + + @Override + public final String getSQLTypeName() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Override + public final Object[] getAttributes(Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + static boolean isStruct(Field currentSchema) { + LOG.finest("++enter++"); + return currentSchema.getType().getStandardType() == STRUCT; + } + + @Override + public String toString() { + try { + FieldList schema = getSchema(); + Object[] attributes = getAttributes(); + + if (schema == null || attributes == null || schema.size() != attributes.length) { + return "{}"; + } + + StringBuilder sb = new StringBuilder("{"); + for (int i = 0; i < attributes.length; i++) { + if (i > 0) { + sb.append(","); + } + String fieldName = schema.get(i).getName(); + Object value = attributes[i]; + + sb.append("\"").append(fieldName.replace("\"", "\\\"")).append("\":"); + + if (value == null) { + sb.append("null"); + } else if (value instanceof String || value instanceof org.apache.arrow.vector.util.Text) { + String stringValue = value.toString().replace("\"", "\\\""); + sb.append("\"").append(stringValue).append("\""); + } else if (value instanceof Timestamp || value instanceof Date || value instanceof Time) { + sb.append("\"").append(value.toString()).append("\""); + } else if (value instanceof byte[]) { + sb.append("\"").append(Base64.getEncoder().encodeToString((byte[]) value)).append("\""); + } else { + sb.append(value.toString()); + } + } + sb.append("}"); + return sb.toString(); + } catch (SQLException e) { + return "{ \"error\": \"Error converting struct to string: " + e.getMessage() + "\" }"; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatement.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatement.java new file mode 100644 index 000000000..041505c62 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatement.java @@ -0,0 +1,1341 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; +import com.google.common.annotations.VisibleForTesting; +import java.io.BufferedReader; +import java.io.CharArrayReader; +import java.io.FilterReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.PipedReader; +import java.io.Reader; +import java.io.StringReader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.CallableStatement; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.Calendar; +import java.util.Map; + +class BigQueryCallableStatement extends BigQueryPreparedStatement implements CallableStatement { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + + BigQueryCallableStatement(BigQueryConnection connection, String callableStmtSql) + throws SQLException { + super(connection, callableStmtSql); + } + + @VisibleForTesting + protected String getCallableStatementSql() { + return this.currentQuery; + } + + @VisibleForTesting + protected BigQueryParameterHandler getParameterHandler() { + return this.parameterHandler; + } + + @Override + public Array getArray(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Array) { + return (Array) param; + } + if (param.getClass().isAssignableFrom(Array.class)) { + return getObject(arg0, Array.class); + } + return null; + } + + @Override + public Array getArray(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Array) { + return (Array) param; + } + if (param.getClass().isAssignableFrom(Array.class)) { + return getObject(arg0, Array.class); + } + return null; + } + + @Override + public BigDecimal getBigDecimal(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof BigDecimal) { + return (BigDecimal) param; + } + if (param.getClass().isAssignableFrom(BigDecimal.class)) { + return getObject(arg0, BigDecimal.class); + } + return null; + } + + @Override + public BigDecimal getBigDecimal(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof BigDecimal) { + return (BigDecimal) param; + } + if (param.getClass().isAssignableFrom(BigDecimal.class)) { + return getObject(arg0, BigDecimal.class); + } + return null; + } + + @Override + public BigDecimal getBigDecimal(int arg0, int arg1) throws SQLException { + LOG.finest("++enter++"); + return getBigDecimal(arg0); + } + + @Override + public Blob getBlob(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Blob) { + return (Blob) param; + } + if (param.getClass().isAssignableFrom(Blob.class)) { + return getObject(arg0, Blob.class); + } + return null; + } + + @Override + public Blob getBlob(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Blob) { + return (Blob) param; + } + if (param.getClass().isAssignableFrom(Blob.class)) { + return getObject(arg0, Blob.class); + } + return null; + } + + @Override + public boolean getBoolean(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Boolean) { + return (Boolean) param; + } + if (param.getClass().isAssignableFrom(Boolean.class)) { + return getObject(arg0, Boolean.class); + } + return false; + } + + @Override + public boolean getBoolean(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Boolean) { + return (Boolean) param; + } + if (param.getClass().isAssignableFrom(Boolean.class)) { + return getObject(arg0, Boolean.class); + } + return false; + } + + @Override + public byte getByte(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Byte) { + return (Byte) param; + } + if (param.getClass().isAssignableFrom(Byte.class)) { + return getObject(arg0, Byte.class); + } + return -1; + } + + @Override + public byte getByte(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Byte) { + return (Byte) param; + } + if (param.getClass().isAssignableFrom(Byte.class)) { + return getObject(arg0, Byte.class); + } + return -1; + } + + @Override + public byte[] getBytes(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof byte[] || param.getClass().isAssignableFrom(byte[].class)) { + return (byte[]) param; + } + if (param instanceof String) { + return param.toString().getBytes(); + } + return null; + } + + @Override + public byte[] getBytes(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof byte[] || param.getClass().isAssignableFrom(byte[].class)) { + return (byte[]) param; + } + if (param instanceof String) { + return param.toString().getBytes(); + } + return null; + } + + // FilterReader, InputStreamReader, PipedReader, StringReader + @Override + public Reader getCharacterStream(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String || param.getClass().isAssignableFrom(String.class)) { + return new StringReader(param.toString()); + } + + if (param instanceof BufferedReader) { + return (BufferedReader) param; + } + if (param.getClass().isAssignableFrom(BufferedReader.class)) { + return getObject(arg0, BufferedReader.class); + } + + if (param instanceof CharArrayReader) { + return (CharArrayReader) param; + } + if (param.getClass().isAssignableFrom(CharArrayReader.class)) { + return getObject(arg0, CharArrayReader.class); + } + + if (param instanceof FilterReader) { + return (FilterReader) param; + } + if (param.getClass().isAssignableFrom(FilterReader.class)) { + return getObject(arg0, FilterReader.class); + } + + if (param instanceof InputStreamReader) { + return (InputStreamReader) param; + } + if (param.getClass().isAssignableFrom(InputStreamReader.class)) { + return getObject(arg0, InputStreamReader.class); + } + + if (param instanceof PipedReader) { + return (PipedReader) param; + } + if (param.getClass().isAssignableFrom(PipedReader.class)) { + return getObject(arg0, PipedReader.class); + } + + if (param instanceof StringReader) { + return (StringReader) param; + } + if (param.getClass().isAssignableFrom(StringReader.class)) { + return getObject(arg0, StringReader.class); + } + return null; + } + + @Override + public Reader getCharacterStream(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String || param.getClass().isAssignableFrom(String.class)) { + return new StringReader(param.toString()); + } + + if (param instanceof BufferedReader) { + return (BufferedReader) param; + } + if (param.getClass().isAssignableFrom(BufferedReader.class)) { + return getObject(arg0, BufferedReader.class); + } + + if (param instanceof CharArrayReader) { + return (CharArrayReader) param; + } + if (param.getClass().isAssignableFrom(CharArrayReader.class)) { + return getObject(arg0, CharArrayReader.class); + } + + if (param instanceof FilterReader) { + return (FilterReader) param; + } + if (param.getClass().isAssignableFrom(FilterReader.class)) { + return getObject(arg0, FilterReader.class); + } + + if (param instanceof InputStreamReader) { + return (InputStreamReader) param; + } + if (param.getClass().isAssignableFrom(InputStreamReader.class)) { + return getObject(arg0, InputStreamReader.class); + } + + if (param instanceof PipedReader) { + return (PipedReader) param; + } + if (param.getClass().isAssignableFrom(PipedReader.class)) { + return getObject(arg0, PipedReader.class); + } + + if (param instanceof StringReader) { + return (StringReader) param; + } + if (param.getClass().isAssignableFrom(StringReader.class)) { + return getObject(arg0, StringReader.class); + } + return null; + } + + @Override + public Clob getClob(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Clob) { + return (Clob) param; + } + if (param.getClass().isAssignableFrom(Clob.class)) { + return getObject(arg0, Clob.class); + } + return null; + } + + @Override + public Clob getClob(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Clob) { + return (Clob) param; + } + if (param.getClass().isAssignableFrom(Clob.class)) { + return getObject(arg0, Clob.class); + } + return null; + } + + @Override + public Date getDate(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Date) { + return (Date) param; + } + if (param.getClass().isAssignableFrom(Date.class)) { + return getObject(arg0, Date.class); + } + return null; + } + + @Override + public Date getDate(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Date) { + return (Date) param; + } + if (param.getClass().isAssignableFrom(Date.class)) { + return getObject(arg0, Date.class); + } + return null; + } + + @Override + public Date getDate(int arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Date) { + Date dateParam = (Date) param; + if (arg1 != null) { + arg1.setTime(dateParam); + return new Date(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Date.class)) { + Date dateObj = getObject(arg0, Date.class); + if (arg1 != null) { + arg1.setTime(dateObj); + return new Date(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public Date getDate(String arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Date) { + Date dateParam = (Date) param; + if (arg1 != null) { + arg1.setTime(dateParam); + return new Date(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Date.class)) { + Date dateObj = getObject(arg0, Date.class); + if (arg1 != null) { + arg1.setTime(dateObj); + return new Date(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public double getDouble(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Double) { + return (Double) param; + } + if (param.getClass().isAssignableFrom(Double.class)) { + return getObject(arg0, Double.class); + } + return 0; + } + + @Override + public double getDouble(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Double) { + return (Double) param; + } + if (param.getClass().isAssignableFrom(Double.class)) { + return getObject(arg0, Double.class); + } + return 0; + } + + @Override + public float getFloat(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Float) { + return (Float) param; + } + if (param.getClass().isAssignableFrom(Float.class)) { + return getObject(arg0, Float.class); + } + return 0; + } + + @Override + public float getFloat(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Float) { + return (Float) param; + } + if (param.getClass().isAssignableFrom(Float.class)) { + return getObject(arg0, Float.class); + } + return 0; + } + + @Override + public int getInt(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Integer) { + return (Integer) param; + } + if (param.getClass().isAssignableFrom(Integer.class)) { + return getObject(arg0, Integer.class); + } + return 0; + } + + @Override + public int getInt(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Integer) { + return (Integer) param; + } + if (param.getClass().isAssignableFrom(Integer.class)) { + return getObject(arg0, Integer.class); + } + return 0; + } + + @Override + public long getLong(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Long) { + return (Long) param; + } + if (param.getClass().isAssignableFrom(Long.class)) { + return getObject(arg0, Long.class); + } + if (param instanceof Integer) { + return (Long) param; + } + if (param.getClass().isAssignableFrom(Integer.class)) { + return getObject(arg0, Integer.class); + } + return 0; + } + + @Override + public long getLong(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Long) { + return (Long) param; + } + if (param.getClass().isAssignableFrom(Long.class)) { + return getObject(arg0, Long.class); + } + if (param instanceof Integer) { + return (Long) param; + } + if (param.getClass().isAssignableFrom(Integer.class)) { + return getObject(arg0, Integer.class); + } + return 0; + } + + @Override + public Reader getNCharacterStream(int arg0) throws SQLException { + LOG.finest("++enter++"); + return getCharacterStream(arg0); + } + + @Override + public Reader getNCharacterStream(String arg0) throws SQLException { + LOG.finest("++enter++"); + return getCharacterStream(arg0); + } + + @Override + public NClob getNClob(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof NClob) {} + if (param.getClass().isAssignableFrom(NClob.class)) { + return getObject(arg0, NClob.class); + } + return null; + } + + @Override + public NClob getNClob(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof NClob) { + return (NClob) param; + } + if (param.getClass().isAssignableFrom(NClob.class)) { + return getObject(arg0, NClob.class); + } + return null; + } + + @Override + public String getNString(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String) { + return param.toString(); + } + if (param.getClass().isAssignableFrom(String.class)) { + return getObject(arg0, String.class); + } + return null; + } + + @Override + public String getNString(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String) { + return param.toString(); + } + if (param.getClass().isAssignableFrom(String.class)) { + return getObject(arg0, String.class); + } + return null; + } + + @Override + public Object getObject(int arg0) throws SQLException { + LOG.finest("++enter++"); + return this.parameterHandler.getParameter(arg0); + } + + @Override + public Object getObject(String arg0) throws SQLException { + LOG.finest("++enter++"); + return this.parameterHandler.getParameter(arg0); + } + + @Override + public Object getObject(int arg0, Map> arg1) throws SQLException { + LOG.finest("++enter++"); + String paramKey = this.parameterHandler.getSqlType(arg0).name(); + if (arg1.containsKey(paramKey)) { + Class argJavaType = arg1.get(paramKey); + Class paramJavaType = this.parameterHandler.getType(arg0); + if (paramJavaType.isAssignableFrom(argJavaType)) { + return this.parameterHandler.getParameter(arg0); + } + } + return null; + } + + @Override + public Object getObject(String arg0, Map> arg1) throws SQLException { + LOG.finest("++enter++"); + String paramKey = this.parameterHandler.getSqlType(arg0).name(); + if (arg1.containsKey(paramKey)) { + Class argJavaType = arg1.get(paramKey); + Class paramJavaType = this.parameterHandler.getType(arg0); + if (paramJavaType.isAssignableFrom(argJavaType)) { + return this.parameterHandler.getParameter(arg0); + } + } + return null; + } + + @Override + public T getObject(int arg0, Class arg1) throws SQLException { + LOG.finest("++enter++"); + Class javaType = this.parameterHandler.getType(arg0); + if (javaType.isAssignableFrom(arg1)) { + return (T) this.parameterHandler.getParameter(arg0); + } + return null; + } + + @Override + public T getObject(String arg0, Class arg1) throws SQLException { + LOG.finest("++enter++"); + Class javaType = this.parameterHandler.getType(arg0); + if (javaType.isAssignableFrom(arg1)) { + return (T) this.parameterHandler.getParameter(arg0); + } + return null; + } + + @Override + public Ref getRef(int arg0) throws SQLException { + // TODO Auto-generated method stub + return null; + } + + @Override + public Ref getRef(String arg0) throws SQLException { + // TODO Auto-generated method stub + return null; + } + + @Override + public RowId getRowId(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof RowId) { + return (RowId) param; + } + if (param.getClass().isAssignableFrom(RowId.class)) { + return getObject(arg0, RowId.class); + } + return null; + } + + @Override + public RowId getRowId(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof RowId) { + return (RowId) param; + } + if (param.getClass().isAssignableFrom(RowId.class)) { + return getObject(arg0, RowId.class); + } + return null; + } + + @Override + public SQLXML getSQLXML(int arg0) throws SQLException { + // TODO Auto-generated method stub + return null; + } + + @Override + public SQLXML getSQLXML(String arg0) throws SQLException { + // TODO Auto-generated method stub + return null; + } + + @Override + public short getShort(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Short) { + return (Short) param; + } + if (param.getClass().isAssignableFrom(Short.class)) { + return getObject(arg0, Short.class); + } + return 0; + } + + @Override + public short getShort(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Short) { + return (Short) param; + } + if (param.getClass().isAssignableFrom(Short.class)) { + return getObject(arg0, Short.class); + } + return 0; + } + + @Override + public String getString(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String) { + return param.toString(); + } + if (param.getClass().isAssignableFrom(String.class)) { + return getObject(arg0, String.class); + } + return null; + } + + @Override + public String getString(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String) { + return param.toString(); + } + if (param.getClass().isAssignableFrom(String.class)) { + return getObject(arg0, String.class); + } + return null; + } + + @Override + public Time getTime(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Time) { + return (Time) param; + } + if (param.getClass().isAssignableFrom(Time.class)) { + return getObject(arg0, Time.class); + } + return null; + } + + @Override + public Time getTime(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Time) { + return (Time) param; + } + if (param.getClass().isAssignableFrom(Time.class)) { + return getObject(arg0, Time.class); + } + return null; + } + + @Override + public Time getTime(int arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Time) { + Time timeParam = (Time) param; + if (arg1 != null) { + arg1.setTimeInMillis(timeParam.getTime()); + return new Time(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Time.class)) { + Time timeObj = getObject(arg0, Time.class); + if (arg1 != null) { + arg1.setTimeInMillis(timeObj.getTime()); + return new Time(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public Time getTime(String arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Time) { + Time timeParam = (Time) param; + if (arg1 != null) { + arg1.setTimeInMillis(timeParam.getTime()); + return new Time(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Time.class)) { + Time timeObj = getObject(arg0, Time.class); + if (arg1 != null) { + arg1.setTimeInMillis(timeObj.getTime()); + return new Time(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public Timestamp getTimestamp(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Timestamp) { + return (Timestamp) param; + } + if (param.getClass().isAssignableFrom(Timestamp.class)) { + return getObject(arg0, Timestamp.class); + } + return null; + } + + @Override + public Timestamp getTimestamp(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Timestamp) { + return (Timestamp) param; + } + if (param.getClass().isAssignableFrom(Timestamp.class)) { + return getObject(arg0, Timestamp.class); + } + return null; + } + + @Override + public Timestamp getTimestamp(int arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Timestamp) { + Timestamp timestampParam = (Timestamp) param; + if (arg1 != null) { + arg1.setTimeInMillis(timestampParam.getTime()); + return new Timestamp(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Timestamp.class)) { + Timestamp timestampObj = getObject(arg0, Timestamp.class); + if (arg1 != null) { + arg1.setTimeInMillis(timestampObj.getTime()); + return new Timestamp(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public Timestamp getTimestamp(String arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Timestamp) { + Timestamp timestampParam = (Timestamp) param; + if (arg1 != null) { + arg1.setTimeInMillis(timestampParam.getTime()); + return new Timestamp(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Timestamp.class)) { + Timestamp timestampObj = getObject(arg0, Timestamp.class); + if (arg1 != null) { + arg1.setTimeInMillis(timestampObj.getTime()); + return new Timestamp(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public URL getURL(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof URL) { + return (URL) param; + } + if (param.getClass().isAssignableFrom(URL.class)) { + return getObject(arg0, URL.class); + } + return null; + } + + @Override + public URL getURL(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof URL) { + return (URL) param; + } + if (param.getClass().isAssignableFrom(URL.class)) { + return getObject(arg0, URL.class); + } + return null; + } + + @Override + public void registerOutParameter(int paramIndex, int sqlType) throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + String.format("registerOutParameter: paramIndex %s, sqlType %s", paramIndex, sqlType)); + checkClosed(); + try { + this.parameterHandler.setParameter( + paramIndex, + null, + BigQueryJdbcTypeMappings.getJavaType(sqlType), + BigQueryParameterHandler.BigQueryStatementParameterType.OUT, + -1); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public void registerOutParameter(String paramName, int sqlType) throws SQLException { + LOG.finest("++enter++"); + LOG.finest(String.format("registerOutParameter: paramName %s, sqlType %s", paramName, sqlType)); + checkClosed(); + try { + this.parameterHandler.setParameter( + paramName, + null, + BigQueryJdbcTypeMappings.getJavaType(sqlType), + BigQueryParameterHandler.BigQueryStatementParameterType.OUT, + -1); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public void registerOutParameter(int paramIndex, int sqlType, int scale) throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + String.format( + "registerOutParameter: paramIndex %s, sqlType %s, scale %s", + paramIndex, sqlType, scale)); + checkClosed(); + if (sqlType != Types.NUMERIC && sqlType != Types.DECIMAL) { + throw new IllegalArgumentException( + String.format("registerOutParameter: Invalid sqlType passed in %s", sqlType)); + } + try { + this.parameterHandler.setParameter( + paramIndex, + null, + BigQueryJdbcTypeMappings.getJavaType(sqlType), + BigQueryParameterHandler.BigQueryStatementParameterType.OUT, + scale); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public void registerOutParameter(int paramIndex, int sqlType, String typeName) + throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + String.format( + "registerOutParameter: paramIndex %s, sqlType %s, typeName %s", + paramIndex, sqlType, typeName)); + // fully qualified sql typeName is not supported by the driver and hence ignored. + registerOutParameter(paramIndex, sqlType); + } + + @Override + public void registerOutParameter(String paramName, int sqlType, int scale) throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + String.format( + "registerOutParameter: paramIndex %s, sqlType %s, scale %s", + paramName, sqlType, scale)); + checkClosed(); + if (sqlType != Types.NUMERIC && sqlType != Types.DECIMAL) { + throw new IllegalArgumentException( + String.format("registerOutParameter: Invalid sqlType passed in %s", sqlType)); + } + try { + this.parameterHandler.setParameter( + paramName, + null, + BigQueryJdbcTypeMappings.getJavaType(sqlType), + BigQueryParameterHandler.BigQueryStatementParameterType.OUT, + scale); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public void registerOutParameter(String paramName, int sqlType, String typeName) + throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + String.format( + "registerOutParameter: paramIndex %s, sqlType %s, typeName %s", + paramName, sqlType, typeName)); + // fully qualified sql typeName is not supported by the driver and hence ignored. + registerOutParameter(paramName, sqlType); + } + + @Override + public void setAsciiStream(String arg0, InputStream arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setAsciiStream(String arg0, InputStream arg1, int arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setAsciiStream(String arg0, InputStream arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBigDecimal(String arg0, BigDecimal arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setBinaryStream(String arg0, InputStream arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setBinaryStream(String arg0, InputStream arg1, int arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBinaryStream(String arg0, InputStream arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setBlob(String arg0, Blob arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBlob(String arg0, InputStream arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBlob(String arg0, InputStream arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBoolean(String arg0, boolean arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Boolean.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setByte(String arg0, byte arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Byte.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setBytes(String arg0, byte[] arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, byte[].class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setCharacterStream(String arg0, Reader arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setCharacterStream(String arg0, Reader arg1, int arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setCharacterStream(String arg0, Reader arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setClob(String arg0, Clob arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setClob(String arg0, Reader arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setClob(String arg0, Reader arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setDate(String arg0, Date arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setDate(String arg0, Date arg1, Calendar arg2) throws SQLException { + arg2.setTimeInMillis(arg1.getTime()); + this.parameterHandler.setParameter( + arg0, + new Date(arg2.getTimeInMillis()), + arg1.getClass(), + BigQueryStatementParameterType.IN, + 0); + } + + @Override + public void setDouble(String arg0, double arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Double.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setFloat(String arg0, float arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Float.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setInt(String arg0, int arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Integer.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setLong(String arg0, long arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Long.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setNCharacterStream(String arg0, Reader arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNCharacterStream(String arg0, Reader arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setNClob(String arg0, NClob arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNClob(String arg0, Reader arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNClob(String arg0, Reader arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNString(String arg0, String arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setNull(String arg0, int arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNull(String arg0, int arg1, String arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setObject(String arg0, Object arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setObject(String arg0, Object arg1, int arg2) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + StandardSQLTypeName sqlType = this.parameterHandler.getSqlType(arg0); + if (BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.containsKey(sqlType)) { + int javaSqlType = BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.get(sqlType); + if (javaSqlType != arg2) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + String.format("Unsupported sql type:%s ", arg2)); + } + } else { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + String.format("parameter sql type not supported: %s", sqlType)); + } + } + + @Override + public void setObject(String arg0, Object arg1, int arg2, int arg3) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, arg3); + StandardSQLTypeName sqlType = this.parameterHandler.getSqlType(arg0); + if (BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.containsKey(sqlType)) { + int javaSqlType = BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.get(sqlType); + if (javaSqlType != arg2) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + String.format("Unsupported sql type:%s ", arg2)); + } + } else { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + String.format("parameter sql type not supported: %s", sqlType)); + } + } + + @Override + public void setRowId(String arg0, RowId arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setSQLXML(String arg0, SQLXML arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setShort(String arg0, short arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Short.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setString(String arg0, String arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setTime(String arg0, Time arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setTime(String arg0, Time arg1, Calendar arg2) throws SQLException { + arg2.setTimeInMillis(arg1.getTime()); + this.parameterHandler.setParameter( + arg0, + new Time(arg2.getTimeInMillis()), + arg1.getClass(), + BigQueryStatementParameterType.IN, + 0); + } + + @Override + public void setTimestamp(String arg0, Timestamp arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setTimestamp(String arg0, Timestamp arg1, Calendar arg2) throws SQLException { + arg2.setTimeInMillis(arg1.getTime()); + this.parameterHandler.setParameter( + arg0, + new Timestamp(arg2.getTimeInMillis()), + arg1.getClass(), + BigQueryStatementParameterType.IN, + 0); + } + + @Override + public void setURL(String arg0, URL arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public boolean wasNull() throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + return false; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCoercion.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCoercion.java new file mode 100644 index 000000000..6265af0de --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCoercion.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.util.function.Function; + +/** + * A {@link BigQueryCoercion} is responsible for coercing one type to another. An implementation of + * {@link BigQueryCoercion} is used to extend the behaviour of {@link BigQueryTypeCoercer} for the + * coercion of one user defined type to another. + * + * @param represents the source type + * @param represents the target type + */ +@InternalApi +interface BigQueryCoercion extends Function { + /** + * Coerce the provided value to the desired type. + * + * @param value the input value. + * @return the output value after coercion. + */ + OUTPUT coerce(INPUT value); + + @Override + default OUTPUT apply(INPUT input) { + return coerce(input); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnection.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnection.java new file mode 100644 index 000000000..e93938f25 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnection.java @@ -0,0 +1,1160 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.gax.core.CredentialsProvider; +import com.google.api.gax.core.FixedCredentialsProvider; +import com.google.api.gax.retrying.RetrySettings; +import com.google.api.gax.rpc.FixedHeaderProvider; +import com.google.api.gax.rpc.HeaderProvider; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.auth.Credentials; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.ConnectionProperty; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.cloud.bigquery.storage.v1.BigQueryReadClient; +import com.google.cloud.bigquery.storage.v1.BigQueryReadSettings; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteSettings; +import com.google.cloud.http.HttpTransportOptions; +import java.io.IOException; +import java.io.InputStream; +import java.sql.CallableStatement; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.Statement; +import java.time.Duration; +import java.util.ArrayList; +import java.util.ConcurrentModificationException; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executor; +import java.util.concurrent.TimeUnit; + +/** + * An implementation of {@link java.sql.Connection} for establishing a connection with BigQuery and + * executing SQL statements + * + * @see BigQueryStatement + */ +public class BigQueryConnection extends BigQueryNoOpsConnection { + + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + String connectionClassName = this.toString(); + private static final String DEFAULT_JDBC_TOKEN_VALUE = "Google-BigQuery-JDBC-Driver"; + private static final String DEFAULT_VERSION = "0.0.0"; + private static HeaderProvider HEADER_PROVIDER; + BigQueryReadClient bigQueryReadClient = null; + BigQueryWriteClient bigQueryWriteClient = null; + BigQuery bigQuery; + String connectionUrl; + Set openStatements; + boolean autoCommit; + int transactionIsolation; + List sqlWarnings; + String catalog; + int holdability; + long retryTimeoutInSeconds; + Duration retryTimeoutDuration; + long retryInitialDelayInSeconds; + Duration retryInitialDelayDuration; + long retryMaxDelayInSeconds; + Duration retryMaxDelayDuration; + // transactionStarted is false by default. + // when autocommit is false transaction starts and session is initialized. + boolean transactionStarted; + ConnectionProperty sessionInfoConnectionProperty; + boolean isClosed; + DatasetId defaultDataset; + String location; + boolean enableHighThroughputAPI; + int highThroughputMinTableSize; + int highThroughputActivationRatio; + boolean enableSession; + boolean unsupportedHTAPIFallback; + boolean useQueryCache; + String queryDialect; + int metadataFetchThreadCount; + boolean allowLargeResults; + String destinationTable; + String destinationDataset; + long destinationDatasetExpirationTime; + String kmsKeyName; + String universeDomain; + List queryProperties; + Map authProperties; + Map overrideProperties; + Credentials credentials; + boolean useStatelessQueryMode; + int numBufferedRows; + HttpTransportOptions httpTransportOptions; + TransportChannelProvider transportChannelProvider; + long maxResults; + long jobTimeoutInSeconds; + boolean enableWriteAPI; + int writeAPIActivationRowCount; + int writeAPIAppendRowCount; + int requestGoogleDriveScope; + List additionalProjects; + boolean filterTablesOnDefaultDataset; + String sslTrustStorePath; + String sslTrustStorePassword; + long maxBytesBilled; + Map labels; + + BigQueryConnection(String url) throws IOException { + this.connectionUrl = url; + this.openStatements = ConcurrentHashMap.newKeySet(); + this.autoCommit = true; + this.sqlWarnings = new ArrayList<>(); + this.transactionStarted = false; + this.isClosed = false; + this.labels = BigQueryJdbcUrlUtility.parseLabels(url, connectionClassName); + this.maxBytesBilled = + BigQueryJdbcUrlUtility.parseMaximumBytesBilled(url, this.connectionClassName); + this.retryTimeoutInSeconds = + BigQueryJdbcUrlUtility.parseRetryTimeoutInSecs(url, this.connectionClassName); + this.retryTimeoutDuration = Duration.ofMillis(retryTimeoutInSeconds * 1000L); + this.retryInitialDelayInSeconds = + BigQueryJdbcUrlUtility.parseRetryInitialDelayInSecs(url, this.connectionClassName); + this.retryInitialDelayDuration = Duration.ofMillis(retryInitialDelayInSeconds * 1000L); + this.retryMaxDelayInSeconds = + BigQueryJdbcUrlUtility.parseRetryMaxDelayInSecs(url, this.connectionClassName); + this.retryMaxDelayDuration = Duration.ofMillis(retryMaxDelayInSeconds * 1000L); + this.jobTimeoutInSeconds = + BigQueryJdbcUrlUtility.parseJobTimeout(url, this.connectionClassName); + this.authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(url, this.connectionClassName); + this.catalog = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.PROJECT_ID_PROPERTY_NAME, + BigQueryOptions.getDefaultProjectId(), + this.connectionClassName); + this.universeDomain = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_UNIVERSE_DOMAIN_VALUE, + this.connectionClassName); + this.overrideProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(url, this.connectionClassName); + if (universeDomain != null) { + this.overrideProperties.put( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, universeDomain); + } + this.credentials = + BigQueryJdbcOAuthUtility.getCredentials( + authProperties, overrideProperties, this.connectionClassName); + String defaultDatasetString = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.DEFAULT_DATASET_PROPERTY_NAME, + null, + this.connectionClassName); + if (defaultDatasetString == null || defaultDatasetString.trim().isEmpty()) { + this.defaultDataset = null; + } else { + String[] parts = defaultDatasetString.split("\\."); + if (parts.length == 2) { + this.defaultDataset = DatasetId.of(parts[0], parts[1]); + } else if (parts.length == 1) { + this.defaultDataset = DatasetId.of(parts[0]); + } else { + throw new IllegalArgumentException( + "DefaultDataset format is invalid. Supported options are datasetId or" + + " projectId.datasetId"); + } + } + this.location = + BigQueryJdbcUrlUtility.parseStringProperty( + url, BigQueryJdbcUrlUtility.LOCATION_PROPERTY_NAME, null, this.connectionClassName); + this.enableHighThroughputAPI = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.ENABLE_HTAPI_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_ENABLE_HTAPI_VALUE, + this.connectionClassName); + this.highThroughputMinTableSize = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.HTAPI_MIN_TABLE_SIZE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_HTAPI_MIN_TABLE_SIZE_VALUE, + this.connectionClassName); + this.highThroughputActivationRatio = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.HTAPI_ACTIVATION_RATIO_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_HTAPI_ACTIVATION_RATIO_VALUE, + this.connectionClassName); + this.useQueryCache = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.USE_QUERY_CACHE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_USE_QUERY_CACHE, + this.connectionClassName); + this.useStatelessQueryMode = + BigQueryJdbcUrlUtility.parseJobCreationMode(url, this.connectionClassName); + this.queryDialect = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.QUERY_DIALECT_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_QUERY_DIALECT_VALUE, + this.connectionClassName); + this.allowLargeResults = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.ALLOW_LARGE_RESULTS_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_ALLOW_LARGE_RESULTS, + this.connectionClassName); + this.destinationTable = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.LARGE_RESULTS_TABLE_PROPERTY_NAME, + null, + this.connectionClassName); + this.destinationDataset = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.LARGE_RESULTS_DATASET_PROPERTY_NAME, + null, + this.connectionClassName); + this.destinationDatasetExpirationTime = + BigQueryJdbcUrlUtility.parseLongProperty( + url, + BigQueryJdbcUrlUtility.DESTINATION_DATASET_EXPIRATION_TIME_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_DESTINATION_DATASET_EXPIRATION_TIME_VALUE, + this.connectionClassName); + this.kmsKeyName = + BigQueryJdbcUrlUtility.parseStringProperty( + url, BigQueryJdbcUrlUtility.KMS_KEY_NAME_PROPERTY_NAME, null, this.connectionClassName); + Map proxyProperties = + BigQueryJdbcProxyUtility.parseProxyProperties(url, this.connectionClassName); + this.sslTrustStorePath = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.SSL_TRUST_STORE_PROPERTY_NAME, + null, + this.connectionClassName); + this.sslTrustStorePassword = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.SSL_TRUST_STORE_PWD_PROPERTY_NAME, + null, + this.connectionClassName); + this.httpTransportOptions = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + proxyProperties, + this.sslTrustStorePath, + this.sslTrustStorePassword, + this.connectionClassName); + this.transportChannelProvider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + proxyProperties, + this.sslTrustStorePath, + this.sslTrustStorePassword, + this.connectionClassName); + this.enableSession = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.ENABLE_SESSION_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_ENABLE_SESSION_VALUE, + this.connectionClassName); + this.unsupportedHTAPIFallback = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_UNSUPPORTED_HTAPI_FALLBACK_VALUE, + this.connectionClassName); + this.maxResults = + BigQueryJdbcUrlUtility.parseLongProperty( + url, + BigQueryJdbcUrlUtility.MAX_RESULTS_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_MAX_RESULTS_VALUE, + this.connectionClassName); + Map queryPropertiesMap = + BigQueryJdbcUrlUtility.parseQueryProperties(url, this.connectionClassName); + this.sessionInfoConnectionProperty = getSessionPropertyFromQueryProperties(queryPropertiesMap); + this.queryProperties = convertMapToConnectionPropertiesList(queryPropertiesMap); + this.enableWriteAPI = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.ENABLE_WRITE_API_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_ENABLE_WRITE_API_VALUE, + this.connectionClassName); + this.writeAPIActivationRowCount = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.SWA_ACTIVATION_ROW_COUNT_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_SWA_ACTIVATION_ROW_COUNT_VALUE, + this.connectionClassName); + this.writeAPIAppendRowCount = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.SWA_APPEND_ROW_COUNT_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_SWA_APPEND_ROW_COUNT_VALUE, + this.connectionClassName); + this.additionalProjects = + BigQueryJdbcUrlUtility.parseStringListProperty( + url, + BigQueryJdbcUrlUtility.ADDITIONAL_PROJECTS_PROPERTY_NAME, + this.connectionClassName); + this.filterTablesOnDefaultDataset = + BigQueryJdbcUrlUtility.parseBooleanProperty( + url, + BigQueryJdbcUrlUtility.FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_FILTER_TABLES_ON_DEFAULT_DATASET_VALUE, + this.connectionClassName); + this.requestGoogleDriveScope = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE, + this.connectionClassName); + this.metadataFetchThreadCount = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.METADATA_FETCH_THREAD_COUNT_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_METADATA_FETCH_THREAD_COUNT_VALUE, + this.connectionClassName); + + HEADER_PROVIDER = createHeaderProvider(); + this.bigQuery = getBigQueryConnection(); + } + + String getLibraryVersion(Class libraryClass) { + LOG.finest("++enter++"); + String version = null; + try (InputStream in = + libraryClass.getResourceAsStream( + "/com/google/cloud/bigquery/jdbc/dependencies.properties")) { + if (in != null) { + Properties props = new Properties(); + props.load(in); + version = props.getProperty("version.jdbc"); + } + } catch (IOException e) { + return DEFAULT_VERSION; + } + + return version != null ? version : DEFAULT_VERSION; + } + + private String buildPartnerToken(String url) { + String partnerTokenString = + BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, this.connectionClassName); + if (partnerTokenString == null || partnerTokenString.isEmpty()) { + return ""; + } + return partnerTokenString; + } + + HeaderProvider createHeaderProvider() { + String partnerToken = buildPartnerToken(this.connectionUrl); + String headerToken = + DEFAULT_JDBC_TOKEN_VALUE + "/" + getLibraryVersion(this.getClass()) + partnerToken; + return FixedHeaderProvider.create("user-agent", headerToken); + } + + protected void addOpenStatements(Statement statement) { + LOG.finest(String.format("Statement %s added to Connection %s.", statement, this)); + this.openStatements.add(statement); + } + + BigQueryReadClient getBigQueryReadClient() { + try { + if (this.bigQueryReadClient == null) { + this.bigQueryReadClient = getBigQueryReadClientConnection(); + } + } catch (IOException e) { + throw new BigQueryJdbcRuntimeException(e); + } + return this.bigQueryReadClient; + } + + BigQueryWriteClient getBigQueryWriteClient() { + try { + if (this.bigQueryWriteClient == null) { + this.bigQueryWriteClient = getBigQueryWriteClientConnection(); + } + } catch (IOException e) { + throw new BigQueryJdbcRuntimeException(e); + } + return this.bigQueryWriteClient; + } + + BigQuery getBigQuery() { + return this.bigQuery; + } + + String getConnectionUrl() { + return connectionUrl; + } + + /** + * Creates and returns a new {@code Statement} object for executing BigQuery SQL queries + * + * @return a new {@code Statement} object + * @see Connection#createStatement() + */ + @Override + public Statement createStatement() throws SQLException { + checkClosed(); + BigQueryStatement currentStatement = new BigQueryStatement(this); + LOG.fine(String.format("Statement %s created.", currentStatement)); + addOpenStatements(currentStatement); + return currentStatement; + } + + /** + * Creates and returns a new {@code Statement} object for executing BigQuery SQL queries. This + * method is similar to {@link BigQueryConnection#createStatement()}, but it overrides the type + * and concurrency of the generated {@code ResultSet}. + * + * @throws SQLException if a BigQuery connection error occurs, if this method is called on a + * closed connection, or the given parameters are not {@code ResultSet} constants indicating + * type and concurrency. + * @throws BigQueryJdbcSqlFeatureNotSupportedException if this method is not supported for the + * specified result set type and result set concurrency. + * @see Connection#createStatement(int, int) + * @see ResultSet + */ + @Override + public Statement createStatement(int resultSetType, int resultSetConcurrency) + throws SQLException { + checkClosed(); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Unsupported createStatement feature."); + } + return createStatement(); + } + + /** + * Creates and returns a new {@code Statement} object for executing BigQuery SQL queries. This + * method is similar to {@link BigQueryConnection#createStatement()}, but it overrides the type, + * concurrency, and holdability of the generated {@code ResultSet}. + * + * @throws SQLException if a BigQuery connection error occurs, if this method is called on a + * closed connection, or the given parameters are not {@code ResultSet} constants indicating + * type, concurrency, and holdability. + * @throws BigQueryJdbcSqlFeatureNotSupportedException if this method is not supported for the + * specified result set type, result set holdability and result set concurrency. + * @see Connection#createStatement(int, int, int) + * @see ResultSet + */ + @Override + public Statement createStatement( + int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY + || resultSetHoldability != ResultSet.CLOSE_CURSORS_AT_COMMIT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Unsupported createStatement feature"); + } + return createStatement(); + } + + @Override + public PreparedStatement prepareStatement(String sql) throws SQLException { + checkClosed(); + PreparedStatement currentStatement = new BigQueryPreparedStatement(this, sql); + LOG.fine(String.format("Prepared Statement %s created.", currentStatement)); + addOpenStatements(currentStatement); + return currentStatement; + } + + @Override + public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { + if (autoGeneratedKeys != Statement.NO_GENERATED_KEYS) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("autoGeneratedKeys is not supported"); + } + return prepareStatement(sql); + } + + @Override + public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException("autoGeneratedKeys is not supported"); + } + + @Override + public PreparedStatement prepareStatement( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY + || resultSetHoldability != ResultSet.CLOSE_CURSORS_AT_COMMIT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Unsupported prepareStatement feature"); + } + return prepareStatement(sql); + } + + @Override + public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) + throws SQLException { + LOG.finest("++enter++"); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Unsupported prepareStatement feature"); + } + return prepareStatement(sql); + } + + public DatasetId getDefaultDataset() { + checkClosed(); + return this.defaultDataset; + } + + String getDestinationDataset() { + return this.destinationDataset; + } + + String getDestinationTable() { + return this.destinationTable; + } + + long getDestinationDatasetExpirationTime() { + return this.destinationDatasetExpirationTime; + } + + String getKmsKeyName() { + return this.kmsKeyName; + } + + List getQueryProperties() { + return this.queryProperties; + } + + public String getLocation() { + checkClosed(); + return this.location; + } + + public Map getAuthProperties() { + checkClosed(); + return this.authProperties; + } + + long getMaxResults() { + return maxResults; + } + + long getRetryTimeoutInSeconds() { + return this.retryTimeoutInSeconds; + } + + Duration getRetryTimeoutDuration() { + return this.retryTimeoutDuration; + } + + long getRetryInitialDelayInSeconds() { + return this.retryInitialDelayInSeconds; + } + + Duration getRetryInitialDelayDuration() { + return this.retryInitialDelayDuration; + } + + long getRetryMaxDelayInSeconds() { + return this.retryMaxDelayInSeconds; + } + + Duration getRetryMaxDelayDuration() { + return this.retryMaxDelayDuration; + } + + long getJobTimeoutInSeconds() { + return this.jobTimeoutInSeconds; + } + + long getMaxBytesBilled() { + return this.maxBytesBilled; + } + + Map getLabels() { + return this.labels; + } + + /** + * Begins a transaction.
+ * The transaction ends when a {@link BigQueryConnection#commit()} or {@link + * BigQueryConnection#rollback()} is made.
+ * For more information about transactions in BigQuery, see Multi-statement transactions. + */ + private void beginTransaction() { + LOG.finest("++enter++"); + QueryJobConfiguration.Builder transactionBeginJobConfig = + QueryJobConfiguration.newBuilder("BEGIN TRANSACTION;"); + try { + if (this.sessionInfoConnectionProperty != null) { + transactionBeginJobConfig.setConnectionProperties(this.queryProperties); + } else { + transactionBeginJobConfig.setCreateSession(true); + } + Job job = this.bigQuery.create(JobInfo.of(transactionBeginJobConfig.build())); + job = job.waitFor(); + Job transactionBeginJob = this.bigQuery.getJob(job.getJobId()); + if (this.sessionInfoConnectionProperty == null) { + this.sessionInfoConnectionProperty = + ConnectionProperty.newBuilder() + .setKey("session_id") + .setValue(transactionBeginJob.getStatistics().getSessionInfo().getSessionId()) + .build(); + this.queryProperties.add(this.sessionInfoConnectionProperty); + } + this.transactionStarted = true; + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } + + public boolean isTransactionStarted() { + return this.transactionStarted; + } + + boolean isSessionEnabled() { + return this.enableSession; + } + + boolean isUnsupportedHTAPIFallback() { + return this.unsupportedHTAPIFallback; + } + + ConnectionProperty getSessionInfoConnectionProperty() { + return this.sessionInfoConnectionProperty; + } + + boolean isEnableHighThroughputAPI() { + return this.enableHighThroughputAPI; + } + + boolean isUseQueryCache() { + return useQueryCache; + } + + boolean getUseStatelessQueryMode() { + return useStatelessQueryMode; + } + + boolean isAllowLargeResults() { + return allowLargeResults; + } + + String getQueryDialect() { + return queryDialect; + } + + Integer getNumBufferedRows() { + return numBufferedRows; + } + + int getHighThroughputMinTableSize() { + return highThroughputMinTableSize; + } + + List getAdditionalProjects() { + return this.additionalProjects; + } + + int getHighThroughputActivationRatio() { + return highThroughputActivationRatio; + } + + boolean isFilterTablesOnDefaultDataset() { + return this.filterTablesOnDefaultDataset; + } + + int isRequestGoogleDriveScope() { + return requestGoogleDriveScope; + } + + int getMetadataFetchThreadCount() { + return this.metadataFetchThreadCount; + } + + boolean isEnableWriteAPI() { + return enableWriteAPI; + } + + int getWriteAPIActivationRowCount() { + return writeAPIActivationRowCount; + } + + int getWriteAPIAppendRowCount() { + return writeAPIAppendRowCount; + } + + String getSSLTrustStorePath() { + return sslTrustStorePath; + } + + String getSSLTrustStorePassword() { + return sslTrustStorePassword; + } + + @Override + public boolean isValid(int timeout) throws SQLException { + if (timeout < 0) { + throw new BigQueryJdbcException("timeout must be >= 0"); + } + if (!isClosed()) { + try (Statement statement = createStatement(); + ResultSet rs = statement.executeQuery("SELECT 1")) { + LOG.finest("Running validation query"); + // TODO(obada): set query timeout when it's implemented + // TODO(obada): use dry run + if (rs.next()) { + if (rs.getInt(1) == 1) { + return true; + } + } + } catch (SQLException ex) { + // Ignore + } + } + return false; + } + + @Override + public void abort(Executor executor) throws SQLException { + LOG.finest("++enter++"); + close(); + } + + // TODO: Throw exception translation of BigQueryJdbcSqlClientInfoException when implementing below + @Override + public void setClientInfo(String name, String value) {} + + @Override + public String getClientInfo(String name) { + return null; + } + + @Override + public String getCatalog() { + return this.catalog; + } + + @Override + public Properties getClientInfo() { + return null; + } + + @Override + public void setClientInfo(Properties properties) {} + + @Override + public SQLWarning getWarnings() { + return this.sqlWarnings.isEmpty() ? null : this.sqlWarnings.get(0); + } + + @Override + public void clearWarnings() { + this.sqlWarnings.clear(); + } + + @Override + public boolean getAutoCommit() { + checkClosed(); + return this.autoCommit; + } + + /** + * Sets this connection's auto-commit mode to the given state.
+ * If this method is called during a transaction and the auto-commit mode is changed, the + * transaction is committed. If setAutoCommit is called and the auto-commit mode is not changed, + * the call is a no-op. + * + * @param autoCommit {@code true} to enable auto-commit mode; {@code false} to disable it + * @see Connection#setAutoCommit(boolean) + */ + @Override + public void setAutoCommit(boolean autoCommit) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + checkIfEnabledSession("setAutoCommit"); + if (this.autoCommit == autoCommit) { + return; + } + + if (isTransactionStarted()) { + commitTransaction(); + } + + this.autoCommit = autoCommit; + if (!this.autoCommit) { + beginTransaction(); + } + } + + @Override + public void commit() { + LOG.finest("++enter++"); + checkClosed(); + checkIfEnabledSession("commit"); + if (!isTransactionStarted()) { + throw new IllegalStateException( + "Cannot commit without an active transaction. Please set setAutoCommit to false to start" + + " a transaction."); + } + commitTransaction(); + if (!getAutoCommit()) { + beginTransaction(); + } + } + + @Override + public void rollback() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + checkIfEnabledSession("rollback"); + if (!isTransactionStarted()) { + throw new IllegalStateException( + "Cannot rollback without an active transaction. Please set setAutoCommit to false to" + + " start a transaction."); + } + try { + QueryJobConfiguration transactionRollbackJobConfig = + QueryJobConfiguration.newBuilder("ROLLBACK TRANSACTION;") + .setConnectionProperties(this.queryProperties) + .build(); + Job rollbackJob = this.bigQuery.create(JobInfo.of(transactionRollbackJobConfig)); + rollbackJob.waitFor(); + this.transactionStarted = false; + if (!getAutoCommit()) { + beginTransaction(); + } + } catch (InterruptedException | BigQueryException ex) { + throw new BigQueryJdbcException(ex); + } + } + + @Override + public DatabaseMetaData getMetaData() throws SQLException { + return new BigQueryDatabaseMetaData(this); + } + + @Override + public int getTransactionIsolation() { + // only supports Connection.TRANSACTION_SERIALIZABLE + return Connection.TRANSACTION_SERIALIZABLE; + } + + @Override + public void setTransactionIsolation(int level) throws SQLException { + if (level != Connection.TRANSACTION_SERIALIZABLE) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Transaction serializable not supported"); + } + this.transactionIsolation = level; + } + + @Override + public int getHoldability() { + return this.holdability; + } + + @Override + public void setHoldability(int holdability) throws SQLException { + if (holdability != ResultSet.CLOSE_CURSORS_AT_COMMIT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "CLOSE_CURSORS_AT_COMMIT not supported"); + } + this.holdability = holdability; + } + + /** + * Releases this {@code BigQueryConnection} object's BigQuery resources immediately instead of + * waiting for them to be automatically released. + * + * @throws SQLException if a BigQuery access error occurs + * @see Connection#close() + */ + @Override + public void close() throws SQLException { + LOG.fine("Closing Connection " + this); + // TODO(neenu-postMVP): Release all connection state objects + // check for and close all existing transactions + + if (isClosed()) { + return; + } + try { + if (this.bigQueryReadClient != null) { + this.bigQueryReadClient.shutdown(); + this.bigQueryReadClient.awaitTermination(1, TimeUnit.MINUTES); + this.bigQueryReadClient.close(); + } + + if (this.bigQueryWriteClient != null) { + this.bigQueryWriteClient.shutdown(); + this.bigQueryWriteClient.awaitTermination(1, TimeUnit.MINUTES); + this.bigQueryWriteClient.close(); + } + + for (Statement statement : this.openStatements) { + statement.close(); + } + this.openStatements.clear(); + } catch (ConcurrentModificationException ex) { + throw new BigQueryJdbcException(ex); + } catch (InterruptedException e) { + throw new BigQueryJdbcRuntimeException(e); + } + this.isClosed = true; + } + + @Override + public boolean isClosed() { + return this.isClosed; + } + + private void checkClosed() { + if (isClosed()) { + throw new IllegalStateException("This " + getClass().getName() + " has been closed"); + } + } + + private void checkIfEnabledSession(String methodName) { + if (!this.enableSession) { + throw new IllegalStateException( + String.format("Session needs to be enabled to use %s method.", methodName)); + } + } + + private ConnectionProperty getSessionPropertyFromQueryProperties( + Map queryPropertiesMap) { + LOG.finest("++enter++"); + if (queryPropertiesMap != null) { + if (queryPropertiesMap.containsKey("session_id")) { + return ConnectionProperty.newBuilder() + .setKey("session_id") + .setValue(queryPropertiesMap.get("session_id")) + .build(); + } + } + return null; + } + + private List convertMapToConnectionPropertiesList( + Map queryPropertiesMap) { + LOG.finest("++enter++"); + List connectionProperties = new ArrayList(); + if (queryPropertiesMap != null) { + for (Map.Entry entry : queryPropertiesMap.entrySet()) { + connectionProperties.add( + ConnectionProperty.newBuilder() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build()); + } + } + return connectionProperties; + } + + void removeStatement(Statement statement) { + this.openStatements.remove(statement); + } + + private BigQuery getBigQueryConnection() { + // 404 Not Found - check if the project exists + // 403 Forbidden - execute a dryRun to check if the user has bigquery.jobs.create permissions + BigQueryOptions.Builder bigQueryOptions = BigQueryOptions.newBuilder(); + if (this.retryTimeoutInSeconds > 0L + || (this.retryInitialDelayInSeconds > 0L && this.retryMaxDelayInSeconds > 0L)) { + RetrySettings.Builder retry_settings_builder = RetrySettings.newBuilder(); + if (this.retryTimeoutInSeconds > 0L) { + retry_settings_builder.setTotalTimeoutDuration(this.retryTimeoutDuration); + } + if (this.retryInitialDelayInSeconds > 0L && this.retryMaxDelayInSeconds > 0L) { + retry_settings_builder.setInitialRetryDelayDuration(retryInitialDelayDuration); + retry_settings_builder.setMaxRetryDelayDuration(retryMaxDelayDuration); + } + bigQueryOptions.setRetrySettings(retry_settings_builder.build()); + } + + if (this.catalog != null) { + bigQueryOptions.setProjectId(this.catalog); + } + if (this.credentials != null) { + bigQueryOptions.setCredentials(this.credentials); + } + if (this.location != null) { + bigQueryOptions.setLocation(this.location); + } + if (this.overrideProperties.containsKey( + BigQueryJdbcUrlUtility.BIGQUERY_ENDPOINT_OVERRIDE_PROPERTY_NAME)) { + bigQueryOptions.setHost( + this.overrideProperties.get( + BigQueryJdbcUrlUtility.BIGQUERY_ENDPOINT_OVERRIDE_PROPERTY_NAME)); + } + if (this.universeDomain != null) { + bigQueryOptions.setUniverseDomain(this.universeDomain); + } + if (this.httpTransportOptions != null) { + bigQueryOptions.setTransportOptions(this.httpTransportOptions); + } + + BigQueryOptions options = bigQueryOptions.setHeaderProvider(HEADER_PROVIDER).build(); + options.setQueryPreviewEnabled(String.valueOf(this.useStatelessQueryMode)); + return options.getService(); + } + + private BigQueryReadClient getBigQueryReadClientConnection() throws IOException { + BigQueryReadSettings.Builder bigQueryReadSettings = + BigQueryReadSettings.newBuilder().setHeaderProvider(HEADER_PROVIDER); + if (getRetrySettings() != null) { + bigQueryReadSettings.createReadSessionSettings().setRetrySettings(getRetrySettings()); + } + if (this.catalog != null) { + bigQueryReadSettings.setQuotaProjectId(this.catalog); + } + if (this.credentials != null) { + CredentialsProvider fixedProvider = FixedCredentialsProvider.create(credentials); + bigQueryReadSettings.setCredentialsProvider(fixedProvider); + } + if (this.overrideProperties.containsKey( + BigQueryJdbcUrlUtility.HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME)) { + bigQueryReadSettings.setEndpoint( + this.overrideProperties.get( + BigQueryJdbcUrlUtility.HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME)); + } + if (this.universeDomain != null) { + bigQueryReadSettings.setUniverseDomain(this.universeDomain); + } + if (this.transportChannelProvider != null) { + bigQueryReadSettings.setTransportChannelProvider(this.transportChannelProvider); + } + + return BigQueryReadClient.create(bigQueryReadSettings.build()); + } + + private BigQueryWriteClient getBigQueryWriteClientConnection() throws IOException { + BigQueryWriteSettings.Builder bigQueryWriteSettings = + BigQueryWriteSettings.newBuilder().setHeaderProvider(HEADER_PROVIDER); + if (getRetrySettings() != null) { + bigQueryWriteSettings.createWriteStreamSettings().setRetrySettings(getRetrySettings()); + } + if (this.catalog != null) { + bigQueryWriteSettings.setQuotaProjectId(this.catalog); + } + if (this.credentials != null) { + CredentialsProvider fixedProvider = FixedCredentialsProvider.create(credentials); + bigQueryWriteSettings.setCredentialsProvider(fixedProvider); + } + // Same endpoint as READ API + if (this.overrideProperties.containsKey( + BigQueryJdbcUrlUtility.HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME)) { + bigQueryWriteSettings.setEndpoint( + this.overrideProperties.get( + BigQueryJdbcUrlUtility.HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME)); + } + if (this.universeDomain != null) { + bigQueryWriteSettings.setUniverseDomain(this.universeDomain); + } + if (this.transportChannelProvider != null) { + bigQueryWriteSettings.setTransportChannelProvider(this.transportChannelProvider); + } + + return BigQueryWriteClient.create(bigQueryWriteSettings.build()); + } + + RetrySettings getRetrySettings() { + RetrySettings.Builder retrySettingsBuilder = null; + + if (this.retryTimeoutInSeconds > 0L + || (this.retryInitialDelayInSeconds > 0L && this.retryMaxDelayInSeconds > 0L)) { + retrySettingsBuilder = RetrySettings.newBuilder(); + if (this.retryTimeoutInSeconds > 0L) { + retrySettingsBuilder.setTotalTimeoutDuration(this.retryTimeoutDuration); + } + if (this.retryInitialDelayInSeconds > 0L && this.retryMaxDelayInSeconds > 0L) { + retrySettingsBuilder.setInitialRetryDelayDuration(retryInitialDelayDuration); + retrySettingsBuilder.setMaxRetryDelayDuration(retryMaxDelayDuration); + } + } + return retrySettingsBuilder == null ? null : retrySettingsBuilder.build(); + } + + private void commitTransaction() { + try { + QueryJobConfiguration transactionCommitJobConfig = + QueryJobConfiguration.newBuilder("COMMIT TRANSACTION;") + .setConnectionProperties(this.queryProperties) + .build(); + Job commitJob = this.bigQuery.create(JobInfo.of(transactionCommitJobConfig)); + commitJob.waitFor(); + this.transactionStarted = false; + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } + + @Override + public CallableStatement prepareCall(String sql) throws SQLException { + checkClosed(); + CallableStatement currentStatement = new BigQueryCallableStatement(this, sql); + LOG.fine(String.format("Callable Statement %s created.", currentStatement)); + addOpenStatements(currentStatement); + return currentStatement; + } + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) + throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported CallableStatement feature"); + } + return prepareCall(sql); + } + + @Override + public CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY + || resultSetHoldability != ResultSet.CLOSE_CURSORS_AT_COMMIT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported CallableStatement feature"); + } + return prepareCall(sql); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionProperty.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionProperty.java new file mode 100644 index 000000000..17be15d86 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionProperty.java @@ -0,0 +1,117 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import java.util.List; + +class BigQueryConnectionProperty { + + private final String name; + private final String description; + private final String defaultValue; + private final List validValues; + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + public String getDefaultValue() { + return defaultValue; + } + + public List getValidValues() { + return validValues; + } + + BigQueryConnectionProperty(Builder builder) { + this.name = builder.name; + this.defaultValue = builder.defaultValue; + this.description = builder.description; + this.validValues = builder.validValues; + } + + /** Returns a builder for a BigQueryConnectionProperty object. */ + static BigQueryConnectionProperty.Builder newBuilder() { + return new BigQueryConnectionProperty.Builder(); + } + + BigQueryConnectionProperty.Builder toBuilder() { + return new BigQueryConnectionProperty.Builder(this); + } + + @Override + public String toString() { + return "BigQueryConnectionProperty{" + + "name='" + + name + + '\'' + + ", description='" + + description + + '\'' + + ", defaultValue='" + + defaultValue + + '\'' + + ", validValues=" + + validValues + + '}'; + } + + static final class Builder { + + private String name; + private String description; + private String defaultValue; + private List validValues; + + private Builder(BigQueryConnectionProperty bigQueryConnectionProperty) { + this.name = bigQueryConnectionProperty.name; + this.description = bigQueryConnectionProperty.description; + this.defaultValue = bigQueryConnectionProperty.defaultValue; + this.validValues = bigQueryConnectionProperty.validValues; + } + + private Builder() {} + + Builder setName(String name) { + this.name = name; + return this; + } + + Builder setDescription(String description) { + this.description = description; + return this; + } + + Builder setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + Builder setValidValues(List validValues) { + this.validValues = validValues; + return this; + } + + BigQueryConnectionProperty build() { + return new BigQueryConnectionProperty(this); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTask.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTask.java new file mode 100644 index 000000000..386785660 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTask.java @@ -0,0 +1,122 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.lang.ref.Reference; +import java.lang.ref.ReferenceQueue; +import java.util.List; + +/** + * This class is used to add polling threads required for polling the reference queues associated + * with the two ResultSets + */ +@InternalApi +class BigQueryDaemonPollingTask extends Thread { + + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryDaemonPollingTask.class.getName()); + + static ReferenceQueue referenceQueueArrowRs; + static ReferenceQueue referenceQueueJsonRs; + static List arrowRsFinalizers; + static List jsonRsFinalizers; + + private static BigQueryDaemonPollingTask arrowDaemon; + private static BigQueryDaemonPollingTask jsonDaemon; + + private static final Object LOCK = new Object(); + + private BigQueryDaemonPollingTask( + List arrowRsFinalizers, + ReferenceQueue referenceQueueArrowRs) { + BigQueryDaemonPollingTask.referenceQueueArrowRs = referenceQueueArrowRs; + BigQueryDaemonPollingTask.arrowRsFinalizers = arrowRsFinalizers; + setDaemon(true); + } + + private BigQueryDaemonPollingTask( + ReferenceQueue referenceQueueJsonRs, + List jsonRsFinalizers) { + BigQueryDaemonPollingTask.referenceQueueJsonRs = referenceQueueJsonRs; + BigQueryDaemonPollingTask.jsonRsFinalizers = jsonRsFinalizers; + setDaemon(true); + } + + /** + * ThreadSafe method which creates two instances of polling task, one each for each type of + * ResultSet + * + * @param referenceQueueArrowRs ReferenceQueue for ArrowResultSet + * @param referenceQueueJsonRs ReferenceQueue for JsonResultSet + * @param arrowRsFinalizers Finalizer for ArrowResultSet + * @param jsonRsFinalizers Finalizer for JsonResultSet + * @return true if the tasks were created + */ + public static boolean startGcDaemonTask( + ReferenceQueue referenceQueueArrowRs, + ReferenceQueue referenceQueueJsonRs, + List arrowRsFinalizers, + List jsonRsFinalizers) { + LOG.finest("++enter++"); + synchronized (LOCK) { + // 2 Background threads will be required to monitor the respective queues + if (arrowDaemon == null && jsonDaemon == null) { + arrowDaemon = new BigQueryDaemonPollingTask(arrowRsFinalizers, referenceQueueArrowRs); + arrowDaemon.start(); + + jsonDaemon = new BigQueryDaemonPollingTask(referenceQueueJsonRs, jsonRsFinalizers); + jsonDaemon.start(); + + return true; + } + } + // Task(s) are already initialised + return false; + } + + @Override + public void run() { + + Reference reference; + try { + LOG.finest("++enter++"); + // poll for Arrow ResultSets + if (referenceQueueArrowRs != null) { + + while ((reference = referenceQueueArrowRs.remove()) != null) { + LOG.fine("Clearing Arrow ResultSet reference " + referenceQueueArrowRs); + ((BigQueryResultSetFinalizers.ArrowResultSetFinalizer) reference).finalizeResources(); + reference.clear(); + } + } + // poll for JSON ResultSets + else if (referenceQueueJsonRs != null) { + while ((reference = referenceQueueJsonRs.remove()) != null) { + LOG.fine("Clearing Json ResultSet reference " + referenceQueueJsonRs); + ((BigQueryResultSetFinalizers.JsonResultSetFinalizer) reference).finalizeResources(); + reference.clear(); + } + } else { + throw new BigQueryJdbcRuntimeException("Null Reference Queue"); + } + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaData.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaData.java new file mode 100644 index 000000000..66917ea88 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaData.java @@ -0,0 +1,5346 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.gax.paging.Page; +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQuery.DatasetListOption; +import com.google.cloud.bigquery.BigQuery.RoutineListOption; +import com.google.cloud.bigquery.BigQuery.TableListOption; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.Routine; +import com.google.cloud.bigquery.RoutineArgument; +import com.google.cloud.bigquery.RoutineId; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLDataType; +import com.google.cloud.bigquery.StandardSQLField; +import com.google.cloud.bigquery.StandardSQLTableType; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.Table; +import com.google.cloud.bigquery.TableDefinition; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.RowIdLifetime; +import java.sql.SQLException; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Properties; +import java.util.Scanner; +import java.util.Set; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.Callable; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.regex.Pattern; +import javax.annotation.Nullable; + +/** + * An implementation of {@link java.sql.DatabaseMetaData}. This interface is implemented by driver + * vendors to let users know the capabilities of a Database Management System (DBMS) in combination + * with the driver based on JDBCâ„¢ technology ("JDBC driver") that is used with it. + * + * @see BigQueryStatement + */ +// TODO(neenu): test and verify after post MVP implementation. +class BigQueryDatabaseMetaData implements DatabaseMetaData { + final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private static final String DATABASE_PRODUCT_NAME = "Google BigQuery"; + private static final String DATABASE_PRODUCT_VERSION = "2.0"; + private static final String DRIVER_NAME = "GoogleJDBCDriverForGoogleBigQuery"; + private static final String DRIVER_DEFAULT_VERSION = "0.0.0"; + private static final String SCHEMA_TERM = "Dataset"; + private static final String CATALOG_TERM = "Project"; + private static final String PROCEDURE_TERM = "Procedure"; + private static final String GET_PRIMARY_KEYS_SQL = "DatabaseMetaData_GetPrimaryKeys.sql"; + private static final String GET_IMPORTED_KEYS_SQL = "DatabaseMetaData_GetImportedKeys.sql"; + private static final String GET_EXPORTED_KEYS_SQL = "DatabaseMetaData_GetExportedKeys.sql"; + private static final String GET_CROSS_REFERENCE_SQL = "DatabaseMetaData_GetCrossReference.sql"; + private static final int API_EXECUTOR_POOL_SIZE = 50; + private static final int DEFAULT_PAGE_SIZE = 500; + private static final int DEFAULT_QUEUE_CAPACITY = 5000; + // Declared package-private for testing. + static final String GOOGLE_SQL_QUOTED_IDENTIFIER = "`"; + // Does not include SQL:2003 Keywords as per JDBC spec. + // https://en.wikipedia.org/wiki/List_of_SQL_reserved_words + static final String GOOGLE_SQL_RESERVED_KEYWORDS = + "ASC,ASSERT_ROWS_MODIFIED,DESC,ENUM,EXCLUDE,FOLLOWING,HASH,IF," + + "IGNORE,LIMIT,LOOKUP,NULLS,PRECEDING,PROTO,QUALIFY,RESPECT,STRUCT,UNBOUNDED"; + static final String GOOGLE_SQL_NUMERIC_FNS = + "ABS,ACOS,ACOSH,ASIN,ASINH,ATAN,ATAN2,ATANH,CBRT,CEIL,CEILING,COS" + + ",COSH,COSINE_DISTANCE,COT,COTH,CSC,CSCH,DIV,EXP,EUCLIDEAN_DISTANCE,FLOOR" + + ",GREATEST,IS_INF,LEAST,LN,LOG,LOG10,MOD,POW,RAND,RANGE_BUCKET,ROUND," + + ",SAFE_ADD,SAFE_DIVIDE,SAFE_MULTIPLY,SAFE_NEGATE,SAFE_SUBTRACT,SEC,SECH," + + "SIGN,SIN,SINH,SQRT,TAN,TANH,TRUNC"; + static final String GOOGLE_SQL_STRING_FNS = + "ASCII,BYTE_LENGTH,CHAR_LENGTH,CHARACTER_LENGTH,CHR,CODE_POINTS_TO_BYTES," + + "CODE_POINTS_TO_STRING,COLLATE,CONCAT,CONTAINS_SUBSTR,EDIT_DISTANCE,ENDS_WITH," + + "FORMAT,FROM_BASE32,FROM_BASE64,FROM_HEX,INITCAP,INSTR,LEFT,LENGTH,LOWER," + + "LPAD,LTRIM,NORMALIZ,NORMALIZE_AND_CASEFOLD,OCTET_LENGTH,REGEXP_CONTAINS," + + "REGEXP_EXTRACT,REGEXP_EXTRACT_ALL,REGEXP_INSTR,REGEXP_REPLACE,REGEXP_SUBSTR," + + "REPEAT,REPLACE,REVERSE,RIGHT,RPAD,RTRIM,SAFE_CONVERT_BYTES_TO_STRING,SOUNDEX," + + "SPLIT,STARTS_WITH,STRPOS,SUBSTR,SUBSTRING,TO_BASE32,TO_BASE64,TO_CODE_POINTS," + + "TO_HEX,TRANSLATE,TRIMunicode,UNICODE,UPPER"; + static final String GOOGLE_SQL_TIME_DATE_FNS = + "DATE,DATE_ADD,DATE_BUCKET,DATE_DIFF,DATE_FROM_UNIX_DATE," + + "DATE_SUB,DATE_TRUNC,DATETIME,DATETIME_ADD.,DATETIME_BUCKET," + + "DATETIME_DIFF,DATETIME_SUB,DATETIME_TRUNC,CURRENT_DATE,CURRENT_DATETIME," + + "CURRENT_TIME,CURRENT_TIMESTAMP,CURRENT_TIME,EXTRACT,FORMAT_TIME,PARSE_TIME," + + "TIME,TIME_ADD,TIME_DIFF,TIME_SUB,TIME_TRUNC,CURRENT_TIMESTAMP,EXTRACT," + + "FORMAT_TIMESTAMP,GENERATE_TIMESTAMP_ARRAY,PARSE_TIMESTAMP,TIMESTAMP," + + "TIMESTAMP_ADD,TIMESTAMP_DIFF,TIMESTAMP_MICROS,TIMESTAMP_MILLIS,TIMESTAMP_SECONDS," + + "TIMESTAMP_SUB,TIMESTAMP_TRUNC,UNIX_MICROS,UNIX_MILLIS,UNIX_SECONDS"; + static final String GOOGLE_SQL_ESCAPE = "\\"; + static final String GOOGLE_SQL_CATALOG_SEPARATOR = "."; + static final int GOOGLE_SQL_MAX_COL_NAME_LEN = 300; + static final int GOOGLE_SQL_MAX_COLS_PER_TABLE = 10000; + + String URL; + BigQueryConnection connection; + private final BigQueryStatement statement; + private final BigQuery bigquery; + private final int metadataFetchThreadCount; + private static final AtomicReference parsedDriverVersion = new AtomicReference<>(null); + private static final AtomicReference parsedDriverMajorVersion = + new AtomicReference<>(null); + private static final AtomicReference parsedDriverMinorVersion = + new AtomicReference<>(null); + + BigQueryDatabaseMetaData(BigQueryConnection connection) throws SQLException { + this.URL = connection.getConnectionUrl(); + this.connection = connection; + this.statement = connection.createStatement().unwrap(BigQueryStatement.class); + this.bigquery = connection.getBigQuery(); + this.metadataFetchThreadCount = connection.getMetadataFetchThreadCount(); + loadDriverVersionProperties(); + } + + @Override + public boolean allProceduresAreCallable() { + // Returns false because BigQuery's IAM permissions can allow a user + // to discover a procedure's existence without having rights to execute it. + return false; + } + + @Override + public boolean allTablesAreSelectable() { + // Returns true to ensure maximum compatibility with client applications + // that expect a positive response to discover and list all available tables. + return true; + } + + @Override + public String getURL() { + return this.URL; + } + + @Override + public String getUserName() { + return null; + } + + @Override + public boolean isReadOnly() { + return false; + } + + @Override + public boolean nullsAreSortedHigh() { + return false; + } + + @Override + public boolean nullsAreSortedLow() { + return false; + } + + @Override + public boolean nullsAreSortedAtStart() { + return false; + } + + @Override + public boolean nullsAreSortedAtEnd() { + return false; + } + + @Override + public String getDatabaseProductName() { + return DATABASE_PRODUCT_NAME; + } + + @Override + public String getDatabaseProductVersion() { + return DATABASE_PRODUCT_VERSION; + } + + @Override + public String getDriverName() { + return DRIVER_NAME; + } + + @Override + public String getDriverVersion() { + return parsedDriverVersion.get() != null ? parsedDriverVersion.get() : DRIVER_DEFAULT_VERSION; + } + + @Override + public int getDriverMajorVersion() { + return parsedDriverMajorVersion.get() != null ? parsedDriverMajorVersion.get() : 0; + } + + @Override + public int getDriverMinorVersion() { + return parsedDriverMinorVersion.get() != null ? parsedDriverMinorVersion.get() : 0; + } + + @Override + public boolean usesLocalFiles() { + return false; + } + + @Override + public boolean usesLocalFilePerTable() { + return false; + } + + @Override + public boolean supportsMixedCaseIdentifiers() { + return false; + } + + @Override + public boolean storesUpperCaseIdentifiers() { + return false; + } + + @Override + public boolean storesLowerCaseIdentifiers() { + return false; + } + + @Override + public boolean storesMixedCaseIdentifiers() { + return false; + } + + @Override + public boolean supportsMixedCaseQuotedIdentifiers() { + return false; + } + + @Override + public boolean storesUpperCaseQuotedIdentifiers() { + return false; + } + + @Override + public boolean storesLowerCaseQuotedIdentifiers() { + return false; + } + + @Override + public boolean storesMixedCaseQuotedIdentifiers() { + return false; + } + + @Override + public String getIdentifierQuoteString() { + return GOOGLE_SQL_QUOTED_IDENTIFIER; + } + + @Override + public String getSQLKeywords() { + return GOOGLE_SQL_RESERVED_KEYWORDS; + } + + @Override + public String getNumericFunctions() { + return GOOGLE_SQL_NUMERIC_FNS; + } + + @Override + public String getStringFunctions() { + return GOOGLE_SQL_STRING_FNS; + } + + @Override + // GoogleSQL has UDF (user defined functions). + // System functions like DATABASE(), USER() are not supported. + public String getSystemFunctions() { + return null; + } + + @Override + public String getTimeDateFunctions() { + return GOOGLE_SQL_TIME_DATE_FNS; + } + + @Override + public String getSearchStringEscape() { + return GOOGLE_SQL_ESCAPE; + } + + @Override + // No extra characters beyond a-z, A-Z, 0-9 and _ + public String getExtraNameCharacters() { + return null; + } + + @Override + public boolean supportsAlterTableWithAddColumn() { + return true; + } + + @Override + public boolean supportsAlterTableWithDropColumn() { + return true; + } + + @Override + public boolean supportsColumnAliasing() { + return true; + } + + @Override + public boolean nullPlusNonNullIsNull() { + return true; + } + + @Override + public boolean supportsConvert() { + return false; + } + + @Override + public boolean supportsConvert(int fromType, int toType) { + return false; + } + + @Override + public boolean supportsTableCorrelationNames() { + return true; + } + + @Override + public boolean supportsDifferentTableCorrelationNames() { + return false; + } + + @Override + public boolean supportsExpressionsInOrderBy() { + return true; + } + + @Override + public boolean supportsOrderByUnrelated() { + return true; + } + + @Override + public boolean supportsGroupBy() { + return true; + } + + @Override + public boolean supportsGroupByUnrelated() { + return true; + } + + @Override + public boolean supportsGroupByBeyondSelect() { + return true; + } + + @Override + public boolean supportsLikeEscapeClause() { + return false; + } + + @Override + public boolean supportsMultipleResultSets() { + return false; + } + + @Override + public boolean supportsMultipleTransactions() { + return false; + } + + @Override + public boolean supportsNonNullableColumns() { + return false; + } + + @Override + public boolean supportsMinimumSQLGrammar() { + return false; + } + + @Override + public boolean supportsCoreSQLGrammar() { + return false; + } + + @Override + public boolean supportsExtendedSQLGrammar() { + return false; + } + + @Override + public boolean supportsANSI92EntryLevelSQL() { + return false; + } + + @Override + public boolean supportsANSI92IntermediateSQL() { + return false; + } + + @Override + public boolean supportsANSI92FullSQL() { + return false; + } + + @Override + public boolean supportsIntegrityEnhancementFacility() { + return false; + } + + @Override + public boolean supportsOuterJoins() { + return false; + } + + @Override + public boolean supportsFullOuterJoins() { + return false; + } + + @Override + public boolean supportsLimitedOuterJoins() { + return false; + } + + @Override + public String getSchemaTerm() { + return SCHEMA_TERM; + } + + @Override + public String getProcedureTerm() { + return PROCEDURE_TERM; + } + + @Override + public String getCatalogTerm() { + return CATALOG_TERM; + } + + @Override + public boolean isCatalogAtStart() { + return true; + } + + @Override + public String getCatalogSeparator() { + return GOOGLE_SQL_CATALOG_SEPARATOR; + } + + @Override + public boolean supportsSchemasInDataManipulation() { + return false; + } + + @Override + public boolean supportsSchemasInProcedureCalls() { + return false; + } + + @Override + public boolean supportsSchemasInTableDefinitions() { + return false; + } + + @Override + public boolean supportsSchemasInIndexDefinitions() { + return false; + } + + @Override + public boolean supportsSchemasInPrivilegeDefinitions() { + return false; + } + + @Override + public boolean supportsCatalogsInDataManipulation() { + return false; + } + + @Override + public boolean supportsCatalogsInProcedureCalls() { + return false; + } + + @Override + public boolean supportsCatalogsInTableDefinitions() { + return false; + } + + @Override + public boolean supportsCatalogsInIndexDefinitions() { + return false; + } + + @Override + public boolean supportsCatalogsInPrivilegeDefinitions() { + return false; + } + + @Override + public boolean supportsPositionedDelete() { + return false; + } + + @Override + public boolean supportsPositionedUpdate() { + return false; + } + + @Override + public boolean supportsSelectForUpdate() { + return false; + } + + @Override + public boolean supportsStoredProcedures() { + return false; + } + + @Override + public boolean supportsSubqueriesInComparisons() { + return false; + } + + @Override + public boolean supportsSubqueriesInExists() { + return false; + } + + @Override + public boolean supportsSubqueriesInIns() { + return false; + } + + @Override + public boolean supportsSubqueriesInQuantifieds() { + return false; + } + + @Override + public boolean supportsCorrelatedSubqueries() { + return false; + } + + @Override + public boolean supportsUnion() { + return true; + } + + @Override + public boolean supportsUnionAll() { + return true; + } + + @Override + public boolean supportsOpenCursorsAcrossCommit() { + return false; + } + + @Override + public boolean supportsOpenCursorsAcrossRollback() { + return false; + } + + @Override + public boolean supportsOpenStatementsAcrossCommit() { + return false; + } + + @Override + public boolean supportsOpenStatementsAcrossRollback() { + return false; + } + + @Override + // No limit + public int getMaxBinaryLiteralLength() { + return 0; + } + + @Override + // No Limit + public int getMaxCharLiteralLength() { + return 0; + } + + @Override + // GoogleSQL documentation says 300. + // https://cloud.google.com/bigquery/quotas#all_tables + public int getMaxColumnNameLength() { + return GOOGLE_SQL_MAX_COL_NAME_LEN; + } + + @Override + // No specific limits for group by. + public int getMaxColumnsInGroupBy() { + return 0; + } + + @Override + // No specific limits for index. + public int getMaxColumnsInIndex() { + return 0; + } + + @Override + // No specific limit for Order By. + public int getMaxColumnsInOrderBy() { + return 0; + } + + @Override + // All columns can be selected. No specific limits. + public int getMaxColumnsInSelect() { + return 0; + } + + @Override + public int getMaxColumnsInTable() { + return GOOGLE_SQL_MAX_COLS_PER_TABLE; + } + + @Override + public int getMaxConnections() { + // Per JDBC spec, returns 0 as there is no connection limit or is unknown. + return 0; + } + + @Override + public int getMaxCursorNameLength() { + // BigQuery does not support named cursors or positioned updates/deletes. + return 0; + } + + @Override + public int getMaxIndexLength() { + // Per the JDBC spec, 0 indicates this feature is not supported. + return 0; + } + + @Override + public int getMaxSchemaNameLength() { + // Dataset IDs can be up to 1024 characters long. + // See: https://cloud.google.com/bigquery/docs/datasets#dataset-naming + return 1024; + } + + @Override + public int getMaxProcedureNameLength() { + // Routine IDs can be up to 256 characters long. + // See: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#RoutineReference.FIELDS.routine_id + return 256; + } + + @Override + public int getMaxCatalogNameLength() { + // Corresponds to the BigQuery Project ID, which can be a maximum of 30 characters. + // See: + // https://cloud.google.com/resource-manager/docs/creating-managing-projects#before_you_begin + return 30; + } + + @Override + public int getMaxRowSize() { + // Per JDBC spec, returns 0 as there is no fixed limit or is unknown. + return 0; + } + + @Override + public boolean doesMaxRowSizeIncludeBlobs() { + return false; + } + + @Override + public int getMaxStatementLength() { + // Per JDBC spec, returns 0 as there is no fixed limit or is unknown. + // See: https://cloud.google.com/bigquery/quotas#query_jobs + return 0; + } + + @Override + public int getMaxStatements() { + // Per JDBC spec, returns 0 as there is no fixed limit or is unknown. + return 0; + } + + @Override + public int getMaxTableNameLength() { + // Table IDs can be up to 1024 characters long. + // See: https://cloud.google.com/bigquery/docs/tables#table-naming + return 1024; + } + + @Override + public int getMaxTablesInSelect() { + // BigQuery allows up to 1,000 tables to be referenced per query. + // See: https://cloud.google.com/bigquery/quotas#query_jobs + return 1000; + } + + @Override + public int getMaxUserNameLength() { + return 0; + } + + @Override + public int getDefaultTransactionIsolation() { + return Connection.TRANSACTION_SERIALIZABLE; + } + + @Override + public boolean supportsTransactions() { + return true; + } + + @Override + public boolean supportsTransactionIsolationLevel(int level) { + return level == Connection.TRANSACTION_SERIALIZABLE; + } + + @Override + public boolean supportsDataDefinitionAndDataManipulationTransactions() { + return false; + } + + @Override + public boolean supportsDataManipulationTransactionsOnly() { + return false; + } + + @Override + public boolean dataDefinitionCausesTransactionCommit() { + return false; + } + + @Override + public boolean dataDefinitionIgnoredInTransactions() { + return false; + } + + @Override + public ResultSet getProcedures( + String catalog, String schemaPattern, String procedureNamePattern) { + if ((catalog == null || catalog.isEmpty()) + || (schemaPattern != null && schemaPattern.isEmpty()) + || (procedureNamePattern != null && procedureNamePattern.isEmpty())) { + LOG.warning("Returning empty ResultSet as catalog is null/empty or a pattern is empty."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getProcedures called for catalog: %s, schemaPattern: %s, procedureNamePattern: %s", + catalog, schemaPattern, procedureNamePattern)); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Pattern procedureNameRegex = compileSqlLikePattern(procedureNamePattern); + final Schema resultSchema = defineGetProceduresSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final List> processingTaskFutures = new ArrayList<>(); + final String catalogParam = catalog; + + Runnable procedureFetcher = + () -> { + ExecutorService apiExecutor = null; + ExecutorService routineProcessorExecutor = null; + final FieldList localResultSchemaFields = resultSchemaFields; + final List>> apiFutures = new ArrayList<>(); + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty()) { + LOG.info("Fetcher thread found no matching datasets. Finishing."); + return; + } + + apiExecutor = Executors.newFixedThreadPool(API_EXECUTOR_POOL_SIZE); + routineProcessorExecutor = Executors.newFixedThreadPool(this.metadataFetchThreadCount); + + LOG.fine("Submitting parallel findMatchingRoutines tasks..."); + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Fetcher interrupted during dataset iteration submission."); + break; + } + + final DatasetId currentDatasetId = dataset.getDatasetId(); + Callable> apiCallable = + () -> + findMatchingBigQueryObjects( + "Routine", + () -> + bigquery.listRoutines( + currentDatasetId, RoutineListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getRoutine( + RoutineId.of( + currentDatasetId.getProject(), + currentDatasetId.getDataset(), + name)), + (rt) -> rt.getRoutineId().getRoutine(), + procedureNamePattern, + procedureNameRegex, + LOG); + Future> apiFuture = apiExecutor.submit(apiCallable); + apiFutures.add(apiFuture); + } + LOG.fine("Finished submitting " + apiFutures.size() + " findMatchingRoutines tasks."); + apiExecutor.shutdown(); + + LOG.fine("Processing results from findMatchingRoutines tasks..."); + for (Future> apiFuture : apiFutures) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Fetcher interrupted while processing API futures."); + break; + } + try { + List routinesResult = apiFuture.get(); + if (routinesResult != null) { + for (Routine routine : routinesResult) { + if (Thread.currentThread().isInterrupted()) break; + + if ("PROCEDURE".equalsIgnoreCase(routine.getRoutineType())) { + LOG.fine( + "Submitting processing task for procedure: " + routine.getRoutineId()); + final Routine finalRoutine = routine; + Future processFuture = + routineProcessorExecutor.submit( + () -> + processProcedureInfo( + finalRoutine, collectedResults, localResultSchemaFields)); + processingTaskFutures.add(processFuture); + } else { + LOG.finer("Skipping non-procedure routine: " + routine.getRoutineId()); + } + } + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning("Fetcher thread interrupted while waiting for API future result."); + break; + } catch (ExecutionException e) { + LOG.warning( + "Error executing findMatchingRoutines task: " + + e.getMessage() + + ". Cause: " + + e.getCause()); + } catch (CancellationException e) { + LOG.warning("A findMatchingRoutines task was cancelled."); + } + } + + LOG.fine( + "Finished submitting " + + processingTaskFutures.size() + + " processProcedureInfo tasks."); + + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher interrupted before waiting for processing tasks; cancelling remaining."); + processingTaskFutures.forEach(f -> f.cancel(true)); + } else { + LOG.fine("Waiting for processProcedureInfo tasks to complete..."); + waitForTasksCompletion(processingTaskFutures); + LOG.fine("All processProcedureInfo tasks completed or handled."); + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetProceduresComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getProcedures", LOG); + } + + if (!Thread.currentThread().isInterrupted()) { + populateQueue(collectedResults, queue, localResultSchemaFields); + } + + } catch (Throwable t) { + LOG.severe("Unexpected error in procedure fetcher runnable: " + t.getMessage()); + apiFutures.forEach(f -> f.cancel(true)); + processingTaskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, localResultSchemaFields); + shutdownExecutor(apiExecutor); + shutdownExecutor(routineProcessorExecutor); + LOG.info("Procedure fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(procedureFetcher, "getProcedures-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getProcedures"); + return resultSet; + } + + Schema defineGetProceduresSchema() { + List fields = new ArrayList<>(9); + fields.add( + Field.newBuilder("PROCEDURE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("reserved1", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("reserved2", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("reserved3", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("SPECIFIC_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + void processProcedureInfo( + Routine routine, List collectedResults, FieldList resultSchemaFields) { + + RoutineId routineId = routine.getRoutineId(); + LOG.fine("Processing procedure info for: " + routineId); + + try { + if (!"PROCEDURE".equalsIgnoreCase(routine.getRoutineType())) { + LOG.warning( + "processProcedureInfo called with non-procedure type: " + + routine.getRoutineType() + + " for " + + routineId); + return; + } + + String catalogName = routineId.getProject(); + String schemaName = routineId.getDataset(); + String procedureName = routineId.getRoutine(); + String remarks = routine.getDescription(); + + List values = new ArrayList<>(resultSchemaFields.size()); + + values.add(createStringFieldValue(catalogName)); // 1. PROCEDURE_CAT + values.add(createStringFieldValue(schemaName)); // 2. PROCEDURE_SCHEM + values.add(createStringFieldValue(procedureName)); // 3. PROCEDURE_NAME + values.add(createNullFieldValue()); // 4. reserved1 + values.add(createNullFieldValue()); // 5. reserved2 + values.add(createNullFieldValue()); // 6. reserved3 + values.add(createStringFieldValue(remarks)); // 7. REMARKS + values.add( + createLongFieldValue( + (long) DatabaseMetaData.procedureResultUnknown)); // 8. PROCEDURE_TYPE + values.add(createStringFieldValue(procedureName)); // 9. SPECIFIC_NAME + + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + + LOG.fine("Processed and added procedure info row for: " + routineId); + + } catch (Exception e) { + LOG.warning( + String.format( + "Error processing procedure info for %s: %s. Skipping this procedure.", + routineId, e.getMessage())); + } + } + + Comparator defineGetProceduresComparator(FieldList resultSchemaFields) { + final int PROC_CAT_IDX = resultSchemaFields.getIndex("PROCEDURE_CAT"); + final int PROC_SCHEM_IDX = resultSchemaFields.getIndex("PROCEDURE_SCHEM"); + final int PROC_NAME_IDX = resultSchemaFields.getIndex("PROCEDURE_NAME"); + final int SPEC_NAME_IDX = resultSchemaFields.getIndex("SPECIFIC_NAME"); + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_CAT_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_NAME_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, SPEC_NAME_IDX), + Comparator.nullsFirst(String::compareTo)); + } + + @Override + public ResultSet getProcedureColumns( + String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern) { + + if (catalog == null || catalog.isEmpty()) { + LOG.warning("Returning empty ResultSet because catalog (project) is null or empty."); + return new BigQueryJsonResultSet(); + } + if ((schemaPattern != null && schemaPattern.isEmpty()) + || (procedureNamePattern != null && procedureNamePattern.isEmpty()) + || (columnNamePattern != null && columnNamePattern.isEmpty())) { + LOG.warning("Returning empty ResultSet because an explicit empty pattern was provided."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getProcedureColumns called for catalog: %s, schemaPattern: %s, procedureNamePattern:" + + " %s, columnNamePattern: %s", + catalog, schemaPattern, procedureNamePattern, columnNamePattern)); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Pattern procedureNameRegex = compileSqlLikePattern(procedureNamePattern); + final Pattern columnNameRegex = compileSqlLikePattern(columnNamePattern); + + final Schema resultSchema = defineGetProcedureColumnsSchema(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final List> processingTaskFutures = new ArrayList<>(); + final String catalogParam = catalog; + + Runnable procedureColumnFetcher = + () -> { + ExecutorService listRoutinesExecutor = null; + ExecutorService getRoutineDetailsExecutor = null; + ExecutorService processArgsExecutor = null; + + final String fetcherThreadNameSuffix = + "-" + catalogParam.substring(0, Math.min(10, catalogParam.length())); + + try { + List datasetsToScan = + fetchMatchingDatasetsForProcedureColumns(catalogParam, schemaPattern, schemaRegex); + if (datasetsToScan.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher: No matching datasets or interrupted early. Catalog: " + catalogParam); + return; + } + + listRoutinesExecutor = + Executors.newFixedThreadPool( + API_EXECUTOR_POOL_SIZE, + runnable -> new Thread(runnable, "pcol-list-rout" + fetcherThreadNameSuffix)); + List procedureIdsToGet = + listMatchingProcedureIdsFromDatasets( + datasetsToScan, + procedureNamePattern, + procedureNameRegex, + listRoutinesExecutor, + catalogParam, + LOG); + shutdownExecutor(listRoutinesExecutor); + listRoutinesExecutor = null; + + if (procedureIdsToGet.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info("Fetcher: No procedure IDs found or interrupted. Catalog: " + catalogParam); + return; + } + + getRoutineDetailsExecutor = + Executors.newFixedThreadPool( + 100, + runnable -> new Thread(runnable, "pcol-get-details" + fetcherThreadNameSuffix)); + List fullRoutines = + fetchFullRoutineDetailsForIds(procedureIdsToGet, getRoutineDetailsExecutor, LOG); + shutdownExecutor(getRoutineDetailsExecutor); + getRoutineDetailsExecutor = null; + + if (fullRoutines.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher: No full routines fetched or interrupted. Catalog: " + catalogParam); + return; + } + + processArgsExecutor = + Executors.newFixedThreadPool( + this.metadataFetchThreadCount, + runnable -> new Thread(runnable, "pcol-proc-args" + fetcherThreadNameSuffix)); + submitProcedureArgumentProcessingJobs( + fullRoutines, + columnNameRegex, + collectedResults, + resultSchema.getFields(), + processArgsExecutor, + processingTaskFutures, + LOG); + + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher: Interrupted before waiting for argument processing. Catalog: " + + catalogParam); + processingTaskFutures.forEach(f -> f.cancel(true)); + } else { + LOG.fine( + "Fetcher: Waiting for " + + processingTaskFutures.size() + + " argument processing tasks. Catalog: " + + catalogParam); + waitForTasksCompletion(processingTaskFutures); + LOG.fine( + "Fetcher: All argument processing tasks completed or handled. Catalog: " + + catalogParam); + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetProcedureColumnsComparator(resultSchema.getFields()); + sortResults(collectedResults, comparator, "getProcedureColumns", LOG); + populateQueue(collectedResults, queue, resultSchema.getFields()); + } + + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning( + "Fetcher: Interrupted in main try block for catalog " + + catalogParam + + ". Error: " + + e.getMessage()); + processingTaskFutures.forEach(f -> f.cancel(true)); + } catch (Throwable t) { + LOG.severe( + "Fetcher: Unexpected error in main try block for catalog " + + catalogParam + + ". Error: " + + t.getMessage()); + processingTaskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, resultSchema.getFields()); + if (listRoutinesExecutor != null) shutdownExecutor(listRoutinesExecutor); + if (getRoutineDetailsExecutor != null) shutdownExecutor(getRoutineDetailsExecutor); + if (processArgsExecutor != null) shutdownExecutor(processArgsExecutor); + LOG.info("Procedure column fetcher thread finished for catalog: " + catalogParam); + } + }; + + Thread fetcherThread = + new Thread(procedureColumnFetcher, "getProcedureColumns-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getProcedureColumns for catalog: " + catalog); + return resultSet; + } + + private List fetchMatchingDatasetsForProcedureColumns( + String catalogParam, String schemaPattern, Pattern schemaRegex) throws InterruptedException { + LOG.fine( + String.format( + "Fetching matching datasets for catalog '%s', schemaPattern '%s'", + catalogParam, schemaPattern)); + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets(catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + LOG.info( + String.format( + "Found %d datasets to scan for procedures in catalog '%s'.", + datasetsToScan.size(), catalogParam)); + return datasetsToScan; + } + + List listMatchingProcedureIdsFromDatasets( + List datasetsToScan, + String procedureNamePattern, + Pattern procedureNameRegex, + ExecutorService listRoutinesExecutor, + String catalogParam, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + + logger.fine( + String.format( + "Listing matching procedure IDs from %d datasets for catalog '%s'.", + datasetsToScan.size(), catalogParam)); + final List>> listRoutineFutures = new ArrayList<>(); + final List procedureIdsToGet = Collections.synchronizedList(new ArrayList<>()); + + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + logger.warning( + "Interrupted during submission of routine listing tasks for catalog: " + catalogParam); + throw new InterruptedException("Interrupted while listing routines"); + } + final DatasetId currentDatasetId = dataset.getDatasetId(); + Callable> listCallable = + () -> + findMatchingBigQueryObjects( + "Routine", + () -> + bigquery.listRoutines( + currentDatasetId, RoutineListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getRoutine( + RoutineId.of( + currentDatasetId.getProject(), currentDatasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + procedureNamePattern, + procedureNameRegex, + logger); + listRoutineFutures.add(listRoutinesExecutor.submit(listCallable)); + } + logger.fine( + "Submitted " + + listRoutineFutures.size() + + " routine list tasks for catalog: " + + catalogParam); + + for (Future> listFuture : listRoutineFutures) { + if (Thread.currentThread().isInterrupted()) { + logger.warning( + "Interrupted while collecting routine list results for catalog: " + catalogParam); + listRoutineFutures.forEach(f -> f.cancel(true)); + throw new InterruptedException("Interrupted while collecting routine lists"); + } + try { + List listedRoutines = listFuture.get(); + if (listedRoutines != null) { + for (Routine listedRoutine : listedRoutines) { + if (listedRoutine != null + && "PROCEDURE".equalsIgnoreCase(listedRoutine.getRoutineType())) { + if (listedRoutine.getRoutineId() != null) { + procedureIdsToGet.add(listedRoutine.getRoutineId()); + } else { + logger.warning( + "Found a procedure type routine with a null ID during listing phase for" + + " catalog: " + + catalogParam); + } + } + } + } + } catch (ExecutionException e) { + logger.warning( + "Error getting routine list result for catalog " + catalogParam + ": " + e.getCause()); + } catch (CancellationException e) { + logger.warning("Routine list task cancelled for catalog: " + catalogParam); + } + } + logger.info( + String.format( + "Found %d procedure IDs to fetch details for in catalog '%s'.", + procedureIdsToGet.size(), catalogParam)); + return procedureIdsToGet; + } + + List fetchFullRoutineDetailsForIds( + List procedureIdsToGet, + ExecutorService getRoutineDetailsExecutor, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + logger.fine( + String.format("Fetching full details for %d procedure IDs.", procedureIdsToGet.size())); + final List> getRoutineFutures = new ArrayList<>(); + final List fullRoutines = Collections.synchronizedList(new ArrayList<>()); + + for (RoutineId procId : procedureIdsToGet) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Interrupted during submission of getRoutine detail tasks."); + throw new InterruptedException("Interrupted while submitting getRoutine tasks"); + } + final RoutineId currentProcId = procId; + Callable getCallable = + () -> { + try { + return bigquery.getRoutine(currentProcId); + } catch (Exception e) { + logger.warning( + "Failed to get full details for routine " + + currentProcId + + ": " + + e.getMessage()); + return null; + } + }; + getRoutineFutures.add(getRoutineDetailsExecutor.submit(getCallable)); + } + logger.fine("Submitted " + getRoutineFutures.size() + " getRoutine detail tasks."); + + for (Future getFuture : getRoutineFutures) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Interrupted while collecting getRoutine detail results."); + getRoutineFutures.forEach(f -> f.cancel(true)); // Cancel remaining + throw new InterruptedException("Interrupted while collecting Routine details"); + } + try { + Routine fullRoutine = getFuture.get(); + if (fullRoutine != null) { + fullRoutines.add(fullRoutine); + } + } catch (ExecutionException e) { + logger.warning("Error processing getRoutine future result: " + e.getCause()); + } catch (CancellationException e) { + logger.warning("getRoutine detail task cancelled."); + } + } + logger.info( + String.format("Successfully fetched full details for %d routines.", fullRoutines.size())); + return fullRoutines; + } + + void submitProcedureArgumentProcessingJobs( + List fullRoutines, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields, + ExecutorService processArgsExecutor, + List> outArgumentProcessingFutures, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + logger.fine( + String.format("Submitting argument processing jobs for %d routines.", fullRoutines.size())); + + for (Routine fullRoutine : fullRoutines) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Interrupted during submission of argument processing tasks."); + throw new InterruptedException("Interrupted while submitting argument processing jobs"); + } + if (fullRoutine != null) { + if ("PROCEDURE".equalsIgnoreCase(fullRoutine.getRoutineType())) { + final Routine finalFullRoutine = fullRoutine; + Future processFuture = + processArgsExecutor.submit( + () -> + processProcedureArguments( + finalFullRoutine, columnNameRegex, collectedResults, resultSchemaFields)); + outArgumentProcessingFutures.add(processFuture); + } else { + logger.warning( + "Routine " + + (fullRoutine.getRoutineId() != null + ? fullRoutine.getRoutineId().toString() + : "UNKNOWN_ID") + + " fetched via getRoutine was not of type PROCEDURE (Type: " + + fullRoutine.getRoutineType() + + "). Skipping argument processing."); + } + } + } + logger.fine( + "Finished submitting " + + outArgumentProcessingFutures.size() + + " processProcedureArguments tasks."); + } + + Schema defineGetProcedureColumnsSchema() { + List fields = new ArrayList<>(20); + fields.add( + Field.newBuilder("PROCEDURE_CAT", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("COLUMN_TYPE", StandardSQLTypeName.INT64).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("PRECISION", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("LENGTH", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + fields.add(Field.newBuilder("SCALE", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + fields.add(Field.newBuilder("RADIX", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("COLUMN_DEF", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("SQL_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SQL_DATETIME_SUB", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("SPECIFIC_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + void processProcedureArguments( + Routine routine, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields) { + + RoutineId routineId = routine.getRoutineId(); + List arguments; + try { + arguments = routine.getArguments(); + } catch (Exception e) { + LOG.warning( + String.format( + "Could not retrieve arguments list for procedure %s: %s. No arguments will be" + + " processed.", + routineId, e.getMessage())); + return; + } + + if (arguments == null || arguments.isEmpty()) { + LOG.fine("Procedure " + routineId + " has no arguments."); + return; + } + + String catalogName = routineId.getProject(); + String schemaName = routineId.getDataset(); + String procedureName = routineId.getRoutine(); + String specificName = procedureName; + + for (int i = 0; i < arguments.size(); i++) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Argument processing task interrupted for " + routineId); + break; + } + + int ordinalPosition = i + 1; + RoutineArgument arg; + String argName; + + try { + arg = arguments.get(i); + argName = arg.getName(); + } catch (Exception listAccessException) { + LOG.warning( + String.format( + "Exception during arguments.get(%d) for Proc: %s. Ordinal: %d. Message: %s." + + " Generating fallback row.", + i, routineId, ordinalPosition, listAccessException.getMessage())); + argName = "arg_retrieval_err_" + ordinalPosition; + arg = null; + } + + // Filter by columnNamePattern, but not by generated fallback name + if (columnNameRegex != null) { + assert argName != null; + if (!argName.startsWith("arg_")) { + if (!columnNameRegex.matcher(argName).matches()) { + continue; + } + } + } + + List values = + createProcedureColumnRow( + catalogName, schemaName, procedureName, specificName, arg, ordinalPosition, argName); + + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + } + } + + List createProcedureColumnRow( + String catalog, + String schemaName, + String procedureName, + String specificName, + @Nullable RoutineArgument argument, + int ordinalPosition, + String columnName) { + + List values = new ArrayList<>(20); + ColumnTypeInfo typeInfo; + + if (argument == null) { + LOG.warning( + String.format( + "Proc: %s, Arg: %s (Pos %d) - RoutineArgument object is null. Defaulting type to" + + " VARCHAR.", + procedureName, columnName, ordinalPosition)); + typeInfo = new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + } else { + try { + StandardSQLDataType argumentDataType = argument.getDataType(); + if (argumentDataType == null) { + LOG.warning( + String.format( + "Proc: %s, Arg: %s (Pos %d) - argument.getDataType() returned null. Defaulting" + + " type to VARCHAR.", + procedureName, columnName, ordinalPosition)); + typeInfo = new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + } else { + typeInfo = + determineTypeInfoFromDataType( + argumentDataType, procedureName, columnName, ordinalPosition); + } + } catch (Exception e) { + LOG.warning( + String.format( + "Proc: %s, Arg: %s (Pos %d) - Unexpected Exception during type processing." + + " Defaulting type to VARCHAR. Error: %s", + procedureName, columnName, ordinalPosition, e.getMessage())); + typeInfo = new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + } + } + + String argumentModeStr = null; + if (argument != null) { + try { + argumentModeStr = argument.getMode(); + } catch (Exception e) { + LOG.warning( + String.format( + "Proc: %s, Arg: %s (Pos %d) - Could not get argument mode. Error: %s", + procedureName, columnName, ordinalPosition, e.getMessage())); + } + } + + values.add(createStringFieldValue(catalog)); // 1. PROCEDURE_CAT + values.add(createStringFieldValue(schemaName)); // 2. PROCEDURE_SCHEM + values.add(createStringFieldValue(procedureName)); // 3. PROCEDURE_NAME + values.add(createStringFieldValue(columnName)); // 4. COLUMN_NAME + long columnTypeJdbc = DatabaseMetaData.procedureColumnUnknown; + if ("IN".equalsIgnoreCase(argumentModeStr)) { + columnTypeJdbc = DatabaseMetaData.procedureColumnIn; + } else if ("OUT".equalsIgnoreCase(argumentModeStr)) { + columnTypeJdbc = DatabaseMetaData.procedureColumnOut; + } else if ("INOUT".equalsIgnoreCase(argumentModeStr)) { + columnTypeJdbc = DatabaseMetaData.procedureColumnInOut; + } + values.add(createLongFieldValue(columnTypeJdbc)); // 5. COLUMN_TYPE + values.add(createLongFieldValue((long) typeInfo.jdbcType)); // 6. DATA_TYPE (java.sql.Types) + values.add(createStringFieldValue(typeInfo.typeName)); // 7. TYPE_NAME (DB type name) + values.add( + createLongFieldValue( + typeInfo.columnSize == null ? null : typeInfo.columnSize.longValue())); // 8. PRECISION + values.add( + createNullFieldValue()); // 9. LENGTH (length in bytes - often null for procedure params) + values.add( + createLongFieldValue( + typeInfo.decimalDigits == null + ? null + : typeInfo.decimalDigits.longValue())); // 10. SCALE + values.add( + createLongFieldValue( + typeInfo.numPrecRadix == null ? null : typeInfo.numPrecRadix.longValue())); // 11. RADIX + values.add(createLongFieldValue((long) DatabaseMetaData.procedureNullable)); // 12. NULLABLE + values.add( + createNullFieldValue()); // 13. REMARKS (Can be argument.getDescription() if available and + // needed) + values.add(createNullFieldValue()); // 14. COLUMN_DEF (Default value - typically null) + values.add(createNullFieldValue()); // 15. SQL_DATA_TYPE (reserved) + values.add(createNullFieldValue()); // 16. SQL_DATETIME_SUB (reserved) + values.add(createNullFieldValue()); // 17. CHAR_OCTET_LENGTH (null for non-char/binary) + values.add(createLongFieldValue((long) ordinalPosition)); // 18. ORDINAL_POSITION + values.add(createStringFieldValue("YES")); // 19. IS_NULLABLE (Default to "YES") + values.add(createStringFieldValue(specificName)); // 20. SPECIFIC_NAME + + return values; + } + + ColumnTypeInfo determineTypeInfoFromDataType( + StandardSQLDataType argumentDataType, + String procedureName, + String columnName, + int ordinalPosition) { + + ColumnTypeInfo defaultVarcharTypeInfo = + new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + try { + String typeKind = argumentDataType.getTypeKind(); + if (typeKind != null && !typeKind.isEmpty()) { + if ("ARRAY".equalsIgnoreCase(typeKind)) { + return new ColumnTypeInfo(Types.ARRAY, "ARRAY", null, null, null); + } + StandardSQLTypeName determinedType = StandardSQLTypeName.valueOf(typeKind.toUpperCase()); + return getColumnTypeInfoForSqlType(determinedType); + } + } catch (Exception e) { + LOG.warning( + String.format( + "Proc: %s, Arg: %s (Pos %d) - Caught an unexpected Exception during type" + + " determination. Defaulting type to VARCHAR. Error: %s", + procedureName, columnName, ordinalPosition, e.getMessage())); + } + return defaultVarcharTypeInfo; + } + + Comparator defineGetProcedureColumnsComparator(FieldList resultSchemaFields) { + final int PROC_CAT_IDX = resultSchemaFields.getIndex("PROCEDURE_CAT"); + final int PROC_SCHEM_IDX = resultSchemaFields.getIndex("PROCEDURE_SCHEM"); + final int PROC_NAME_IDX = resultSchemaFields.getIndex("PROCEDURE_NAME"); + final int SPEC_NAME_IDX = resultSchemaFields.getIndex("SPECIFIC_NAME"); + final int COL_NAME_IDX = resultSchemaFields.getIndex("COLUMN_NAME"); + + if (PROC_CAT_IDX < 0 + || PROC_SCHEM_IDX < 0 + || PROC_NAME_IDX < 0 + || SPEC_NAME_IDX < 0 + || COL_NAME_IDX < 0) { + LOG.severe( + "Could not find required columns (PROCEDURE_CAT, SCHEM, NAME, SPECIFIC_NAME, COLUMN_NAME)" + + " in getProcedureColumns schema for sorting. Returning null comparator."); + return null; + } + + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_CAT_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_SCHEM_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, SPEC_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, COL_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)); + } + + @Override + public ResultSet getTables( + String catalog, String schemaPattern, String tableNamePattern, String[] types) { + + Tuple effectiveIdentifiers = + determineEffectiveCatalogAndSchema(catalog, schemaPattern); + String effectiveCatalog = effectiveIdentifiers.x(); + String effectiveSchemaPattern = effectiveIdentifiers.y(); + + if ((effectiveCatalog == null || effectiveCatalog.isEmpty()) + || (effectiveSchemaPattern != null && effectiveSchemaPattern.isEmpty()) + || (tableNamePattern != null && tableNamePattern.isEmpty())) { + LOG.warning( + "Returning empty ResultSet as one or more patterns are empty or catalog is null."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getTables called for catalog: %s, schemaPattern: %s, tableNamePattern: %s, types: %s", + effectiveCatalog, effectiveSchemaPattern, tableNamePattern, Arrays.toString(types))); + + final Pattern schemaRegex = compileSqlLikePattern(effectiveSchemaPattern); + final Pattern tableNameRegex = compileSqlLikePattern(tableNamePattern); + final Set requestedTypes = + (types == null || types.length == 0) ? null : new HashSet<>(Arrays.asList(types)); + + final Schema resultSchema = defineGetTablesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final String catalogParam = effectiveCatalog; + final String schemaParam = effectiveSchemaPattern; + + Runnable tableFetcher = + () -> { + ExecutorService apiExecutor = null; + ExecutorService tableProcessorExecutor = null; + final FieldList localResultSchemaFields = resultSchemaFields; + final List>> apiFutures = new ArrayList<>(); + final List> processingFutures = new ArrayList<>(); + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaParam, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty()) { + LOG.info("Fetcher thread found no matching datasets. Returning empty resultset."); + return; + } + + apiExecutor = Executors.newFixedThreadPool(API_EXECUTOR_POOL_SIZE); + tableProcessorExecutor = Executors.newFixedThreadPool(this.metadataFetchThreadCount); + + LOG.fine("Submitting parallel findMatchingTables tasks..."); + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Table fetcher interrupted during dataset iteration."); + break; + } + + final DatasetId currentDatasetId = dataset.getDatasetId(); + Callable> apiCallable = + () -> + findMatchingBigQueryObjects( + "Table", + () -> + bigquery.listTables( + currentDatasetId, TableListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getTable( + TableId.of( + currentDatasetId.getProject(), + currentDatasetId.getDataset(), + name)), + (tbl) -> tbl.getTableId().getTable(), + tableNamePattern, + tableNameRegex, + LOG); + Future> apiFuture = apiExecutor.submit(apiCallable); + apiFutures.add(apiFuture); + } + LOG.fine("Finished submitting " + apiFutures.size() + " findMatchingTables tasks."); + apiExecutor.shutdown(); + + LOG.fine("Processing results from findMatchingTables tasks..."); + for (Future> apiFuture : apiFutures) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Table fetcher interrupted while processing API futures."); + break; + } + try { + List tablesResult = apiFuture.get(); + if (tablesResult != null) { + for (Table table : tablesResult) { + if (Thread.currentThread().isInterrupted()) break; + + final Table currentTable = table; + Future processFuture = + tableProcessorExecutor.submit( + () -> + processTableInfo( + currentTable, + requestedTypes, + collectedResults, + localResultSchemaFields)); + processingFutures.add(processFuture); + } + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning("Fetcher thread interrupted while waiting for API future result."); + break; + } catch (ExecutionException e) { + LOG.warning( + "Error executing findMatchingTables task: " + + e.getMessage() + + ". Cause: " + + e.getCause()); + } catch (CancellationException e) { + LOG.warning("A findMatchingTables task was cancelled."); + } + } + + LOG.fine( + "Finished submitting " + processingFutures.size() + " processTableInfo tasks."); + + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher interrupted before waiting for processing tasks; cancelling remaining."); + processingFutures.forEach(f -> f.cancel(true)); + } else { + LOG.fine("Waiting for processTableInfo tasks to complete..."); + waitForTasksCompletion(processingFutures); + LOG.fine("All processTableInfo tasks completed."); + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetTablesComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getTables", LOG); + } + + if (!Thread.currentThread().isInterrupted()) { + populateQueue(collectedResults, queue, localResultSchemaFields); + } + + } catch (Throwable t) { + LOG.severe("Unexpected error in table fetcher runnable: " + t.getMessage()); + apiFutures.forEach(f -> f.cancel(true)); + processingFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, localResultSchemaFields); + shutdownExecutor(apiExecutor); + shutdownExecutor(tableProcessorExecutor); + LOG.info("Table fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(tableFetcher, "getTables-fetcher-" + effectiveCatalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getTables"); + return resultSet; + } + + Schema defineGetTablesSchema() { + List fields = new ArrayList<>(10); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("TABLE_TYPE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SELF_REFERENCING_COL_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("REF_GENERATION", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + void processTableInfo( + Table table, + Set requestedTypes, + List collectedResults, + FieldList resultSchemaFields) { + + TableId tableId = table.getTableId(); + LOG.fine("Processing table info for: " + tableId); + + try { + String catalogName = tableId.getProject(); + String schemaName = tableId.getDataset(); + String tableName = tableId.getTable(); + TableDefinition definition = table.getDefinition(); + String bqTableType = definition.getType().toString(); + String remarks = table.getDescription(); + + if (requestedTypes != null && !requestedTypes.contains(bqTableType)) { + LOG.finer( + String.format( + "Skipping table %s as its type '%s' is not in the requested types %s", + tableId, bqTableType, requestedTypes)); + return; + } + + List values = new ArrayList<>(resultSchemaFields.size()); + values.add(createStringFieldValue(catalogName)); // 1. TABLE_CAT + values.add(createStringFieldValue(schemaName)); // 2. TABLE_SCHEM + values.add(createStringFieldValue(tableName)); // 3. TABLE_NAME + values.add(createStringFieldValue(bqTableType)); // 4. TABLE_TYPE + values.add(createStringFieldValue(remarks)); // 5. REMARKS + values.add(createNullFieldValue()); // 6. TYPE_CAT (always null) + values.add(createNullFieldValue()); // 7. TYPE_SCHEM (always null) + values.add(createNullFieldValue()); // 8. TYPE_NAME (always null) + values.add(createNullFieldValue()); // 9. SELF_REFERENCING_COL_NAME (always null) + values.add(createNullFieldValue()); // 10. REF_GENERATION (always null) + + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + + LOG.fine("Processed and added table info row for: " + tableId); + } catch (Exception e) { + LOG.warning( + String.format( + "Error processing table info for %s: %s. Skipping this table.", + tableId, e.getMessage())); + } + } + + Comparator defineGetTablesComparator(FieldList resultSchemaFields) { + final int TABLE_TYPE_IDX = resultSchemaFields.getIndex("TABLE_TYPE"); + final int TABLE_CAT_IDX = resultSchemaFields.getIndex("TABLE_CAT"); + final int TABLE_SCHEM_IDX = resultSchemaFields.getIndex("TABLE_SCHEM"); + final int TABLE_NAME_IDX = resultSchemaFields.getIndex("TABLE_NAME"); + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_TYPE_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_CAT_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_NAME_IDX), + Comparator.nullsFirst(String::compareTo)); + } + + @Override + public ResultSet getSchemas() { + LOG.info("getSchemas() called"); + + return getSchemas(null, null); + } + + @Override + public ResultSet getCatalogs() { + LOG.info("getCatalogs() called"); + + final List accessibleCatalogs = getAccessibleCatalogNames(); + final Schema catalogsSchema = defineGetCatalogsSchema(); + final FieldList schemaFields = catalogsSchema.getFields(); + final List catalogRows = + prepareGetCatalogsRows(schemaFields, accessibleCatalogs); + + final BlockingQueue queue = + new LinkedBlockingQueue<>(catalogRows.isEmpty() ? 1 : catalogRows.size() + 1); + + populateQueue(catalogRows, queue, schemaFields); + signalEndOfData(queue, schemaFields); + + return BigQueryJsonResultSet.of( + catalogsSchema, catalogRows.size(), queue, this.statement, new Thread[0]); + } + + Schema defineGetCatalogsSchema() { + return Schema.of( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING).setMode(Mode.REQUIRED).build()); + } + + List prepareGetCatalogsRows( + FieldList schemaFields, List accessibleCatalogs) { + List catalogRows = new ArrayList<>(); + for (String catalogName : accessibleCatalogs) { + FieldValue fieldValue = FieldValue.of(FieldValue.Attribute.PRIMITIVE, catalogName); + catalogRows.add(FieldValueList.of(Collections.singletonList(fieldValue), schemaFields)); + } + return catalogRows; + } + + @Override + public ResultSet getTableTypes() { + LOG.info("getTableTypes() called"); + + final Schema tableTypesSchema = defineGetTableTypesSchema(); + final List tableTypeRows = prepareGetTableTypesRows(tableTypesSchema); + + BlockingQueue queue = + new LinkedBlockingQueue<>(tableTypeRows.size() + 1); + + populateQueue(tableTypeRows, queue, tableTypesSchema.getFields()); + signalEndOfData(queue, tableTypesSchema.getFields()); + + return BigQueryJsonResultSet.of( + tableTypesSchema, tableTypeRows.size(), queue, this.statement, new Thread[0]); + } + + static Schema defineGetTableTypesSchema() { + return Schema.of( + Field.newBuilder("TABLE_TYPE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + } + + static List prepareGetTableTypesRows(Schema schema) { + final String[] tableTypes = {"EXTERNAL", "MATERIALIZED VIEW", "SNAPSHOT", "TABLE", "VIEW"}; + List rows = new ArrayList<>(tableTypes.length); + FieldList schemaFields = schema.getFields(); + + for (String typeName : tableTypes) { + FieldValue fieldValue = FieldValue.of(FieldValue.Attribute.PRIMITIVE, typeName); + rows.add(FieldValueList.of(Collections.singletonList(fieldValue), schemaFields)); + } + return rows; + } + + @Override + public ResultSet getColumns( + String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) { + + Tuple effectiveIdentifiers = + determineEffectiveCatalogAndSchema(catalog, schemaPattern); + String effectiveCatalog = effectiveIdentifiers.x(); + String effectiveSchemaPattern = effectiveIdentifiers.y(); + + if ((effectiveCatalog == null || effectiveCatalog.isEmpty()) + || (effectiveSchemaPattern != null && effectiveSchemaPattern.isEmpty()) + || (tableNamePattern != null && tableNamePattern.isEmpty()) + || (columnNamePattern != null && columnNamePattern.isEmpty())) { + LOG.warning( + "Returning empty ResultSet as one or more patterns are empty or catalog is null."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getColumns called for catalog: %s, schemaPattern: %s, tableNamePattern: %s," + + " columnNamePattern: %s", + effectiveCatalog, effectiveSchemaPattern, tableNamePattern, columnNamePattern)); + + Pattern schemaRegex = compileSqlLikePattern(effectiveSchemaPattern); + Pattern tableNameRegex = compileSqlLikePattern(tableNamePattern); + Pattern columnNameRegex = compileSqlLikePattern(columnNamePattern); + + final Schema resultSchema = defineGetColumnsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final String catalogParam = effectiveCatalog; + final String schemaParam = effectiveSchemaPattern; + + Runnable columnFetcher = + () -> { + ExecutorService columnExecutor = null; + final List> taskFutures = new ArrayList<>(); + final FieldList localResultSchemaFields = resultSchemaFields; + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaParam, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty()) { + LOG.info("Fetcher thread found no matching datasets. Returning empty resultset."); + return; + } + + columnExecutor = Executors.newFixedThreadPool(this.metadataFetchThreadCount); + + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Fetcher interrupted during dataset iteration."); + break; + } + + DatasetId datasetId = dataset.getDatasetId(); + LOG.info("Processing dataset: " + datasetId.getDataset()); + + List
tablesToScan = + findMatchingBigQueryObjects( + "Table", + () -> + bigquery.listTables( + datasetId, TableListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getTable( + TableId.of(datasetId.getProject(), datasetId.getDataset(), name)), + (tbl) -> tbl.getTableId().getTable(), + tableNamePattern, + tableNameRegex, + LOG); + + for (Table table : tablesToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher interrupted during table iteration for dataset " + + datasetId.getDataset()); + break; + } + + TableId tableId = table.getTableId(); + LOG.fine("Submitting task for table: " + tableId); + final Table finalTable = table; + Future future = + columnExecutor.submit( + () -> + processTableColumns( + finalTable, + columnNameRegex, + collectedResults, + localResultSchemaFields)); + taskFutures.add(future); + } + if (Thread.currentThread().isInterrupted()) break; + } + + waitForTasksCompletion(taskFutures); + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetColumnsComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getColumns", LOG); + } + + if (!Thread.currentThread().isInterrupted()) { + populateQueue(collectedResults, queue, localResultSchemaFields); + } + + } catch (Throwable t) { + LOG.severe("Unexpected error in column fetcher runnable: " + t.getMessage()); + taskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, localResultSchemaFields); + shutdownExecutor(columnExecutor); + LOG.info("Column fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(columnFetcher, "getColumns-fetcher-" + effectiveCatalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of(resultSchema, -1, queue, null, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getColumns"); + return resultSet; + } + + private void processTableColumns( + Table table, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields) { + TableId tableId = table.getTableId(); + LOG.fine("Processing columns for table: " + tableId); + TableDefinition definition = table.getDefinition(); + Schema tableSchema = (definition != null) ? definition.getSchema() : null; + + try { + if (tableSchema == null) { + LOG.fine( + "Schema not included in table object for " + + tableId + + ", fetching full table details..."); + Table fullTable = bigquery.getTable(tableId); + if (fullTable != null) { + definition = fullTable.getDefinition(); + tableSchema = (definition != null) ? definition.getSchema() : null; + } else { + LOG.warning( + "Table " + tableId + " not found when fetching full details for columns. Skipping."); + return; + } + } + + if (tableSchema == null + || tableSchema.getFields() == null + || tableSchema.getFields().isEmpty()) { + LOG.warning( + String.format( + "Schema not found or fields are null for table %s (Type: %s). Skipping columns.", + tableId, definition.getType())); + return; + } + + FieldList fields = tableSchema.getFields(); + String catalogName = tableId.getProject(); + String schemaName = tableId.getDataset(); + String tableName = tableId.getTable(); + + for (int i = 0; i < fields.size(); i++) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Task for table " + tableId + " interrupted during column iteration."); + break; + } + Field field = fields.get(i); + String currentColumnName = field.getName(); + if (columnNameRegex != null && !columnNameRegex.matcher(currentColumnName).matches()) + continue; + List values = createColumnRow(catalogName, schemaName, tableName, field, i + 1); + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + } + LOG.fine("Finished processing columns for table: " + tableId); + } catch (BigQueryException e) { + LOG.warning( + String.format( + "BigQueryException processing table %s: %s (Code: %d)", + tableId, e.getMessage(), e.getCode())); + } catch (Exception e) { + LOG.severe( + String.format("Unexpected error processing table %s: %s", tableId, e.getMessage())); + } + } + + private Schema defineGetColumnsSchema() { + List fields = new ArrayList<>(24); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 1 + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 2 + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 3 + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 4 + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); // 5 + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 6 + fields.add( + Field.newBuilder("COLUMN_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 7 + fields.add( + Field.newBuilder("BUFFER_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 8 + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 9 + fields.add( + Field.newBuilder("NUM_PREC_RADIX", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 10 + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); // 11 + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 12 + fields.add( + Field.newBuilder("COLUMN_DEF", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 13 + fields.add( + Field.newBuilder("SQL_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 14 + fields.add( + Field.newBuilder("SQL_DATETIME_SUB", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 15 + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 16 + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); // 17 + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 18 + fields.add( + Field.newBuilder("SCOPE_CATALOG", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 19 + fields.add( + Field.newBuilder("SCOPE_SCHEMA", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 20 + fields.add( + Field.newBuilder("SCOPE_TABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 21 + fields.add( + Field.newBuilder("SOURCE_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 22 + fields.add( + Field.newBuilder("IS_AUTOINCREMENT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 23 + fields.add( + Field.newBuilder("IS_GENERATEDCOLUMN", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 24 + return Schema.of(fields); + } + + List createColumnRow( + String catalog, String schemaName, String tableName, Field field, int ordinalPosition) { + List values = new ArrayList<>(24); + Field.Mode mode = (field.getMode() == null) ? Field.Mode.NULLABLE : field.getMode(); + ColumnTypeInfo typeInfo = mapBigQueryTypeToJdbc(field); + + values.add(createStringFieldValue(catalog)); // 1. TABLE_CAT + values.add(createStringFieldValue(schemaName)); // 2. TABLE_SCHEM + values.add(createStringFieldValue(tableName)); // 3. TABLE_NAME + values.add(createStringFieldValue(field.getName())); // 4. COLUMN_NAME + values.add(createLongFieldValue((long) typeInfo.jdbcType)); // 5. DATA_TYPE + values.add(createStringFieldValue(typeInfo.typeName)); // 6. TYPE_NAME + values.add( + createLongFieldValue( + typeInfo.columnSize == null + ? null + : typeInfo.columnSize.longValue())); // 7. COLUMN_SIZE + values.add(createNullFieldValue()); // 8. BUFFER_LENGTH + values.add( + createLongFieldValue( + typeInfo.decimalDigits == null + ? null + : typeInfo.decimalDigits.longValue())); // 9. DECIMAL_DIGITS + values.add( + createLongFieldValue( + typeInfo.numPrecRadix == null + ? null + : typeInfo.numPrecRadix.longValue())); // 10. NUM_PREC_RADIX + int nullable = + (mode == Field.Mode.REQUIRED) + ? DatabaseMetaData.columnNoNulls + : DatabaseMetaData.columnNullable; + values.add(createLongFieldValue((long) nullable)); // 11. NULLABLE + values.add(createStringFieldValue(field.getDescription())); // 12. REMARKS + values.add(createNullFieldValue()); // 13. COLUMN_DEF + values.add(createNullFieldValue()); // 14. SQL_DATA_TYPE + values.add(createNullFieldValue()); // 15. SQL_DATETIME_SUB + values.add(createNullFieldValue()); // 16. CHAR_OCTET_LENGTH + values.add(createLongFieldValue((long) ordinalPosition)); // 17. ORDINAL_POSITION + String isNullable = ""; + switch (mode) { + case REQUIRED: + isNullable = "NO"; + break; + case NULLABLE: + case REPEATED: + isNullable = "YES"; + break; + } + values.add(createStringFieldValue(isNullable)); // 18. IS_NULLABLE + values.add(createNullFieldValue()); // 19. SCOPE_CATALOG + values.add(createNullFieldValue()); // 20. SCOPE_SCHEMA + values.add(createNullFieldValue()); // 21. SCOPE_TABLE + values.add(createNullFieldValue()); // 22. SOURCE_DATA_TYPE + values.add(createStringFieldValue("NO")); // 23. IS_AUTOINCREMENT + values.add(createStringFieldValue("NO")); // 24. IS_GENERATEDCOLUMN + + return values; + } + + static class ColumnTypeInfo { + final int jdbcType; + final String typeName; + final Integer columnSize; + final Integer decimalDigits; + final Integer numPrecRadix; + + ColumnTypeInfo( + int jdbcType, + String typeName, + Integer columnSize, + Integer decimalDigits, + Integer numPrecRadix) { + this.jdbcType = jdbcType; + this.typeName = typeName; + this.columnSize = columnSize; + this.decimalDigits = decimalDigits; + this.numPrecRadix = numPrecRadix; + } + } + + ColumnTypeInfo mapBigQueryTypeToJdbc(Field field) { + Mode mode = (field.getMode() == null) ? Mode.NULLABLE : field.getMode(); + if (mode == Mode.REPEATED) { + return new ColumnTypeInfo(Types.ARRAY, "ARRAY", null, null, null); + } + + StandardSQLTypeName bqType = null; + if (field.getType() != null && field.getType().getStandardType() != null) { + bqType = field.getType().getStandardType(); + } + return getColumnTypeInfoForSqlType(bqType); + } + + private Comparator defineGetColumnsComparator(FieldList resultSchemaFields) { + final int TABLE_CAT_IDX = resultSchemaFields.getIndex("TABLE_CAT"); + final int TABLE_SCHEM_IDX = resultSchemaFields.getIndex("TABLE_SCHEM"); + final int TABLE_NAME_IDX = resultSchemaFields.getIndex("TABLE_NAME"); + final int ORDINAL_POS_IDX = resultSchemaFields.getIndex("ORDINAL_POSITION"); + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_CAT_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_NAME_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getLongValueOrNull(fvl, ORDINAL_POS_IDX), + Comparator.nullsFirst(Long::compareTo)); + } + + @Override + public ResultSet getColumnPrivileges( + String catalog, String schema, String table, String columnNamePattern) { + LOG.info( + String.format( + "getColumnPrivileges called for catalog: %s, schema: %s, table: %s, columnNamePattern:" + + " %s. BigQuery IAM model differs from SQL privileges; returning empty ResultSet.", + catalog, schema, table, columnNamePattern)); + + final Schema resultSchema = defineGetColumnPrivilegesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetColumnPrivilegesSchema() { + List fields = defineBasePrivilegeFields(); + + Field columnNameField = + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build(); + fields.add(3, columnNameField); + + return Schema.of(fields); + } + + @Override + public ResultSet getTablePrivileges( + String catalog, String schemaPattern, String tableNamePattern) { + LOG.info( + String.format( + "getTablePrivileges called for catalog: %s, schemaPattern: %s, tableNamePattern: %s. " + + "BigQuery IAM model differs from SQL privileges; returning empty ResultSet.", + catalog, schemaPattern, tableNamePattern)); + + final Schema resultSchema = defineGetTablePrivilegesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetTablePrivilegesSchema() { + List fields = defineBasePrivilegeFields(); + return Schema.of(fields); + } + + @Override + public ResultSet getBestRowIdentifier( + String catalog, String schema, String table, int scope, boolean nullable) { + LOG.info( + String.format( + "getBestRowIdentifier called for catalog: %s, schema: %s, table: %s, scope: %d," + + " nullable: %s. BigQuery does not support best row identifiers; returning empty" + + " ResultSet.", + catalog, schema, table, scope, nullable)); + + final Schema resultSchema = defineGetBestRowIdentifierSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetBestRowIdentifierSchema() { + List fields = new ArrayList<>(8); + fields.add( + Field.newBuilder("SCOPE", StandardSQLTypeName.INT64).setMode(Field.Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("BUFFER_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PSEUDO_COLUMN", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + @Override + public ResultSet getVersionColumns(String catalog, String schema, String table) { + LOG.info( + String.format( + "getVersionColumns called for catalog: %s, schema: %s, table: %s. " + + "Automatic version columns not supported by BigQuery; returning empty ResultSet.", + catalog, schema, table)); + + final Schema resultSchema = defineGetVersionColumnsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetVersionColumnsSchema() { + List fields = new ArrayList<>(8); + fields.add( + Field.newBuilder("SCOPE", StandardSQLTypeName.INT64).setMode(Field.Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("BUFFER_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PSEUDO_COLUMN", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + @Override + public ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException { + String sql = readSqlFromFile(GET_PRIMARY_KEYS_SQL); + try { + String formattedSql = replaceSqlParameters(sql, catalog, schema, table); + return this.statement.executeQuery(formattedSql); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Override + public ResultSet getImportedKeys(String catalog, String schema, String table) + throws SQLException { + String sql = readSqlFromFile(GET_IMPORTED_KEYS_SQL); + try { + String formattedSql = replaceSqlParameters(sql, catalog, schema, table); + return this.statement.executeQuery(formattedSql); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Override + public ResultSet getExportedKeys(String catalog, String schema, String table) + throws SQLException { + String sql = readSqlFromFile(GET_EXPORTED_KEYS_SQL); + try { + String formattedSql = replaceSqlParameters(sql, catalog, schema, table); + return this.statement.executeQuery(formattedSql); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Override + public ResultSet getCrossReference( + String parentCatalog, + String parentSchema, + String parentTable, + String foreignCatalog, + String foreignSchema, + String foreignTable) + throws SQLException { + String sql = readSqlFromFile(GET_CROSS_REFERENCE_SQL); + try { + String formattedSql = + replaceSqlParameters( + sql, + parentCatalog, + parentSchema, + parentTable, + foreignCatalog, + foreignSchema, + foreignTable); + return this.statement.executeQuery(formattedSql); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Override + public ResultSet getTypeInfo() { + LOG.info("getTypeInfo() called"); + + final Schema typeInfoSchema = defineGetTypeInfoSchema(); + final FieldList schemaFields = typeInfoSchema.getFields(); + final List typeInfoRows = prepareGetTypeInfoRows(schemaFields); + + final Comparator comparator = defineGetTypeInfoComparator(schemaFields); + sortResults(typeInfoRows, comparator, "getTypeInfo", LOG); + final BlockingQueue queue = + new LinkedBlockingQueue<>(typeInfoRows.size() + 1); + + populateQueue(typeInfoRows, queue, schemaFields); + signalEndOfData(queue, schemaFields); + return BigQueryJsonResultSet.of( + typeInfoSchema, typeInfoRows.size(), queue, this.statement, new Thread[0]); + } + + Schema defineGetTypeInfoSchema() { + List fields = new ArrayList<>(18); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 1 + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 2 + fields.add( + Field.newBuilder("PRECISION", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 3 + fields.add( + Field.newBuilder("LITERAL_PREFIX", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 4 + fields.add( + Field.newBuilder("LITERAL_SUFFIX", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 5 + fields.add( + Field.newBuilder("CREATE_PARAMS", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 6 + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 7 + fields.add( + Field.newBuilder("CASE_SENSITIVE", StandardSQLTypeName.BOOL) + .setMode(Mode.REQUIRED) + .build()); // 8 + fields.add( + Field.newBuilder("SEARCHABLE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 9 + fields.add( + Field.newBuilder("UNSIGNED_ATTRIBUTE", StandardSQLTypeName.BOOL) + .setMode(Mode.NULLABLE) + .build()); // 10 + fields.add( + Field.newBuilder("FIXED_PREC_SCALE", StandardSQLTypeName.BOOL) + .setMode(Mode.REQUIRED) + .build()); // 11 + fields.add( + Field.newBuilder("AUTO_INCREMENT", StandardSQLTypeName.BOOL) + .setMode(Mode.REQUIRED) + .build()); // 12 + fields.add( + Field.newBuilder("LOCAL_TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 13 + fields.add( + Field.newBuilder("MINIMUM_SCALE", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 14 + fields.add( + Field.newBuilder("MAXIMUM_SCALE", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 15 + fields.add( + Field.newBuilder("SQL_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 16 + fields.add( + Field.newBuilder("SQL_DATETIME_SUB", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 17 + fields.add( + Field.newBuilder("NUM_PREC_RADIX", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 18 + return Schema.of(fields); + } + + List prepareGetTypeInfoRows(FieldList schemaFields) { + List rows = new ArrayList<>(); + + Function createRow = + (data) -> { + List values = new ArrayList<>(18); + values.add(createStringFieldValue(data.typeName)); // 1. TYPE_NAME + values.add(createLongFieldValue((long) data.jdbcType)); // 2. DATA_TYPE + values.add(createLongFieldValue(data.precision)); // 3. PRECISION + values.add(createStringFieldValue(data.literalPrefix)); // 4. LITERAL_PREFIX + values.add(createStringFieldValue(data.literalSuffix)); // 5. LITERAL_SUFFIX + values.add(createStringFieldValue(data.createParams)); // 6. CREATE_PARAMS + values.add(createLongFieldValue((long) data.nullable)); // 7. NULLABLE + values.add(createBooleanFieldValue(data.caseSensitive)); // 8. CASE_SENSITIVE + values.add(createLongFieldValue((long) data.searchable)); // 9. SEARCHABLE + values.add(createBooleanFieldValue(data.unsignedAttribute)); // 10. UNSIGNED_ATTRIBUTE + values.add(createBooleanFieldValue(data.fixedPrecScale)); // 11. FIXED_PREC_SCALE + values.add(createBooleanFieldValue(data.autoIncrement)); // 12. AUTO_INCREMENT + values.add(createStringFieldValue(data.localTypeName)); // 13. LOCAL_TYPE_NAME + values.add(createLongFieldValue(data.minimumScale)); // 14. MINIMUM_SCALE + values.add(createLongFieldValue(data.maximumScale)); // 15. MAXIMUM_SCALE + values.add(createNullFieldValue()); // 16. SQL_DATA_TYPE + values.add(createNullFieldValue()); // 17. SQL_DATETIME_SUB + values.add(createLongFieldValue(data.numPrecRadix)); // 18. NUM_PREC_RADIX + return FieldValueList.of(values, schemaFields); + }; + + rows.add( + createRow.apply( + new TypeInfoRowData( + "INT64", + Types.BIGINT, + 19L, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "INT64", + 0L, + 0L, + 10L))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "BOOL", + Types.BOOLEAN, + 1L, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typePredBasic, + false, + false, + false, + "BOOL", + 0L, + 0L, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "FLOAT64", + Types.DOUBLE, + 15L, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "FLOAT64", + null, + null, + 2L))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "NUMERIC", + Types.NUMERIC, + 38L, + null, + null, + "PRECISION,SCALE", + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + true, + false, + "NUMERIC", + 9L, + 9L, + 10L))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "BIGNUMERIC", + Types.NUMERIC, + 77L, + null, + null, + "PRECISION,SCALE", + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + true, + false, + "BIGNUMERIC", + 38L, + 38L, + 10L))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "STRING", + Types.NVARCHAR, + null, + "'", + "'", + "LENGTH", + DatabaseMetaData.typeNullable, + true, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "STRING", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "TIMESTAMP", + Types.TIMESTAMP, + 29L, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "TIMESTAMP", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "DATE", + Types.DATE, + 10L, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "DATE", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "TIME", + Types.TIME, + 15L, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "TIME", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "DATETIME", + Types.TIMESTAMP, + 29L, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "DATETIME", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "GEOGRAPHY", + Types.OTHER, + null, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "GEOGRAPHY", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "JSON", + Types.OTHER, + null, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "JSON", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "INTERVAL", + Types.OTHER, + null, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "INTERVAL", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "BYTES", + Types.VARBINARY, + null, + "0x", + null, + "LENGTH", + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "BYTES", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "STRUCT", + Types.STRUCT, + null, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typePredNone, + false, + false, + false, + "STRUCT", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "ARRAY", + Types.ARRAY, + null, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typePredNone, + false, + false, + false, + "ARRAY", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "RANGE", + Types.OTHER, + null, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "RANGE", + null, + null, + null))); + + return rows; + } + + Comparator defineGetTypeInfoComparator(FieldList schemaFields) { + final int DATA_TYPE_IDX = schemaFields.getIndex("DATA_TYPE"); + if (DATA_TYPE_IDX < 0) { + LOG.severe( + "Could not find DATA_TYPE column in getTypeInfo schema for sorting. Returning null" + + " comparator."); + return null; + } + Comparator comparator = + Comparator.comparing( + (FieldValueList fvl) -> getLongValueOrNull(fvl, DATA_TYPE_IDX), + Comparator.nullsFirst(Long::compareTo)); + return comparator; + } + + @Override + public ResultSet getIndexInfo( + String catalog, String schema, String table, boolean unique, boolean approximate) { + LOG.info( + String.format( + "getIndexInfo called for catalog: %s, schema: %s, table: %s, unique: %s, approximate:" + + " %s. Traditional indexes not supported by BigQuery; returning empty ResultSet.", + catalog, schema, table, unique, approximate)); + + final Schema resultSchema = defineGetIndexInfoSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetIndexInfoSchema() { + List fields = new ArrayList<>(13); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("NON_UNIQUE", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("INDEX_QUALIFIER", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("INDEX_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE", StandardSQLTypeName.INT64).setMode(Field.Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("ASC_OR_DESC", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("CARDINALITY", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PAGES", StandardSQLTypeName.INT64).setMode(Field.Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("FILTER_CONDITION", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + @Override + public boolean supportsResultSetType(int type) { + // BigQuery primarily supports forward-only result sets. + return type == ResultSet.TYPE_FORWARD_ONLY; + } + + @Override + public boolean supportsResultSetConcurrency(int type, int concurrency) { + // BigQuery primarily supports forward-only, read-only result sets. + return type == ResultSet.TYPE_FORWARD_ONLY && concurrency == ResultSet.CONCUR_READ_ONLY; + } + + @Override + public boolean ownUpdatesAreVisible(int type) { + return false; + } + + @Override + public boolean ownDeletesAreVisible(int type) { + return false; + } + + @Override + public boolean ownInsertsAreVisible(int type) { + return false; + } + + @Override + public boolean othersUpdatesAreVisible(int type) { + return false; + } + + @Override + public boolean othersDeletesAreVisible(int type) { + return false; + } + + @Override + public boolean othersInsertsAreVisible(int type) { + return false; + } + + @Override + public boolean updatesAreDetected(int type) { + return false; + } + + @Override + public boolean deletesAreDetected(int type) { + return false; + } + + @Override + public boolean insertsAreDetected(int type) { + return false; + } + + @Override + public boolean supportsBatchUpdates() { + return false; + } + + @Override + public ResultSet getUDTs( + String catalog, String schemaPattern, String typeNamePattern, int[] types) { + LOG.info( + String.format( + "getUDTs called for catalog: %s, schemaPattern: %s, typeNamePattern: %s, types: %s. " + + "Feature not supported by BigQuery; returning empty ResultSet.", + catalog, + schemaPattern, + typeNamePattern, + (types == null ? "null" : Arrays.toString(types)))); + + final Schema resultSchema = defineGetUDTsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetUDTsSchema() { + List fields = new ArrayList<>(7); + fields.add( + Field.newBuilder("TYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("CLASS_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("BASE_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + @Override + public Connection getConnection() { + return connection; + } + + @Override + public boolean supportsSavepoints() { + return false; + } + + @Override + public boolean supportsNamedParameters() { + return false; + } + + @Override + public boolean supportsMultipleOpenResults() { + return false; + } + + @Override + public boolean supportsGetGeneratedKeys() { + return false; + } + + @Override + public ResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern) { + LOG.info( + String.format( + "getSuperTables called for catalog: %s, schemaPattern: %s, tableNamePattern: %s. " + + "BigQuery does not support super tables; returning empty ResultSet.", + catalog, schemaPattern, tableNamePattern)); + + final Schema resultSchema = defineGetSuperTablesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetSuperTablesSchema() { + List fields = new ArrayList<>(4); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 1. TABLE_CAT + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 2. TABLE_SCHEM + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 3. TABLE_NAME + fields.add( + Field.newBuilder("SUPERTABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 4. SUPERTABLE_NAME + return Schema.of(fields); + } + + @Override + public ResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern) { + LOG.info( + String.format( + "getSuperTypes called for catalog: %s, schemaPattern: %s, typeNamePattern: %s. BigQuery" + + " does not support user-defined type hierarchies; returning empty ResultSet.", + catalog, schemaPattern, typeNamePattern)); + + final Schema resultSchema = defineGetSuperTypesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetSuperTypesSchema() { + List fields = new ArrayList<>(6); + fields.add( + Field.newBuilder("TYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 1. TYPE_CAT + fields.add( + Field.newBuilder("TYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 2. TYPE_SCHEM + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 3. TYPE_NAME + fields.add( + Field.newBuilder("SUPERTYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 4. SUPERTYPE_CAT + fields.add( + Field.newBuilder("SUPERTYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 5. SUPERTYPE_SCHEM + fields.add( + Field.newBuilder("SUPERTYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 6. SUPERTYPE_NAME + return Schema.of(fields); + } + + @Override + public ResultSet getAttributes( + String catalog, String schemaPattern, String typeNamePattern, String attributeNamePattern) { + LOG.info( + String.format( + "getAttributes called for catalog: %s, schemaPattern: %s, typeNamePattern: %s," + + " attributeNamePattern: %s. Feature not supported by BigQuery; returning empty" + + " ResultSet.", + catalog, schemaPattern, typeNamePattern, attributeNamePattern)); + + final Schema resultSchema = defineGetAttributesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetAttributesSchema() { + List fields = new ArrayList<>(21); + fields.add( + Field.newBuilder("TYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("ATTR_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("ATTR_TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("ATTR_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("NUM_PREC_RADIX", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("ATTR_DEF", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SQL_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SQL_DATETIME_SUB", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("SCOPE_CATALOG", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SCOPE_SCHEMA", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SCOPE_TABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SOURCE_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + @Override + public boolean supportsResultSetHoldability(int holdability) { + if (holdability == ResultSet.CLOSE_CURSORS_AT_COMMIT) { + return true; + } + return false; + } + + @Override + public int getResultSetHoldability() { + return ResultSet.CLOSE_CURSORS_AT_COMMIT; + } + + @Override + // Obtained from java libraries pom + // https://github.com/googleapis/java-bigquery/blob/main/pom.xml + public int getDatabaseMajorVersion() { + return 2; + } + + @Override + public int getDatabaseMinorVersion() { + return 0; + } + + @Override + public int getJDBCMajorVersion() { + return 4; + } + + @Override + public int getJDBCMinorVersion() { + return 2; + } + + @Override + public int getSQLStateType() { + return DatabaseMetaData.sqlStateSQL; + } + + @Override + public boolean locatorsUpdateCopy() { + return false; + } + + @Override + public boolean supportsStatementPooling() { + return false; + } + + @Override + public RowIdLifetime getRowIdLifetime() { + return null; + } + + @Override + public ResultSet getSchemas(String catalog, String schemaPattern) { + if ((catalog != null && catalog.isEmpty()) + || (schemaPattern != null && schemaPattern.isEmpty())) { + LOG.warning("Returning empty ResultSet as catalog or schemaPattern is an empty string."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getSchemas called for catalog: %s, schemaPattern: %s", catalog, schemaPattern)); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Schema resultSchema = defineGetSchemasSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final String catalogParam = catalog; + + Runnable schemaFetcher = + () -> { + final FieldList localResultSchemaFields = resultSchemaFields; + List projectsToScanList = new ArrayList<>(); + + if (catalogParam != null) { + projectsToScanList.add(catalogParam); + } else { + projectsToScanList.addAll(getAccessibleCatalogNames()); + } + + if (projectsToScanList.isEmpty()) { + LOG.info( + "No valid projects to scan (primary, specified, or additional). Returning empty" + + " resultset."); + return; + } + + try { + for (String currentProjectToScan : projectsToScanList) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Schema fetcher interrupted during project iteration for project: " + + currentProjectToScan); + break; + } + LOG.info("Fetching schemas for project: " + currentProjectToScan); + List datasetsInProject = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + currentProjectToScan, + BigQuery.DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(currentProjectToScan, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + + if (datasetsInProject.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher thread found no matching datasets in project: " + + currentProjectToScan); + continue; + } + + LOG.fine("Processing found datasets for project: " + currentProjectToScan); + for (Dataset dataset : datasetsInProject) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Schema fetcher interrupted during dataset iteration for project: " + + currentProjectToScan); + break; + } + processSchemaInfo(dataset, collectedResults, localResultSchemaFields); + } + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetSchemasComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getSchemas", LOG); + } + + if (!Thread.currentThread().isInterrupted()) { + populateQueue(collectedResults, queue, localResultSchemaFields); + } + + } catch (Throwable t) { + LOG.severe("Unexpected error in schema fetcher runnable: " + t.getMessage()); + } finally { + signalEndOfData(queue, localResultSchemaFields); + LOG.info("Schema fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(schemaFetcher, "getSchemas-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getSchemas"); + return resultSet; + } + + Schema defineGetSchemasSchema() { + List fields = new ArrayList<>(2); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("TABLE_CATALOG", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + void processSchemaInfo( + Dataset dataset, List collectedResults, FieldList resultSchemaFields) { + DatasetId datasetId = dataset.getDatasetId(); + LOG.finer("Processing schema info for dataset: " + datasetId); + try { + String schemaName = datasetId.getDataset(); + String catalogName = datasetId.getProject(); + List values = new ArrayList<>(resultSchemaFields.size()); + values.add(createStringFieldValue(schemaName)); + values.add(createStringFieldValue(catalogName)); + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + LOG.finer("Processed and added schema info row for: " + datasetId); + } catch (Exception e) { + LOG.warning( + String.format( + "Error processing schema info for dataset %s: %s. Skipping this schema.", + datasetId, e.getMessage())); + } + } + + Comparator defineGetSchemasComparator(FieldList resultSchemaFields) { + final int TABLE_CATALOG_IDX = resultSchemaFields.getIndex("TABLE_CATALOG"); + final int TABLE_SCHEM_IDX = resultSchemaFields.getIndex("TABLE_SCHEM"); + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_CATALOG_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)); + } + + @Override + public boolean supportsStoredFunctionsUsingCallSyntax() { + return false; + } + + @Override + public boolean autoCommitFailureClosesAllResultSets() { + return false; + } + + @Override + public ResultSet getClientInfoProperties() { + LOG.info("getClientInfoProperties() called."); + + final Schema resultSchema = defineGetClientInfoPropertiesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(4); + final List collectedResults = new ArrayList<>(3); + + try { + collectedResults.add( + FieldValueList.of( + Arrays.asList( + createStringFieldValue("ApplicationName"), + createLongFieldValue(25L), + createNullFieldValue(), + createStringFieldValue( + "The name of the application currently utilizing the connection.")), + resultSchemaFields)); + + collectedResults.add( + FieldValueList.of( + Arrays.asList( + createStringFieldValue("ClientHostname"), + createLongFieldValue(25L), + createNullFieldValue(), + createStringFieldValue( + "The hostname of the computer the application using the connection is running" + + " on.")), + resultSchemaFields)); + + collectedResults.add( + FieldValueList.of( + Arrays.asList( + createStringFieldValue("ClientUser"), + createLongFieldValue(25L), + createNullFieldValue(), + createStringFieldValue( + "The name of the user that the application using the connection is performing" + + " work for.")), + resultSchemaFields)); + + Comparator comparator = + Comparator.comparing( + (FieldValueList fvl) -> + getStringValueOrNull(fvl, resultSchemaFields.getIndex("NAME")), + Comparator.nullsFirst(String::compareToIgnoreCase)); + sortResults(collectedResults, comparator, "getClientInfoProperties", LOG); + populateQueue(collectedResults, queue, resultSchemaFields); + + } catch (Exception e) { + LOG.warning("Unexpected error processing client info properties: " + e.getMessage()); + collectedResults.clear(); + queue.clear(); + } finally { + signalEndOfData(queue, resultSchemaFields); + } + return BigQueryJsonResultSet.of( + resultSchema, collectedResults.size(), queue, this.statement, new Thread[0]); + } + + Schema defineGetClientInfoPropertiesSchema() { + List fields = new ArrayList<>(4); + fields.add( + Field.newBuilder("NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 1 + fields.add( + Field.newBuilder("MAX_LEN", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 2 + fields.add( + Field.newBuilder("DEFAULT_VALUE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 3 + fields.add( + Field.newBuilder("DESCRIPTION", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 4 + return Schema.of(fields); + } + + @Override + public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern) { + if ((catalog == null || catalog.isEmpty()) + || (schemaPattern != null && schemaPattern.isEmpty()) + || (functionNamePattern != null && functionNamePattern.isEmpty())) { + LOG.warning( + "Returning empty ResultSet as catalog is null/empty or a pattern is empty for" + + " getFunctions."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getFunctions called for catalog: %s, schemaPattern: %s, functionNamePattern: %s", + catalog, schemaPattern, functionNamePattern)); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Pattern functionNameRegex = compileSqlLikePattern(functionNamePattern); + final Schema resultSchema = defineGetFunctionsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final List> processingTaskFutures = new ArrayList<>(); + final String catalogParam = catalog; + + Runnable functionFetcher = + () -> { + ExecutorService apiExecutor = null; + ExecutorService routineProcessorExecutor = null; + final FieldList localResultSchemaFields = resultSchemaFields; + final List>> apiFutures = new ArrayList<>(); + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty()) { + LOG.info("Fetcher thread found no matching datasets. Returning empty resultset."); + return; + } + + apiExecutor = Executors.newFixedThreadPool(API_EXECUTOR_POOL_SIZE); + routineProcessorExecutor = Executors.newFixedThreadPool(this.metadataFetchThreadCount); + + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Function fetcher interrupted during dataset iteration submission."); + break; + } + + final DatasetId currentDatasetId = dataset.getDatasetId(); + + Callable> apiCallable = + () -> { + LOG.fine( + String.format( + "Fetching all routines for dataset: %s, pattern: %s", + currentDatasetId.getDataset(), functionNamePattern)); + return findMatchingBigQueryObjects( + "Routine", + () -> + bigquery.listRoutines( + currentDatasetId, RoutineListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getRoutine( + RoutineId.of( + currentDatasetId.getProject(), + currentDatasetId.getDataset(), + name)), + (rt) -> rt.getRoutineId().getRoutine(), + functionNamePattern, + functionNameRegex, + LOG); + }; + Future> apiFuture = apiExecutor.submit(apiCallable); + apiFutures.add(apiFuture); + } + LOG.fine( + "Finished submitting " + + apiFutures.size() + + " findMatchingRoutines (for functions) tasks."); + apiExecutor.shutdown(); + + for (Future> apiFuture : apiFutures) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Function fetcher interrupted while processing API futures."); + break; + } + try { + List routinesResult = apiFuture.get(); + if (routinesResult != null) { + for (Routine routine : routinesResult) { + if (Thread.currentThread().isInterrupted()) { + break; + } + String routineType = routine.getRoutineType(); + if ("SCALAR_FUNCTION".equalsIgnoreCase(routineType) + || "TABLE_FUNCTION".equalsIgnoreCase(routineType)) { + LOG.fine( + "Submitting processing task for function: " + + routine.getRoutineId() + + " of type " + + routineType); + final Routine finalRoutine = routine; + Future processFuture = + routineProcessorExecutor.submit( + () -> + processFunctionInfo( + finalRoutine, collectedResults, localResultSchemaFields)); + processingTaskFutures.add(processFuture); + } + } + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning( + "Function fetcher thread interrupted while waiting for API future result."); + break; + } catch (ExecutionException | CancellationException e) { + LOG.warning( + "Error or cancellation in findMatchingRoutines (for functions) task: " + + e.getMessage()); + } + } + waitForTasksCompletion(processingTaskFutures); + Comparator comparator = + defineGetFunctionsComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getFunctions", LOG); + populateQueue(collectedResults, queue, localResultSchemaFields); + } catch (Throwable t) { + LOG.severe("Unexpected error in function fetcher runnable: " + t.getMessage()); + apiFutures.forEach(f -> f.cancel(true)); + processingTaskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, localResultSchemaFields); + shutdownExecutor(apiExecutor); + shutdownExecutor(routineProcessorExecutor); + LOG.info("Function fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(functionFetcher, "getFunctions-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getFunctions"); + return resultSet; + } + + Schema defineGetFunctionsSchema() { + List fields = new ArrayList<>(6); + fields.add( + Field.newBuilder("FUNCTION_CAT", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("FUNCTION_SCHEM", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("FUNCTION_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("FUNCTION_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("SPECIFIC_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + void processFunctionInfo( + Routine routine, List collectedResults, FieldList resultSchemaFields) { + RoutineId routineId = routine.getRoutineId(); + LOG.fine("Processing function info for: " + routineId); + + try { + String catalogName = routineId.getProject(); + String schemaName = routineId.getDataset(); + String functionName = routineId.getRoutine(); + String remarks = routine.getDescription(); + String specificName = functionName; + + short functionType; + String bqRoutineType = routine.getRoutineType(); + if ("SCALAR_FUNCTION".equalsIgnoreCase(bqRoutineType)) { + functionType = DatabaseMetaData.functionResultUnknown; + } else if ("TABLE_FUNCTION".equalsIgnoreCase(bqRoutineType)) { + functionType = DatabaseMetaData.functionReturnsTable; + } else { + functionType = DatabaseMetaData.functionResultUnknown; + } + + List values = new ArrayList<>(resultSchemaFields.size()); + values.add(createStringFieldValue(catalogName)); // 1. FUNCTION_CAT + values.add(createStringFieldValue(schemaName)); // 2. FUNCTION_SCHEM + values.add(createStringFieldValue(functionName)); // 3. FUNCTION_NAME + values.add(createStringFieldValue(remarks)); // 4. REMARKS + values.add(createLongFieldValue((long) functionType)); // 5. FUNCTION_TYPE + values.add(createStringFieldValue(specificName)); // 6. SPECIFIC_NAME + + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + LOG.fine("Processed and added function info row for: " + routineId); + + } catch (Exception e) { + LOG.warning( + String.format( + "Error processing function info for %s: %s. Skipping this function.", + routineId, e.getMessage())); + } + } + + Comparator defineGetFunctionsComparator(FieldList resultSchemaFields) { + final int FUNC_CAT_IDX = resultSchemaFields.getIndex("FUNCTION_CAT"); + final int FUNC_SCHEM_IDX = resultSchemaFields.getIndex("FUNCTION_SCHEM"); + final int FUNC_NAME_IDX = resultSchemaFields.getIndex("FUNCTION_NAME"); + final int SPEC_NAME_IDX = resultSchemaFields.getIndex("SPECIFIC_NAME"); + + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_CAT_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_NAME_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, SPEC_NAME_IDX), + Comparator.nullsFirst(String::compareTo)); + } + + @Override + public ResultSet getFunctionColumns( + String catalog, String schemaPattern, String functionNamePattern, String columnNamePattern) { + if (catalog == null || catalog.isEmpty()) { + LOG.warning("Returning empty ResultSet catalog (project) is null or empty."); + return new BigQueryJsonResultSet(); + } + if ((schemaPattern != null && schemaPattern.isEmpty()) + || (functionNamePattern != null && functionNamePattern.isEmpty()) + || (columnNamePattern != null && columnNamePattern.isEmpty())) { + LOG.warning("Returning empty ResultSet because an explicit empty pattern was provided."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + String.format( + "getFunctionColumns called for catalog: %s, schemaPattern: %s, functionNamePattern: %s," + + " columnNamePattern: %s", + catalog, schemaPattern, functionNamePattern, columnNamePattern)); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Pattern functionNameRegex = compileSqlLikePattern(functionNamePattern); + final Pattern columnNameRegex = compileSqlLikePattern(columnNamePattern); + + final Schema resultSchema = defineGetFunctionColumnsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final List> processingTaskFutures = new ArrayList<>(); + final String catalogParam = catalog; + + Runnable functionColumnFetcher = + () -> { + ExecutorService listRoutinesExecutor = null; + ExecutorService getRoutineDetailsExecutor = null; + ExecutorService processParamsExecutor = null; + final String fetcherThreadNameSuffix = + "-" + catalogParam.substring(0, Math.min(10, catalogParam.length())); + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher: No matching datasets or interrupted early. Catalog: " + catalogParam); + return; + } + + listRoutinesExecutor = + Executors.newFixedThreadPool( + API_EXECUTOR_POOL_SIZE, + runnable -> new Thread(runnable, "funcol-list-rout" + fetcherThreadNameSuffix)); + List functionIdsToGet = + listMatchingFunctionIdsFromDatasets( + datasetsToScan, + functionNamePattern, + functionNameRegex, + listRoutinesExecutor, + catalogParam, + LOG); + shutdownExecutor(listRoutinesExecutor); + listRoutinesExecutor = null; + + if (functionIdsToGet.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info("Fetcher: No function IDs found or interrupted. Catalog: " + catalogParam); + return; + } + + getRoutineDetailsExecutor = + Executors.newFixedThreadPool( + this.metadataFetchThreadCount, + runnable -> + new Thread(runnable, "funcol-get-details" + fetcherThreadNameSuffix)); + List fullFunctions = + fetchFullRoutineDetailsForIds(functionIdsToGet, getRoutineDetailsExecutor, LOG); + shutdownExecutor(getRoutineDetailsExecutor); + getRoutineDetailsExecutor = null; + + if (fullFunctions.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher: No full functions fetched or interrupted. Catalog: " + catalogParam); + return; + } + + processParamsExecutor = + Executors.newFixedThreadPool( + this.metadataFetchThreadCount, + runnable -> + new Thread(runnable, "funcol-proc-params" + fetcherThreadNameSuffix)); + submitFunctionParameterProcessingJobs( + fullFunctions, + columnNameRegex, + collectedResults, + resultSchemaFields, + processParamsExecutor, + processingTaskFutures, + LOG); + + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher: Interrupted before waiting for parameter processing. Catalog: " + + catalogParam); + processingTaskFutures.forEach(f -> f.cancel(true)); + } else { + LOG.fine( + "Fetcher: Waiting for " + + processingTaskFutures.size() + + " parameter processing tasks. Catalog: " + + catalogParam); + waitForTasksCompletion(processingTaskFutures); + LOG.fine( + "Fetcher: All parameter processing tasks completed or handled. Catalog: " + + catalogParam); + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetFunctionColumnsComparator(resultSchemaFields); + sortResults(collectedResults, comparator, "getFunctionColumns", LOG); + populateQueue(collectedResults, queue, resultSchemaFields); + } + + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning( + "Fetcher: Interrupted in main try block for catalog " + + catalogParam + + ". Error: " + + e.getMessage()); + processingTaskFutures.forEach(f -> f.cancel(true)); + } catch (Throwable t) { + LOG.severe( + "Fetcher: Unexpected error in main try block for catalog " + + catalogParam + + ". Error: " + + t.getMessage()); + processingTaskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, resultSchemaFields); + if (listRoutinesExecutor != null) shutdownExecutor(listRoutinesExecutor); + if (getRoutineDetailsExecutor != null) shutdownExecutor(getRoutineDetailsExecutor); + if (processParamsExecutor != null) shutdownExecutor(processParamsExecutor); + LOG.info("Function column fetcher thread finished for catalog: " + catalogParam); + } + }; + + Thread fetcherThread = + new Thread(functionColumnFetcher, "getFunctionColumns-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getFunctionColumns for catalog: " + catalog); + return resultSet; + } + + Schema defineGetFunctionColumnsSchema() { + List fields = new ArrayList<>(17); + fields.add( + Field.newBuilder("FUNCTION_CAT", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 1 + fields.add( + Field.newBuilder("FUNCTION_SCHEM", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 2 + fields.add( + Field.newBuilder("FUNCTION_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 3 + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 4 + fields.add( + Field.newBuilder("COLUMN_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 5 + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 6 + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 7 + fields.add( + Field.newBuilder("PRECISION", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 8 + fields.add( + Field.newBuilder("LENGTH", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); // 9 + fields.add( + Field.newBuilder("SCALE", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); // 10 + fields.add( + Field.newBuilder("RADIX", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); // 11 + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 12 + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 13 + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 14 + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 15 + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 16 + fields.add( + Field.newBuilder("SPECIFIC_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 17 + return Schema.of(fields); + } + + List listMatchingFunctionIdsFromDatasets( + List datasetsToScan, + String functionNamePattern, + Pattern functionNameRegex, + ExecutorService listRoutinesExecutor, + String catalogParam, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + + logger.fine( + String.format( + "Listing matching function IDs from %d datasets for catalog '%s'.", + datasetsToScan.size(), catalogParam)); + final List>> listRoutineFutures = new ArrayList<>(); + final List functionIdsToGet = Collections.synchronizedList(new ArrayList<>()); + + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + logger.warning( + "Interrupted during submission of routine (function) listing tasks for catalog: " + + catalogParam); + throw new InterruptedException("Interrupted while listing functions"); + } + final DatasetId currentDatasetId = dataset.getDatasetId(); + Callable> listCallable = + () -> + findMatchingBigQueryObjects( + "Routine", + () -> + bigquery.listRoutines( + currentDatasetId, RoutineListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getRoutine( + RoutineId.of( + currentDatasetId.getProject(), currentDatasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + functionNamePattern, + functionNameRegex, + logger); + listRoutineFutures.add(listRoutinesExecutor.submit(listCallable)); + } + logger.fine( + "Submitted " + + listRoutineFutures.size() + + " routine (function) list tasks for catalog: " + + catalogParam); + + for (Future> listFuture : listRoutineFutures) { + if (Thread.currentThread().isInterrupted()) { + logger.warning( + "Interrupted while collecting routine (function) list results for catalog: " + + catalogParam); + listRoutineFutures.forEach(f -> f.cancel(true)); + throw new InterruptedException("Interrupted while collecting function lists"); + } + try { + List listedRoutines = listFuture.get(); + if (listedRoutines != null) { + for (Routine listedRoutine : listedRoutines) { + if (listedRoutine != null + && ("SCALAR_FUNCTION".equalsIgnoreCase(listedRoutine.getRoutineType()) + || "TABLE_FUNCTION".equalsIgnoreCase(listedRoutine.getRoutineType()))) { + if (listedRoutine.getRoutineId() != null) { + functionIdsToGet.add(listedRoutine.getRoutineId()); + } else { + logger.warning( + "Found a function type routine with a null ID during listing phase for catalog:" + + " " + + catalogParam); + } + } + } + } + } catch (ExecutionException e) { + logger.warning( + "Error getting routine (function) list result for catalog " + + catalogParam + + ": " + + e.getCause()); + } catch (CancellationException e) { + logger.warning("Routine (function) list task cancelled for catalog: " + catalogParam); + } + } + logger.info( + String.format( + "Found %d function IDs to fetch details for in catalog '%s'.", + functionIdsToGet.size(), catalogParam)); + return functionIdsToGet; + } + + void submitFunctionParameterProcessingJobs( + List fullFunctions, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields, + ExecutorService processParamsExecutor, + List> outParameterProcessingFutures, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + logger.fine( + String.format( + "Submitting parameter processing jobs for %d functions.", fullFunctions.size())); + + for (Routine fullFunction : fullFunctions) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Interrupted during submission of function parameter processing tasks."); + throw new InterruptedException( + "Interrupted while submitting function parameter processing jobs"); + } + if (fullFunction != null) { + String routineType = fullFunction.getRoutineType(); + if ("SCALAR_FUNCTION".equalsIgnoreCase(routineType) + || "TABLE_FUNCTION".equalsIgnoreCase(routineType)) { + final Routine finalFullFunction = fullFunction; + Future processFuture = + processParamsExecutor.submit( + () -> + processFunctionParametersAndReturnValue( + finalFullFunction, + columnNameRegex, + collectedResults, + resultSchemaFields)); + outParameterProcessingFutures.add(processFuture); + } else { + logger.warning( + "Routine " + + (fullFunction.getRoutineId() != null + ? fullFunction.getRoutineId().toString() + : "UNKNOWN_ID") + + " fetched for getFunctionColumns was not of a function type (Type: " + + routineType + + "). Skipping parameter processing."); + } + } + } + logger.fine( + "Finished submitting " + + outParameterProcessingFutures.size() + + " processFunctionParametersAndReturnValue tasks."); + } + + void processFunctionParametersAndReturnValue( + Routine routine, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields) { + RoutineId routineId = routine.getRoutineId(); + if (routineId == null) { + LOG.warning("Processing a routine with a null ID. Skipping."); + return; + } + LOG.finer("Processing function parameters and return value for: " + routineId); + + String functionCatalog = routineId.getProject(); + String functionSchema = routineId.getDataset(); + String functionName = routineId.getRoutine(); + String specificName = functionName; + + if (routine.getReturnTableType() != null) { + StandardSQLTableType returnTableType = routine.getReturnTableType(); + if (returnTableType != null && returnTableType.getColumns() != null) { + List tableColumns = returnTableType.getColumns(); + for (int i = 0; i < tableColumns.size(); i++) { + StandardSQLField tableColumn = tableColumns.get(i); + String columnName = tableColumn.getName(); + if (columnNameRegex != null + && (columnName == null || !columnNameRegex.matcher(columnName).matches())) { + continue; + } + List rowValues = + createFunctionColumnRow( + functionCatalog, + functionSchema, + functionName, + specificName, + columnName, + DatabaseMetaData.functionColumnResult, + tableColumn.getDataType(), + i + 1); + collectedResults.add(FieldValueList.of(rowValues, resultSchemaFields)); + } + } + } + + List arguments = routine.getArguments(); + if (arguments != null) { + for (int i = 0; i < arguments.size(); i++) { + RoutineArgument arg = arguments.get(i); + String argName = arg.getName(); + + if (columnNameRegex != null + && (argName == null || !columnNameRegex.matcher(argName).matches())) { + continue; + } + + short columnType; + String originalMode = arg.getMode(); + + if ("IN".equalsIgnoreCase(originalMode)) { + columnType = DatabaseMetaData.functionColumnIn; + } else if ("OUT".equalsIgnoreCase(originalMode)) { + columnType = DatabaseMetaData.functionColumnOut; + } else if ("INOUT".equalsIgnoreCase(originalMode)) { + columnType = DatabaseMetaData.functionColumnInOut; + } else { + columnType = DatabaseMetaData.functionColumnUnknown; + } + + List rowValues = + createFunctionColumnRow( + functionCatalog, + functionSchema, + functionName, + specificName, + argName, + columnType, + arg.getDataType(), + i + 1); + collectedResults.add(FieldValueList.of(rowValues, resultSchemaFields)); + } + } + } + + List createFunctionColumnRow( + String functionCatalog, + String functionSchema, + String functionName, + String specificName, + String columnName, + int columnType, + StandardSQLDataType dataType, + int ordinalPosition) { + + List values = new ArrayList<>(17); + ColumnTypeInfo typeInfo = + determineTypeInfoFromDataType(dataType, functionName, columnName, ordinalPosition); + + values.add(createStringFieldValue(functionCatalog)); // 1. FUNCTION_CAT + values.add(createStringFieldValue(functionSchema)); // 2. FUNCTION_SCHEM + values.add(createStringFieldValue(functionName)); // 3. FUNCTION_NAME + values.add(createStringFieldValue(columnName)); // 4. COLUMN_NAME + values.add(createLongFieldValue((long) columnType)); // 5. COLUMN_TYPE + + values.add(createLongFieldValue((long) typeInfo.jdbcType)); // 6. DATA_TYPE + values.add(createStringFieldValue(typeInfo.typeName)); // 7. TYPE_NAME + values.add( + createLongFieldValue( + typeInfo.columnSize == null ? null : typeInfo.columnSize.longValue())); // 8. PRECISION + if (typeInfo.typeName != null + && (typeInfo.typeName.equalsIgnoreCase("STRING") + || typeInfo.typeName.equalsIgnoreCase("NVARCHAR") + || typeInfo.typeName.equalsIgnoreCase("BYTES") + || typeInfo.typeName.equalsIgnoreCase("VARBINARY"))) { + values.add( + createLongFieldValue( + typeInfo.columnSize == null ? null : typeInfo.columnSize.longValue())); // 9. LENGTH + values.add( + createLongFieldValue( + typeInfo.decimalDigits == null + ? null + : typeInfo.decimalDigits.longValue())); // 10. SCALE + values.add( + createLongFieldValue( + typeInfo.numPrecRadix == null + ? null + : typeInfo.numPrecRadix.longValue())); // 11. RADIX + values.add( + createLongFieldValue((long) DatabaseMetaData.functionNullableUnknown)); // 12. NULLABLE + values.add(createStringFieldValue(null)); // 13. REMARKS + values.add( + createLongFieldValue( + typeInfo.columnSize == null + ? null + : typeInfo.columnSize.longValue())); // 14. CHAR_OCTET_LENGTH + } else { + values.add(createNullFieldValue()); // 9. LENGTH + values.add( + createLongFieldValue( + typeInfo.decimalDigits == null + ? null + : typeInfo.decimalDigits.longValue())); // 10. SCALE + values.add( + createLongFieldValue( + typeInfo.numPrecRadix == null + ? null + : typeInfo.numPrecRadix.longValue())); // 11. RADIX + values.add( + createLongFieldValue((long) DatabaseMetaData.functionNullableUnknown)); // 12. NULLABLE + values.add(createStringFieldValue(null)); // 13. REMARKS + values.add(createNullFieldValue()); // 14. CHAR_OCTET_LENGTH + } + + values.add(createLongFieldValue((long) ordinalPosition)); // 15. ORDINAL_POSITION + values.add(createStringFieldValue("")); // 16. IS_NULLABLE + values.add(createStringFieldValue(specificName)); // 17. SPECIFIC_NAME + + return values; + } + + Comparator defineGetFunctionColumnsComparator(FieldList resultSchemaFields) { + final int FUNC_CAT_IDX = resultSchemaFields.getIndex("FUNCTION_CAT"); + final int FUNC_SCHEM_IDX = resultSchemaFields.getIndex("FUNCTION_SCHEM"); + final int FUNC_NAME_IDX = resultSchemaFields.getIndex("FUNCTION_NAME"); + final int SPEC_NAME_IDX = resultSchemaFields.getIndex("SPECIFIC_NAME"); + final int ORDINAL_POS_IDX = resultSchemaFields.getIndex("ORDINAL_POSITION"); + + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_CAT_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_SCHEM_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, SPEC_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getLongValueOrNull(fvl, ORDINAL_POS_IDX), + Comparator.nullsFirst(Long::compareTo)); + } + + @Override + public ResultSet getPseudoColumns( + String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) { + LOG.info( + String.format( + "getPseudoColumns called for catalog: %s, schemaPattern: %s, tableNamePattern: %s," + + " columnNamePattern: %s. Pseudo columns not supported by BigQuery; returning" + + " empty ResultSet.", + catalog, schemaPattern, tableNamePattern, columnNamePattern)); + + final Schema resultSchema = defineGetPseudoColumnsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetPseudoColumnsSchema() { + List fields = new ArrayList<>(12); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("NUM_PREC_RADIX", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("COLUMN_USAGE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + @Override + public boolean generatedKeyAlwaysReturned() { + return false; + } + + @Override + public T unwrap(Class iface) { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) { + return false; + } + + // --- Helper Methods --- + + /** + * Determines the effective catalog and schema pattern to use for metadata retrieval. + * + *

This method applies the logic for the {@code FilterTablesOnDefaultDataset} connection + * property. If this property is enabled and the provided {@code catalog} or {@code schemaPattern} + * are null, empty, or wildcard ('%'), they may be overridden by the default catalog (project) and + * default dataset (schema) configured in the {@link BigQueryConnection}. + * + * @param catalog The catalog name provided by the user; may be {@code null}. + * @param schemaPattern The schema name pattern provided by the user; may be {@code null}. + * @return A {@link Tuple} where {@code Tuple.x()} is the effective catalog string and {@code + * Tuple.y()} is the effective schema pattern string. These are the values that should be used + * for querying BigQuery's metadata. + * @see BigQueryConnection#isFilterTablesOnDefaultDataset() + */ + private Tuple determineEffectiveCatalogAndSchema( + String catalog, String schemaPattern) { + String effectiveCatalog = catalog; + String effectiveSchemaPattern = schemaPattern; + + if (this.connection.isFilterTablesOnDefaultDataset() + && this.connection.getDefaultDataset() != null + && this.connection.getDefaultDataset().getDataset() != null + && !this.connection.getDefaultDataset().getDataset().isEmpty()) { + + String defaultProjectFromConnection = this.connection.getCatalog(); + // We only use the dataset part of the DefaultDataset for schema filtering + String defaultSchemaFromConnection = this.connection.getDefaultDataset().getDataset(); + + boolean catalogIsNullOrEmptyOrWildcard = + (catalog == null || catalog.isEmpty() || catalog.equals("%")); + boolean schemaPatternIsNullOrEmptyOrWildcard = + (schemaPattern == null || schemaPattern.isEmpty() || schemaPattern.equals("%")); + + final String logPrefix = "FilterTablesOnDefaultDatasetTrue: "; + if (catalogIsNullOrEmptyOrWildcard && schemaPatternIsNullOrEmptyOrWildcard) { + effectiveCatalog = defaultProjectFromConnection; + effectiveSchemaPattern = defaultSchemaFromConnection; + LOG.info( + String.format( + logPrefix + "Using default catalog '%s' and default dataset '%s'.", + effectiveCatalog, + effectiveSchemaPattern)); + } else if (catalogIsNullOrEmptyOrWildcard) { + effectiveCatalog = defaultProjectFromConnection; + LOG.info( + String.format( + logPrefix + + "Using default catalog '%s' with user dataset '%s'. Default dataset '%s' ignored.", + effectiveCatalog, + effectiveSchemaPattern, + defaultSchemaFromConnection)); + } else if (schemaPatternIsNullOrEmptyOrWildcard) { + effectiveSchemaPattern = defaultSchemaFromConnection; + LOG.info( + String.format( + logPrefix + "Using user catalog '%s' and default dataset '%s'.", + effectiveCatalog, + effectiveSchemaPattern)); + } else { + LOG.info( + String.format( + logPrefix + + "Using user catalog '%s' and schema '%s'. Default dataset '%s' ignored.", + effectiveCatalog, + effectiveSchemaPattern, + defaultSchemaFromConnection)); + } + } + return Tuple.of(effectiveCatalog, effectiveSchemaPattern); + } + + private ColumnTypeInfo getColumnTypeInfoForSqlType(StandardSQLTypeName bqType) { + if (bqType == null) { + LOG.warning("Null BigQuery type encountered: " + bqType.name() + ". Mapping to VARCHAR."); + return new ColumnTypeInfo(Types.VARCHAR, bqType.name(), null, null, null); + } + + switch (bqType) { + case INT64: + return new ColumnTypeInfo(Types.BIGINT, "BIGINT", 19, 0, 10); + case BOOL: + return new ColumnTypeInfo(Types.BOOLEAN, "BOOLEAN", 1, null, null); + case FLOAT64: + return new ColumnTypeInfo(Types.DOUBLE, "DOUBLE", 15, null, 10); + case NUMERIC: + return new ColumnTypeInfo(Types.NUMERIC, "NUMERIC", 38, 9, 10); + case BIGNUMERIC: + return new ColumnTypeInfo(Types.NUMERIC, "NUMERIC", 77, 38, 10); + case STRING: + return new ColumnTypeInfo(Types.NVARCHAR, "NVARCHAR", null, null, null); + case TIMESTAMP: + case DATETIME: + return new ColumnTypeInfo(Types.TIMESTAMP, "TIMESTAMP", 29, null, null); + case DATE: + return new ColumnTypeInfo(Types.DATE, "DATE", 10, null, null); + case TIME: + return new ColumnTypeInfo(Types.TIME, "TIME", 15, null, null); + case GEOGRAPHY: + case JSON: + case INTERVAL: + return new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + case BYTES: + return new ColumnTypeInfo(Types.VARBINARY, "VARBINARY", null, null, null); + case STRUCT: + return new ColumnTypeInfo(Types.STRUCT, "STRUCT", null, null, null); + default: + LOG.warning( + "Unknown BigQuery type encountered: " + bqType.name() + ". Mapping to VARCHAR."); + return new ColumnTypeInfo(Types.VARCHAR, bqType.name(), null, null, null); + } + } + + List findMatchingBigQueryObjects( + String objectTypeName, + Supplier> listAllOperation, + Function getSpecificOperation, + Function nameExtractor, + String pattern, + Pattern regex, + BigQueryJdbcCustomLogger logger) { + + boolean needsList = needsListing(pattern); + List resultList = new ArrayList<>(); + + try { + Iterable objects; + if (needsList) { + logger.info( + String.format( + "Listing all %ss (pattern: %s)...", + objectTypeName, pattern == null ? "" : pattern)); + Page firstPage = listAllOperation.get(); + objects = firstPage.iterateAll(); + logger.fine( + String.format( + "Retrieved initial %s list, iterating & filtering if needed...", objectTypeName)); + + } else { + logger.info(String.format("Getting specific %s: '%s'", objectTypeName, pattern)); + T specificObject = getSpecificOperation.apply(pattern); + objects = + (specificObject == null) + ? Collections.emptyList() + : Collections.singletonList(specificObject); + if (specificObject == null) { + logger.info(String.format("Specific %s not found: '%s'", objectTypeName, pattern)); + } + } + + boolean wasListing = needsList; + for (T obj : objects) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Thread interrupted during " + objectTypeName + " processing loop."); + throw new InterruptedException( + "Interrupted during " + objectTypeName + " processing loop"); + } + if (obj != null) { + if (wasListing && regex != null) { + String name = nameExtractor.apply(obj); + if (name != null && regex.matcher(name).matches()) { + resultList.add(obj); + } + } else { + resultList.add(obj); + } + } + } + + } catch (BigQueryException e) { + if (!needsList && e.getCode() == 404) { + logger.info(String.format("%s '%s' not found (API error 404).", objectTypeName, pattern)); + } else { + logger.warning( + String.format( + "BigQueryException finding %ss for pattern '%s': %s (Code: %d)", + objectTypeName, pattern, e.getMessage(), e.getCode())); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + logger.warning("Interrupted while finding " + objectTypeName + "s."); + } catch (Exception e) { + logger.severe( + String.format( + "Unexpected exception finding %ss for pattern '%s': %s", + objectTypeName, pattern, e.getMessage())); + } + return resultList; + } + + private static class TypeInfoRowData { + String typeName; + int jdbcType; + Long precision; + String literalPrefix; + String literalSuffix; + String createParams; + int nullable; + boolean caseSensitive; + int searchable; + boolean unsignedAttribute; + boolean fixedPrecScale; + boolean autoIncrement; + String localTypeName; + Long minimumScale; + Long maximumScale; + Long numPrecRadix; + + TypeInfoRowData( + String typeName, + int jdbcType, + Long precision, + String literalPrefix, + String literalSuffix, + String createParams, + int nullable, + boolean caseSensitive, + int searchable, + boolean unsignedAttribute, + boolean fixedPrecScale, + boolean autoIncrement, + String localTypeName, + Long minimumScale, + Long maximumScale, + Long numPrecRadix) { + this.typeName = typeName; + this.jdbcType = jdbcType; + this.precision = precision; + this.literalPrefix = literalPrefix; + this.literalSuffix = literalSuffix; + this.createParams = createParams; + this.nullable = nullable; + this.caseSensitive = caseSensitive; + this.searchable = searchable; + this.unsignedAttribute = unsignedAttribute; + this.fixedPrecScale = fixedPrecScale; + this.autoIncrement = autoIncrement; + this.localTypeName = localTypeName; + this.minimumScale = minimumScale; + this.maximumScale = maximumScale; + this.numPrecRadix = numPrecRadix; + } + } + + void sortResults( + List collectedResults, + Comparator comparator, + String operationName, + BigQueryJdbcCustomLogger logger) { + + if (collectedResults == null || collectedResults.isEmpty()) { + logger.info(String.format("No results collected for %s, skipping sort.", operationName)); + return; + } + if (comparator == null) { + logger.info(String.format("No comparator provided for %s, skipping sort.", operationName)); + return; + } + + logger.info( + String.format( + "Sorting %d collected %s results...", collectedResults.size(), operationName)); + try { + collectedResults.sort(comparator); + logger.info(String.format("%s result sorting completed.", operationName)); + } catch (Exception e) { + logger.severe( + String.format("Error during sorting %s results: %s", operationName, e.getMessage())); + } + } + + private List defineBasePrivilegeFields() { + List fields = new ArrayList<>(7); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("GRANTOR", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("GRANTEE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("PRIVILEGE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("IS_GRANTABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + return fields; + } + + Pattern compileSqlLikePattern(String sqlLikePattern) { + if (sqlLikePattern == null) { + return null; + } + if (sqlLikePattern.isEmpty()) { + return Pattern.compile("(?!)"); + } + StringBuilder regex = new StringBuilder(sqlLikePattern.length() * 2); + regex.append('^'); + for (int i = 0; i < sqlLikePattern.length(); i++) { + char c = sqlLikePattern.charAt(i); + switch (c) { + case '%': + regex.append(".*"); + break; + case '_': + regex.append('.'); + break; + case '\\': + case '.': + case '[': + case ']': + case '(': + case ')': + case '{': + case '}': + case '*': + case '+': + case '?': + case '^': + case '$': + case '|': + regex.append('\\').append(c); + break; + default: + regex.append(c); + break; + } + } + regex.append('$'); + return Pattern.compile(regex.toString(), Pattern.CASE_INSENSITIVE); + } + + boolean needsListing(String pattern) { + return pattern == null || pattern.contains("%") || pattern.contains("_"); + } + + FieldValue createStringFieldValue(String value) { + return FieldValue.of(FieldValue.Attribute.PRIMITIVE, value); + } + + FieldValue createLongFieldValue(Long value) { + return (value == null) + ? FieldValue.of(FieldValue.Attribute.PRIMITIVE, null) + : FieldValue.of(FieldValue.Attribute.PRIMITIVE, String.valueOf(value)); + } + + FieldValue createNullFieldValue() { + return FieldValue.of(FieldValue.Attribute.PRIMITIVE, null); + } + + FieldValue createBooleanFieldValue(Boolean value) { + return (value == null) + ? FieldValue.of(FieldValue.Attribute.PRIMITIVE, null) + : FieldValue.of(FieldValue.Attribute.PRIMITIVE, value ? "1" : "0"); + } + + private String getStringValueOrNull(FieldValueList fvl, int index) { + if (fvl == null || index < 0 || index >= fvl.size()) return null; + FieldValue fv = fvl.get(index); + return (fv == null || fv.isNull()) ? null : fv.getStringValue(); + } + + private Long getLongValueOrNull(FieldValueList fvl, int index) { + if (fvl == null || index < 0 || index >= fvl.size()) return null; + FieldValue fv = fvl.get(index); + try { + return (fv == null || fv.isNull()) ? null : fv.getLongValue(); + } catch (NumberFormatException e) { + LOG.warning("Could not parse Long value for index " + index); + return null; + } + } + + private void waitForTasksCompletion(List> taskFutures) { + LOG.info(String.format("Waiting for %d submitted tasks to complete...", taskFutures.size())); + for (Future future : taskFutures) { + try { + if (!future.isCancelled()) { + future.get(); + } + } catch (CancellationException e) { + LOG.warning("A table processing task was cancelled."); + } catch (ExecutionException e) { + LOG.severe( + String.format( + "Error executing table processing task: %s", + (e.getCause() != null ? e.getCause().getMessage() : e.getMessage()))); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning( + "Fetcher thread interrupted while waiting for tasks. Attempting to cancel remaining" + + " tasks."); + taskFutures.forEach(f -> f.cancel(true)); + break; + } + } + LOG.info("Finished waiting for tasks."); + } + + private void populateQueue( + List collectedResults, + BlockingQueue queue, + FieldList resultSchemaFields) { + LOG.info(String.format("Populating queue with %d results...", collectedResults.size())); + try { + for (FieldValueList sortedRow : collectedResults) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Interrupted during queue population."); + break; + } + queue.put(BigQueryFieldValueListWrapper.of(resultSchemaFields, sortedRow)); + } + LOG.info("Finished populating queue."); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning("Interrupted while putting row onto queue."); + } catch (Exception e) { + LOG.severe("Unexpected error populating queue: " + e.getMessage()); + } + } + + private void signalEndOfData( + BlockingQueue queue, FieldList resultSchemaFields) { + try { + LOG.info("Adding end signal to queue."); + queue.put(BigQueryFieldValueListWrapper.of(resultSchemaFields, null, true)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning("Interrupted while sending end signal to queue."); + } catch (Exception e) { + LOG.severe("Exception while sending end signal to queue: " + e.getMessage()); + } + } + + private void shutdownExecutor(ExecutorService executor) { + if (executor == null || executor.isShutdown()) { + return; + } + LOG.info("Shutting down column executor service..."); + executor.shutdown(); + try { + if (!executor.awaitTermination(10, TimeUnit.SECONDS)) { + LOG.warning("Executor did not terminate gracefully after 10s, forcing shutdownNow()."); + List droppedTasks = executor.shutdownNow(); + LOG.warning( + "Executor shutdownNow() initiated. Dropped tasks count: " + droppedTasks.size()); + if (!executor.awaitTermination(10, TimeUnit.SECONDS)) { + LOG.severe("Executor did not terminate even after shutdownNow()."); + } + } + LOG.info("Executor shutdown complete."); + } catch (InterruptedException ie) { + LOG.warning( + "Interrupted while waiting for executor termination. Forcing shutdownNow() again."); + executor.shutdownNow(); + Thread.currentThread().interrupt(); + } + } + + private String getCurrentCatalogName() { + return this.connection.getCatalog(); + } + + private List getAccessibleCatalogNames() { + Set accessibleCatalogs = new HashSet<>(); + String primaryCatalog = getCurrentCatalogName(); + if (primaryCatalog != null && !primaryCatalog.isEmpty()) { + accessibleCatalogs.add(primaryCatalog); + } + + List additionalProjects = this.connection.getAdditionalProjects(); + if (additionalProjects != null) { + for (String project : additionalProjects) { + if (project != null && !project.isEmpty()) { + accessibleCatalogs.add(project); + } + } + } + + List sortedCatalogs = new ArrayList<>(accessibleCatalogs); + Collections.sort(sortedCatalogs); + return sortedCatalogs; + } + + static String readSqlFromFile(String filename) { + InputStream in; + in = BigQueryDatabaseMetaData.class.getResourceAsStream(filename); + BufferedReader reader = new BufferedReader(new InputStreamReader(in)); + StringBuilder builder = new StringBuilder(); + try (Scanner scanner = new Scanner(reader)) { + while (scanner.hasNextLine()) { + String line = scanner.nextLine(); + builder.append(line).append("\n"); + } + } + return builder.toString(); + } + + String replaceSqlParameters(String sql, String... params) throws SQLException { + return String.format(sql, (Object[]) params); + } + + private void loadDriverVersionProperties() { + if (parsedDriverVersion.get() != null) { + return; + } + Properties props = new Properties(); + try (InputStream input = + getClass().getResourceAsStream("/com/google/cloud/bigquery/jdbc/dependencies.properties")) { + if (input == null) { + String errorMessage = + "Could not find dependencies.properties. Driver version information is unavailable."; + LOG.severe(errorMessage); + throw new IllegalStateException(errorMessage); + } + props.load(input); + String versionString = props.getProperty("version.jdbc"); + if (versionString == null || versionString.trim().isEmpty()) { + String errorMessage = + "The property version.jdbc not found or empty in dependencies.properties."; + LOG.severe(errorMessage); + throw new IllegalStateException(errorMessage); + } + parsedDriverVersion.compareAndSet(null, versionString.trim()); + String[] parts = versionString.split("\\."); + if (parts.length < 2) { + return; + } + parsedDriverMajorVersion.compareAndSet(null, Integer.parseInt(parts[0])); + String minorPart = parts[1]; + String numericMinor = minorPart.replaceAll("[^0-9].*", ""); + if (!numericMinor.isEmpty()) { + parsedDriverMinorVersion.compareAndSet(null, Integer.parseInt(numericMinor)); + } + } catch (IOException | NumberFormatException e) { + String errorMessage = + "Error reading dependencies.properties. Driver version information is" + + " unavailable. Error: " + + e.getMessage(); + LOG.severe(errorMessage); + throw new IllegalStateException(errorMessage, e); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercions.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercions.java new file mode 100644 index 000000000..324888982 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercions.java @@ -0,0 +1,102 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.math.RoundingMode; + +/** A registry of all the inbuilt {@link BigQueryCoercion}s that framework offers by default. */ +@InternalApi +class BigQueryDefaultCoercions { + + /** + * Creates a {@link BigQueryTypeCoercerBuilder} with all the inbuilt {@link BigQueryCoercion}s. + *

  • {@link BigQueryTypeCoercer#INSTANCE} uses this builder to populate itself with all the + * default {@link BigQueryCoercion}s. + *
  • A {@link BigQueryTypeCoercerBuilder} created through {@link BigQueryTypeCoercer#builder()} + * method also contains all these default {@link BigQueryCoercion}s + */ + static BigQueryTypeCoercerBuilder builder() { + BigQueryTypeCoercerBuilder builder = new BigQueryTypeCoercerBuilder(); + + // TODO: can we figure out the class parameters from coercion itself? + builder.registerTypeCoercion( + (String s) -> s != null && ("1".equals(s.trim()) || Boolean.parseBoolean(s)), + String.class, + Boolean.class); + builder.registerTypeCoercion(Integer::parseInt, String.class, Integer.class); + builder.registerTypeCoercion(BigInteger::new, String.class, BigInteger.class); + builder.registerTypeCoercion(Long::valueOf, String.class, Long.class); + builder.registerTypeCoercion(Double::valueOf, String.class, Double.class); + builder.registerTypeCoercion(BigDecimal::new, String.class, BigDecimal.class); + + builder.registerTypeCoercion((b) -> b ? 1 : 0, Boolean.class, Integer.class); + + builder.registerTypeCoercion(Integer::intValue, Integer.class, Integer.class); + builder.registerTypeCoercion(Integer::shortValue, Integer.class, Short.class); + builder.registerTypeCoercion(Integer::byteValue, Integer.class, Byte.class); + builder.registerTypeCoercion(Integer::doubleValue, Integer.class, Double.class); + builder.registerTypeCoercion(Integer::floatValue, Integer.class, Float.class); + + builder.registerTypeCoercion(Long::intValue, Long.class, Integer.class); + builder.registerTypeCoercion(Long::shortValue, Long.class, Short.class); + builder.registerTypeCoercion(Long::byteValue, Long.class, Byte.class); + builder.registerTypeCoercion(Long::doubleValue, Long.class, Double.class); + builder.registerTypeCoercion(Long::floatValue, Long.class, Float.class); + + builder.registerTypeCoercion(Double::floatValue, Double.class, Float.class); + builder.registerTypeCoercion(Double::longValue, Double.class, Long.class); + builder.registerTypeCoercion(Double::intValue, Double.class, Integer.class); + builder.registerTypeCoercion(Double::shortValue, Double.class, Short.class); + builder.registerTypeCoercion(Double::byteValue, Double.class, Byte.class); + builder.registerTypeCoercion(BigDecimal::valueOf, Double.class, BigDecimal.class); + + builder.registerTypeCoercion(Float::intValue, Float.class, Integer.class); + builder.registerTypeCoercion(Float::byteValue, Float.class, Byte.class); + builder.registerTypeCoercion(Float::shortValue, Float.class, Short.class); + builder.registerTypeCoercion(Float::doubleValue, Float.class, Double.class); + + builder.registerTypeCoercion(BigInteger::longValue, BigInteger.class, Long.class); + builder.registerTypeCoercion(BigDecimal::new, BigInteger.class, BigDecimal.class); + + builder.registerTypeCoercion(BigDecimal::doubleValue, BigDecimal.class, Double.class); + builder.registerTypeCoercion(BigDecimal::toBigInteger, BigDecimal.class, BigInteger.class); + builder.registerTypeCoercion( + bigDecimal -> bigDecimal.setScale(0, RoundingMode.DOWN).intValueExact(), + BigDecimal.class, + Integer.class); + builder.registerTypeCoercion( + bigDecimal -> bigDecimal.setScale(0, RoundingMode.DOWN).longValueExact(), + BigDecimal.class, + Long.class); + builder.registerTypeCoercion( + bigDecimal -> bigDecimal.setScale(0, RoundingMode.DOWN).shortValueExact(), + BigDecimal.class, + Short.class); + builder.registerTypeCoercion( + bigDecimal -> bigDecimal.setScale(0, RoundingMode.DOWN).byteValueExact(), + BigDecimal.class, + Byte.class); + builder.registerTypeCoercion(BigDecimal::floatValue, BigDecimal.class, Float.class); + + builder.registerTypeCoercion(unused -> false, Void.class, Boolean.class); + + return builder; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDriver.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDriver.java new file mode 100644 index 000000000..2c7eba16c --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDriver.java @@ -0,0 +1,249 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import io.grpc.LoadBalancerRegistry; +import io.grpc.internal.PickFirstLoadBalancerProvider; +import java.io.IOException; +import java.sql.Connection; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.DriverPropertyInfo; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * JDBC {@link Driver} implementation for BigQuery. + * + *

    Usage: + * + *

    + *  String CONNECTION_URL = "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443"
    + *            + "ProjectId=test;OAuthType=3""
    + *  Connection connection = DriverManager.getConnection(CONNECTION_URL);
    + * 
    + * + *

    Valid URLs take the form: + * + *

    + *  jdbc:bigquery://{host}:{port};ProjectId={projectId};OAuthType={oAuthType};
    + *  {property1}={value1};{property2}={value2};...
    + * 
    + */ +public class BigQueryDriver implements Driver { + + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryDriver.class.getName()); + // TODO: update this when JDBC goes GA + private static final int JDBC_MAJOR_VERSION = 0; + private static final int JDBC_MINOR_VERSION = 1; + static BigQueryDriver registeredBigqueryJdbcDriver; + + static { + try { + register(); + } catch (SQLException e) { + throw new ExceptionInInitializerError("Registering driver failed: " + e.getMessage()); + } + LoadBalancerRegistry.getDefaultRegistry().register(new PickFirstLoadBalancerProvider()); + } + + public BigQueryDriver() {} + + static void register() throws SQLException { + LOG.finest("++enter++"); + if (isRegistered()) { + LOG.warning("Driver is already registered. It can only be registered once."); + } + + DriverManager.registerDriver(LazyHolder.INSTANCE); + BigQueryDriver.registeredBigqueryJdbcDriver = LazyHolder.INSTANCE; + } + + static boolean isRegistered() { + LOG.finest("++enter++"); + return registeredBigqueryJdbcDriver != null; + } + + /** + * @return the registered JDBC driver for BigQuery. + * @throws IllegalStateException if the driver has not been registered. + */ + public static BigQueryDriver getRegisteredDriver() throws IllegalStateException { + LOG.finest("++enter++"); + if (isRegistered()) { + return registeredBigqueryJdbcDriver; + } + throw new IllegalStateException( + "Driver is not registered (or it has not been registered using Driver.register() method)"); + } + + /** + * Attempts to establish a BigQuery connection to the given URL, using the provided connection + * Properties. + * + *

    Valid URLs take the form: + * + *

    +   *  jdbc:bigquery://{host}:{port};ProjectId={projectId};OAuthType={oAuthType};
    +   *  {property1}={value1};{property2}={value2};...
    +   * 
    + * + * @param url the BigQuery URL to connect to + * @param info a list of arbitrary string tag/value pairs as connection arguments. + * @return A connection to the URL if it was established successfully, otherwise {@code null} + * @throws SQLException if driver fails to connect to clients. + * @see Driver#connect(String, Properties) + */ + @Override + public Connection connect(String url, Properties info) throws SQLException { + LOG.finest("++enter++"); + try { + if (acceptsURL(url)) { + // strip 'jdbc:' from the URL, add any extra properties + String connectionUri = + BigQueryJdbcUrlUtility.appendPropertiesToURL(url.substring(5), this.toString(), info); + + // LogLevel + String logLevelStr = + BigQueryJdbcUrlUtility.parseUriProperty( + connectionUri, BigQueryJdbcUrlUtility.LOG_LEVEL_PROPERTY_NAME); + if (logLevelStr == null) { + logLevelStr = System.getenv(BigQueryJdbcUrlUtility.LOG_LEVEL_ENV_VAR); + } + Level logLevel = BigQueryJdbcUrlUtility.parseLogLevel(logLevelStr); + + // LogPath + String logPath = + BigQueryJdbcUrlUtility.parseUriProperty( + connectionUri, BigQueryJdbcUrlUtility.LOG_PATH_PROPERTY_NAME); + if (logPath == null) { + logPath = System.getenv(BigQueryJdbcUrlUtility.LOG_PATH_ENV_VAR); + } + if (logPath == null) { + logPath = BigQueryJdbcUrlUtility.DEFAULT_LOG_PATH; + } + + BigQueryJdbcRootLogger.setLevel(logLevel, logPath); + // Logging starts from here. + BigQueryConnection connection = new BigQueryConnection(connectionUri); + LOG.info( + String.format( + "Driver info : { {Database Product Name : %s}, " + + "{Database Product Version : %s}, " + + "{Driver Name : %s}, " + + "{Driver Version : %s}, " + + "{LogLevel : %s}, " + + "{LogPath : %s}, " + + "{Driver Instance : %s} }", + connection.getMetaData().getDatabaseProductName(), + connection.getMetaData().getDatabaseProductVersion(), + connection.getMetaData().getDriverName(), + connection.getMetaData().getDriverVersion(), + logLevel, + logPath, + this.toString())); + return connection; + } else { + throw new IllegalArgumentException( + "Invalid URL provided, must start with \"jdbc:bigquery:\""); + } + } catch (IOException e) { + LOG.warning("Getting a warning: " + e.getMessage()); + } + return null; + } + + /** + * @param url a JDBC connection URL + * @return True if the URL is non-empty and starts with "jdbc:bigquery" + * @see Driver#acceptsURL(String) + */ + @Override + public boolean acceptsURL(String url) throws SQLException { + LOG.finest("++enter++"); + if (url == null || url.isEmpty()) { + throw new BigQueryJdbcException("Connection URL is null."); + } + return url.startsWith("jdbc:bigquery:"); + } + + /** + * Gets information about the possible BigQuery JDBC Connection Properties. + * + * @param url the BigQuery connection URL + * @param info a proposed list BigQuery connection properties + * @return an array of {@code DriverPropertyInfo} objects describing possible properties. + * @see Driver#getPropertyInfo(String, Properties) + */ + @Override + public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) { + LOG.finest("++enter++"); + String connectionUri = + BigQueryJdbcUrlUtility.appendPropertiesToURL(url.substring(5), this.toString(), info); + List propertyInfoList = new ArrayList<>(); + + for (BigQueryConnectionProperty prop : BigQueryJdbcUrlUtility.VALID_PROPERTIES) { + DriverPropertyInfo driverProperty = + new DriverPropertyInfo( + prop.getName(), + BigQueryJdbcUrlUtility.parseUriProperty(connectionUri, prop.getName())); + driverProperty.description = prop.getDescription(); + propertyInfoList.add(driverProperty); + } + Map oAuthProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(url, this.toString()); + for (Map.Entry authProperty : oAuthProperties.entrySet()) { + propertyInfoList.add(new DriverPropertyInfo(authProperty.getKey(), authProperty.getValue())); + } + return propertyInfoList.toArray(new DriverPropertyInfo[0]); + } + + @Override + public int getMajorVersion() { + LOG.finest("++enter++"); + return JDBC_MAJOR_VERSION; + } + + @Override + public int getMinorVersion() { + LOG.finest("++enter++"); + return JDBC_MINOR_VERSION; + } + + @Override + public boolean jdbcCompliant() { + LOG.finest("++enter++"); + return false; + } + + @Override + public Logger getParentLogger() { + LOG.finest("++enter++"); + return BigQueryJdbcRootLogger.getRootLogger(); + } + + private static class LazyHolder { + static final BigQueryDriver INSTANCE = new BigQueryDriver(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryErrorMessage.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryErrorMessage.java new file mode 100644 index 000000000..9afa07452 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryErrorMessage.java @@ -0,0 +1,29 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; + +@InternalApi +class BigQueryErrorMessage { + + static final String CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED = + "Customized Types are not supported yet."; + static final String INVALID_ARRAY = "Invalid java.sql.Array instance."; + static final String METHOD_NOT_IMPLEMENTED = "This method is not implemented."; + static final String OAUTH_TYPE_ERROR_MESSAGE = "Invalid Auth type specified"; +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryFieldValueListWrapper.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryFieldValueListWrapper.java new file mode 100644 index 000000000..9d8b1b2f7 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryFieldValueListWrapper.java @@ -0,0 +1,81 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValueList; +import java.util.List; + +/** + * Package-private, This class acts as a facade layer and wraps the FieldList(schema) and + * FieldValueList + */ +class BigQueryFieldValueListWrapper { + + // This is a reference schema to the schema + private final FieldList fieldList; + + // POJO representation of the JSON response + private final FieldValueList fieldValueList; + + // This is very similar to the FieldValueList above, except we get the + // reference as a List in case of an Array + private final List arrayFieldValueList; + + // This flag marks the end of the stream for the ResultSet + private boolean isLast = false; + + static BigQueryFieldValueListWrapper of( + FieldList fieldList, FieldValueList fieldValueList, boolean... isLast) { + boolean isLastFlag = isLast != null && isLast.length == 1 && isLast[0]; + return new BigQueryFieldValueListWrapper(fieldList, fieldValueList, null, isLastFlag); + } + + static BigQueryFieldValueListWrapper getNestedFieldValueListWrapper( + FieldList fieldList, List arrayFieldValueList, boolean... isLast) { + boolean isLastFlag = isLast != null && isLast.length == 1 && isLast[0]; + return new BigQueryFieldValueListWrapper(fieldList, null, arrayFieldValueList, isLastFlag); + } + + private BigQueryFieldValueListWrapper( + FieldList fieldList, + FieldValueList fieldValueList, + List arrayFieldValueList, + boolean isLast) { + this.fieldList = fieldList; + this.fieldValueList = fieldValueList; + this.arrayFieldValueList = arrayFieldValueList; + this.isLast = isLast; + } + + public FieldList getFieldList() { + return this.fieldList; + } + + public FieldValueList getFieldValueList() { + return this.fieldValueList; + } + + public List getArrayFieldValueList() { + return this.arrayFieldValueList; + } + + public boolean isLast() { + return this.isLast; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBulkInsertWriter.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBulkInsertWriter.java new file mode 100644 index 000000000..d4e970262 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBulkInsertWriter.java @@ -0,0 +1,130 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.ApiFuture; +import com.google.api.core.ApiFutureCallback; +import com.google.api.core.ApiFutures; +import com.google.api.gax.retrying.RetrySettings; +import com.google.cloud.bigquery.storage.v1.AppendRowsResponse; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; +import com.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1.Exceptions; +import com.google.cloud.bigquery.storage.v1.Exceptions.StorageException; +import com.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; +import com.google.cloud.bigquery.storage.v1.JsonStreamWriter; +import com.google.cloud.bigquery.storage.v1.TableName; +import com.google.cloud.bigquery.storage.v1.WriteStream; +import com.google.common.util.concurrent.MoreExecutors; +import com.google.gson.JsonArray; +import com.google.protobuf.Descriptors.DescriptorValidationException; +import java.io.IOException; +import java.util.concurrent.Phaser; +import javax.annotation.concurrent.GuardedBy; + +class BigQueryJdbcBulkInsertWriter { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private JsonStreamWriter jsonStreamWriter; + private final Phaser openRequestCount = new Phaser(1); + private final Object streamLock = new Object(); + + @GuardedBy("lock") + private RuntimeException error = null; + + void initialize(TableName parentTable, BigQueryWriteClient client, RetrySettings retrySettings) + throws IOException, DescriptorValidationException, InterruptedException { + WriteStream stream = WriteStream.newBuilder().setType(WriteStream.Type.PENDING).build(); + + CreateWriteStreamRequest createWriteStreamRequest = + CreateWriteStreamRequest.newBuilder() + .setParent(parentTable.toString()) + .setWriteStream(stream) + .build(); + WriteStream writeStream = client.createWriteStream(createWriteStreamRequest); + + JsonStreamWriter.Builder jsonStreamWriterBuilder = + JsonStreamWriter.newBuilder(writeStream.getName(), writeStream.getTableSchema()); + + if (retrySettings != null) { + jsonStreamWriterBuilder.setRetrySettings(retrySettings); + } + + this.jsonStreamWriter = jsonStreamWriterBuilder.build(); + } + + void append(JsonArray data, long offset) throws DescriptorValidationException, IOException { + synchronized (this.streamLock) { + if (this.error != null) { + throw this.error; + } + } + + ApiFuture future = jsonStreamWriter.append(data, offset); + ApiFutures.addCallback( + future, new AppendCompleteCallback(this), MoreExecutors.directExecutor()); + openRequestCount.register(); + } + + long cleanup(BigQueryWriteClient client) { + openRequestCount.arriveAndAwaitAdvance(); + jsonStreamWriter.close(); + + synchronized (this.streamLock) { + if (this.error != null) { + throw this.error; + } + } + + // Finalize the stream. + FinalizeWriteStreamResponse finalizeResponse = + client.finalizeWriteStream(jsonStreamWriter.getStreamName()); + LOG.finest("Rows written: " + finalizeResponse.getRowCount()); + return finalizeResponse.getRowCount(); + } + + String getStreamName() { + return jsonStreamWriter.getStreamName(); + } + + static class AppendCompleteCallback implements ApiFutureCallback { + + private final BigQueryJdbcBulkInsertWriter parent; + + AppendCompleteCallback(BigQueryJdbcBulkInsertWriter parent) { + this.parent = parent; + } + + public void onSuccess(AppendRowsResponse response) { + done(); + } + + public void onFailure(Throwable throwable) { + synchronized (this.parent.streamLock) { + if (this.parent.error == null) { + StorageException storageException = Exceptions.toStorageException(throwable); + this.parent.error = + (storageException != null) ? storageException : new RuntimeException(throwable); + } + } + done(); + } + + private void done() { + this.parent.openRequestCount.arriveAndDeregister(); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcCustomLogger.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcCustomLogger.java new file mode 100644 index 000000000..611b200ff --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcCustomLogger.java @@ -0,0 +1,32 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import java.util.logging.Logger; + +class BigQueryJdbcCustomLogger extends Logger { + + protected BigQueryJdbcCustomLogger(String name, String resourceBundleName) { + super(name, resourceBundleName); + this.setParent(BigQueryJdbcRootLogger.getRootLogger()); + } + + BigQueryJdbcCustomLogger(String name) { + this(name, null); + this.setParent(BigQueryJdbcRootLogger.getRootLogger()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtility.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtility.java new file mode 100644 index 000000000..5f486f1e5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtility.java @@ -0,0 +1,759 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.api.client.util.PemReader.readFirstSectionAndClose; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.OAUTH_TYPE_ERROR_MESSAGE; + +import com.google.api.client.util.PemReader; +import com.google.api.client.util.SecurityUtils; +import com.google.auth.oauth2.AccessToken; +import com.google.auth.oauth2.ClientId; +import com.google.auth.oauth2.ExternalAccountCredentials; +import com.google.auth.oauth2.GoogleCredentials; +import com.google.auth.oauth2.ImpersonatedCredentials; +import com.google.auth.oauth2.ServiceAccountCredentials; +import com.google.auth.oauth2.UserAuthorizer; +import com.google.auth.oauth2.UserCredentials; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.gson.Strictness; +import com.google.gson.stream.JsonReader; +import java.awt.Desktop; +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.PrintWriter; +import java.io.Reader; +import java.io.StringReader; +import java.net.ServerSocket; +import java.net.Socket; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.security.GeneralSecurityException; +import java.security.KeyFactory; +import java.security.NoSuchAlgorithmException; +import java.security.PrivateKey; +import java.security.spec.InvalidKeySpecException; +import java.security.spec.PKCS8EncodedKeySpec; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +final class BigQueryJdbcOAuthUtility { + + private static final String USER_AUTH_SUCCESS_HTTP_RESPONSE = + "HTTP/1.1 200 OK\n" + + "Content-Length: 300\n" + + "Connection: close\n" + + "Content-Type: text/html; charset=utf-8\n" + + "\n" + + "\n" + + "Thank you for using JDBC Driver for Google BigQuery!\n" + + "You may now close the window."; + + private static final int USER_AUTH_TIMEOUT_MS = 120000; + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJdbcOAuthUtility.class.getName()); + + private static final Map BYOID_NAME_MAP = + new HashMap() { + { + put(BigQueryJdbcUrlUtility.BYOID_AUDIENCE_URI_PROPERTY_NAME, "audience"); + put(BigQueryJdbcUrlUtility.BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME, "credential_source"); + put(BigQueryJdbcUrlUtility.BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME, "subject_token_type"); + put(BigQueryJdbcUrlUtility.BYOID_TOKEN_URI_PROPERTY_NAME, "token_url"); + put( + BigQueryJdbcUrlUtility.BYOID_POOL_USER_PROJECT_PROPERTY_NAME, + "workforce_pool_user_project"); + put( + BigQueryJdbcUrlUtility.BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME, + "service_account_impersonation_url"); + } + }; + + /** + * Parses the OAuth properties from the given URL. + * + * @param url The URL to parse. + * @return A map of OAuth properties. + */ + static Map parseOAuthProperties(String url, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + Map oauthProperties = new HashMap<>(); + + AuthType authType; + try { + authType = + AuthType.fromValue( + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_TYPE_VALUE, + callerClassName)); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(OAUTH_TYPE_ERROR_MESSAGE); + } + oauthProperties.put(BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME, String.valueOf(authType)); + switch (authType) { + case GOOGLE_SERVICE_ACCOUNT: + // For using a Google Service Account (OAuth Type 0) + // need: project id, OAuthServiceAcctEmail and OAuthPvtKey or OAuthPvtKeyPath that can be + // .p12 or json. + // TODO: validation if .p12 or json file can be in getPropertyInfo can be handy for user + String serviceAccountEmail = + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_SA_EMAIL_PROPERTY_NAME); + String serviceAccountPK = + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME); + String serviceAccountPrivateKeyPath = + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME); + String p12Password = + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.OAUTH_P12_PASSWORD_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_P12_PASSWORD_VALUE, + callerClassName); + + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_EMAIL_PROPERTY_NAME, serviceAccountEmail); + oauthProperties.put(BigQueryJdbcUrlUtility.OAUTH_P12_PASSWORD_PROPERTY_NAME, p12Password); + if (serviceAccountEmail != null && serviceAccountPK != null) { + oauthProperties.put(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME, serviceAccountPK); + } else { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME, + serviceAccountPrivateKeyPath); + } + break; + case GOOGLE_USER_ACCOUNT: + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME)); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + int reqGoogleDriveScope = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE, + callerClassName); + oauthProperties.put( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + String.valueOf(reqGoogleDriveScope)); + LOG.fine("RequestGoogleDriveScope parsed."); + break; + case PRE_GENERATED_TOKEN: + String refreshToken = + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME); + if (refreshToken != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME, refreshToken); + LOG.fine("OAuthRefreshToken provided."); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME)); + LOG.fine("OAuthClientId provided."); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + LOG.fine("OAuthClientSecret provided."); + break; + } + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_ACCESS_TOKEN_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_ACCESS_TOKEN_PROPERTY_NAME)); + LOG.fine("OAuthAccessToken provided."); + break; + case APPLICATION_DEFAULT_CREDENTIALS: + // For Application Default Credentials (OAuth Type 3) + // need: project id + break; + case EXTERNAL_ACCOUNT_AUTH: + // For External account authentication (OAuth Type 4) + // need: project id, OAuthPvtKey or OAuthPvtKeyPath or BYOID_PROPERTIES + String pvtKey = + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME); + String pvtKeyPath = + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME); + if (pvtKey != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME)); + LOG.fine("OAuthPvtKey provided."); + } else if (pvtKeyPath != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseUriProperty( + url, BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME)); + LOG.fine("OAuthPvtKeyPath provided."); + } else { + for (String property : BigQueryJdbcUrlUtility.BYOID_PROPERTIES) { + String value = + BigQueryJdbcUrlUtility.parseBYOIDProperty(url, property, callerClassName); + if (value != null) { + oauthProperties.put(property, value); + LOG.fine(property + " provided."); + } + } + String universeDomainProp = BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME; + String universeDomain = BigQueryJdbcUrlUtility.parseUriProperty(url, universeDomainProp); + if (universeDomain != null) { + oauthProperties.put(universeDomainProp, universeDomain); + LOG.fine(universeDomainProp + " provided. Caller : " + callerClassName); + } + } + break; + } + + if (authType == AuthType.GOOGLE_SERVICE_ACCOUNT + || authType == AuthType.GOOGLE_USER_ACCOUNT + || authType == AuthType.PRE_GENERATED_TOKEN) { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_EMAIL_VALUE, + callerClassName)); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_CHAIN_VALUE, + callerClassName)); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_SCOPES_VALUE, + callerClassName)); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME, + BigQueryJdbcUrlUtility.parseStringProperty( + url, + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_VALUE, + callerClassName)); + } + return oauthProperties; + } + + /** + * Gets the credentials for the given Auth properties. + * + * @param authProperties A map of Auth properties. + * @return A GoogleCredentials object. + */ + static GoogleCredentials getCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + AuthType authType = + AuthType.valueOf(authProperties.get(BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME)); + + GoogleCredentials credentials; + switch (authType) { + case GOOGLE_SERVICE_ACCOUNT: + credentials = + getGoogleServiceAccountCredentials(authProperties, overrideProperties, callerClassName); + break; + case GOOGLE_USER_ACCOUNT: + credentials = + getGoogleUserAccountCredentials(authProperties, overrideProperties, callerClassName); + break; + case PRE_GENERATED_TOKEN: + credentials = + getPreGeneratedTokensCredentials(authProperties, overrideProperties, callerClassName); + break; + case APPLICATION_DEFAULT_CREDENTIALS: + // This auth method doesn't support service account impersonation + return getApplicationDefaultCredentials(callerClassName); + case EXTERNAL_ACCOUNT_AUTH: + // This auth method doesn't support service account impersonation + return getExternalAccountAuthCredentials(authProperties, callerClassName); + default: + throw new IllegalStateException(OAUTH_TYPE_ERROR_MESSAGE); + } + + return getServiceAccountImpersonatedCredentials(credentials, authProperties); + } + + private static boolean isFileExists(String filename) { + try { + return filename != null && !filename.isEmpty() && Files.exists(Paths.get(filename)); + } catch (Exception e) { + // Filename is invalid + return false; + } + } + + private static boolean isJson(String value) { + try { + // This is done this way to ensure strict Json parsing + // https://github.com/google/gson/issues/1208#issuecomment-2120764686 + InputStream stream = new ByteArrayInputStream(value.getBytes()); + InputStreamReader reader = new InputStreamReader(stream); + JsonReader jsonReader = new JsonReader(reader); + jsonReader.setStrictness(Strictness.STRICT); + JsonElement json = JsonParser.parseReader(jsonReader); + return json != null; + } catch (Exception e) { + // Unable to parse json string + return false; + } + } + + private static GoogleCredentials getGoogleServiceAccountCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + ServiceAccountCredentials.Builder builder; + try { + final String pvtKeyPath = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME); + final String pvtKey = authProperties.get(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME); + final String pvtEmail = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_SA_EMAIL_PROPERTY_NAME); + final String p12Password = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_P12_PASSWORD_PROPERTY_NAME); + + final String keyPath = pvtKeyPath != null ? pvtKeyPath : pvtKey; + PrivateKey key = null; + InputStream stream = null; + + if (isFileExists(keyPath)) { + key = privateKeyFromP12File(keyPath, p12Password); + if (key == null) { + stream = Files.newInputStream(Paths.get(keyPath)); + } + } else if (isJson(pvtKey)) { + stream = new ByteArrayInputStream(pvtKey.getBytes()); + } else if (pvtKey != null) { + key = privateKeyFromPkcs8(pvtKey); + } + + if (stream != null) { + builder = ServiceAccountCredentials.fromStream(stream).toBuilder(); + } else if (pvtEmail != null && key != null) { + builder = + ServiceAccountCredentials.newBuilder().setClientEmail(pvtEmail).setPrivateKey(key); + } else { + LOG.severe("No valid Service Account credentials provided."); + throw new BigQueryJdbcRuntimeException("No valid credentials provided."); + } + + if (overrideProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME)) { + builder.setTokenServerUri( + new URI(overrideProperties.get(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME))); + } + if (overrideProperties.containsKey( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)) { + builder.setUniverseDomain( + overrideProperties.get(BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)); + } + } catch (URISyntaxException | IOException e) { + LOG.severe("Validation failure for Service Account credentials."); + throw new BigQueryJdbcRuntimeException(e); + } + LOG.info("GoogleCredentials instantiated. Auth Method: Service Account."); + return builder.build(); + } + + static UserAuthorizer getUserAuthorizer( + Map authProperties, + Map overrideProperties, + int port, + String callerClassName) + throws URISyntaxException { + LOG.finest("++enter++\t" + callerClassName); + List scopes = new ArrayList<>(); + scopes.add("https://www.googleapis.com/auth/bigquery"); + + // Add Google Drive scope conditionally + if (authProperties.containsKey( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)) { + try { + int driveScopeValue = + Integer.parseInt( + authProperties.get( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)); + if (driveScopeValue == 1) { + scopes.add("https://www.googleapis.com/auth/drive.readonly"); + LOG.fine("Added Google Drive read-only scope. Caller: " + callerClassName); + } + } catch (NumberFormatException e) { + LOG.severe( + "Invalid value for RequestGoogleDriveScope, defaulting to not request Drive scope. Caller: " + + callerClassName); + } + } + + List responseTypes = new ArrayList<>(); + responseTypes.add("code"); + + ClientId clientId = + ClientId.of( + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME), + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + UserAuthorizer.Builder userAuthorizerBuilder = + UserAuthorizer.newBuilder() + .setClientId(clientId) + .setScopes(scopes) + .setCallbackUri(URI.create("http://localhost:" + port)); + + if (overrideProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME)) { + userAuthorizerBuilder.setTokenServerUri( + new URI(overrideProperties.get(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME))); + } + + return userAuthorizerBuilder.build(); + } + + static UserCredentials getCredentialsFromCode( + UserAuthorizer userAuthorizer, String code, String callerClassName) throws IOException { + LOG.finest("++enter++\t" + callerClassName); + return userAuthorizer.getCredentialsFromCode(code, URI.create("")); + } + + private static GoogleCredentials getGoogleUserAccountCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + try { + ServerSocket serverSocket = new ServerSocket(0); + serverSocket.setSoTimeout(USER_AUTH_TIMEOUT_MS); + int port = serverSocket.getLocalPort(); + UserAuthorizer userAuthorizer = + getUserAuthorizer(authProperties, overrideProperties, port, callerClassName); + + URL authURL = userAuthorizer.getAuthorizationUrl("user", "", URI.create("")); + String code; + + if (Desktop.isDesktopSupported()) { + Desktop.getDesktop().browse(authURL.toURI()); + + Socket socket = serverSocket.accept(); + + OutputStream outputStream = socket.getOutputStream(); + PrintWriter printWriter = new PrintWriter(outputStream); + + BufferedReader bufferedReader = + new BufferedReader(new InputStreamReader(socket.getInputStream())); + String response = bufferedReader.readLine(); + + Pattern p = Pattern.compile("(?<=code=).*?(?=&|$)"); + Matcher m = p.matcher(response); + + if (!m.find()) { + throw new BigQueryJdbcRuntimeException("Could not retrieve the code for user auth"); + } + code = m.group(); + + printWriter.println(USER_AUTH_SUCCESS_HTTP_RESPONSE); + printWriter.flush(); + socket.close(); + serverSocket.close(); + } else { + throw new BigQueryJdbcRuntimeException("User auth only supported in desktop environments"); + } + + return getCredentialsFromCode(userAuthorizer, code, callerClassName); + } catch (IOException | URISyntaxException ex) { + LOG.severe( + String.format( + "Failed to establish connection using User Account authentication: %s", + ex.getMessage())); + throw new BigQueryJdbcRuntimeException(ex); + } + } + + private static GoogleCredentials getPreGeneratedAccessTokenCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + GoogleCredentials.Builder builder = GoogleCredentials.newBuilder(); + if (overrideProperties.containsKey( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)) { + builder.setUniverseDomain( + overrideProperties.get(BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)); + } + LOG.info("Connection established. Auth Method: Pre-generated Access Token."); + return builder + .setAccessToken( + AccessToken.newBuilder() + .setTokenValue( + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_ACCESS_TOKEN_PROPERTY_NAME)) + .build()) + .build(); + } + + static GoogleCredentials getPreGeneratedTokensCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + if (authProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME)) { + try { + return getPreGeneratedRefreshTokenCredentials( + authProperties, overrideProperties, callerClassName); + } catch (URISyntaxException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } else { + return getPreGeneratedAccessTokenCredentials( + authProperties, overrideProperties, callerClassName); + } + } + + static UserCredentials getPreGeneratedRefreshTokenCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) + throws URISyntaxException { + LOG.finest("++enter++\t" + callerClassName); + + UserCredentials.Builder userCredentialsBuilder = + UserCredentials.newBuilder() + .setRefreshToken( + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME)) + .setClientId(authProperties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME)) + .setClientSecret( + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + + if (overrideProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME)) { + userCredentialsBuilder.setTokenServerUri( + new URI(overrideProperties.get(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME))); + } + if (overrideProperties.containsKey( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)) { + userCredentialsBuilder.setUniverseDomain( + overrideProperties.get(BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)); + } + LOG.info("Connection established. Auth Method: Pre-generated Refresh Token."); + return userCredentialsBuilder.build(); + } + + private static GoogleCredentials getApplicationDefaultCredentials(String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + try { + GoogleCredentials credentials = GoogleCredentials.getApplicationDefault(); + String principal = "unknown"; + if (credentials instanceof ServiceAccountCredentials) { + principal = ((ServiceAccountCredentials) credentials).getClientEmail(); + } else if (credentials instanceof UserCredentials) { + principal = "user credentials"; + } else if (credentials instanceof ExternalAccountCredentials) { + principal = "external account"; + } + LOG.info( + String.format( + "Connection established. Auth Method: Application Default Credentials, Principal: %s.", + principal)); + return credentials; + } catch (IOException exception) { + // TODO throw exception + throw new BigQueryJdbcRuntimeException("Application default credentials not found."); + } + } + + private static GoogleCredentials getExternalAccountAuthCredentials( + Map authProperties, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + try { + JsonObject jsonObject = null; + String credentialsPath = null; + if (authProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME)) { + String pvtKeyPath = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME).trim(); + if (pvtKeyPath.startsWith("{")) { + jsonObject = JsonParser.parseString(pvtKeyPath).getAsJsonObject(); + } else { + credentialsPath = pvtKeyPath; + } + } else if (authProperties.containsKey( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME)) { + credentialsPath = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME); + } else { + jsonObject = new JsonObject(); + for (String property : BigQueryJdbcUrlUtility.BYOID_PROPERTIES) { + if (Objects.equals( + property, BigQueryJdbcUrlUtility.BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME)) { + jsonObject.add( + BYOID_NAME_MAP.get(property), + JsonParser.parseString(authProperties.get(property)).getAsJsonObject()); + } else if (authProperties.containsKey(property)) { + jsonObject.addProperty(BYOID_NAME_MAP.get(property), authProperties.get(property)); + } + } + if (authProperties.containsKey( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)) { + jsonObject.addProperty( + "universe_domain", + authProperties.get(BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)); + } + } + + if (credentialsPath != null) { + return ExternalAccountCredentials.fromStream( + Files.newInputStream(Paths.get(credentialsPath))); + } else if (jsonObject != null) { + return ExternalAccountCredentials.fromStream( + new ByteArrayInputStream(jsonObject.toString().getBytes())); + } else { + throw new IllegalArgumentException( + "Insufficient info provided for external authentication"); + } + } catch (IOException e) { + throw new BigQueryJdbcRuntimeException(e); + } + } + + // This function checks if connection string contains configuration for + // credentials impersonation. If not, it returns regular credentials object. + // If impersonated service account is provided, returns Credentials object + // accomodating this information. + private static GoogleCredentials getServiceAccountImpersonatedCredentials( + GoogleCredentials credentials, Map authProperties) { + + String impersonationEmail = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME); + if (impersonationEmail == null || impersonationEmail.isEmpty()) { + return credentials; + } + + String impersonationChainString = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME); + List impersonationChain = null; + if (impersonationChainString != null && !impersonationChainString.isEmpty()) { + impersonationChain = Arrays.asList(impersonationChainString.split(",")); + } + + // Scopes has a default value, so it should never be null + List impersonationScopes = + Arrays.asList( + authProperties + .get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME) + .split(",")); + + // Token lifetime has a default value, so it should never be null + String impersonationLifetime = + authProperties.get( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME); + int impersonationLifetimeInt = 0; + try { + impersonationLifetimeInt = Integer.parseInt(impersonationLifetime); + } catch (NumberFormatException e) { + LOG.severe("Invalid value for ServiceAccountImpersonationTokenLifetime."); + throw new IllegalArgumentException( + "Invalid value for ServiceAccountImpersonationTokenLifetime: must be a positive integer.", + e); + } + + return ImpersonatedCredentials.create( + credentials, + impersonationEmail, + impersonationChain, + impersonationScopes, + impersonationLifetimeInt); + } + + static PrivateKey privateKeyFromP12File(String privateKeyFile, String password) { + try { + InputStream stream = Files.newInputStream(Paths.get(privateKeyFile)); + return SecurityUtils.loadPrivateKeyFromKeyStore( + SecurityUtils.getPkcs12KeyStore(), stream, "notasecret", "privatekey", password); + } catch (IOException | GeneralSecurityException e) { + LOG.warning("Unable to parse p12 file: " + e.getMessage()); + return null; + } + } + + static PrivateKey privateKeyFromPkcs8(String privateKeyPkcs8) { + try { + Reader reader = new StringReader(privateKeyPkcs8); + PemReader.Section section = readFirstSectionAndClose(reader, "PRIVATE KEY"); + if (section == null) { + throw new IOException("Invalid PKCS#8 data."); + } + byte[] bytes = section.getBase64DecodedBytes(); + PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes); + KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory(); + return keyFactory.generatePrivate(keySpec); + } catch (NoSuchAlgorithmException | InvalidKeySpecException | IOException e) { + LOG.warning("Unable to parse pkcs8 secret: " + e.getMessage()); + return null; + } + } + + enum AuthType { + GOOGLE_SERVICE_ACCOUNT(0), + GOOGLE_USER_ACCOUNT(1), + PRE_GENERATED_TOKEN(2), + APPLICATION_DEFAULT_CREDENTIALS(3), + EXTERNAL_ACCOUNT_AUTH(4); + + private final int value; + + AuthType(int value) { + this.value = value; + } + + static AuthType fromValue(int value) { + for (AuthType authType : values()) { + if (authType.value == value) { + return authType; + } + } + throw new IllegalStateException(OAUTH_TYPE_ERROR_MESSAGE + ": " + value); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameter.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameter.java new file mode 100644 index 000000000..cb11d14e4 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameter.java @@ -0,0 +1,117 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; + +class BigQueryJdbcParameter { + private int index; + private Object value; + private Class type; + private StandardSQLTypeName sqlType; + // Additional parameters needed for CallableStatement. + private String paramName; + private BigQueryStatementParameterType paramType; + private int scale; + + BigQueryJdbcParameter() {} + + BigQueryJdbcParameter(BigQueryJdbcParameter parameter) { + this.index = parameter.index; + this.value = parameter.value; + this.type = parameter.type; + this.sqlType = parameter.sqlType; + } + + int getIndex() { + return index; + } + + void setIndex(int index) { + this.index = index; + } + + Object getValue() { + return value; + } + + void setValue(Object value) { + this.value = value; + } + + Class getType() { + return type; + } + + public void setType(Class type) { + this.type = type; + } + + StandardSQLTypeName getSqlType() { + return sqlType; + } + + void setSqlType(StandardSQLTypeName sqlType) { + this.sqlType = sqlType; + } + + String getParamName() { + return paramName; + } + + void setParamName(String paramName) { + this.paramName = paramName; + } + + BigQueryStatementParameterType getParamType() { + return paramType; + } + + void setParamType(BigQueryStatementParameterType paramType) { + this.paramType = paramType; + } + + int getScale() { + return scale; + } + + void setScale(int scale) { + this.scale = scale; + } + + @Override + public String toString() { + return "BigQueryJdbcParameter{" + + "index=" + + index + + ", value=" + + value + + ", type=" + + type + + ", sqlType=" + + sqlType + + ", paramName='" + + paramName + + '\'' + + ", paramType=" + + paramType.name() + + ", scale=" + + scale + + '}'; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtility.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtility.java new file mode 100644 index 000000000..ebc5450b6 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtility.java @@ -0,0 +1,291 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.storage.v1.stub.BigQueryReadStubSettings.defaultGrpcTransportProviderBuilder; + +import com.google.api.client.http.HttpTransport; +import com.google.api.client.http.apache.v5.Apache5HttpTransport; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.auth.http.HttpTransportFactory; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.http.HttpTransportOptions; +import io.grpc.HttpConnectProxiedSocketAddress; +import io.grpc.ProxiedSocketAddress; +import io.grpc.ProxyDetector; +import io.grpc.netty.shaded.io.grpc.netty.GrpcSslContexts; +import io.grpc.netty.shaded.io.netty.handler.ssl.SslContext; +import java.io.FileInputStream; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.SocketAddress; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Pattern; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; +import org.apache.hc.client5.http.auth.AuthScope; +import org.apache.hc.client5.http.auth.UsernamePasswordCredentials; +import org.apache.hc.client5.http.impl.DefaultAuthenticationStrategy; +import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.client5.http.impl.classic.HttpClientBuilder; +import org.apache.hc.client5.http.impl.classic.HttpClients; +import org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManagerBuilder; +import org.apache.hc.client5.http.impl.routing.DefaultProxyRoutePlanner; +import org.apache.hc.client5.http.routing.HttpRoutePlanner; +import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactory; +import org.apache.hc.core5.http.HttpHost; + +final class BigQueryJdbcProxyUtility { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJdbcProxyUtility.class.getName()); + static final String validPortRegex = + "^([1-9][0-9]{0,3}|[1-5][0-9]{4}|6[0-4][0-9]{3}|65[0-4][0-9]{2}|655[0-2][0-9]|6553[0-5])$"; + + private BigQueryJdbcProxyUtility() {} + + static Map parseProxyProperties(String URL, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + Map proxyProperties = new HashMap<>(); + String proxyHost = + BigQueryJdbcUrlUtility.parseUriProperty( + URL, BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME); + if (proxyHost != null) { + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, proxyHost); + } + String proxyPort = + BigQueryJdbcUrlUtility.parseUriProperty( + URL, BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME); + if (proxyPort != null) { + if (!Pattern.compile(validPortRegex).matcher(proxyPort).find()) { + throw new IllegalArgumentException( + "Illegal port number provided %s. Please provide a valid port number."); + } + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, proxyPort); + } + String proxyUid = + BigQueryJdbcUrlUtility.parseUriProperty( + URL, BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME); + if (proxyUid != null) { + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME, proxyUid); + } + String proxyPwd = + BigQueryJdbcUrlUtility.parseUriProperty( + URL, BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME); + if (proxyPwd != null) { + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME, proxyPwd); + } + + boolean isMissingProxyHostOrPortWhenProxySet = + (proxyHost == null && proxyPort != null) || (proxyHost != null && proxyPort == null); + if (isMissingProxyHostOrPortWhenProxySet) { + throw new IllegalArgumentException( + "Both ProxyHost and ProxyPort parameters need to be specified. No defaulting behavior occurs."); + } + boolean isMissingProxyUidOrPwdWhenAuthSet = + (proxyUid == null && proxyPwd != null) || (proxyUid != null && proxyPwd == null); + if (isMissingProxyUidOrPwdWhenAuthSet) { + throw new IllegalArgumentException( + "Both ProxyUid and ProxyPwd parameters need to be specified for authentication."); + } + boolean isProxyAuthSetWithoutProxySettings = proxyUid != null && proxyHost == null; + if (isProxyAuthSetWithoutProxySettings) { + throw new IllegalArgumentException( + "Proxy authentication provided via connection string with no proxy host or port set."); + } + return proxyProperties; + } + + static HttpTransportOptions getHttpTransportOptions( + Map proxyProperties, + String sslTrustStorePath, + String sslTrustStorePassword, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + if (!proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME) + && sslTrustStorePath == null) { + return null; + } + return HttpTransportOptions.newBuilder() + .setHttpTransportFactory( + getHttpTransportFactory( + proxyProperties, sslTrustStorePath, sslTrustStorePassword, callerClassName)) + .build(); + } + + private static HttpTransportFactory getHttpTransportFactory( + Map proxyProperties, + String sslTrustStorePath, + String sslTrustStorePassword, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + HttpClientBuilder httpClientBuilder = HttpClients.custom(); + boolean explicitProxySet = + proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME); + + if (explicitProxySet) { + HttpHost proxyHostDetails = + new HttpHost( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME), + Integer.parseInt( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME))); + HttpRoutePlanner httpRoutePlanner = new DefaultProxyRoutePlanner(proxyHostDetails); + httpClientBuilder.setRoutePlanner(httpRoutePlanner); + addAuthToProxyIfPresent(proxyProperties, httpClientBuilder, callerClassName); + } else { + httpClientBuilder.useSystemProperties(); + } + + if (sslTrustStorePath != null) { + try (FileInputStream trustStoreStream = new FileInputStream(sslTrustStorePath)) { + KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); + char[] trustStorePasswordChars = + sslTrustStorePassword != null ? sslTrustStorePassword.toCharArray() : null; + trustStore.load(trustStoreStream, trustStorePasswordChars); + + TrustManagerFactory trustManagerFactory = + TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + trustManagerFactory.init(trustStore); + + SSLContext sslContext = SSLContext.getInstance("TLS"); + sslContext.init(null, trustManagerFactory.getTrustManagers(), null); + + SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext); + httpClientBuilder.setConnectionManager( + PoolingHttpClientConnectionManagerBuilder.create() + .setSSLSocketFactory(sslSocketFactory) + .build()); + } catch (IOException | GeneralSecurityException e) { + throw new BigQueryJdbcRuntimeException(e); + } + } + addAuthToProxyIfPresent(proxyProperties, httpClientBuilder, callerClassName); + + CloseableHttpClient httpClient = httpClientBuilder.build(); + final HttpTransport httpTransport = new Apache5HttpTransport(httpClient); + return () -> httpTransport; + } + + private static void addAuthToProxyIfPresent( + Map proxyProperties, + HttpClientBuilder closeableHttpClientBuilder, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + if (proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME) + && proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME)) { + + AuthScope authScope = + new AuthScope( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME), + Integer.parseInt( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME))); + UsernamePasswordCredentials usernamePasswordCredentials = + new UsernamePasswordCredentials( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME), + proxyProperties + .get(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME) + .toCharArray()); + + BasicCredentialsProvider proxyCredentialsProvider = new BasicCredentialsProvider(); + proxyCredentialsProvider.setCredentials(authScope, usernamePasswordCredentials); + closeableHttpClientBuilder.setDefaultCredentialsProvider(proxyCredentialsProvider); + closeableHttpClientBuilder.setProxyAuthenticationStrategy( + DefaultAuthenticationStrategy.INSTANCE); // order of challenge? so it will show up + } + } + + static TransportChannelProvider getTransportChannelProvider( + Map proxyProperties, + String sslTrustStorePath, + String sslTrustStorePassword, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + boolean hasProxy = proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME); + boolean hasSsl = sslTrustStorePath != null; + + if (!hasProxy && !hasSsl) { + return null; + } + + TransportChannelProvider transportChannelProvider = + defaultGrpcTransportProviderBuilder() + .setChannelConfigurator( + managedChannelBuilder -> { + if (hasProxy) { + managedChannelBuilder.proxyDetector( + new ProxyDetector() { + @Override + public ProxiedSocketAddress proxyFor(SocketAddress socketAddress) { + return getHttpConnectProxiedSocketAddress( + (InetSocketAddress) socketAddress, proxyProperties); + } + }); + } + if (hasSsl + && managedChannelBuilder + instanceof io.grpc.netty.shaded.io.grpc.netty.NettyChannelBuilder) { + try (FileInputStream trustStoreStream = + new FileInputStream(sslTrustStorePath)) { + KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); + char[] trustStorePasswordChars = + sslTrustStorePassword != null + ? sslTrustStorePassword.toCharArray() + : null; + trustStore.load(trustStoreStream, trustStorePasswordChars); + + TrustManagerFactory trustManagerFactory = + TrustManagerFactory.getInstance( + TrustManagerFactory.getDefaultAlgorithm()); + trustManagerFactory.init(trustStore); + + SslContext grpcSslContext = + GrpcSslContexts.forClient().trustManager(trustManagerFactory).build(); + ((io.grpc.netty.shaded.io.grpc.netty.NettyChannelBuilder) + managedChannelBuilder) + .sslContext(grpcSslContext); + + } catch (IOException | GeneralSecurityException e) { + throw new BigQueryJdbcRuntimeException(e); + } + } + return managedChannelBuilder; + }) + .build(); + return transportChannelProvider; + } + + private static HttpConnectProxiedSocketAddress getHttpConnectProxiedSocketAddress( + InetSocketAddress socketAddress, Map proxyProperties) { + String proxyHost = proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME); + int proxyPort = + Integer.parseInt(proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME)); + HttpConnectProxiedSocketAddress.Builder builder = + HttpConnectProxiedSocketAddress.newBuilder() + .setProxyAddress(new InetSocketAddress(proxyHost, proxyPort)) + .setTargetAddress(socketAddress); + if (proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME) + && proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME)) { + builder.setUsername(proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME)); + builder.setPassword(proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME)); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcRootLogger.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcRootLogger.java new file mode 100644 index 000000000..ef963d87d --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcRootLogger.java @@ -0,0 +1,193 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import java.io.IOException; +import java.lang.management.ManagementFactory; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Optional; +import java.util.logging.ConsoleHandler; +import java.util.logging.FileHandler; +import java.util.logging.Formatter; +import java.util.logging.Handler; +import java.util.logging.Level; +import java.util.logging.LogRecord; +import java.util.logging.Logger; + +/** This class is used to log messages from the BigQuery JDBC Driver. */ +class BigQueryJdbcRootLogger { + + /** + * Note: Each connection will have its own file handler with the level and logPath specified in + * the connection properties. But the logs will be driver logs and not connection specific. + */ + private static final Logger logger = Logger.getLogger("com.google.cloud.bigquery"); + + private static final Logger storageLogger = Logger.getLogger("com.google.cloud.bigquery.storage"); + + private static Handler fileHandler = null; + private static Path currentLogPath = null; + private static int fileCounter = 0; + + static { + logger.setUseParentHandlers(false); + storageLogger.setUseParentHandlers(true); + } + + public static Formatter getFormatter() { + return new Formatter() { + private static final String PATTERN = "yyyy-MM-dd HH:mm:ss.SSS"; + private static final String FORMAT = + "%1$s %2$5s %3$d --- [%4$-7.15s] %5$-50s %6$-20s: %7$s%8$s"; + private static final int MAX_THREAD_NAME_LENGTH = 15; + + /** + * Returns the thread for the given thread id. + * + * @param threadId ID for the thread being logged. + * @return returns the thread + */ + Optional getThread(long threadId) { + return Thread.getAllStackTraces().keySet().stream() + .filter(thread -> thread.getId() == threadId) + .findFirst(); + } + + @Override + public String format(LogRecord record) { + String date = new SimpleDateFormat(PATTERN).format(new Date(record.getMillis())); + String threadName = + getThread(record.getThreadID()) + .map(Thread::getName) + .map( + name -> + name.length() > MAX_THREAD_NAME_LENGTH + ? name.substring(name.length() - MAX_THREAD_NAME_LENGTH) + : name) + .orElse(""); + long processId = + Long.parseLong(ManagementFactory.getRuntimeMXBean().getName().split("@")[0]); + String sourceClassName = record.getLoggerName(); + String sourceMethodName = record.getSourceMethodName(); + return String.format( + FORMAT, + date, + record.getLevel().getName(), + processId, + threadName, + sourceClassName, + sourceMethodName, + record.getMessage(), + System.lineSeparator()); + } + }; + } + + public static Logger getRootLogger() { + return logger; + } + + private static void setHandler() throws IOException { + // If Console handler exists, remove it. + // If File handler exists, use it. Else create new one. + for (Handler h : logger.getHandlers()) { + if (h instanceof ConsoleHandler) { + h.close(); + logger.removeHandler(h); + break; + } + if (h instanceof FileHandler) { + fileHandler = h; + break; + } + } + + if (fileHandler == null) { + String fileName = String.format("BigQueryJdbc%d", fileCounter); + fileCounter++; + + currentLogPath = Files.createTempFile(fileName, ".log"); + currentLogPath.toFile().deleteOnExit(); + + fileHandler = new FileHandler(currentLogPath.toString(), 0, 1, true); + logger.addHandler(fileHandler); + } + } + + public static void setLevel(Level level, String logPath) throws IOException { + if (level != Level.OFF) { + setPath(logPath); + if (logger.getHandlers().length == 0) { + setHandler(); + fileHandler.setFormatter(getFormatter()); + logger.setUseParentHandlers(false); + } + fileHandler.setLevel(level); + logger.setLevel(level); + } else { + for (Handler h : logger.getHandlers()) { + h.close(); + logger.removeHandler(h); + } + fileHandler = null; + currentLogPath = null; + } + } + + static void setPath(String logPath) { + try { + if (!logPath.isEmpty() && !logPath.endsWith("/")) { + logPath = logPath + "/"; + } + Path dir = Paths.get(logPath); + if (!Files.exists(dir)) { + Files.createDirectory(dir); + } + + String fileName = String.format("BigQueryJdbc%d.log", fileCounter); + fileCounter++; + Path destination = Paths.get(logPath + fileName).toAbsolutePath(); + + if (currentLogPath != null && !currentLogPath.equals(destination)) { + Path source = Paths.get(currentLogPath.toUri()); + Files.move(source, destination, StandardCopyOption.REPLACE_EXISTING); + } + + currentLogPath = destination; + fileHandler = new FileHandler(currentLogPath.toString(), 0, 1, true); + fileHandler.setFormatter(getFormatter()); + + for (Handler h : logger.getHandlers()) { + if (h instanceof FileHandler) { + h.close(); + logger.removeHandler(h); + break; + } + } + + logger.addHandler(fileHandler); + + } catch (IOException ex) { + logger.warning("Log File warning : " + ex); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcTypeMappings.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcTypeMappings.java new file mode 100644 index 000000000..b95ac0230 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcTypeMappings.java @@ -0,0 +1,159 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.common.collect.ImmutableMap; +import com.google.gson.JsonObject; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.AbstractMap.SimpleEntry; +import java.util.Map; + +@InternalApi +class BigQueryJdbcTypeMappings { + + static final Map> standardSQLToJavaTypeMapping = + ImmutableMap.ofEntries( + entry(StandardSQLTypeName.INT64, Long.class), + entry(StandardSQLTypeName.BOOL, Boolean.class), + entry(StandardSQLTypeName.FLOAT64, Double.class), + entry(StandardSQLTypeName.NUMERIC, BigDecimal.class), + entry(StandardSQLTypeName.BIGNUMERIC, BigDecimal.class), + entry(StandardSQLTypeName.STRING, String.class), + entry(StandardSQLTypeName.TIMESTAMP, Timestamp.class), + entry(StandardSQLTypeName.DATE, Date.class), + entry(StandardSQLTypeName.TIME, Time.class), + entry(StandardSQLTypeName.DATETIME, Timestamp.class), + entry(StandardSQLTypeName.GEOGRAPHY, String.class), + entry(StandardSQLTypeName.JSON, String.class), + entry(StandardSQLTypeName.INTERVAL, String.class), + entry(StandardSQLTypeName.RANGE, String.class), + entry(StandardSQLTypeName.BYTES, byte[].class), + entry(StandardSQLTypeName.STRUCT, Struct.class), + entry(StandardSQLTypeName.ARRAY, Array.class)); + + static final Map standardSQLToJavaSqlTypesMapping = + ImmutableMap.ofEntries( + entry(StandardSQLTypeName.INT64, Types.BIGINT), + entry(StandardSQLTypeName.BOOL, Types.BOOLEAN), + entry(StandardSQLTypeName.FLOAT64, Types.DOUBLE), + entry(StandardSQLTypeName.NUMERIC, Types.NUMERIC), + entry(StandardSQLTypeName.BIGNUMERIC, Types.NUMERIC), + entry(StandardSQLTypeName.STRING, Types.NVARCHAR), + entry(StandardSQLTypeName.TIMESTAMP, Types.TIMESTAMP), + entry(StandardSQLTypeName.DATE, Types.DATE), + entry(StandardSQLTypeName.TIME, Types.TIME), + entry(StandardSQLTypeName.DATETIME, Types.TIMESTAMP), + entry(StandardSQLTypeName.GEOGRAPHY, Types.OTHER), + entry(StandardSQLTypeName.JSON, Types.OTHER), + entry(StandardSQLTypeName.INTERVAL, Types.OTHER), + entry(StandardSQLTypeName.RANGE, Types.OTHER), + entry(StandardSQLTypeName.BYTES, Types.VARBINARY), + entry(StandardSQLTypeName.STRUCT, Types.STRUCT), + entry(StandardSQLTypeName.ARRAY, Types.ARRAY)); + + static final Map> javaSQLToJavaTypeMapping = + ImmutableMap.ofEntries( + entry(Types.BIGINT, Long.class), + entry(Types.INTEGER, Integer.class), + entry(Types.BOOLEAN, Boolean.class), + entry(Types.DOUBLE, Double.class), + entry(Types.FLOAT, Float.class), + entry(Types.NUMERIC, BigDecimal.class), + entry(Types.VARCHAR, String.class), + entry(Types.NVARCHAR, String.class), + entry(Types.TIMESTAMP, Timestamp.class), + entry(Types.DATE, Date.class), + entry(Types.TIME, Time.class), + entry(Types.OTHER, String.class), + entry(Types.BINARY, byte[].class), + entry(Types.VARBINARY, byte[].class), + entry(Types.STRUCT, Struct.class), + entry(Types.BIT, Boolean.class), + entry(Types.ARRAY, Array.class)); + + static StandardSQLTypeName classToType(Class type) + throws BigQueryJdbcSqlFeatureNotSupportedException { + if (Boolean.class.isAssignableFrom(type)) { + return StandardSQLTypeName.BOOL; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.STRING; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.GEOGRAPHY; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.DATETIME; + } else if (Integer.class.isAssignableFrom(type)) { + return StandardSQLTypeName.INT64; + } else if (Long.class.isAssignableFrom(type)) { + return StandardSQLTypeName.INT64; + } else if (Double.class.isAssignableFrom(type)) { + return StandardSQLTypeName.FLOAT64; + } else if (Float.class.isAssignableFrom(type)) { + return StandardSQLTypeName.FLOAT64; + } else if (BigDecimal.class.isAssignableFrom(type)) { + return StandardSQLTypeName.NUMERIC; + } else if (BigDecimal.class.isAssignableFrom(type)) { + return StandardSQLTypeName.BIGNUMERIC; + } else if (Date.class.isAssignableFrom(type)) { + return StandardSQLTypeName.DATE; + } else if (Timestamp.class.isAssignableFrom(type)) { + return StandardSQLTypeName.TIMESTAMP; + } else if (Time.class.isAssignableFrom(type)) { + return StandardSQLTypeName.TIME; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.JSON; + } else if (JsonObject.class.isAssignableFrom(type)) { + return StandardSQLTypeName.JSON; + } else if (Byte.class.isAssignableFrom(type)) { + return StandardSQLTypeName.BYTES; + } else if (Array.class.isAssignableFrom(type)) { + return StandardSQLTypeName.ARRAY; + } else if (Struct.class.isAssignableFrom(type)) { + return StandardSQLTypeName.STRUCT; + } else if (byte[].class.isAssignableFrom(type)) { + return StandardSQLTypeName.BYTES; + } + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported object type for QueryParameter: " + type); + } + + static Class getJavaType(int javaSQLType) throws BigQueryJdbcSqlFeatureNotSupportedException { + if (!javaSQLToJavaTypeMapping.containsKey(javaSQLType)) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported Java type for SQL type: " + javaSQLType); + } + Class javaType = javaSQLToJavaTypeMapping.get(javaSQLType); + if (javaType == null) { + // This should never happen unless the map was initialized with null values. + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported Java type for SQL type: " + javaSQLType); + } + return javaType; + } + + private static SimpleEntry entry(K key, V value) { + return new SimpleEntry<>(key, value); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtility.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtility.java new file mode 100644 index 000000000..3b26f7be5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtility.java @@ -0,0 +1,920 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.client.util.escape.CharEscapers; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; +import java.util.Set; +import java.util.logging.Level; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +/** + * This class implements all the methods that parse Connection property values from the Connection + * String. + */ +final class BigQueryJdbcUrlUtility { + + // TODO: Add all Connection options + static final String ALLOW_LARGE_RESULTS_PROPERTY_NAME = "AllowLargeResults"; + static final String LARGE_RESULTS_TABLE_PROPERTY_NAME = "LargeResultTable"; + static final String LARGE_RESULTS_DATASET_PROPERTY_NAME = "LargeResultDataset"; + static final String UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME = "UnsupportedHTAPIFallback"; + static final boolean DEFAULT_UNSUPPORTED_HTAPI_FALLBACK_VALUE = true; + static final String DESTINATION_DATASET_EXPIRATION_TIME_PROPERTY_NAME = + "LargeResultsDatasetExpirationTime"; + static final long DEFAULT_DESTINATION_DATASET_EXPIRATION_TIME_VALUE = 3600000L; + static final boolean DEFAULT_ALLOW_LARGE_RESULTS = true; + static final String QUERY_DIALECT_PROPERTY_NAME = "QueryDialect"; + static final String DEFAULT_QUERY_DIALECT_VALUE = "SQL"; + static final String UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME = "universeDomain"; + static final String DEFAULT_UNIVERSE_DOMAIN_VALUE = "googleapis.com"; + static final String PROJECT_ID_PROPERTY_NAME = "ProjectId"; + static final String DEFAULT_DATASET_PROPERTY_NAME = "DefaultDataset"; + static final String OAUTH_TYPE_PROPERTY_NAME = "OAuthType"; + static final String HTAPI_ACTIVATION_RATIO_PROPERTY_NAME = "HighThroughputActivationRatio"; + static final String KMS_KEY_NAME_PROPERTY_NAME = "KMSKeyName"; + static final String QUERY_PROPERTIES_NAME = "QueryProperties"; + static final int DEFAULT_HTAPI_ACTIVATION_RATIO_VALUE = + 2; // TODO: to adjust this value before private preview based on performance testing. + static final String HTAPI_MIN_TABLE_SIZE_PROPERTY_NAME = "HighThroughputMinTableSize"; + static final int DEFAULT_HTAPI_MIN_TABLE_SIZE_VALUE = 100; + static final int DEFAULT_OAUTH_TYPE_VALUE = -1; + static final String LOCATION_PROPERTY_NAME = "Location"; + static final String ENDPOINT_OVERRIDES_PROPERTY_NAME = "EndpointOverrides"; + static final String PRIVATE_SERVICE_CONNECT_PROPERTY_NAME = "PrivateServiceConnectUris"; + static final String OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME = + "ServiceAccountImpersonationEmail"; + static final String DEFAULT_OAUTH_SA_IMPERSONATION_EMAIL_VALUE = null; + static final String OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME = + "ServiceAccountImpersonationChain"; + static final String DEFAULT_OAUTH_SA_IMPERSONATION_CHAIN_VALUE = null; + static final String OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME = + "ServiceAccountImpersonationScopes"; + static final String DEFAULT_OAUTH_SA_IMPERSONATION_SCOPES_VALUE = + "https://www.googleapis.com/auth/bigquery"; + static final String OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME = + "ServiceAccountImpersonationTokenLifetime"; + static final String DEFAULT_OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_VALUE = "3600"; + static final String OAUTH_SA_EMAIL_PROPERTY_NAME = "OAuthServiceAcctEmail"; + static final String OAUTH_PVT_KEY_PATH_PROPERTY_NAME = "OAuthPvtKeyPath"; + static final String OAUTH_P12_PASSWORD_PROPERTY_NAME = "OAuthP12Password"; + static final String DEFAULT_OAUTH_P12_PASSWORD_VALUE = "notasecret"; + static final String OAUTH_PVT_KEY_PROPERTY_NAME = "OAuthPvtKey"; + static final String OAUTH2_TOKEN_URI_PROPERTY_NAME = "OAUTH2"; + static final String HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME = "READ_API"; + static final String BIGQUERY_ENDPOINT_OVERRIDE_PROPERTY_NAME = "BIGQUERY"; + static final String STS_ENDPOINT_OVERRIDE_PROPERTY_NAME = "STS"; + static final String OAUTH_ACCESS_TOKEN_PROPERTY_NAME = "OAuthAccessToken"; + static final String OAUTH_REFRESH_TOKEN_PROPERTY_NAME = "OAuthRefreshToken"; + static final String OAUTH_CLIENT_ID_PROPERTY_NAME = "OAuthClientId"; + static final String OAUTH_CLIENT_SECRET_PROPERTY_NAME = "OAuthClientSecret"; + static final String ENABLE_HTAPI_PROPERTY_NAME = "EnableHighThroughputAPI"; + static final String PROXY_HOST_PROPERTY_NAME = "ProxyHost"; + static final String PROXY_PORT_PROPERTY_NAME = "ProxyPort"; + static final String PROXY_USER_ID_PROPERTY_NAME = "ProxyUid"; + static final String PROXY_PASSWORD_PROPERTY_NAME = "ProxyPwd"; + static final boolean DEFAULT_ENABLE_HTAPI_VALUE = false; + static final boolean DEFAULT_ENABLE_SESSION_VALUE = false; + static final int DEFAULT_LOG_LEVEL = 0; + static final String LOG_LEVEL_PROPERTY_NAME = "LogLevel"; + static final String LOG_PATH_PROPERTY_NAME = "LogPath"; + static final String LOG_LEVEL_ENV_VAR = "BIGQUERY_JDBC_LOG_LEVEL"; + static final String LOG_PATH_ENV_VAR = "BIGQUERY_JDBC_LOG_PATH"; + static final String ENABLE_SESSION_PROPERTY_NAME = "EnableSession"; + static final String DEFAULT_LOG_PATH = ""; + static final String USE_QUERY_CACHE_PROPERTY_NAME = "UseQueryCache"; + static final boolean DEFAULT_USE_QUERY_CACHE = true; + static final String JOB_CREATION_MODE_PROPERTY_NAME = "JobCreationMode"; + static final int DEFAULT_JOB_CREATION_MODE = 2; + static final String MAX_RESULTS_PROPERTY_NAME = "MaxResults"; + static final long DEFAULT_MAX_RESULTS_VALUE = 10000; + static final String BYOID_AUDIENCE_URI_PROPERTY_NAME = "BYOID_AudienceUri"; + static final String BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME = "BYOID_CredentialSource"; + static final String BYOID_POOL_USER_PROJECT_PROPERTY_NAME = "BYOID_PoolUserProject"; + static final String BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME = "BYOID_SA_Impersonation_Uri"; + static final String BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME = "BYOID_SubjectTokenType"; + static final String BYOID_TOKEN_URI_PROPERTY_NAME = "BYOID_TokenUri"; + static final String PARTNER_TOKEN_PROPERTY_NAME = "PartnerToken"; + static final String METADATA_FETCH_THREAD_COUNT_PROPERTY_NAME = "MetaDataFetchThreadCount"; + static final int DEFAULT_METADATA_FETCH_THREAD_COUNT_VALUE = 32; + static final String RETRY_TIMEOUT_IN_SECS_PROPERTY_NAME = "Timeout"; + static final long DEFAULT_RETRY_TIMEOUT_IN_SECS_VALUE = 0L; + static final String JOB_TIMEOUT_PROPERTY_NAME = "JobTimeout"; + static final long DEFAULT_JOB_TIMEOUT_VALUE = 0L; + static final String RETRY_INITIAL_DELAY_PROPERTY_NAME = "RetryInitialDelay"; + static final long DEFAULT_RETRY_INITIAL_DELAY_VALUE = 0L; + static final String RETRY_MAX_DELAY_PROPERTY_NAME = "RetryMaxDelay"; + static final long DEFAULT_RETRY_MAX_DELAY_VALUE = 0L; + static final String ADDITIONAL_PROJECTS_PROPERTY_NAME = "AdditionalProjects"; + // Applicable only for connection pooling. + static final String CONNECTION_POOL_SIZE_PROPERTY_NAME = "ConnectionPoolSize"; + static final long DEFAULT_CONNECTION_POOL_SIZE_VALUE = 10L; + static final String LISTENER_POOL_SIZE_PROPERTY_NAME = "ListenerPoolSize"; + static final long DEFAULT_LISTENER_POOL_SIZE_VALUE = 10L; + static final String ENABLE_WRITE_API_PROPERTY_NAME = "EnableWriteAPI"; + static final boolean DEFAULT_ENABLE_WRITE_API_VALUE = false; + static final String SWA_APPEND_ROW_COUNT_PROPERTY_NAME = "SWA_AppendRowCount"; + static final int DEFAULT_SWA_APPEND_ROW_COUNT_VALUE = 1000; + static final String SWA_ACTIVATION_ROW_COUNT_PROPERTY_NAME = "SWA_ActivationRowCount"; + static final int DEFAULT_SWA_ACTIVATION_ROW_COUNT_VALUE = 3; + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJdbcUrlUtility.class.getName()); + static final String FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME = + "FilterTablesOnDefaultDataset"; + static final boolean DEFAULT_FILTER_TABLES_ON_DEFAULT_DATASET_VALUE = false; + static final String REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME = "RequestGoogleDriveScope"; + static final String SSL_TRUST_STORE_PROPERTY_NAME = "SSLTrustStore"; + static final String SSL_TRUST_STORE_PWD_PROPERTY_NAME = "SSLTrustStorePwd"; + static final int DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE = 0; + static final String MAX_BYTES_BILLED_PROPERTY_NAME = "MaximumBytesBilled"; + static final Long DEFAULT_MAX_BYTES_BILLED_VALUE = 0L; + static final String LABELS_PROPERTY_NAME = "Labels"; + static final List OVERRIDE_PROPERTIES = + Arrays.asList( + BIGQUERY_ENDPOINT_OVERRIDE_PROPERTY_NAME, + OAUTH2_TOKEN_URI_PROPERTY_NAME, + HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME, + STS_ENDPOINT_OVERRIDE_PROPERTY_NAME); + static final List BYOID_PROPERTIES = + Arrays.asList( + BYOID_AUDIENCE_URI_PROPERTY_NAME, + BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME, + BYOID_POOL_USER_PROJECT_PROPERTY_NAME, + BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME, + BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME, + BYOID_TOKEN_URI_PROPERTY_NAME); + + static Set PROXY_PROPERTIES = + Collections.unmodifiableSet( + new HashSet<>( + Arrays.asList( + BigQueryConnectionProperty.newBuilder() + .setName(PROXY_HOST_PROPERTY_NAME) + .setDescription("The host name of the proxy server.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PROXY_PORT_PROPERTY_NAME) + .setDescription( + "The port number of the proxy server to connect to. No defaulting" + + " behavior happens.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PROXY_USER_ID_PROPERTY_NAME) + .setDescription("The user name for an authenticated proxy server.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PROXY_PASSWORD_PROPERTY_NAME) + .setDescription("The password for an authenticated proxy server.") + .build()))); + + static Set AUTH_PROPERTIES = + Collections.unmodifiableSet( + new HashSet<>( + Arrays.asList( + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_TYPE_PROPERTY_NAME) + .setDescription( + "This option specifies how the connector obtains or provides the" + + " credentials for OAuth\n" + + "2.0 authentication") + .setDefaultValue(String.valueOf(DEFAULT_OAUTH_TYPE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_EMAIL_PROPERTY_NAME) + .setDescription( + "The Service Account email use for Service Account Authentication.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_PVT_KEY_PATH_PROPERTY_NAME) + .setDescription( + "The location of the credentials file used for this connection.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_PVT_KEY_PROPERTY_NAME) + .setDescription("The OAuth private key used for this connection.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_REFRESH_TOKEN_PROPERTY_NAME) + .setDescription( + "The pre-generated refresh token to be used with BigQuery for" + + " authentication.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_ACCESS_TOKEN_PROPERTY_NAME) + .setDescription( + "The pre-generated access token to be used with BigQuery for" + + " authentication.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_CLIENT_ID_PROPERTY_NAME) + .setDescription( + "The client ID to be used for user authentication or to refresh" + + " pre-generated tokens.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_CLIENT_SECRET_PROPERTY_NAME) + .setDescription( + "The client secret to be used for user authentication or to refresh" + + " pre-generated tokens.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME) + .setDescription("The service account email to be impersonated.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME) + .setDescription( + "Comma separated list of service account emails in the impersonation chain.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME) + .setDescription( + "Comma separated list of OAuth2 scopes to use with impersonated account.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME) + .setDescription("Impersonated account token lifetime.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_P12_PASSWORD_PROPERTY_NAME) + .setDescription("Password for p12 secret file.") + .build()))); + + static Set VALID_PROPERTIES = + Collections.unmodifiableSet( + new HashSet<>( + Arrays.asList( + BigQueryConnectionProperty.newBuilder() + .setName(MAX_BYTES_BILLED_PROPERTY_NAME) + .setDescription( + " Limits the bytes billed for this query. Queries with bytes billed above" + + " this limit will fail (without incurring a charge). If" + + " unspecified, the project default is used.") + .setDefaultValue(String.valueOf(DEFAULT_MAX_BYTES_BILLED_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(CONNECTION_POOL_SIZE_PROPERTY_NAME) + .setDescription("Connection pool size if connection pooling is enabled.") + .setDefaultValue(String.valueOf(DEFAULT_CONNECTION_POOL_SIZE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LISTENER_POOL_SIZE_PROPERTY_NAME) + .setDescription("Listener pool size if connection pooling is enabled.") + .setDefaultValue(String.valueOf(DEFAULT_LISTENER_POOL_SIZE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(RETRY_INITIAL_DELAY_PROPERTY_NAME) + .setDescription("Initial delay, in seconds, before the first retry.") + .setDefaultValue(String.valueOf(DEFAULT_RETRY_INITIAL_DELAY_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(RETRY_MAX_DELAY_PROPERTY_NAME) + .setDescription("Max limit for the retry delay, in seconds.") + .setDefaultValue(String.valueOf(DEFAULT_RETRY_MAX_DELAY_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(RETRY_TIMEOUT_IN_SECS_PROPERTY_NAME) + .setDescription( + "The length of time, in seconds, for which the connector retries a failed" + + " API call before timing out.") + .setDefaultValue(String.valueOf(DEFAULT_RETRY_TIMEOUT_IN_SECS_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(JOB_TIMEOUT_PROPERTY_NAME) + .setDescription( + "Job timeout (in seconds) after which the job is cancelled on the server") + .setDefaultValue(String.valueOf(DEFAULT_JOB_TIMEOUT_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME) + .setDescription( + "This option determines whether the connector uses the REST API or" + + " returns an error when encountering fetch workflows unsupported by" + + " the High-Throughput API.") + .setDefaultValue(String.valueOf(DEFAULT_UNSUPPORTED_HTAPI_FALLBACK_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(DESTINATION_DATASET_EXPIRATION_TIME_PROPERTY_NAME) + .setDescription( + "The expiration time (in milliseconds) for tables in a user-specified" + + " large result dataset.") + .setDefaultValue( + String.valueOf(DEFAULT_DESTINATION_DATASET_EXPIRATION_TIME_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME) + .setDescription( + "The name of the partner-operated cloud which is a new instance of Google" + + " production, known as a Trusted Partner Cloud universe.") + .setDefaultValue(DEFAULT_UNIVERSE_DOMAIN_VALUE) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PROJECT_ID_PROPERTY_NAME) + .setDescription("A globally unique identifier for your project.") + .setDefaultValue(BigQueryOptions.getDefaultProjectId()) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LOG_PATH_PROPERTY_NAME) + .setDescription( + "The directory where the connector saves log files (when logging is" + + " enabled).") + .setDefaultValue(DEFAULT_LOG_PATH) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(DEFAULT_DATASET_PROPERTY_NAME) + .setDescription( + "This default dataset for query execution. If this option is set, queries" + + " with unqualified \n" + + "table names will run against this dataset.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LOCATION_PROPERTY_NAME) + .setDescription( + "The location where datasets are created/queried. The location will be" + + " determined\n" + + " automatically by BigQuery if not specified.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ENABLE_HTAPI_PROPERTY_NAME) + .setDescription( + "Enables or disables Read API usage in the Driver. Disabled by default.") + .setDefaultValue(String.valueOf(DEFAULT_ENABLE_HTAPI_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(HTAPI_ACTIVATION_RATIO_PROPERTY_NAME) + .setDescription( + "Connector switches to BigQuery Storage API when the number of pages" + + " exceed this value.") + .setDefaultValue(String.valueOf(DEFAULT_HTAPI_ACTIVATION_RATIO_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(KMS_KEY_NAME_PROPERTY_NAME) + .setDescription( + "The KMS key name tells BigQuery which key to use when encrypting or" + + " decrypting your data.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(QUERY_PROPERTIES_NAME) + .setDescription( + "Connection-level properties to customize query behavior.") // TODO: + // Figure out + // a clean way + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LABELS_PROPERTY_NAME) + .setDescription( + "Labels associated with the query to organize and group query jobs.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(HTAPI_MIN_TABLE_SIZE_PROPERTY_NAME) + .setDescription( + "If the number of total rows exceeds this value, the connector switches" + + " to the BigQuery Storage API for faster processing.") + .setDefaultValue(String.valueOf(DEFAULT_HTAPI_MIN_TABLE_SIZE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ENABLE_SESSION_PROPERTY_NAME) + .setDescription( + "Enable to capture your SQL activities or enable multi statement" + + " transactions. Disabled by default.") + .setDefaultValue(String.valueOf(DEFAULT_ENABLE_SESSION_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LOG_LEVEL_PROPERTY_NAME) + .setDescription( + "Sets the Log Level for the Driver. Set to Level.OFF by default.") + .setDefaultValue(String.valueOf(DEFAULT_LOG_LEVEL)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(USE_QUERY_CACHE_PROPERTY_NAME) + .setDescription("Enables or disables Query caching. Set to true by default.") + .setDefaultValue(String.valueOf(DEFAULT_USE_QUERY_CACHE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(QUERY_DIALECT_PROPERTY_NAME) + .setDescription( + "Parameter for selecting if the queries should use standard or legacy SQL" + + " syntax.") + .setDefaultValue(DEFAULT_QUERY_DIALECT_VALUE) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ALLOW_LARGE_RESULTS_PROPERTY_NAME) + .setDescription( + "Enabled by default, must be used with legacy SQL. Used for setting" + + " destination table & dataset.") + .setDefaultValue(String.valueOf(DEFAULT_ALLOW_LARGE_RESULTS)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LARGE_RESULTS_TABLE_PROPERTY_NAME) + .setDescription("The destination table where queries are saved.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LARGE_RESULTS_DATASET_PROPERTY_NAME) + .setDescription("The destination dataset where queries are saved.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(JOB_CREATION_MODE_PROPERTY_NAME) + .setDescription( + "Enables or disables Stateless Query mode. Set to false by default.") + .setDefaultValue(String.valueOf(DEFAULT_JOB_CREATION_MODE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(MAX_RESULTS_PROPERTY_NAME) + .setDescription("Maximum number of results per page") + .setDefaultValue(String.valueOf(DEFAULT_MAX_RESULTS_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_AUDIENCE_URI_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. Corresponds to the audience" + + " property\n" + + " in the external account configuration file.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The file location or the URI" + + " of\n" + + " the subject token. Corresponds to the credential_source property" + + " in\n" + + " the external account configuration file.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_POOL_USER_PROJECT_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The project number associated" + + " with\n" + + " the workforce pool. Corresponds to the" + + " workforce_pool_user_project\n" + + " property in the external account configuration file.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The service account email." + + " Only\n" + + " present when service account impersonation is used. Corresponds" + + " to\n" + + " the service_account_impersonation_url property in the external" + + " account\n" + + " configuration file.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The subject token type." + + " Corresponds\n" + + " to the subject_token_type property in the external account" + + " configuration file.") + .setDefaultValue("urn:ietf:params:oauth:tokentype:id_token") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_TOKEN_URI_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The URI used to generate" + + " authentication\n" + + " tokens. Corresponds to the token_url property in the external" + + " account\n" + + " configuration file.") + .setDefaultValue("https://sts.googleapis.com/v1/token") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PARTNER_TOKEN_PROPERTY_NAME) + .setDescription("The partner name and environment.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(METADATA_FETCH_THREAD_COUNT_PROPERTY_NAME) + .setDescription( + "The number of threads used to call a DatabaseMetaData method.") + .setDefaultValue(String.valueOf(DEFAULT_METADATA_FETCH_THREAD_COUNT_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ENABLE_WRITE_API_PROPERTY_NAME) + .setDescription( + "Enables or disables Write API usage for bulk inserts in the Driver." + + " Disabled by default.") + .setDefaultValue(String.valueOf(DEFAULT_ENABLE_WRITE_API_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(SWA_ACTIVATION_ROW_COUNT_PROPERTY_NAME) + .setDescription( + "Connector switches to BigQuery Storage Write API when the number of rows" + + " for executeBatch insert exceed this value. Do not change unless" + + " necessary.") + .setDefaultValue(String.valueOf(DEFAULT_SWA_ACTIVATION_ROW_COUNT_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(SWA_APPEND_ROW_COUNT_PROPERTY_NAME) + .setDescription("Size of the write stream. Do not change unless necessary.") + .setDefaultValue(String.valueOf(DEFAULT_SWA_APPEND_ROW_COUNT_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ADDITIONAL_PROJECTS_PROPERTY_NAME) + .setDescription( + "A comma-separated list of Google Cloud project IDs that can be accessed" + + " for querying, in addition to the primary project specified in the" + + " connection.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME) + .setDescription( + "If true and DefaultDataset is set, DatabaseMetaData.getTables() and" + + " .getColumns() will filter results based on the DefaultDataset" + + " when catalog/schema patterns are null or wildcards.") + .setDefaultValue( + String.valueOf(DEFAULT_FILTER_TABLES_ON_DEFAULT_DATASET_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME) + .setDescription( + "Enables or disables whether the connector requests access to Google" + + " Drive. Set to false (0) by default.") + .setDefaultValue(String.valueOf(DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(SSL_TRUST_STORE_PROPERTY_NAME) + .setDescription( + "The full path of the Java TrustStore containing the server certificate" + + " for one-way SSL authentication.\n" + + "If the trust store requires a password, provide it using the" + + " property SSLTrustStorePwd.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(SSL_TRUST_STORE_PWD_PROPERTY_NAME) + .setDescription( + "The password for accessing the Java TrustStore that is specified using" + + " the property SSLTrustStore.") + .build()))); + + private BigQueryJdbcUrlUtility() {} + + /** + * Parses a URI property from the given URI. + * + * @param uri The URI to parse. + * @param property The name of the property to parse. + * @return The String value of the property, or the default value if the property is not found. + */ + static String parseUriProperty(String uri, String property) { + Pattern pattern = Pattern.compile(String.format("(?is)(?:;|\\?)%s=(.*?)(?:;|$)", property)); + Matcher matcher = pattern.matcher(uri); + if (matcher.find() && matcher.groupCount() == 1) { + return CharEscapers.decodeUriPath(matcher.group(1)); + } + return null; + } + + /** + * Appends the given properties to the given URL. + * + * @param url The URL to append the properties to. + * @param properties The properties to append. + * @return The string value of the updated URL. + */ + static String appendPropertiesToURL(String url, String callerClassName, Properties properties) { + LOG.finest("++enter++ " + callerClassName); + StringBuilder urlBuilder = new StringBuilder(url); + for (Entry entry : properties.entrySet()) { + if (entry.getValue() != null && !"".equals(entry.getValue())) { + LOG.info( + String.format("Appending %s with value %s to URL", entry.getKey(), entry.getValue())); + urlBuilder.append(";").append(entry.getKey()).append("=").append(entry.getValue()); + } + } + return urlBuilder.toString(); + } + + static boolean convertIntToBoolean(String value, String propertyName) { + int integerValue; + + try { + if (value.equalsIgnoreCase("true")) { + integerValue = 1; + } else if (value.equalsIgnoreCase("false")) { + integerValue = 0; + } else { + integerValue = Integer.parseInt(value); + } + + } catch (NumberFormatException ex) { + throw new IllegalArgumentException( + String.format( + "Invalid value for %s. For Boolean connection properties, use 0 for false and 1 for" + + " true.", + propertyName), + ex); + } + if (integerValue == 1) { + return true; + } else if (integerValue == 0) { + return false; + } else { + throw new IllegalArgumentException( + String.format( + "Invalid value for %s. For Boolean connection properties, use 0 for false and 1 for" + + " true.", + propertyName)); + } + } + + // todo just make it a map + static Map parseQueryProperties(String url, String callerClassName) { + return parsePropertiesMap(url, QUERY_PROPERTIES_NAME, callerClassName); + } + + static Map parseLabels(String url, String callerClassName) { + return parsePropertiesMap(url, LABELS_PROPERTY_NAME, callerClassName); + } + + static String parseStringProperty( + String url, String propertyName, String defaultValue, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + String parsedValue = BigQueryJdbcUrlUtility.parseUriProperty(url, propertyName); + if (parsedValue != null) { + return parsedValue; + } + return defaultValue; + } + + static List parseStringListProperty( + String url, String propertyName, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + String rawValue = parseStringProperty(url, propertyName, null, callerClassName); + if (rawValue == null || rawValue.trim().isEmpty()) { + return Collections.emptyList(); + } + return Arrays.stream(rawValue.split(",")) + .map(String::trim) + .filter(s -> !s.isEmpty()) + .collect(Collectors.toList()); + } + + public static String parsePartnerTokenProperty(String url, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + // This property is expected to be set by partners only. For more details on exact format + // supported, refer b/396086960 + String regex = + PARTNER_TOKEN_PROPERTY_NAME + "=\\s*\\(\\s*(GPN:[^;]*?)\\s*(?:;\\s*([^)]*?))?\\s*\\)"; + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(url); + + if (matcher.find()) { + String gpnPart = matcher.group(1); + String environmentPart = matcher.group(2); + StringBuilder partnerToken = new StringBuilder(" ("); + partnerToken.append(gpnPart); + if (environmentPart != null && !environmentPart.trim().isEmpty()) { + partnerToken.append("; "); + partnerToken.append(environmentPart); + } + partnerToken.append(")"); + return partnerToken.toString(); + } + return null; + } + + static Integer parseIntProperty( + String url, String propertyName, Integer defaultValue, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + String parsedValue = BigQueryJdbcUrlUtility.parseUriProperty(url, propertyName); + if (parsedValue != null) { + try { + return Integer.parseInt(parsedValue); + } catch (NumberFormatException e) { + LOG.severe( + String.format( + "Invalid integer value '%s' for property '%s'. Please provide a valid integer.", + parsedValue, propertyName)); + throw new IllegalArgumentException( + String.format("Invalid integer value for property '%s': %s", propertyName, parsedValue), + e); + } + } + return defaultValue; + } + + static Long parseLongProperty( + String url, String propertyName, Long defaultValue, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + String parsedValue = BigQueryJdbcUrlUtility.parseUriProperty(url, propertyName); + if (parsedValue != null) { + return Long.parseLong(parsedValue); + } + return defaultValue; + } + + static Boolean parseBooleanProperty( + String url, String propertyName, Boolean defaultValue, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + String parsedValue = BigQueryJdbcUrlUtility.parseUriProperty(url, propertyName); + if (parsedValue != null) { + return convertIntToBoolean(parsedValue, propertyName); + } + return defaultValue; + } + + public static Level parseLogLevel(String logLevelString) { + int logLevel = logLevelString != null ? Integer.parseInt(logLevelString) : DEFAULT_LOG_LEVEL; + switch (logLevel) { + case 8: + return Level.ALL; + case 7: + return Level.FINEST; + case 6: + return Level.FINER; + case 5: + return Level.FINE; + case 4: + return Level.CONFIG; + case 3: + return Level.INFO; + case 2: + return Level.WARNING; + case 1: + return Level.SEVERE; + case 0: + default: + LOG.info( + String.format( + "%s value not provided, defaulting to %s.", LOG_LEVEL_PROPERTY_NAME, Level.OFF)); + return Level.OFF; + } + } + + static Map parseOverrideProperties(String url, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + Map overrideProps = new HashMap<>(); + Pattern pattern = + Pattern.compile( + String.format( + "(?is)(%s|%s)=([^;]+)", + ENDPOINT_OVERRIDES_PROPERTY_NAME, PRIVATE_SERVICE_CONNECT_PROPERTY_NAME)); + Matcher matcher = pattern.matcher(url); + String overridePropertiesString; + if (matcher.find() && matcher.groupCount() >= 1) { + overridePropertiesString = matcher.group(2); + } else { + return overrideProps; + } + for (String property : OVERRIDE_PROPERTIES) { + Pattern propertyPattern = Pattern.compile(String.format("(?i)%s=(.*?)(?:[,;]|$)", property)); + Matcher propertyMatcher = propertyPattern.matcher(overridePropertiesString); + if (propertyMatcher.find() && propertyMatcher.groupCount() >= 1) { + overrideProps.put(property, propertyMatcher.group(1)); + } + } + return overrideProps; + } + + public static boolean parseJobCreationMode(String url, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + String jobCreationMode = + BigQueryJdbcUrlUtility.parseUriProperty(url, JOB_CREATION_MODE_PROPERTY_NAME); + + if (jobCreationMode == null) { + LOG.fine( + String.format( + "%s value not provided, defaulting to %s. Caller: %s", + JOB_CREATION_MODE_PROPERTY_NAME, DEFAULT_JOB_CREATION_MODE, callerClassName)); + // Default Job creation mode is JOB_CREATION_OPTIONAL(2) + // which translates to options.setQueryPreviewEnabled(true) + return true; + } + if (jobCreationMode.equalsIgnoreCase("1")) { + return false; + } else if (jobCreationMode.equalsIgnoreCase("2")) { + return true; + } else { + throw new NumberFormatException( + String.format( + "Invalid value for %s. Use 1 for JOB_CREATION_REQUIRED and 2 for" + + " JOB_CREATION_OPTIONAL.", + JOB_CREATION_MODE_PROPERTY_NAME)); + } + } + + public static String parseBYOIDProperty(String url, String property, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + String value = BigQueryJdbcUrlUtility.parseUriProperty(url, property); + String defaultValue = BigQueryJdbcUrlUtility.getConnectionPropertyDefaultValue(property); + if (value != null) { + return value; + } else if (defaultValue != null) { + return defaultValue; + } + return null; + } + + public static String getConnectionPropertyDefaultValue(String propertyName) { + // TODO: change how we store properties because this method has to go through all of them + for (BigQueryConnectionProperty property : VALID_PROPERTIES) { + if (property.getName().equals(propertyName)) { + return property.getDefaultValue(); + } + } + return null; + } + + public static long parseRetryTimeoutInSecs(String url, String callerClassName) { + return BigQueryJdbcUrlUtility.parseLongProperty( + url, + RETRY_TIMEOUT_IN_SECS_PROPERTY_NAME, + DEFAULT_RETRY_TIMEOUT_IN_SECS_VALUE, + callerClassName); + } + + public static long parseJobTimeout(String url, String callerClassName) { + return parseLongProperty( + url, JOB_TIMEOUT_PROPERTY_NAME, DEFAULT_JOB_TIMEOUT_VALUE, callerClassName); + } + + public static long parseRetryInitialDelayInSecs(String url, String callerClassName) { + return BigQueryJdbcUrlUtility.parseLongProperty( + url, RETRY_INITIAL_DELAY_PROPERTY_NAME, DEFAULT_RETRY_INITIAL_DELAY_VALUE, callerClassName); + } + + public static long parseRetryMaxDelayInSecs(String url, String callerClassName) { + return BigQueryJdbcUrlUtility.parseLongProperty( + url, RETRY_MAX_DELAY_PROPERTY_NAME, DEFAULT_RETRY_MAX_DELAY_VALUE, callerClassName); + } + + // Convenience Helper Methods + public static long parseConnectionPoolSize(String url, String callerClassName) { + if (url == null || url.isEmpty()) { + throw new BigQueryJdbcRuntimeException("Connection url is empty"); + } + return parseLongProperty( + url, + CONNECTION_POOL_SIZE_PROPERTY_NAME, + DEFAULT_CONNECTION_POOL_SIZE_VALUE, + callerClassName); + } + + public static long parseListenerPoolSize(String url, String callerClassName) { + if (url == null || url.isEmpty()) { + throw new BigQueryJdbcRuntimeException("Connection url is empty"); + } + return parseLongProperty( + url, LISTENER_POOL_SIZE_PROPERTY_NAME, DEFAULT_LISTENER_POOL_SIZE_VALUE, callerClassName); + } + + public static long parseMaximumBytesBilled(String url, String callerClassName) { + if (url == null || url.isEmpty()) { + throw new BigQueryJdbcRuntimeException("Connection url is empty"); + } + return parseLongProperty( + url, MAX_BYTES_BILLED_PROPERTY_NAME, DEFAULT_MAX_BYTES_BILLED_VALUE, callerClassName); + } + + private static Map parsePropertiesMap( + String url, String propertyName, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + String propertiesString = BigQueryJdbcUrlUtility.parseUriProperty(url, propertyName); + if (propertiesString == null || propertiesString.isEmpty()) { + LOG.fine(String.format("Unable to parse property name: %s from url: %s", propertyName, url)); + return null; + } + Map propertiesMap = new HashMap<>(); + String[] keyValuePairs = propertiesString.split(","); + + for (String keyValuePair : keyValuePairs) { + String[] parts = keyValuePair.split("="); + if (parts.length == 2) { + propertiesMap.put(parts[0], parts[1]); + } else { + LOG.warning( + String.format( + "Invalid KeyValue pair: %s found in url: %s for property name: %s", + keyValuePair, url, propertyName)); + } + } + return propertiesMap; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArray.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArray.java new file mode 100644 index 000000000..3b557a15a --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArray.java @@ -0,0 +1,105 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryFieldValueListWrapper.getNestedFieldValueListWrapper; + +import com.google.api.core.InternalApi; +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.Schema; +import java.sql.ResultSet; +import java.util.List; + +/** An implementation of {@link BigQueryBaseArray} used to represent Array values from Json data. */ +@InternalApi +class BigQueryJsonArray extends BigQueryBaseArray { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJsonArray.class.getName()); + private static final BigQueryTypeCoercer BIGQUERY_TYPE_COERCER = + BigQueryTypeCoercionUtility.INSTANCE; + private List values; + + BigQueryJsonArray(Field schema, FieldValue values) { + super(schema); + this.values = (values == null || values.isNull()) ? null : values.getRepeatedValue(); + } + + @Override + public Object getArray() { + ensureValid(); + LOG.finest("++enter++"); + if (this.values == null) { + return null; + } + return getArrayInternal(0, this.values.size()); + } + + @Override + public Object getArray(long index, int count) { + ensureValid(); + LOG.finest("++enter++"); + if (this.values == null) { + return null; + } + Tuple range = createRange(index, count, this.values.size()); + return getArrayInternal(range.x(), range.y()); + } + + @Override + public ResultSet getResultSet() { + ensureValid(); + LOG.finest("++enter++"); + if (this.values == null) { + return new BigQueryJsonResultSet(); + } + BigQueryFieldValueListWrapper bigQueryFieldValueListWrapper = + getNestedFieldValueListWrapper(FieldList.of(singleElementSchema()), this.values); + return BigQueryJsonResultSet.getNestedResultSet( + Schema.of(this.schema), bigQueryFieldValueListWrapper, 0, this.values.size()); + } + + @Override + public ResultSet getResultSet(long index, int count) { + ensureValid(); + LOG.finest("++enter++"); + if (this.values == null) { + return new BigQueryJsonResultSet(); + } + Tuple range = createRange(index, count, this.values.size()); + BigQueryFieldValueListWrapper bigQueryFieldValueListWrapper = + getNestedFieldValueListWrapper(FieldList.of(singleElementSchema()), this.values); + return BigQueryJsonResultSet.getNestedResultSet( + Schema.of(this.schema), bigQueryFieldValueListWrapper, range.x(), range.y()); + } + + @Override + public void free() { + this.values = null; + markInvalid(); + } + + @Override + Object getCoercedValue(int index) { + FieldValue fieldValue = this.values.get(index); + return this.arrayOfStruct + ? new BigQueryJsonStruct(this.schema.getSubFields(), fieldValue) + : BIGQUERY_TYPE_COERCER.coerceTo(getTargetClass(), fieldValue); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSet.java new file mode 100644 index 000000000..f9d7b1153 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSet.java @@ -0,0 +1,317 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryBaseArray.isArray; +import static com.google.cloud.bigquery.jdbc.BigQueryBaseStruct.isStruct; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.concurrent.BlockingQueue; + +/** {@link ResultSet} Implementation for JSON datasource (Using REST APIs) */ +class BigQueryJsonResultSet extends BigQueryBaseResultSet { + private final long totalRows; + private final BlockingQueue buffer; + private boolean hasReachedEnd = false; + // Points to the current record + private BigQueryFieldValueListWrapper cursor; + // Tracks the index of the nested element under process + private int nestedRowIndex; + private long rowCnt = 0; + private boolean afterLast = false; + private final int fromIndex; + private final int toIndexExclusive; + private final Thread[] ownedThreads; + + private BigQueryJsonResultSet( + Schema schema, + long totalRows, + BlockingQueue buffer, + BigQueryStatement statement, + boolean isNested, + BigQueryFieldValueListWrapper cursor, + int fromIndex, + int toIndexExclusive, + Thread[] ownedThreads, + BigQuery bigQuery) { + super(bigQuery, statement, schema, isNested); + this.totalRows = totalRows; + this.buffer = buffer; + this.cursor = cursor; + this.fromIndex = fromIndex; + this.toIndexExclusive = toIndexExclusive; + this.nestedRowIndex = fromIndex - 1; + this.ownedThreads = ownedThreads; + } + + /** + * This method returns an instance of BigQueryJsonResultSet after adding it in the list of + * JsonResultSetFinalizer + * + * @return BigQueryJsonResultSet + */ + static BigQueryJsonResultSet of( + Schema schema, + long totalRows, + BlockingQueue buffer, + BigQueryStatement statement, + Thread[] ownedThreads, + BigQuery bigQuery) { + + return new BigQueryJsonResultSet( + schema, totalRows, buffer, statement, false, null, -1, -1, ownedThreads, bigQuery); + } + + static BigQueryJsonResultSet of( + Schema schema, + long totalRows, + BlockingQueue buffer, + BigQueryStatement statement, + Thread[] ownedThreads) { + + return new BigQueryJsonResultSet( + schema, totalRows, buffer, statement, false, null, -1, -1, ownedThreads, null); + } + + BigQueryJsonResultSet() { + super(null, null, null, false); + totalRows = 0; + buffer = null; + fromIndex = 0; + ownedThreads = new Thread[0]; + toIndexExclusive = 0; + } + + // + + /** + * Wrapper method which can be used for initialising the instance of BigQueryJsonResultSet for the + * nested Records + * + * @param schema Table schema + * @param cursor Points to the current record + * @param fromIndex starting index under consideration + * @param toIndexExclusive last index under consideration + * @return The BigQueryJsonResultSet + */ + static BigQueryJsonResultSet getNestedResultSet( + Schema schema, BigQueryFieldValueListWrapper cursor, int fromIndex, int toIndexExclusive) { + return new BigQueryJsonResultSet( + schema, + -1, + null, + null, /* statement will be null in case of nested java.sql.Result. */ + true, + cursor, + fromIndex, + toIndexExclusive, + null, + null); + } + + /* Advances the result set to the next row, returning false if no such row exists. Potentially blocking operation */ + public boolean next() throws SQLException { + checkClosed(); + if (this.isNested) { + // We are working with the nested record, the cursor would have been + // populated. + if (this.cursor == null || this.cursor.getArrayFieldValueList() == null) { + throw new IllegalStateException( + "Cursor/ArrayFieldValueList can not be null working with the nested record"); + } + // Check if there's a next record in the array which can be read + if (this.nestedRowIndex < (this.toIndexExclusive - 1)) { + this.nestedRowIndex++; + return true; + } + this.afterLast = true; + return false; + + } else { + // If end of stream is reached or we are past the last row i.e + // rowcnt == totalRows (rowcnt starts at 0) + // then we can simply return false + if (this.hasReachedEnd || this.isLast()) { + this.afterLast = true; + return false; + } + try { + // Advance the cursor,Potentially blocking operation + this.cursor = this.buffer.take(); + this.rowCnt++; + // Check for end of stream + if (this.cursor.isLast()) { + this.cursor = null; + this.hasReachedEnd = true; + return false; + } + // Cursor has been advanced + return true; + + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException( + "Error occurred while advancing the cursor. This could happen when connection is closed while we call the next method", + ex); + } + } + } + + @Override + public Object getObject(int columnIndex) throws SQLException { + // columnIndex is SQL index starting at 1 + checkClosed(); + LOG.finest("++enter++"); + FieldValue value = getObjectInternal(columnIndex); + if (value == null || value.isNull()) { + return null; + } + + if (this.isNested && columnIndex == 1) { + return this.bigQueryTypeCoercer.coerceTo(Integer.class, value); + } + + if (this.isNested && columnIndex == 2) { + Field arrayField = this.schema.getFields().get(0); + if (isStruct(arrayField)) { + return new BigQueryJsonStruct(arrayField.getSubFields(), value); + } + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + arrayField.getType().getStandardType()); + return this.bigQueryTypeCoercer.coerceTo(targetClass, value); + } + + int extraIndex = this.isNested ? 2 : 1; + Field fieldSchema = this.schemaFieldList.get(columnIndex - extraIndex); + if (isArray(fieldSchema)) { + return new BigQueryJsonArray(fieldSchema, value); + } else if (isStruct(fieldSchema)) { + return new BigQueryJsonStruct(fieldSchema.getSubFields(), value); + } else { + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + fieldSchema.getType().getStandardType()); + return this.bigQueryTypeCoercer.coerceTo(targetClass, value); + } + } + + /** + * This method will be called by every other getter of this {@link java.sql.ResultSet}, including + * {@link #getObject(int)} to get the value in its rawest form i.e. {@link FieldValue} to coerce + * it further as required. + * + * @param columnIndex the first column is 1, the second is 2, ... + * @return an instance of {@link FieldValue} represents value at columnIndex column. + */ + private FieldValue getObjectInternal(int columnIndex) throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + FieldValue value; + if (this.isNested) { + boolean validIndexForNestedResultSet = columnIndex == 1 || columnIndex == 2; + // BigQuery doesn't support multidimensional arrays, so just the default row + // num column (1) and the actual column (2) is supposed to be read + if (!validIndexForNestedResultSet) { + throw new IllegalArgumentException( + "Column index is required to be 1 or 2 for the nested arrays"); + } + if (this.cursor.getArrayFieldValueList() == null + || this.cursor.getArrayFieldValueList().get(this.nestedRowIndex) == null) { + throw new IllegalStateException("ArrayFieldValueList cannot be null"); + } + + // For Arrays the first column is Index, ref: + // https://docs.oracle.com/javase/7/docs/api/java/sql/Array.html#getResultSet() + if (columnIndex == 1) { + return FieldValue.of(Attribute.PRIMITIVE, Integer.toString(this.nestedRowIndex + 1)); + } else { + // columnIndex = 2 + // This ignores the columnIndex, as there's just one column, and we have already incremented + // the nestedRowIndex + value = this.cursor.getArrayFieldValueList().get(this.nestedRowIndex); + } + } + // non nested, return the value + else { + // SQL Index to 0 based index + value = this.cursor.getFieldValueList().get(columnIndex - 1); + } + setWasNull(value.getValue()); + return value; + } + + @Override + public void close() { + LOG.fine(String.format("Closing BigqueryJsonResultSet %s.", this)); + this.isClosed = true; + if (ownedThreads != null) { + for (Thread ownedThread : ownedThreads) { + if (!ownedThread.isInterrupted()) { + ownedThread.interrupt(); + } + } + } + super.close(); + } + + @Override + public boolean isBeforeFirst() throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + if (this.isNested) { + return this.nestedRowIndex < this.fromIndex; + } else { + return this.cursor == null && this.rowCnt == 0; + } + } + + @Override + public boolean isAfterLast() throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + return this.afterLast; + } + + @Override + public boolean isFirst() throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + if (this.isNested) { + return this.nestedRowIndex == this.fromIndex; + } else { + return this.rowCnt == 1; + } + } + + @Override + public boolean isLast() throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + if (this.isNested) { + return this.nestedRowIndex == this.toIndexExclusive - 1; + } else { + return this.rowCnt == this.totalRows; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStruct.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStruct.java new file mode 100644 index 000000000..35217f8e7 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStruct.java @@ -0,0 +1,80 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryBaseArray.isArray; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import java.lang.reflect.Array; +import java.util.List; + +/** + * An implementation of {@link BigQueryBaseStruct} used to represent Struct values from Json data. + */ +@InternalApi +class BigQueryJsonStruct extends BigQueryBaseStruct { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJsonStruct.class.getName()); + + private static final BigQueryTypeCoercer BIGQUERY_TYPE_COERCER = + BigQueryTypeCoercionUtility.INSTANCE; + + private final FieldList schema; + private final List values; + + public BigQueryJsonStruct(FieldList schema, FieldValue values) { + this.schema = schema; + this.values = (values == null || values.isNull()) ? null : values.getRecordValue(); + } + + @Override + FieldList getSchema() { + return this.schema; + } + + @Override + public Object[] getAttributes() { + LOG.finest("++enter++"); + int size = schema.size(); + Object[] attributes = (Object[]) Array.newInstance(Object.class, size); + + for (int index = 0; index < size; index++) { + Field currentSchema = schema.get(index); + FieldValue currentValue = values == null ? null : values.get(index); + Object coercedValue = getValue(currentSchema, currentValue); + Array.set(attributes, index, coercedValue); + } + return attributes; + } + + private Object getValue(Field currentSchema, FieldValue currentValue) { + LOG.finest("++enter++"); + if (isArray(currentSchema)) { + return new BigQueryJsonArray(currentSchema, currentValue); + } else if (isStruct(currentSchema)) { + return new BigQueryJsonStruct(currentSchema.getSubFields(), currentValue); + } else { + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + currentSchema.getType().getStandardType()); + return BIGQUERY_TYPE_COERCER.coerceTo(targetClass, currentValue); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsConnection.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsConnection.java new file mode 100644 index 000000000..1804cc14c --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsConnection.java @@ -0,0 +1,191 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.METHOD_NOT_IMPLEMENTED; + +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.Array; +import java.sql.Blob; +import java.sql.CallableStatement; +import java.sql.Clob; +import java.sql.Connection; +import java.sql.NClob; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.SQLXML; +import java.sql.Savepoint; +import java.sql.Struct; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.Executor; + +/** NoOps Abstract base class for BigQuery JDBC Connection. */ +abstract class BigQueryNoOpsConnection implements Connection { + + @Override + public CallableStatement prepareCall(String sql) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String nativeSQL(String sql) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T unwrap(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean isWrapperFor(Class iface) { + return false; + } + + @Override + public boolean isReadOnly() { + return false; + } + + @Override + public void setReadOnly(boolean readOnly) {} + + @Override + public void setCatalog(String catalog) {} + + // TODO: post MVP feature + + @Override + public Map> getTypeMap() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setTypeMap(Map> map) {} + + @Override + public Savepoint setSavepoint() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Savepoint setSavepoint(String name) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void rollback(Savepoint savepoint) {} + + @Override + public void releaseSavepoint(Savepoint savepoint) {} + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Clob createClob() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Blob createBlob() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public NClob createNClob() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public SQLXML createSQLXML() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean isValid(int timeout) throws SQLException { + return false; + } + + @Override + public void setClientInfo(String name, String value) {} + + @Override + public String getClientInfo(String name) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Properties getClientInfo() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setClientInfo(Properties properties) {} + + @Override + public Array createArrayOf(String typeName, Object[] elements) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Struct createStruct(String typeName, Object[] attributes) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String getSchema() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setSchema(String schema) {} + + @Override + public void setNetworkTimeout(Executor executor, int milliseconds) {} + + @Override + public int getNetworkTimeout() { + return 0; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsResultSet.java new file mode 100644 index 000000000..e4b29f7cd --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsResultSet.java @@ -0,0 +1,693 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.METHOD_NOT_IMPLEMENTED; + +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Map; + +/** NoOps Abstract base class for BigQuery JDBC ResultSet(s). */ +abstract class BigQueryNoOpsResultSet implements ResultSet { + + @Override + public int getFetchDirection() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setFetchSize(int rows) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int getFetchSize() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String getCursorName() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean absolute(int row) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void beforeFirst() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void afterLast() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean first() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean last() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int getRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean relative(int rows) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean previous() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setFetchDirection(int direction) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean rowUpdated() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean rowInserted() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean rowDeleted() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNull(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBoolean(int columnIndex, boolean x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateByte(int columnIndex, byte x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateShort(int columnIndex, short x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateInt(int columnIndex, int x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateLong(int columnIndex, long x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateFloat(int columnIndex, float x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateDouble(int columnIndex, double x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateString(int columnIndex, String x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBytes(int columnIndex, byte[] x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateDate(int columnIndex, Date x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateTime(int columnIndex, Time x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateObject(int columnIndex, Object x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNull(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBoolean(String columnLabel, boolean x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateByte(String columnLabel, byte x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateShort(String columnLabel, short x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateInt(String columnLabel, int x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateLong(String columnLabel, long x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateFloat(String columnLabel, float x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateDouble(String columnLabel, double x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateString(String columnLabel, String x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBytes(String columnLabel, byte[] x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateDate(String columnLabel, Date x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateTime(String columnLabel, Time x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, int length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, int length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateObject(String columnLabel, Object x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void insertRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void deleteRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void refreshRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void cancelRowUpdates() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void moveToInsertRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void moveToCurrentRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Object getObject(int columnIndex, Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Ref getRef(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Object getObject(String columnLabel, Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Ref getRef(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public URL getURL(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public URL getURL(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRef(int columnIndex, Ref x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRef(String columnLabel, Ref x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(int columnIndex, Blob x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(String columnLabel, Blob x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(int columnIndex, Clob x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(String columnLabel, Clob x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateArray(int columnIndex, Array x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateArray(String columnLabel, Array x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public RowId getRowId(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public RowId getRowId(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRowId(int columnIndex, RowId x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRowId(String columnLabel, RowId x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNString(int columnIndex, String nString) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNString(String columnLabel, String nString) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(int columnIndex, NClob nClob) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(String columnLabel, NClob nClob) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public NClob getNClob(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public NClob getNClob(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public SQLXML getSQLXML(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public SQLXML getSQLXML(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String getNString(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String getNString(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Reader getNCharacterStream(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Reader getNCharacterStream(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(int columnIndex, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(String columnLabel, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(int columnIndex, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(String columnLabel, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T getObject(int columnIndex, Class type) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T getObject(String columnLabel, Class type) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T unwrap(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public SQLWarning getWarnings() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void clearWarnings() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + void checkClosed() throws SQLException { + if (isClosed()) { + throw new BigQueryJdbcException("This " + getClass().getName() + " has been closed"); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsStatement.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsStatement.java new file mode 100644 index 000000000..2e71bfaf7 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsStatement.java @@ -0,0 +1,90 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.METHOD_NOT_IMPLEMENTED; + +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +abstract class BigQueryNoOpsStatement implements Statement { + + @Override + public void setCursorName(String name) throws SQLException { + // TODO: ResultSet Concurrency is read only(Not updatable) + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T unwrap(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public ResultSet getGeneratedKeys() throws SQLException { + // TODO: Returns an empty resultset. + // return empty ResultSet + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int executeUpdate(String sql, String[] columnNames) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean execute(String sql, int[] columnIndexes) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean execute(String sql, String[] columnNames) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandler.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandler.java new file mode 100644 index 000000000..9644dd581 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandler.java @@ -0,0 +1,281 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.SQLException; +import java.util.ArrayList; + +class BigQueryParameterHandler { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + + public BigQueryParameterHandler(int parameterCount) { + this.parametersArraySize = parameterCount; + } + + BigQueryParameterHandler(int parameterCount, ArrayList parametersList) { + this.parametersArraySize = parameterCount; + this.parametersList = parametersList; + } + + // Indicates whether the parameter is input, output or both + // Default is UNSPECIFIED + // Used by CallableStatement + enum BigQueryStatementParameterType { + UNSPECIFIED, + IN, + OUT, + INOUT + }; + + private int parametersArraySize; + ArrayList parametersList = new ArrayList<>(parametersArraySize); + + private long highestIndex = 0; + + QueryJobConfiguration.Builder configureParameters( + QueryJobConfiguration.Builder jobConfigurationBuilder) throws SQLException { + LOG.finest("++enter++"); + try { + for (int i = 1; i <= this.parametersArraySize; i++) { + + Object parameterValue = getParameter(i); + StandardSQLTypeName sqlType = getSqlType(i); + LOG.info( + String.format( + "Parameter %s of type %s at index %s added to QueryJobConfiguration", + parameterValue, sqlType, i)); + jobConfigurationBuilder.addPositionalParameter( + QueryParameterValue.of(parameterValue, sqlType)); + } + } catch (NullPointerException e) { + if (e.getMessage().contains("Null type")) { + throw new BigQueryJdbcException("One or more parameters missing in Prepared statement.", e); + } + } + return jobConfigurationBuilder; + } + + void setParameter(int parameterIndex, Object value, Class type) + throws BigQueryJdbcSqlFeatureNotSupportedException { + LOG.finest("++enter++"); + LOG.finest(String.format("setParameter called by : %s", type.getName())); + checkValidIndex(parameterIndex); + + int arrayIndex = parameterIndex - 1; + if (parameterIndex >= this.highestIndex || this.parametersList.get(arrayIndex) == null) { + parametersList.ensureCapacity(parameterIndex); + while (parametersList.size() < parameterIndex) { + parametersList.add(null); + } + parametersList.set(arrayIndex, new BigQueryJdbcParameter()); + } + this.highestIndex = Math.max(parameterIndex, highestIndex); + BigQueryJdbcParameter parameter = parametersList.get(arrayIndex); + + parameter.setIndex(parameterIndex); + parameter.setValue(value); + parameter.setType(type); + parameter.setSqlType(BigQueryJdbcTypeMappings.classToType(type)); + parameter.setParamName(""); + parameter.setParamType(BigQueryStatementParameterType.UNSPECIFIED); + parameter.setScale(-1); + + LOG.finest(String.format("Parameter set { %s }", parameter.toString())); + } + + private void checkValidIndex(int parameterIndex) { + if (parameterIndex > this.parametersArraySize) { + throw new IndexOutOfBoundsException("All parameters already provided."); + } + } + + Object getParameter(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return null; + } + return parametersList.get(arrayIndex).getValue(); + } + + Class getType(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return null; + } + return parametersList.get(arrayIndex).getType(); + } + + StandardSQLTypeName getSqlType(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return null; + } + return parametersList.get(arrayIndex).getSqlType(); + } + + void clearParameters() { + LOG.finest("++enter++"); + parametersList.clear(); + highestIndex = 0; + } + + // set parameter by name and type + void setParameter( + String paramName, + Object value, + Class type, + BigQueryStatementParameterType paramType, + int scale) + throws BigQueryJdbcSqlFeatureNotSupportedException { + LOG.finest("++enter++"); + LOG.finest(String.format("setParameter called by : %s", type.getName())); + if (paramName == null || paramName.isEmpty()) { + throw new IllegalArgumentException("paramName cannot be null or empty"); + } + BigQueryJdbcParameter parameter = null; + for (BigQueryJdbcParameter p : parametersList) { + if (paramName.equals(p.getParamName())) { + parameter = p; + break; + } + } + if (parameter == null) { + // Add new parameter. + parameter = new BigQueryJdbcParameter(); + parameter.setIndex(-1); + } + parameter.setValue(value); + parameter.setType(type); + parameter.setSqlType(BigQueryJdbcTypeMappings.classToType(type)); + parameter.setParamName(paramName); + parameter.setParamType(paramType); + parameter.setScale(scale); + if (parameter.getIndex() == -1) { + parametersList.add(parameter); + } + LOG.finest(String.format("Parameter set { %s }", parameter.toString())); + } + + // set parameter by index and type + void setParameter( + int parameterIndex, + Object value, + Class type, + BigQueryStatementParameterType paramType, + int scale) + throws BigQueryJdbcSqlFeatureNotSupportedException { + LOG.finest("++enter++"); + LOG.finest(String.format("setParameter called by : %s", type.getName())); + checkValidIndex(parameterIndex); + int arrayIndex = parameterIndex - 1; + if (parameterIndex >= this.highestIndex || this.parametersList.get(arrayIndex) == null) { + parametersList.ensureCapacity(parameterIndex); + while (parametersList.size() < parameterIndex) { + parametersList.add(null); + } + parametersList.set(arrayIndex, new BigQueryJdbcParameter()); + } + this.highestIndex = Math.max(parameterIndex, highestIndex); + BigQueryJdbcParameter parameter = parametersList.get(arrayIndex); + + parameter.setIndex(parameterIndex); + parameter.setValue(value); + parameter.setType(type); + parameter.setSqlType(BigQueryJdbcTypeMappings.classToType(type)); + parameter.setParamName(""); + parameter.setParamType(paramType); + parameter.setScale(scale); + + LOG.finest(String.format("Parameter set { %s }", parameter.toString())); + } + + // Get Parameter by name + Object getParameter(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getValue(); + } + } + return null; + } + + // Get parameter type by index + BigQueryStatementParameterType getParameterType(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return null; + } + return parametersList.get(arrayIndex).getParamType(); + } + + // Get parameter type by name + BigQueryStatementParameterType getParameterType(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getParamType(); + } + } + return null; + } + + // Get scale type by index + int getParameterScale(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return -1; + } + return parametersList.get(arrayIndex).getScale(); + } + + // Get parameter scale by name + int getParameterScale(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getScale(); + } + } + return -1; + } + + Class getType(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getType(); + } + } + return null; + } + + StandardSQLTypeName getSqlType(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getSqlType(); + } + } + return null; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnection.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnection.java new file mode 100644 index 000000000..ebb07dc11 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnection.java @@ -0,0 +1,497 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.common.annotations.VisibleForTesting; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.UUID; +import java.util.concurrent.Executor; +import java.util.concurrent.LinkedBlockingDeque; +import javax.sql.ConnectionEvent; +import javax.sql.ConnectionEventListener; +import javax.sql.PooledConnection; +import javax.sql.StatementEventListener; + +class BigQueryPooledConnection implements PooledConnection { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + + private String id; // Mainly for internal use + private Connection bqConnection; + private boolean inUse = false; + private Long listenerPoolSize = 10L; + private LinkedBlockingDeque listeners; + + BigQueryPooledConnection(Connection bqConnection) { + this.bqConnection = bqConnection; + this.id = UUID.randomUUID().toString(); + String connectionUrl = ((BigQueryConnection) bqConnection).getConnectionUrl(); + if (connectionUrl != null && !connectionUrl.isEmpty()) { + this.listenerPoolSize = + BigQueryJdbcUrlUtility.parseListenerPoolSize(connectionUrl, this.toString()); + } + if (getListenerPoolSize() > 0L) { + listeners = new LinkedBlockingDeque<>(getListenerPoolSize().intValue()); + } else { + listeners = new LinkedBlockingDeque<>(); + } + } + + Long getListenerPoolSize() { + return listenerPoolSize; + } + + @VisibleForTesting + boolean inUse() { + return inUse; + } + + @VisibleForTesting + boolean isListenerPooled(ConnectionEventListener l) { + return listeners.contains(l); + } + + @Override + public synchronized Connection getConnection() throws SQLException { + LOG.finest("++enter++"); + if (inUse) { + throw new SQLException("PooledConnection is already in use."); + } + inUse = true; + // Return a wrapper around the underlying physical connection. + return new BigQueryPooledConnectionWrapper(bqConnection, this); + } + + @Override + public synchronized void close() throws SQLException { + LOG.finest("++enter++"); + // Notify listeners that the *PooledConnection* is being closed. + ConnectionEvent event = new ConnectionEvent(this); + for (ConnectionEventListener listener : listeners) { + listener.connectionClosed(event); // This is likely not the intended event for this action + } + // Marks the pooled connection to be not in use. + inUse = false; + } + + @Override + public synchronized void addConnectionEventListener(ConnectionEventListener listener) { + LOG.finest("++enter++"); + if (listener == null) { + return; + } + if (this.listeners.contains(listener)) { + return; + } + this.listeners.add(listener); + } + + @Override + public synchronized void removeConnectionEventListener(ConnectionEventListener listener) { + LOG.finest("++enter++"); + if (listener == null) { + return; + } + if (!this.listeners.contains(listener)) { + return; + } + this.listeners.remove(listener); + } + + // Method called by the BigQueryPooledConnectionWrapper when the logical + // Connection is closed. + public synchronized void connectionHandleClosed(BigQueryPooledConnectionWrapper handle) { + LOG.finest("++enter++"); + inUse = false; + ConnectionEvent event = new ConnectionEvent(this); + for (ConnectionEventListener listener : listeners) { + listener.connectionClosed(event); + } + LOG.finest("Connection handle returned to the pool."); + } + + // Method to notify listeners about a connection error. This can be called + // by the application if they are using PooledConnection directly or by the + // BigQueryPooledConnectionWrapper when a connection is aborted. + public synchronized void fireConnectionError(SQLException e) { + LOG.finest("++enter++"); + inUse = false; + ConnectionEvent event = new ConnectionEvent(this, e); + for (ConnectionEventListener listener : listeners) { + listener.connectionErrorOccurred(event); + } + LOG.finest( + String.format("Connection handle removed from the pool due to error: %s", e.getMessage())); + // Listners no longer need to listen for this connection since it has been removed from the + // pool. + for (ConnectionEventListener listener : listeners) { + removeConnectionEventListener(listener); + } + } + + @Override + public void addStatementEventListener(StatementEventListener arg0) { + throw new UnsupportedOperationException( + "Method 'addStatementEventListener' is not supported by the BQ Driver"); + } + + @Override + public void removeStatementEventListener(StatementEventListener arg0) { + throw new UnsupportedOperationException( + "Method 'removeStatementEventListener' is not supported by the BQ Driver"); + } + + // Inner class: Connection Wrapper around the actual physical Connection + // This class notifies the listeners or calls the listner notification methods + // provided by the pooled connection. + static class BigQueryPooledConnectionWrapper implements Connection { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private Connection bqConnectionDelegate; + private BigQueryPooledConnection pooledConnection; + private boolean closed = false; + + public BigQueryPooledConnectionWrapper( + Connection bqConnectionDelegate, BigQueryPooledConnection pooledConnection) { + this.bqConnectionDelegate = bqConnectionDelegate; + this.pooledConnection = pooledConnection; + } + + @Override + public void close() throws SQLException { + LOG.finest("++enter++"); + if (!closed) { + // Instead of physically closing, we notify the PooledConnection + // that this handle is no longer in use. + pooledConnection.connectionHandleClosed(this); + closed = true; + LOG.finest("Logical connection closed (returned to pool)."); + } + } + + @Override + public boolean isClosed() throws SQLException { + return closed || bqConnectionDelegate.isClosed(); + } + + @Override + public java.sql.Statement createStatement() throws SQLException { + return bqConnectionDelegate.createStatement(); + } + + @Override + public java.sql.PreparedStatement prepareStatement(String sql) throws SQLException { + return bqConnectionDelegate.prepareStatement(sql); + } + + @Override + public java.sql.CallableStatement prepareCall(String sql) throws SQLException { + return bqConnectionDelegate.prepareCall(sql); + } + + @Override + public String nativeSQL(String sql) throws SQLException { + return bqConnectionDelegate.nativeSQL(sql); + } + + @Override + public void setAutoCommit(boolean autoCommit) throws SQLException { + bqConnectionDelegate.setAutoCommit(autoCommit); + } + + @Override + public boolean getAutoCommit() throws SQLException { + return bqConnectionDelegate.getAutoCommit(); + } + + @Override + public void commit() throws SQLException { + bqConnectionDelegate.commit(); + } + + @Override + public void rollback() throws SQLException { + bqConnectionDelegate.rollback(); + } + + @Override + public java.sql.DatabaseMetaData getMetaData() throws SQLException { + return bqConnectionDelegate.getMetaData(); + } + + @Override + public void setReadOnly(boolean readOnly) throws SQLException { + bqConnectionDelegate.setReadOnly(readOnly); + } + + @Override + public boolean isReadOnly() throws SQLException { + return bqConnectionDelegate.isReadOnly(); + } + + @Override + public void setCatalog(String catalog) throws SQLException { + bqConnectionDelegate.setCatalog(catalog); + } + + @Override + public String getCatalog() throws SQLException { + return bqConnectionDelegate.getCatalog(); + } + + @Override + public void setTransactionIsolation(int level) throws SQLException { + bqConnectionDelegate.setTransactionIsolation(level); + } + + @Override + public int getTransactionIsolation() throws SQLException { + return bqConnectionDelegate.getTransactionIsolation(); + } + + @Override + public java.sql.SQLWarning getWarnings() throws SQLException { + return bqConnectionDelegate.getWarnings(); + } + + @Override + public void clearWarnings() throws SQLException { + bqConnectionDelegate.clearWarnings(); + } + + @Override + public java.sql.Statement createStatement(int resultSetType, int resultSetConcurrency) + throws SQLException { + return bqConnectionDelegate.createStatement(resultSetType, resultSetConcurrency); + } + + @Override + public java.sql.PreparedStatement prepareStatement( + String sql, int resultSetType, int resultSetConcurrency) throws SQLException { + return bqConnectionDelegate.prepareStatement(sql, resultSetType, resultSetConcurrency); + } + + @Override + public java.sql.CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency) throws SQLException { + return bqConnectionDelegate.prepareCall(sql, resultSetType, resultSetConcurrency); + } + + @Override + public java.util.Map> getTypeMap() throws SQLException { + return bqConnectionDelegate.getTypeMap(); + } + + @Override + public void setTypeMap(java.util.Map> map) throws SQLException { + bqConnectionDelegate.setTypeMap(map); + } + + @Override + public void setHoldability(int holdability) throws SQLException { + bqConnectionDelegate.setHoldability(holdability); + } + + @Override + public int getHoldability() throws SQLException { + return bqConnectionDelegate.getHoldability(); + } + + @Override + public java.sql.Savepoint setSavepoint() throws SQLException { + return bqConnectionDelegate.setSavepoint(); + } + + @Override + public java.sql.Savepoint setSavepoint(String name) throws SQLException { + return bqConnectionDelegate.setSavepoint(name); + } + + @Override + public void rollback(java.sql.Savepoint savepoint) throws SQLException { + bqConnectionDelegate.rollback(savepoint); + } + + @Override + public void releaseSavepoint(java.sql.Savepoint savepoint) throws SQLException { + bqConnectionDelegate.releaseSavepoint(savepoint); + } + + @Override + public java.sql.Statement createStatement( + int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + return bqConnectionDelegate.createStatement( + resultSetType, resultSetConcurrency, resultSetHoldability); + } + + @Override + public java.sql.PreparedStatement prepareStatement( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + return bqConnectionDelegate.prepareStatement( + sql, resultSetType, resultSetConcurrency, resultSetHoldability); + } + + @Override + public java.sql.CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + return bqConnectionDelegate.prepareCall( + sql, resultSetType, resultSetConcurrency, resultSetHoldability); + } + + @Override + public java.sql.PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) + throws SQLException { + return bqConnectionDelegate.prepareStatement(sql, autoGeneratedKeys); + } + + @Override + public java.sql.PreparedStatement prepareStatement(String sql, int[] columnIndices) + throws SQLException { + return bqConnectionDelegate.prepareStatement(sql, columnIndices); + } + + @Override + public java.sql.PreparedStatement prepareStatement(String sql, String[] columnNames) + throws SQLException { + return bqConnectionDelegate.prepareStatement(sql, columnNames); + } + + @Override + public java.sql.Clob createClob() throws SQLException { + return bqConnectionDelegate.createClob(); + } + + @Override + public java.sql.Blob createBlob() throws SQLException { + return bqConnectionDelegate.createBlob(); + } + + @Override + public java.sql.NClob createNClob() throws SQLException { + return bqConnectionDelegate.createNClob(); + } + + @Override + public java.sql.SQLXML createSQLXML() throws SQLException { + return bqConnectionDelegate.createSQLXML(); + } + + @Override + public boolean isValid(int timeout) throws SQLException { + return bqConnectionDelegate.isValid(timeout); + } + + @Override + public void setClientInfo(String name, String value) throws java.sql.SQLClientInfoException { + bqConnectionDelegate.setClientInfo(name, value); + } + + @Override + public void setClientInfo(java.util.Properties properties) + throws java.sql.SQLClientInfoException { + bqConnectionDelegate.setClientInfo(properties); + } + + @Override + public String getClientInfo(String name) throws SQLException { + return bqConnectionDelegate.getClientInfo(name); + } + + @Override + public java.util.Properties getClientInfo() throws SQLException { + return bqConnectionDelegate.getClientInfo(); + } + + @Override + public java.sql.Array createArrayOf(String typeName, Object[] elements) throws SQLException { + return bqConnectionDelegate.createArrayOf(typeName, elements); + } + + @Override + public java.sql.Struct createStruct(String typeName, Object[] attributes) throws SQLException { + return bqConnectionDelegate.createStruct(typeName, attributes); + } + + @Override + public T unwrap(Class iface) throws SQLException { + return bqConnectionDelegate.unwrap(iface); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return bqConnectionDelegate.isWrapperFor(iface); + } + + @Override + public void setSchema(String schema) throws SQLException { + bqConnectionDelegate.setSchema(schema); + } + + @Override + public String getSchema() throws SQLException { + return bqConnectionDelegate.getSchema(); + } + + @Override + public void setNetworkTimeout(java.util.concurrent.Executor executor, int milliseconds) + throws SQLException { + bqConnectionDelegate.setNetworkTimeout(executor, milliseconds); + } + + @Override + public int getNetworkTimeout() throws SQLException { + return bqConnectionDelegate.getNetworkTimeout(); + } + + @Override + public void abort(Executor arg0) throws SQLException { + LOG.finest("++enter++"); + if (!closed) { + // We notify the pooled connection that physical connection + // is being aborted. We assume here that abort() is called for + // error cases. + SQLException e = new SQLException("Connection is being terminated and aborted"); + pooledConnection.fireConnectionError(e); + closed = true; + LOG.finest("Logical connection aborted (removed from pool)."); + } + // Call the delate abort to actually close the undelying connection. + bqConnectionDelegate.abort(arg0); + } + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + BigQueryPooledConnection other = (BigQueryPooledConnection) obj; + if (id == null) { + if (other.id != null) return false; + } else if (!id.equals(other.id)) return false; + return true; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPreparedStatement.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPreparedStatement.java new file mode 100644 index 000000000..2410f6a58 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPreparedStatement.java @@ -0,0 +1,611 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.gax.retrying.RetrySettings; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; +import com.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; +import com.google.cloud.bigquery.storage.v1.TableName; +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; +import com.google.protobuf.Descriptors.DescriptorValidationException; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.ParameterMetaData; +import java.sql.PreparedStatement; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.LinkedList; +import java.util.Queue; + +class BigQueryPreparedStatement extends BigQueryStatement implements PreparedStatement { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private static final char POSITIONAL_PARAMETER_CHAR = '?'; + // Making this protected so BigQueryCallableStatement subclass can access the parameters. + protected final BigQueryParameterHandler parameterHandler; + protected int parameterCount = 0; + protected String currentQuery; + private Queue> batchParameters = new LinkedList<>(); + private Schema insertSchema = null; + private TableName insertTableName = null; + + BigQueryPreparedStatement(BigQueryConnection connection, String query) { + super(connection); + setCurrentQuery(query); + this.parameterHandler = new BigQueryParameterHandler(this.parameterCount); + } + + void setCurrentQuery(String currentQuery) { + this.parameterCount = getParameterCount(currentQuery); + this.currentQuery = currentQuery; + } + + private int getParameterCount(String query) { + LOG.finest("++enter++"); + return (int) query.chars().filter(ch -> ch == POSITIONAL_PARAMETER_CHAR).count(); + } + + @Override + public ResultSet executeQuery() throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(this.currentQuery); + try { + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(this.currentQuery); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration = this.parameterHandler.configureParameters(jobConfiguration); + runQuery(this.currentQuery, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + return getCurrentResultSet(); + } + + @Override + public long executeLargeUpdate() throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(this.currentQuery); + try { + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(this.currentQuery); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration = this.parameterHandler.configureParameters(jobConfiguration); + runQuery(this.currentQuery, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + return this.currentUpdateCount; + } + + @Override + public int executeUpdate() throws SQLException { + LOG.finest("++enter++"); + return checkUpdateCount(executeLargeUpdate()); + } + + @Override + public boolean execute() throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(this.currentQuery); + try { + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(this.currentQuery); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration = this.parameterHandler.configureParameters(jobConfiguration); + runQuery(this.currentQuery, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + return getCurrentResultSet() != null; + } + + @Override + public void clearParameters() { + LOG.finest("++enter++"); + this.parameterHandler.clearParameters(); + this.parameterCount = 0; + } + + @Override + public void setNull(int parameterIndex, int sqlType) { + // TODO(neenu): implement null case + } + + @Override + public void setBoolean(int parameterIndex, boolean x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Boolean.class); + } + + @Override + public void setByte(int parameterIndex, byte x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Byte.class); + } + + @Override + public void setShort(int parameterIndex, short x) { + // TODO(neenu): implement Bytes conversion. + } + + @Override + public void setInt(int parameterIndex, int x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Integer.class); + } + + @Override + public void setLong(int parameterIndex, long x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Long.class); + } + + @Override + public void setFloat(int parameterIndex, float x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Float.class); + } + + @Override + public void setDouble(int parameterIndex, double x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Double.class); + } + + @Override + public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, BigDecimal.class); + } + + @Override + public void setString(int parameterIndex, String x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, String.class); + } + + @Override + public void setBytes(int parameterIndex, byte[] x) { + // TODO(neenu): implement Bytes conversion. + } + + @Override + public void setDate(int parameterIndex, Date x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x.toString(), String.class); + } + + @Override + public void setTime(int parameterIndex, Time x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x.toString(), String.class); + } + + @Override + public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x.toString(), String.class); + } + + @Override + public void setAsciiStream(int parameterIndex, InputStream x, int length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setUnicodeStream(int parameterIndex, InputStream x, int length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBinaryStream(int parameterIndex, InputStream x, int length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setObject(int parameterIndex, Object x, int targetSqlType) {} + + @Override + public void setObject(int parameterIndex, Object x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void addBatch() { + LOG.finest("++enter++"); + ArrayList currentParameterList = + deepCopyParameterList(this.parameterHandler.parametersList); + this.batchParameters.add(currentParameterList); + } + + private ArrayList deepCopyParameterList( + ArrayList parametersList) { + ArrayList copiedParameterList = new ArrayList<>(); + for (BigQueryJdbcParameter parameter : parametersList) { + BigQueryJdbcParameter newParameter = new BigQueryJdbcParameter(parameter); + copiedParameterList.add(newParameter); + } + return copiedParameterList; + } + + @Override + public int[] executeBatch() throws SQLException { + LOG.finest("++enter++"); + int[] result = new int[this.batchParameters.size()]; + if (this.batchParameters.isEmpty()) { + return result; + } + if (useWriteAPI()) { + try (BigQueryWriteClient writeClient = this.connection.getBigQueryWriteClient()) { + LOG.info("Using Write API for bulk INSERT operation."); + ArrayList currentParameterList = this.batchParameters.peek(); + if (this.insertSchema == null && this.insertTableName == null) { + QueryStatistics insertJobQueryStatistics = + getQueryStatistics(getWriteBatchJobConfiguration(currentParameterList)); + setInsertMetadata(insertJobQueryStatistics); + } + + long rowCount = bulkInsertWithWriteAPI(writeClient); + int[] insertArray = new int[Math.toIntExact(rowCount)]; + Arrays.fill(insertArray, 1); + return insertArray; + + } catch (DescriptorValidationException | IOException | InterruptedException e) { + throw new BigQueryJdbcRuntimeException(e); + } + + } else { + try { + LOG.info("Using individual INSERT query runs."); + int count = this.batchParameters.size(); + StringBuilder combinedQuery = new StringBuilder(); + for (int i = 0; i < count; i++) { + + if (this.currentQuery.trim().endsWith(";")) { + combinedQuery.append(this.currentQuery); + } else { + combinedQuery.append(this.currentQuery).append(";"); + } + } + // executeBatch in PreparedStatement is used for BulkInsert/DML. + // If not correct Type, fails later. + runQuery( + combinedQuery.toString(), getStandardBatchJobConfiguration(combinedQuery.toString())); + int i = 0; + while (getUpdateCount() != -1 && i < count) { + result[i] = getUpdateCount(); + getMoreResults(); + i++; + } + return result; + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + } + + private long bulkInsertWithWriteAPI(BigQueryWriteClient bigQueryWriteClient) + throws DescriptorValidationException, + IOException, + InterruptedException, + BigQueryJdbcException { + LOG.finest("++enter++"); + RetrySettings retrySettings = this.connection.getRetrySettings(); + + BigQueryJdbcBulkInsertWriter bulkInsertWriter = new BigQueryJdbcBulkInsertWriter(); + bulkInsertWriter.initialize(this.insertTableName, bigQueryWriteClient, retrySettings); + + try { + long offset = 0; + JsonArray jsonArray = new JsonArray(); + Gson gson = new Gson(); + int count = this.batchParameters.size(); + for (int i = 0; i < count; i++) { + + ArrayList parameterList = this.batchParameters.poll(); + FieldList fieldLists = this.insertSchema.getFields(); + if (fieldLists.size() == parameterList.size()) { + + JsonObject rowObject = new JsonObject(); + for (int j = 0; j < parameterList.size(); j++) { + BigQueryJdbcParameter parameter = parameterList.get(j); + if (parameter.getSqlType() == StandardSQLTypeName.STRING) { + rowObject.addProperty(fieldLists.get(j).getName(), parameter.getValue().toString()); + } else { + rowObject.addProperty(fieldLists.get(j).getName(), gson.toJson(parameter.getValue())); + } + } + jsonArray.add(rowObject); + + if (jsonArray.size() == this.querySettings.getWriteAPIAppendRowCount() + || this.batchParameters.size() == 0) { + bulkInsertWriter.append(jsonArray, offset); + LOG.finest("Append called "); + offset += jsonArray.size(); + jsonArray = new JsonArray(); + } + } else { + throw new BigQueryJdbcException("Mismatch between field count and parameter count."); + } + } + } catch (BigQueryJdbcException e) { + throw new RuntimeException(e); + } + + long rowCount = bulkInsertWriter.cleanup(bigQueryWriteClient); + + BatchCommitWriteStreamsRequest commitRequest = + BatchCommitWriteStreamsRequest.newBuilder() + .setParent(this.insertTableName.toString()) + .addWriteStreams(bulkInsertWriter.getStreamName()) + .build(); + BatchCommitWriteStreamsResponse commitResponse = + bigQueryWriteClient.batchCommitWriteStreams(commitRequest); + if (commitResponse.hasCommitTime() == false) { + throw new BigQueryJdbcException("Error committing the streams"); + } + LOG.finest("Commit called."); + return rowCount; + } + + private void setInsertMetadata(QueryStatistics statistics) throws SQLException { + LOG.finest("++enter++"); + if (!statistics.getStatementType().equals(StatementType.INSERT) + || statistics.getSchema() == null + || statistics.getReferencedTables().stream().distinct().count() > 1) { + throw new BigQueryJdbcException( + "Use java.sql.Statement.executeBatch() for heterogeneous DML batches"); + } + + this.insertSchema = statistics.getSchema(); + TableId tableID = statistics.getReferencedTables().get(0); + this.insertTableName = + TableName.of(tableID.getProject(), tableID.getDataset(), tableID.getTable()); + LOG.finest( + String.format( + "this.insertTableName : %s, this.insertSchema : %s", + this.insertTableName, this.insertSchema.toString())); + } + + QueryJobConfiguration getWriteBatchJobConfiguration( + ArrayList currentParameterList) throws SQLException { + LOG.finest("++enter++"); + BigQueryParameterHandler batchHandler = + new BigQueryParameterHandler(this.parameterCount, currentParameterList); + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(this.currentQuery); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration = batchHandler.configureParameters(jobConfiguration); + return jobConfiguration.build(); + } + + QueryJobConfiguration getStandardBatchJobConfiguration(String query) throws SQLException { + LOG.finest("++enter++"); + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(query); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration.setPriority(QueryJobConfiguration.Priority.BATCH); + int index = 0; + while (!this.batchParameters.isEmpty()) { + ArrayList parameterList = this.batchParameters.poll(); + + for (BigQueryJdbcParameter parameter : parameterList) { + Object parameterValue = parameter.getValue(); + StandardSQLTypeName sqlType = parameter.getSqlType(); + LOG.finest( + String.format( + "Parameter %s of type %s at index %s added to QueryJobConfiguration", + parameterValue, sqlType, index++)); + jobConfiguration.addPositionalParameter(QueryParameterValue.of(parameterValue, sqlType)); + } + } + return jobConfiguration.build(); + } + + Boolean useWriteAPI() { + LOG.finest("++enter++"); + if (this.querySettings.isUseWriteAPI()) { + if (this.batchParameters.size() >= this.querySettings.getWriteAPIActivationRowCount()) { + return true; + } + } + return false; + } + + @Override + public void setCharacterStream(int parameterIndex, Reader reader, int length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setRef(int parameterIndex, Ref x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBlob(int parameterIndex, Blob x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setClob(int parameterIndex, Clob x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setArray(int parameterIndex, Array x) { + // TODO(neenu) :IMPLEMENT ARRAY + } + + @Override + public ResultSetMetaData getMetaData() { + // TODO(neenu) :IMPLEMENT metadata + return null; + } + + @Override + public void setDate(int parameterIndex, Date x, Calendar cal) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setTime(int parameterIndex, Time x, Calendar cal) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNull(int parameterIndex, int sqlType, String typeName) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setURL(int parameterIndex, URL x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public ParameterMetaData getParameterMetaData() { + // TODO(neenu) :IMPLEMENT + return null; + } + + @Override + public void setRowId(int parameterIndex, RowId x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNString(int parameterIndex, String value) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNCharacterStream(int parameterIndex, Reader value, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNClob(int parameterIndex, NClob value) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setClob(int parameterIndex, Reader reader, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBlob(int parameterIndex, InputStream inputStream, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNClob(int parameterIndex, Reader reader, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setSQLXML(int parameterIndex, SQLXML xmlObject) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) { + // TODO(neenu) : IMPLEMENT? + } + + @Override + public void setAsciiStream(int parameterIndex, InputStream x, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBinaryStream(int parameterIndex, InputStream x, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setCharacterStream(int parameterIndex, Reader reader, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setAsciiStream(int parameterIndex, InputStream x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBinaryStream(int parameterIndex, InputStream x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setCharacterStream(int parameterIndex, Reader reader) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNCharacterStream(int parameterIndex, Reader value) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setClob(int parameterIndex, Reader reader) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBlob(int parameterIndex, InputStream inputStream) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNClob(int parameterIndex, Reader reader) { + // TODO :NOT IMPLEMENTED + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSet.java new file mode 100644 index 000000000..c24e37abd --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSet.java @@ -0,0 +1,46 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; + +public interface BigQueryResultSet { + /* + * This function returns Job Id for the corresponding BQ Job that generated result. + * Note that it is not available for certain queries (low-latency queries) and for metadata results. + * + * @return JobId object or null. + */ + public JobId getJobId(); + + /* + * This function returns Query Id for the corresponding low-latency query produced results. + * It is null for regular (non-low latency) jobs and metadata results. + * + * @return Query Id string or null. + */ + public String getQueryId(); + + /* + * Returns com.google.cloud.bigquery.JobStatistics.QueryStatistics object with statistics for the + * completed Job for non-low latency queries. + * + * @return QueryStatistics object or null. + */ + public QueryStatistics getQueryStatistics(); +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizers.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizers.java new file mode 100644 index 000000000..15a1cca34 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizers.java @@ -0,0 +1,73 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.lang.ref.PhantomReference; +import java.lang.ref.ReferenceQueue; + +@InternalApi +class BigQueryResultSetFinalizers { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryResultSetFinalizers.class.getName()); + + @InternalApi + static class ArrowResultSetFinalizer extends PhantomReference { + Thread ownedThread; + + public ArrowResultSetFinalizer( + BigQueryArrowResultSet referent, + ReferenceQueue q, + Thread ownedThread) { + super(referent, q); + this.ownedThread = ownedThread; + } + + // Free resources. Remove all the hard refs + public void finalizeResources() { + LOG.finest("++enter++"); + if (ownedThread != null && !ownedThread.isInterrupted()) { + ownedThread.interrupt(); + } + } + } + + @InternalApi + static class JsonResultSetFinalizer extends PhantomReference { + Thread[] ownedThreads; + + public JsonResultSetFinalizer( + BigQueryJsonResultSet referent, + ReferenceQueue q, + Thread[] ownedThreads) { + super(referent, q); + this.ownedThreads = ownedThreads; + } + + // Free resources. Remove all the hard refs + public void finalizeResources() { + LOG.finest("++enter++"); + if (ownedThreads != null) { + for (Thread ownedThread : ownedThreads) { + if (!ownedThread.isInterrupted()) { + ownedThread.interrupt(); + } + } + } + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadata.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadata.java new file mode 100644 index 000000000..d18c68933 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadata.java @@ -0,0 +1,213 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; + +/** This class returns ResultSetMetadata for the JSON and the Arrow ResultSets */ +class BigQueryResultSetMetadata implements ResultSetMetaData { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private final FieldList schemaFieldList; + private final Statement statement; + private final int columnCount; + + private static final int DEFAULT_DISPLAY_SIZE = 50; + + private BigQueryResultSetMetadata(FieldList schemaFieldList, Statement statement) { + LOG.finest("++enter++"); + this.schemaFieldList = schemaFieldList; + this.columnCount = schemaFieldList.size(); + this.statement = statement; + } + + static BigQueryResultSetMetadata of(FieldList schemaFieldList, Statement statement) { + return new BigQueryResultSetMetadata(schemaFieldList, statement); + } + + private Field getField(int sqlColumn) { + return this.schemaFieldList.get(sqlColumn - 1); + } + + @Override + public int getColumnCount() { + return this.columnCount; + } + + @Override + public boolean isAutoIncrement(int column) { + // BQ doesn't support auto increment + return false; + } + + @Override + public boolean isCaseSensitive(int column) { + int colType = getColumnType(column); + return colType == Types.NVARCHAR; + } + + @Override + public boolean isSearchable(int column) { + int colType = getColumnType(column); + return colType != Types.OTHER; + } + + @Override + public boolean isCurrency(int column) { + return false; + } + + @Override + public int isNullable(int column) { + Mode colMode = getField(column).getMode(); + if (colMode == null) { + return ResultSetMetaData.columnNullableUnknown; + } + return colMode == Mode.NULLABLE + ? ResultSetMetaData.columnNullable + : ResultSetMetaData.columnNoNulls; + } + + @Override + public boolean isSigned(int column) { + int colType = getColumnType(column); + return colType == Types.FLOAT + || colType == Types.DOUBLE + || colType == Types.BIGINT + || colType == Types.NUMERIC; + } + + @Override + public int getColumnDisplaySize(int column) { + int colType = getColumnType(column); + switch (colType) { + case Types.BOOLEAN: + return 5; + case Types.DATE: + case Types.BIGINT: + return 10; + case Types.DOUBLE: + case Types.DECIMAL: + case Types.NUMERIC: + return 14; + case Types.TIMESTAMP: + return 16; + default: + return DEFAULT_DISPLAY_SIZE; + } + } + + @Override + public String getColumnLabel(int column) { + return getField(column).getName(); + } + + @Override + public String getColumnName(int column) { + return getField(column).getName(); + } + + @Override + public int getPrecision(int column) { + return (int) (getField(column).getPrecision() != null ? getField(column).getPrecision() : 0); + } + + @Override + public int getScale(int column) { + return (int) (getField(column).getScale() != null ? getField(column).getScale() : 0); + } + + @Override + public String getTableName(int column) { + // returning "" as per the specs as there might be multiple tables involved, or we + // might be reading from the temp table + return ""; + } + + @Override + public String getCatalogName(int column) { + return ""; // not applicable + } + + @Override + public String getSchemaName(int column) { + return ""; // not applicable + } + + private StandardSQLTypeName getStandardSQLTypeName(int column) { + Field field = getField(column); + if (field.getMode() == Mode.REPEATED) { + return StandardSQLTypeName.ARRAY; + } + return getField(column).getType().getStandardType(); + } + + @Override + public int getColumnType(int column) { + return BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.get( + getStandardSQLTypeName(column)); + } + + @Override + public String getColumnTypeName(int column) { + return getStandardSQLTypeName(column).name(); + } + + @Override + public boolean isReadOnly(int column) { + return false; + } + + @Override + public boolean isWritable(int column) { + return !isReadOnly(column); + } + + @Override + public boolean isDefinitelyWritable(int column) { + return false; + } + + @Override + public String getColumnClassName(int column) { + Field field = getField(column); + if (field.getMode() == Mode.REPEATED) { + return java.sql.Array.class.getName(); + } + return BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping + .get(field.getType().getStandardType()) + .getName(); + } + + // Unsupported methods: + @Override + public T unwrap(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException("unwrap is not implemented"); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException("isWrapperFor is not implemented"); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySettings.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySettings.java new file mode 100644 index 000000000..16f13a778 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySettings.java @@ -0,0 +1,857 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.Clustering; +import com.google.cloud.bigquery.ConnectionProperty; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.EncryptionConfiguration; +import com.google.cloud.bigquery.ExternalTableDefinition; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryJobConfiguration.Priority; +import com.google.cloud.bigquery.RangePartitioning; +import com.google.cloud.bigquery.TimePartitioning; +import com.google.cloud.bigquery.UserDefinedFunction; +import java.util.List; +import java.util.Map; + +/** This class is used to pass user defined settings for execution of Queries. */ +// TODO: Expose this class as public once we decide on how to expose the slow +// query path to the end users. IMP: revisit the set of params to be exposed via BigQuerySettings +class BigQuerySettings { + + private final boolean useReadAPI; + private final int highThroughputActivationRatio; + private final int highThroughputMinTableSize; + private final boolean unsupportedHTAPIFallback; + + private final boolean enableSession; + + private final ConnectionProperty sessionInfoConnectionProperty; + + private final boolean useQueryCache; + private final String queryDialect; + private final List queryProperties; + private final Boolean allowLargeResults; + private final String kmsKeyName; + private final Clustering clustering; + + private final JobInfo.CreateDisposition createDisposition; + + private final EncryptionConfiguration destinationEncryptionConfiguration; + + private final String destinationTable; + private final String destinationDataset; + private final long destinationDatasetExpirationTime; + + private final long jobTimeoutMs; + + private final int maximumBillingTier; + + private final QueryJobConfiguration.Priority priority; + + private final RangePartitioning rangePartitioning; + + private final List schemaUpdateOptions; + + private final Map tableDefinitions; + + private final TimePartitioning timePartitioning; + + private final List userDefinedFunctions; + + private final JobInfo.WriteDisposition writeDisposition; + + private final int numBufferedRows; + + private final long maxResultPerPage; + + private final DatasetId defaultDataset; + + private final boolean useWriteAPI; + private final int writeAPIActivationRowCount; + private final int writeAPIAppendRowCount; + + private final long maxBytesBilled; + private final Map labels; + + private BigQuerySettings(Builder builder) { + this.useReadAPI = builder.useReadAPI; + this.highThroughputActivationRatio = builder.highThroughputActivationRatio; + this.highThroughputMinTableSize = builder.highThroughputMinTableSize; + this.useQueryCache = builder.useQueryCache; + this.queryDialect = builder.queryDialect; + this.queryProperties = builder.queryProperties; + this.allowLargeResults = builder.allowLargeResults; + this.kmsKeyName = builder.kmsKeyName; + this.clustering = builder.clustering; + this.createDisposition = builder.createDisposition; + this.destinationEncryptionConfiguration = builder.destinationEncryptionConfiguration; + this.destinationTable = builder.destinationTable; + this.destinationDataset = builder.destinationDataset; + this.destinationDatasetExpirationTime = builder.destinationDatasetExpirationTime; + this.jobTimeoutMs = builder.jobTimeoutMs; + this.maximumBillingTier = builder.maximumBillingTier; + this.priority = builder.priority; + this.rangePartitioning = builder.rangePartitioning; + this.schemaUpdateOptions = builder.schemaUpdateOptions; + this.tableDefinitions = builder.tableDefinitions; + this.timePartitioning = builder.timePartitioning; + this.userDefinedFunctions = builder.userDefinedFunctions; + this.writeDisposition = builder.writeDisposition; + this.numBufferedRows = builder.numBufferedRows; + this.maxResultPerPage = builder.maxResultPerPage; + this.defaultDataset = builder.defaultDataset; + this.enableSession = builder.enableSession; + this.unsupportedHTAPIFallback = builder.unsupportedHTAPIFallback; + this.sessionInfoConnectionProperty = builder.sessionInfoConnectionProperty; + this.useWriteAPI = builder.useWriteAPI; + this.writeAPIActivationRowCount = builder.writeAPIActivationRowCount; + this.writeAPIAppendRowCount = builder.writeAPIAppendRowCount; + this.maxBytesBilled = builder.maxBytesBilled; + this.labels = builder.labels; + } + + /** Returns a builder for a BigQuerySettings object. */ + static Builder newBuilder() { + return new Builder(); + } + + /** + * Returns useReadAPI flag, enabled by default. Read API will be used if the underlying conditions + * are satisfied and this flag is enabled + */ + Boolean getUseReadAPI() { + return useReadAPI; + } + + /** + * Returns integer value for when the connector switches to BigQuery Storage API when the number + * of pages and rows in query results exceed this value and HighThroughPutMinTableSize, + * respectively. + */ + int getHighThroughputActivationRatio() { + return highThroughputActivationRatio; + } + + /** + * Returns integer value for when query results are large, exceeding both row and page limits, the + * connector switches to the BigQuery Storage API for faster processing. + */ + int getHighThroughputMinTableSize() { + return highThroughputMinTableSize; + } + + /** + * Determines if session features are enabled. + * + *

    Enabling session-level features allows for capturing SQL activities or enabling + * multi-statement transactions. Session tracking is disabled by default. + * + * @return true if session is enabled, false otherwise. + */ + boolean isEnableSession() { + return enableSession; + } + + /** + * When the connector uses fetch workflows not supported on the High-Throughput API, this option + * specifies whether the connector falls back to the REST API or returns an error. By default it + * falls back to standard API. + * + * @return true if falls back to standard, false to error. + */ + boolean isUnsupportedHTAPIFallback() { + return unsupportedHTAPIFallback; + } + + /** + * Returns information about the BigQuery session ConnectionProperty associated with this job. + * + *

    BigQuery's sessions provide a way to link multiple jobs and maintain temporary data, such as + * temporary tables, between them. They are needed for using multi-statement transactions that + * span across multiple queries. + * + * @return An instance of {@link ConnectionProperty} containing session details, or {@code null} + * if this job is not part of a session. + */ + ConnectionProperty getSessionInfoConnectionProperty() { + return sessionInfoConnectionProperty; + } + + Boolean getUseQueryCache() { + return useQueryCache; + } + + String getQueryDialect() { + return queryDialect; + } + + List getQueryProperties() { + return this.queryProperties; + } + + /** + * Returns the KMS resource name which is the unique identifier you give to your encryption key in + * Google Cloud's Key Management Service (KMS). Tells BigQuery which key to use when encrypting or + * decrypting your data. + */ + String getKmsKeyName() { + return kmsKeyName; + } + + Boolean getAllowLargeResults() { + return allowLargeResults; + } + + /** Returns the clustering specification for the destination table. */ + Clustering getClustering() { + return clustering; + } + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + JobInfo.CreateDisposition getCreateDisposition() { + return createDisposition; + } + + /** Returns the custom encryption configuration (e.g., Cloud KMS keys) */ + EncryptionConfiguration getDestinationEncryptionConfiguration() { + return destinationEncryptionConfiguration; + } + + /** + * Returns the table where you want to store query results. If not provided a default temp table + * is created when needed. + */ + String getDestinationTable() { + return destinationTable; + } + + /** + * Returns the dataset where you want to store query results. If not provided a default dataset is + * created when needed. + */ + String getDestinationDataset() { + return destinationDataset; + } + + long getDestinationDatasetExpirationTime() { + return destinationDatasetExpirationTime; + } + + /** Returns the timeout associated with this job */ + Long getJobTimeoutMs() { + return jobTimeoutMs; + } + + /** Returns the optional billing tier limit for this job. */ + Integer getMaximumBillingTier() { + return maximumBillingTier; + } + + /** Returns the query priority. */ + QueryJobConfiguration.Priority getPriority() { + return priority; + } + + /** Returns the range partitioning specification for the table */ + RangePartitioning getRangePartitioning() { + return rangePartitioning; + } + + /** + * Returns options allowing the schema of the destination table to be updated as a side effect of + * the query job. Schema update options are supported in two cases: when writeDisposition is + * WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a partition + * of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE will always + * overwrite the schema. + */ + List getSchemaUpdateOptions() { + return schemaUpdateOptions; + } + + /** + * Returns the external tables definitions. If querying external data sources outside BigQuery, + * this value describes the data format, location and other properties of the data sources. By + * defining these properties, the data sources can be queried as if they were standard BigQuery + * tables. + */ + Map getTableDefinitions() { + return tableDefinitions; + } + + /** Returns the time partitioning specification for the destination table. */ + TimePartitioning getTimePartitioning() { + return timePartitioning; + } + + /** + * Returns user defined function resources that can be used by this query. Function resources can + * either be defined inline ({@link UserDefinedFunction.Type#INLINE}) or loaded from a Google + * Cloud Storage URI ({@link UserDefinedFunction.Type#FROM_URI}. + */ + List getUserDefinedFunctions() { + return userDefinedFunctions; + } + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + JobInfo.WriteDisposition getWriteDisposition() { + return writeDisposition; + } + + /** Returns the number of rows of data to pre-fetch */ + Integer getNumBufferedRows() { + return numBufferedRows; + } + + Long getMaxResultPerPage() { + return maxResultPerPage; + } + + DatasetId getDefaultDataset() { + return defaultDataset; + } + + boolean isUseWriteAPI() { + return useWriteAPI; + } + + int getWriteAPIActivationRowCount() { + return writeAPIActivationRowCount; + } + + int getWriteAPIAppendRowCount() { + return writeAPIAppendRowCount; + } + + long getMaxBytesBilled() { + return maxBytesBilled; + } + + Map getLabels() { + return labels; + } + + @Override + public String toString() { + return "BigQuerySettings{" + + "enableSession=" + + enableSession + + ", " + + "unsupportedHTAPIFallback=" + + unsupportedHTAPIFallback + + ", " + + "sessionInfo=" + + sessionInfoConnectionProperty + + ", " + + "useReadAPI=" + + useReadAPI + + ", " + + "kmsKeyName=" + + kmsKeyName + + ", " + + "highThroughputMinTableSize=" + + highThroughputMinTableSize + + ", " + + "highThroughputActivationRatio=" + + highThroughputActivationRatio + + ", " + + "useQueryCache=" + + useQueryCache + + ", " + + "queryDialect=" + + queryDialect + + ", " + + "queryProperties=" + + queryProperties + + ", " + + "allowLargeResults=" + + allowLargeResults + + ", " + + "clustering=" + + clustering + + ", " + + "createDisposition=" + + createDisposition + + ", " + + "destinationEncryptionConfiguration=" + + destinationEncryptionConfiguration + + ", " + + "destinationTable=" + + destinationTable + + ", " + + "destinationDataset=" + + destinationDataset + + ", " + + "destinationDatasetExpirationTime=" + + destinationDatasetExpirationTime + + ", " + + "jobTimeoutMs=" + + jobTimeoutMs + + ", " + + "maximumBillingTier=" + + maximumBillingTier + + ", " + + "priority=" + + priority + + ", " + + "rangePartitioning=" + + rangePartitioning + + ", " + + "schemaUpdateOptions=" + + schemaUpdateOptions + + ", " + + "tableDefinitions=" + + tableDefinitions + + ", " + + "timePartitioning=" + + timePartitioning + + ", " + + "userDefinedFunctions=" + + userDefinedFunctions + + ", " + + "writeDisposition=" + + writeDisposition + + ", " + + "numBufferedRows=" + + numBufferedRows + + ", " + + "maxResultPerPage=" + + maxResultPerPage + + ", " + + "defaultDataset=" + + defaultDataset + + ", " + + "useWriteAPI=" + + useWriteAPI + + ", " + + "writeAPIActivationRowCount=" + + writeAPIActivationRowCount + + ", " + + "writeAPIAppendRowCount=" + + writeAPIAppendRowCount + + ", " + + "maxBytesBilled=" + + maxBytesBilled + + "}"; + } + + /** Returns a builder pre-populated using the current values of this field. */ + Builder toBuilder() { + return new Builder(this); + } + + static final class Builder { + + private boolean useReadAPI; + private int highThroughputMinTableSize; + private int highThroughputActivationRatio; + private boolean enableSession; + private boolean unsupportedHTAPIFallback; + private ConnectionProperty sessionInfoConnectionProperty; + private boolean useQueryCache; + private String queryDialect; + private List queryProperties; + private Boolean allowLargeResults; + private String kmsKeyName; + private Clustering clustering; + private JobInfo.CreateDisposition createDisposition; + private EncryptionConfiguration destinationEncryptionConfiguration; + private String destinationTable; + private String destinationDataset; + private long destinationDatasetExpirationTime; + private long jobTimeoutMs; + private int maximumBillingTier; + private QueryJobConfiguration.Priority priority; + private RangePartitioning rangePartitioning; + private List schemaUpdateOptions; + private Map tableDefinitions; + private TimePartitioning timePartitioning; + private List userDefinedFunctions; + private JobInfo.WriteDisposition writeDisposition; + private int numBufferedRows; + private long maxResultPerPage; + private DatasetId defaultDataset; + private boolean useWriteAPI; + private int writeAPIActivationRowCount; + private int writeAPIAppendRowCount; + private long maxBytesBilled; + private Map labels; + + Builder() { + this.withDefaultValues(); + } + + private Builder(BigQuerySettings querySettings) { + this.useReadAPI = querySettings.getUseReadAPI(); + this.highThroughputMinTableSize = querySettings.getHighThroughputMinTableSize(); + this.highThroughputActivationRatio = querySettings.getHighThroughputActivationRatio(); + this.enableSession = querySettings.isEnableSession(); + this.unsupportedHTAPIFallback = querySettings.isUnsupportedHTAPIFallback(); + this.sessionInfoConnectionProperty = querySettings.getSessionInfoConnectionProperty(); + this.useQueryCache = querySettings.getUseQueryCache(); + this.queryDialect = querySettings.getQueryDialect(); + this.queryProperties = querySettings.getQueryProperties(); + this.allowLargeResults = querySettings.getAllowLargeResults(); + this.kmsKeyName = querySettings.getKmsKeyName(); + this.clustering = querySettings.getClustering(); + this.createDisposition = querySettings.getCreateDisposition(); + this.destinationEncryptionConfiguration = + querySettings.getDestinationEncryptionConfiguration(); + this.destinationTable = querySettings.getDestinationTable(); + this.destinationDataset = querySettings.getDestinationDataset(); + this.destinationDatasetExpirationTime = querySettings.destinationDatasetExpirationTime; + this.jobTimeoutMs = querySettings.getJobTimeoutMs(); + this.maximumBillingTier = querySettings.getMaximumBillingTier(); + this.priority = querySettings.getPriority(); + this.rangePartitioning = querySettings.getRangePartitioning(); + this.schemaUpdateOptions = querySettings.getSchemaUpdateOptions(); + this.tableDefinitions = querySettings.getTableDefinitions(); + this.timePartitioning = querySettings.getTimePartitioning(); + this.userDefinedFunctions = querySettings.getUserDefinedFunctions(); + this.writeDisposition = querySettings.getWriteDisposition(); + this.numBufferedRows = querySettings.getNumBufferedRows(); + this.maxResultPerPage = querySettings.getMaxResultPerPage(); + this.defaultDataset = querySettings.getDefaultDataset(); + this.useWriteAPI = querySettings.isUseWriteAPI(); + this.writeAPIActivationRowCount = querySettings.getWriteAPIActivationRowCount(); + this.writeAPIAppendRowCount = querySettings.getWriteAPIAppendRowCount(); + this.maxBytesBilled = querySettings.getMaxBytesBilled(); + this.labels = querySettings.getLabels(); + } + + Builder withDefaultValues() { + return setUseReadAPI(false) // Read API is disabled by default; + .setQueryDialect(BigQueryJdbcUrlUtility.DEFAULT_QUERY_DIALECT_VALUE) + .setNumBufferedRows(10000) // 10K records will be kept in the buffer (Blocking Queue); + .setMaxResultPerPage(BigQueryJdbcUrlUtility.DEFAULT_MAX_RESULTS_VALUE); + } + + /** + * Sets useReadAPI flag, enabled by default. Read API will be used if the underlying conditions + * are satisfied and this flag is enabled + * + * @param useReadAPI or {@code true} for none + */ + Builder setUseReadAPI(boolean useReadAPI) { + this.useReadAPI = useReadAPI; + return this; + } + + /** + * Sets the minimum table size for which the BigQuery Storage API will be used. + * + *

    When query results are large, exceeding both the row and page limits, the connector + * automatically switches to the BigQuery Storage API for faster processing. This method allows + * you to configure a threshold for table size, enabling the use of the BigQuery Storage API + * when the limit is exceeded, provided the table size exceeds the specified value. + * + * @param highThroughputMinTableSize the minimum table size to trigger the use of the BigQuery + * Storage API + */ + Builder setHighThroughputMinTableSize(int highThroughputMinTableSize) { + this.highThroughputMinTableSize = highThroughputMinTableSize; + return this; + } + + /** + * Sets the activation ratio for switching to the BigQuery Storage API. + * + *

    The connector switches to the BigQuery Storage API when the number of pages in the query + * results exceeds this value AND the table size is greater than or equal to the value set or + * default value of {@link #setHighThroughputMinTableSize(int)}. + * + * @param highThroughputActivationRatio the activation ratio for switching to BigQuery Storage + * API + */ + Builder setHighThroughputActivationRatio(int highThroughputActivationRatio) { + this.highThroughputActivationRatio = highThroughputActivationRatio; + return this; + } + + /** + * setting true, enables session-level features such as capturing SQL activities or enabling + * multi-statement transactions. Session tracking is disabled by default. + */ + Builder setEnableSession(boolean enableSession) { + this.enableSession = enableSession; + return this; + } + + /** + * When the connector uses fetch workflows not supported on the High-Throughput API, this option + * specifies whether the connector falls back to the REST API or returns an error. By default it + * falls back to standard API. + * + * @param unsupportedHTAPIFallback true if falls back to standard, false to error. + */ + Builder setUnsupportedHTAPIFallback(boolean unsupportedHTAPIFallback) { + this.unsupportedHTAPIFallback = unsupportedHTAPIFallback; + return this; + } + + /** + * Setting session information associated with the job. + * + *

    BigQuery's sessions provide a way to link multiple jobs and maintain temporary data, such + * as temporary tables, between them. They are needed for using multi-statement transactions + * that span across multiple queries. + */ + Builder setSessionInfoConnectionProperty(ConnectionProperty sessionInfoConnectionProperty) { + this.sessionInfoConnectionProperty = sessionInfoConnectionProperty; + return this; + } + + Builder setUseQueryCache(boolean useQueryCache) { + this.useQueryCache = useQueryCache; + return this; + } + + Builder setAllowLargeResults(Boolean allowLargeResults) { + this.allowLargeResults = allowLargeResults; + return this; + } + + /** + * Set the KMS resource key name which is the unique identifier you give to your encryption key + * in Google Cloud's Key Management Service (KMS). Tells BigQuery which key to use when + * encrypting or decrypting your data. + */ + Builder setKmsKeyName(String kmsKeyName) { + this.kmsKeyName = kmsKeyName; + return this; + } + + Builder setQueryDialect(String queryDialect) { + this.queryDialect = queryDialect; + return this; + } + + Builder setQueryProperties(List queryProperties) { + this.queryProperties = queryProperties; + return this; + } + + /** Sets the clustering specification for the destination table. */ + Builder setClustering(Clustering clustering) { + this.clustering = clustering; + return this; + } + + /** + * Sets whether the job is allowed to create tables. + * + * @see + * Create Disposition + */ + Builder setCreateDisposition(JobInfo.CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + /** + * Sets the custom encryption configuration (e.g., Cloud KMS keys). + * + * @param destinationEncryptionConfiguration destinationEncryptionConfiguration or {@code null} + * for none + */ + Builder setDestinationEncryptionConfiguration( + EncryptionConfiguration destinationEncryptionConfiguration) { + this.destinationEncryptionConfiguration = destinationEncryptionConfiguration; + return this; + } + + Builder setDestinationTable(String destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + Builder setDestinationDataset(String destinationDataset) { + this.destinationDataset = destinationDataset; + return this; + } + + Builder setDestinationDatasetExpirationTime(long destinationDatasetExpirationTime) { + this.destinationDatasetExpirationTime = destinationDatasetExpirationTime; + return this; + } + + /** + * [Optional] Job timeout in milliseconds. If this time limit is exceeded, BigQuery may attempt + * to terminate the job. + * + * @param jobTimeoutMs jobTimeoutMs or {@code null} for none + */ + Builder setJobTimeoutMs(long jobTimeoutMs) { + this.jobTimeoutMs = jobTimeoutMs; + return this; + } + + /** + * Limits the billing tier for this job. Queries that have resource usage beyond this tier will + * fail (without incurring a charge). If unspecified, this will be set to your project default. + * + * @param maximumBillingTier maximum billing tier for this job + */ + Builder setMaximumBillingTier(int maximumBillingTier) { + this.maximumBillingTier = maximumBillingTier; + return this; + } + + /** + * Sets a priority for the query. If not specified the priority is assumed to be {@link + * Priority#INTERACTIVE}. + */ + Builder setPriority(QueryJobConfiguration.Priority priority) { + this.priority = priority; + return this; + } + + /** + * Range partitioning specification for this table. Only one of timePartitioning and + * rangePartitioning should be specified. + * + * @param rangePartitioning rangePartitioning or {@code null} for none + */ + Builder setRangePartitioning(RangePartitioning rangePartitioning) { + this.rangePartitioning = rangePartitioning; + return this; + } + + /** + * Sets options allowing the schema of the destination table to be updated as a side effect of + * the query job. Schema update options are supported in two cases: when writeDisposition is + * WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a + * partition of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE + * will always overwrite the schema. + */ + Builder setSchemaUpdateOptions(List schemaUpdateOptions) { + this.schemaUpdateOptions = schemaUpdateOptions; + return this; + } + + /** + * Sets the external tables definitions. If querying external data sources outside BigQuery, + * this value describes the data format, location and other properties of the data sources. By + * defining these properties, the data sources can be queried as if they were standard BigQuery + * tables. + */ + Builder setTableDefinitions(Map tableDefinitions) { + this.tableDefinitions = tableDefinitions; + return this; + } + + /** Sets the time partitioning specification for the destination table. */ + Builder setTimePartitioning(TimePartitioning timePartitioning) { + this.timePartitioning = timePartitioning; + return this; + } + + /** + * Sets user defined function resources that can be used by this query. Function resources can + * either be defined inline ({@link UserDefinedFunction#inline(String)}) or loaded from a Google + * Cloud Storage URI ({@link UserDefinedFunction#fromUri(String)}. + */ + Builder setUserDefinedFunctions(List userDefinedFunctions) { + this.userDefinedFunctions = userDefinedFunctions; + return this; + } + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + Builder setWriteDisposition(JobInfo.WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + /** + * Sets the number of rows in the buffer (a blocking queue) that query results are consumed + * from. + * + * @param numBufferedRows numBufferedRows or {@code null} for none + */ + Builder setNumBufferedRows(int numBufferedRows) { + this.numBufferedRows = numBufferedRows; + return this; + } + + /** + * Sets the maximum records per page to be used for pagination. This is used as an input for the + * tabledata.list and jobs.getQueryResults RPC calls + * + * @param maxResultPerPage + */ + Builder setMaxResultPerPage(long maxResultPerPage) { + this.maxResultPerPage = maxResultPerPage; + return this; + } + + Builder setDefaultDataset(DatasetId defaultDataset) { + this.defaultDataset = defaultDataset; + return this; + } + + Builder setUseWriteAPI(boolean useWriteAPI) { + this.useWriteAPI = useWriteAPI; + return this; + } + + Builder setWriteAPIActivationRowCount(int writeAPIActivationRowCount) { + this.writeAPIActivationRowCount = writeAPIActivationRowCount; + return this; + } + + Builder setWriteAPIAppendRowCount(int writeAPIAppendRowCount) { + this.writeAPIAppendRowCount = writeAPIAppendRowCount; + return this; + } + + Builder setMaxBytesBilled(long maxBytesBilled) { + this.maxBytesBilled = maxBytesBilled; + return this; + } + + Builder setLabels(Map labels) { + this.labels = labels; + return this; + } + + /** Creates a {@code BigQuerySettings} object. */ + BigQuerySettings build() { + return new BigQuerySettings(this); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySqlTypeConverter.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySqlTypeConverter.java new file mode 100644 index 000000000..cfdc64a14 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySqlTypeConverter.java @@ -0,0 +1,81 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.jdbc.BigQueryStatement.SqlType; + +class BigQuerySqlTypeConverter { + + static SqlType getSqlTypeFromStatementType(StatementType statementType) { + switch (statementType.toString()) { + case "SELECT": + return SqlType.SELECT; + + case "INSERT": + case "UPDATE": + case "DELETE": + case "MERGE": + return SqlType.DML; + case "CALL": + return SqlType.DML_EXTRA; + + case "CREATE_TABLE": + case "CREATE_TABLE_AS_SELECT": + case "CREATE_VIEW": + case "CREATE_MODEL": + case "CREATE_MATERIALIZED_VIEW": + case "CREATE_FUNCTION": + case "CREATE_TABLE_FUNCTION": + case "CREATE_PROCEDURE": + case "CREATE_ROW_ACCESS_POLICY": + case "CREATE_SCHEMA": + case "CREATE_SNAPSHOT_TABLE": + case "CREATE_SEARCH_INDEX": + case "DROP_TABLE": + case "DROP_EXTERNAL_TABLE": + case "DROP_VIEW": + case "DROP_MODEL": + case "DROP_MATERIALIZED_VIEW": + case "DROP_FUNCTION": + case "DROP_TABLE_FUNCTION": + case "DROP_PROCEDURE": + case "DROP_SEARCH_INDEX": + case "DROP_SCHEMA": + case "DROP_SNAPSHOT_TABLE": + case "DROP_ROW_ACCESS_POLICY": + case "ALTER_TABLE": + case "ALTER_VIEW": + case "ALTER_MATERIALIZED_VIEW": + case "ALTER_SCHEMA": + case "TRUNCATE_TABLE": + case "CREATE_EXTERNAL_TABLE": + return SqlType.DDL; + case "SCRIPT": + return SqlType.SCRIPT; + case "BEGIN_TRANSACTION": + case "COMMIT_TRANSACTION": + case "ROLLBACK_TRANSACTION": + return SqlType.TCL; + case "EXPORT_DATA": + case "EXPORT_MODEL": + case "LOAD_DATA": + default: + return SqlType.OTHER; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryStatement.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryStatement.java new file mode 100644 index 000000000..0da086888 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryStatement.java @@ -0,0 +1,1522 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.api.gax.paging.Page; +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQuery.JobListOption; +import com.google.cloud.bigquery.BigQuery.QueryResultsOption; +import com.google.cloud.bigquery.BigQuery.TableDataListOption; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.EncryptionConfiguration; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobConfiguration; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.JobStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.JobStatistics.ScriptStatistics; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableResult; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlSyntaxErrorException; +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import com.google.cloud.bigquery.storage.v1.BigQueryReadClient; +import com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1.DataFormat; +import com.google.cloud.bigquery.storage.v1.ReadRowsRequest; +import com.google.cloud.bigquery.storage.v1.ReadRowsResponse; +import com.google.cloud.bigquery.storage.v1.ReadSession; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterators; +import java.lang.ref.ReferenceQueue; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.UUID; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.ThreadFactory; +import java.util.logging.Level; + +/** + * An implementation of {@link java.sql.Statement} for executing BigQuery SQL statement and + * returning the results it produces. + * + * @see BigQueryConnection#createStatement + * @see ResultSet + */ +public class BigQueryStatement extends BigQueryNoOpsStatement { + + // TODO (obada): Update this after benchmarking + private static final int MAX_PROCESS_QUERY_THREADS_CNT = 50; + protected static ExecutorService queryTaskExecutor = + Executors.newFixedThreadPool(MAX_PROCESS_QUERY_THREADS_CNT); + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private static final String DEFAULT_DATASET_NAME = "_google_jdbc"; + private static final String DEFAULT_TABLE_NAME = "temp_table_"; + private static final String JDBC_JOB_PREFIX = "google-jdbc-"; + protected ResultSet currentResultSet; + protected long currentUpdateCount = -1; + protected List jobIds = new ArrayList<>(); + protected JobIdWrapper parentJobId = null; + protected int currentJobIdIndex = -1; + protected List batchQueries = new ArrayList<>(); + protected BigQueryConnection connection; + protected int maxFieldSize = 0; + protected int maxRows = 0; + protected boolean isClosed = false; + protected boolean closeOnCompletion = false; + protected Object cancelLock = new Object(); + protected boolean isCanceled = false; + protected boolean poolable; + protected int queryTimeout = 0; + protected SQLWarning warning; + private int fetchDirection = ResultSet.FETCH_FORWARD; + private int fetchSize; + private String scriptQuery; + private Map extraLabels = new HashMap<>(); + + private BigQueryReadClient bigQueryReadClient = null; + private final BigQuery bigQuery; + + final BigQuerySettings querySettings; + + private BlockingQueue bigQueryFieldValueListWrapperBlockingQueue; + + private BlockingQueue arrowBatchWrapperBlockingQueue; + + // Variables Required for the ReferenceQueue implementation + static ReferenceQueue referenceQueueArrowRs = new ReferenceQueue<>(); + static ReferenceQueue referenceQueueJsonRs = new ReferenceQueue<>(); + static List arrowResultSetFinalizers = + new ArrayList<>(); + static List jsonResultSetFinalizers = + new ArrayList<>(); + + private static final ThreadFactory JDBC_THREAD_FACTORY = + new BigQueryThreadFactory("BigQuery-Thread-"); + + static { + BigQueryDaemonPollingTask.startGcDaemonTask( + referenceQueueArrowRs, + referenceQueueJsonRs, + arrowResultSetFinalizers, + jsonResultSetFinalizers); + } + + @VisibleForTesting + public BigQueryStatement(BigQueryConnection connection) { + this.connection = connection; + this.bigQuery = connection.getBigQuery(); + this.querySettings = generateBigQuerySettings(); + } + + private void resetStatementFields() { + this.isCanceled = false; + this.scriptQuery = null; + this.parentJobId = null; + this.currentJobIdIndex = -1; + this.currentUpdateCount = -1; + } + + private BigQuerySettings generateBigQuerySettings() { + LOG.finest("++enter++"); + + BigQuerySettings.Builder querySettings = BigQuerySettings.newBuilder(); + DatasetId defaultDataset = this.connection.getDefaultDataset(); + if (defaultDataset != null) { + querySettings.setDefaultDataset(this.connection.defaultDataset); + } + Long maxBytesBilled = this.connection.getMaxBytesBilled(); + if (maxBytesBilled > 0) { + querySettings.setMaxBytesBilled(maxBytesBilled); + } + if (this.connection.getLabels() != null && !this.connection.getLabels().isEmpty()) { + querySettings.setLabels(this.connection.getLabels()); + } + querySettings.setMaxResultPerPage(this.connection.getMaxResults()); + querySettings.setUseReadAPI(this.connection.isEnableHighThroughputAPI()); + querySettings.setHighThroughputMinTableSize(this.connection.getHighThroughputMinTableSize()); + querySettings.setHighThroughputActivationRatio( + this.connection.getHighThroughputActivationRatio()); + querySettings.setUnsupportedHTAPIFallback(this.connection.isUnsupportedHTAPIFallback()); + querySettings.setUseQueryCache(this.connection.isUseQueryCache()); + querySettings.setQueryDialect(this.connection.getQueryDialect()); + querySettings.setKmsKeyName(this.connection.getKmsKeyName()); + querySettings.setQueryProperties(this.connection.getQueryProperties()); + querySettings.setAllowLargeResults(this.connection.isAllowLargeResults()); + if (this.connection.getJobTimeoutInSeconds() > 0) { + querySettings.setJobTimeoutMs(this.connection.getJobTimeoutInSeconds() * 1000L); + } + if (this.connection.getDestinationTable() != null) { + querySettings.setDestinationTable(this.connection.getDestinationTable()); + } + if (this.connection.getDestinationDataset() != null) { + querySettings.setDestinationDataset(this.connection.getDestinationDataset()); + querySettings.setDestinationDatasetExpirationTime( + this.connection.getDestinationDatasetExpirationTime()); + } + // only create session if enable session and session info is null + if (this.connection.enableSession) { + if (this.connection.sessionInfoConnectionProperty == null) { + querySettings.setEnableSession(this.connection.isSessionEnabled()); + } else { + querySettings.setSessionInfoConnectionProperty( + this.connection.getSessionInfoConnectionProperty()); + } + } + querySettings.setUseWriteAPI(this.connection.isEnableWriteAPI()); + querySettings.setWriteAPIActivationRowCount(this.connection.getWriteAPIActivationRowCount()); + querySettings.setWriteAPIAppendRowCount(this.connection.getWriteAPIAppendRowCount()); + + return querySettings.build(); + } + + /** + * This method executes a BigQuery SQL query, return a single {@code ResultSet} object. + * + *

    Example of running a query: + * + *

    +   *  Connection connection = DriverManager.getConnection(CONNECTION_URL);
    +   *  Statement bigQueryStatement = bigQueryConnection.createStatement();
    +   *  ResultSet result = bigQueryStatement.executeQuery(QUERY);
    +   * 
    + * + * @param sql BigQuery SQL query + * @return {@code ResultSet} containing the output of the query + * @throws SQLException if a BigQuery access error occurs, this method is called on a closed + * {@code Statement}, the given SQL statement produces multiple or no result sets. + * @see java.sql.Statement#executeQuery(String) + */ + @Override + public ResultSet executeQuery(String sql) throws SQLException { + // TODO: write method to return state variables to original state. + LOG.finest("++enter++"); + logQueryExecutionStart(sql); + try { + QueryJobConfiguration jobConfiguration = + setDestinationDatasetAndTableInJobConfig(getJobConfig(sql).build()); + runQuery(sql, jobConfiguration); + } catch (InterruptedException ex) { + throw new BigQueryJdbcException(ex); + } + + if (!isSingularResultSet()) { + throw new BigQueryJdbcException( + "Query returned more than one or didn't return any ResultSet."); + } + // This contains all the other assertions spec required on this method + return getCurrentResultSet(); + } + + @Override + public long executeLargeUpdate(String sql) throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(sql); + try { + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(sql); + runQuery(sql, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + if (this.currentUpdateCount == -1) { + throw new BigQueryJdbcException( + "Update query expected to return affected row count. Double check query type."); + } + return this.currentUpdateCount; + } + + @Override + public int executeUpdate(String sql) throws SQLException { + LOG.finest("++enter++"); + return checkUpdateCount(executeLargeUpdate(sql)); + } + + int checkUpdateCount(long updateCount) { + LOG.finest("++enter++"); + if (updateCount > Integer.MAX_VALUE) { + LOG.warning("Warning: Table update exceeded maximum limit!"); + // Update count is -2 if update is successful but the update count exceeds Integer.MAX_VALUE + return -2; + } + return (int) updateCount; + } + + @Override + public boolean execute(String sql) throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(sql); + try { + QueryJobConfiguration jobConfiguration = getJobConfig(sql).build(); + // If Large Results are enabled, ensure query type is SELECT + if (isLargeResultsEnabled() && getQueryType(jobConfiguration, null) == SqlType.SELECT) { + jobConfiguration = setDestinationDatasetAndTableInJobConfig(jobConfiguration); + } + runQuery(sql, jobConfiguration); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + return getCurrentResultSet() != null; + } + + StatementType getStatementType(QueryJobConfiguration queryJobConfiguration) throws SQLException { + LOG.finest("++enter++"); + QueryJobConfiguration dryRunJobConfiguration = + queryJobConfiguration.toBuilder().setDryRun(true).build(); + Job job; + try { + job = bigQuery.create(JobInfo.of(dryRunJobConfiguration)); + } catch (BigQueryException ex) { + if (ex.getMessage().contains("Syntax error")) { + throw new BigQueryJdbcSqlSyntaxErrorException(ex); + } + throw new BigQueryJdbcException(ex); + } + QueryStatistics statistics = job.getStatistics(); + return statistics.getStatementType(); + } + + SqlType getQueryType(QueryJobConfiguration jobConfiguration, StatementType statementType) + throws SQLException { + LOG.finest("++enter++"); + if (statementType == null) { + statementType = getStatementType(jobConfiguration); + } + + SqlType sqlType = BigQuerySqlTypeConverter.getSqlTypeFromStatementType(statementType); + LOG.fine( + String.format( + "Query: %s, Statement Type: %s, SQL Type: %s", + jobConfiguration.getQuery(), statementType, sqlType)); + return sqlType; + } + + QueryStatistics getQueryStatistics(QueryJobConfiguration queryJobConfiguration) + throws BigQueryJdbcSqlSyntaxErrorException, BigQueryJdbcException { + LOG.finest("++enter++"); + QueryJobConfiguration dryRunJobConfiguration = + queryJobConfiguration.toBuilder().setDryRun(true).build(); + Job job; + try { + job = this.bigQuery.create(JobInfo.of(dryRunJobConfiguration)); + return job.getStatistics(); + } catch (BigQueryException ex) { + if (ex.getMessage().contains("Syntax error")) { + throw new BigQueryJdbcSqlSyntaxErrorException(ex); + } + throw new BigQueryJdbcException(ex); + } + } + + /** + * Releases this Statement's BigQuery and JDBC resources immediately instead of waiting for this + * to happen when it is automatically closed. These resources include the {@code ResultSet} + * object, batch queries, job IDs, and BigQuery connection
    + * + *

    Calling the method close on a Statement object that is already closed has no effect. + * + * @throws SQLException if a BigQuery access error occurs + */ + @Override + public void close() throws SQLException { + LOG.fine(String.format("Closing Statement %s.", this)); + if (isClosed()) { + return; + } + + boolean cancelSucceeded = false; + try { + cancel(); // This attempts to cancel jobs and calls closeStatementResources() + cancelSucceeded = true; + } catch (SQLException e) { + LOG.warning(String.format("Failed to cancel statement during close().", e)); + } finally { + if (!cancelSucceeded) { + closeStatementResources(); + } + this.connection = null; + this.isClosed = true; + } + } + + @Override + public int getMaxFieldSize() { + return this.maxFieldSize; + } + + @Override + public void setMaxFieldSize(int max) { + this.maxFieldSize = max; + } + + @Override + public int getMaxRows() { + return this.maxRows; + } + + @Override + public void setMaxRows(int max) { + this.maxRows = max; + } + + @Override + public void setEscapeProcessing(boolean enable) { + // TODO: verify how to implement this method + } + + @Override + public int getQueryTimeout() { + return this.queryTimeout; + } + + @Override + public void setQueryTimeout(int seconds) { + if (seconds < 0) { + throw new IllegalArgumentException("Query Timeout should be >= 0."); + } + this.queryTimeout = seconds; + } + + /** + * Cancels this {@code Statement} object, the running threads, and BigQuery jobs. + * + * @throws SQLException if a BigQuery access error occurs or this method is called on a closed + * {@code Statement} + */ + @Override + public void cancel() throws SQLException { + LOG.finest(String.format("Statement %s cancelled", this)); + synchronized (cancelLock) { + this.isCanceled = true; + for (JobId jobId : this.jobIds) { + try { + this.bigQuery.cancel(jobId); + LOG.info("Job " + jobId + "cancelled."); + } catch (BigQueryException e) { + if (e.getMessage() != null + && (e.getMessage().contains("Job is already in state DONE") + || e.getMessage().contains("Error: 3848323"))) { + LOG.warning("Attempted to cancel a job that was already done: " + jobId); + } else { + throw new BigQueryJdbcException(e); + } + } + } + jobIds.clear(); + } + // If a ResultSet exists, then it will be closed as well, closing the + // ownedThreads + closeStatementResources(); + } + + @Override + public SQLWarning getWarnings() { + return this.warning; + } + + @Override + public void clearWarnings() { + this.warning = null; + } + + @Override + public ResultSet getResultSet() { + return this.currentResultSet; + } + + @VisibleForTesting + void setUpdateCount(long count) { + this.currentUpdateCount = count; + } + + @Override + public int getUpdateCount() { + return (int) this.currentUpdateCount; + } + + @Override + public long getLargeUpdateCount() { + return this.currentUpdateCount; + } + + @Override + public boolean getMoreResults() throws SQLException { + return getMoreResults(CLOSE_CURRENT_RESULT); + } + + private void closeStatementResources() throws SQLException { + LOG.finest("++enter++"); + if (this.currentResultSet != null) { + // If Statement has 'CloseOnCompletion' set, resultset might + // call into the same function; In order to avoid stack overflow + // we will cleanup resultset before calling into 'close'. + ResultSet tmp = this.currentResultSet; + this.currentResultSet = null; + tmp.close(); + } + this.batchQueries.clear(); + this.currentUpdateCount = -1; + this.currentJobIdIndex = -1; + if (this.connection != null) { + if (this.connection.isTransactionStarted()) { + this.connection.rollback(); + } + this.connection.removeStatement(this); + } + } + + private boolean isSingularResultSet() { + return this.currentResultSet != null + && (this.parentJobId == null || this.parentJobId.getJobs().size() == 1); + } + + private String generateJobId() { + return JDBC_JOB_PREFIX + UUID.randomUUID().toString(); + } + + private class ExecuteResult { + public final TableResult tableResult; + public final Job job; + + ExecuteResult(TableResult tableResult, Job job) { + this.tableResult = tableResult; + this.job = job; + } + } + + @InternalApi + ExecuteResult executeJob(QueryJobConfiguration jobConfiguration) + throws InterruptedException, BigQueryException, BigQueryJdbcException { + LOG.finest("++enter++"); + Job job = null; + // Location is not properly passed from the connection, + // so we need to explicitly set it; + // Do not set custom JobId here or it will disable jobless queries. + JobId jobId = JobId.newBuilder().setLocation(connection.getLocation()).build(); + if (connection.getUseStatelessQueryMode()) { + Object result = bigQuery.queryWithTimeout(jobConfiguration, jobId, null); + if (result instanceof TableResult) { + TableResult tableResult = (TableResult) result; + if (tableResult.getJobId() != null) { + return new ExecuteResult(tableResult, bigQuery.getJob(tableResult.getJobId())); + } + return new ExecuteResult((TableResult) result, null); + } + + if (result instanceof Job) { + job = (Job) result; + } else { + throw new BigQueryJdbcException("Unexpected result type from queryWithTimeout"); + } + } else { + // Update jobId with custom JobId if jobless query is disabled. + jobId = jobId.toBuilder().setJob(generateJobId()).build(); + JobInfo jobInfo = JobInfo.newBuilder(jobConfiguration).setJobId(jobId).build(); + job = bigQuery.create(jobInfo); + } + + if (job == null) { + throw new BigQueryJdbcException("Failed to create BQ Job."); + } + synchronized (cancelLock) { + if (isCanceled) { + job.cancel(); + throw new BigQueryJdbcException("Query was cancelled."); + } + jobId = job.getJobId(); + jobIds.add(jobId); + } + LOG.info("Query submitted with Job ID: " + job.getJobId().getJob()); + TableResult result = + job.getQueryResults(QueryResultsOption.pageSize(querySettings.getMaxResultPerPage())); + synchronized (cancelLock) { + jobIds.remove(jobId); + } + return new ExecuteResult(result, job); + } + + /** + * Execute the SQL script and sets the reference of the underlying job, passing null querySettings + * will result in the FastQueryPath + */ + @InternalApi + void runQuery(String query, QueryJobConfiguration jobConfiguration) + throws SQLException, InterruptedException { + LOG.finest("++enter++"); + LOG.fine("Run Query started"); + + if (queryTimeout > 0) { + jobConfiguration = + jobConfiguration.toBuilder().setJobTimeoutMs(Long.valueOf(queryTimeout) * 1000).build(); + } + + try { + resetStatementFields(); + ExecuteResult executeResult = executeJob(jobConfiguration); + StatementType statementType = + executeResult.job == null + ? getStatementType(jobConfiguration) + : ((QueryStatistics) executeResult.job.getStatistics()).getStatementType(); + SqlType queryType = getQueryType(jobConfiguration, statementType); + handleQueryResult(query, executeResult.tableResult, queryType); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } catch (BigQueryException ex) { + if (ex.getMessage().contains("Syntax error")) { + throw new BigQueryJdbcSqlSyntaxErrorException(ex); + } + throw new BigQueryJdbcException(ex); + } + } + + private boolean isLargeResultsEnabled() { + String destinationTable = this.querySettings.getDestinationTable(); + String destinationDataset = this.querySettings.getDestinationDataset(); + return destinationDataset != null || destinationTable != null; + } + + private QueryJobConfiguration setDestinationDatasetAndTableInJobConfig( + QueryJobConfiguration jobConfiguration) { + String destinationTable = this.querySettings.getDestinationTable(); + String destinationDataset = this.querySettings.getDestinationDataset(); + if (destinationDataset != null || destinationTable != null) { + if (destinationDataset != null) { + checkIfDatasetExistElseCreate(destinationDataset); + } + if (jobConfiguration.useLegacySql() && destinationDataset == null) { + checkIfDatasetExistElseCreate(DEFAULT_DATASET_NAME); + destinationDataset = DEFAULT_DATASET_NAME; + } + if (destinationTable == null) { + destinationTable = getDefaultDestinationTable(); + } + return jobConfiguration.toBuilder() + .setAllowLargeResults(this.querySettings.getAllowLargeResults()) + .setDestinationTable(TableId.of(destinationDataset, destinationTable)) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setWriteDisposition(JobInfo.WriteDisposition.WRITE_TRUNCATE) + .build(); + } + return jobConfiguration; + } + + Job getNextJob() { + while (this.currentJobIdIndex + 1 < this.parentJobId.getJobs().size()) { + this.currentJobIdIndex += 1; + Job currentJob = this.parentJobId.getJobs().get(this.currentJobIdIndex); + QueryStatistics queryStatistics = currentJob.getStatistics(); + ScriptStatistics scriptStatistics = queryStatistics.getScriptStatistics(); + // EXPRESSION jobs are not relevant for customer query and can be + // created by BQ depending on various conditions. We will just ignore + // them when presenting results. + if (!"expression".equalsIgnoreCase(scriptStatistics.getEvaluationKind())) { + return currentJob; + } + } + return null; + } + + void handleQueryResult(String query, TableResult results, SqlType queryType) + throws SQLException, InterruptedException { + LOG.finest("++enter++"); + switch (queryType) { + case SELECT: + processQueryResponse(query, results); + break; + case DML: + case DML_EXTRA: + try { + Job completedJob = this.bigQuery.getJob(results.getJobId()).waitFor(); + JobStatistics.QueryStatistics statistics = completedJob.getStatistics(); + updateAffectedRowCount(statistics.getNumDmlAffectedRows()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } catch (NullPointerException ex) { + throw new BigQueryJdbcException(ex); + } + break; + case TCL: + case DDL: + updateAffectedRowCount(results.getTotalRows()); + break; + case SCRIPT: + try { + Page childJobs = + this.bigQuery.listJobs(JobListOption.parentJobId(results.getJobId().getJob())); + + ArrayList childJobList = new ArrayList<>(); + Iterator iterableJobs = childJobs.iterateAll().iterator(); + iterableJobs.forEachRemaining(childJobList::add); + Collections.reverse(childJobList); + + this.scriptQuery = query; + this.parentJobId = new JobIdWrapper(results.getJobId(), results, childJobList); + this.currentJobIdIndex = -1; + + Job currentJob = getNextJob(); + if (currentJob == null) { + return; + } + StatementType statementType = + ((QueryStatistics) (currentJob.getStatistics())).getStatementType(); + SqlType sqlType = getQueryType(currentJob.getConfiguration(), statementType); + handleQueryResult(query, currentJob.getQueryResults(), sqlType); + } catch (NullPointerException ex) { + throw new BigQueryJdbcException(ex); + } + break; + case OTHER: + throw new BigQueryJdbcException(String.format("Unexpected value: " + queryType)); + } + } + + private void updateAffectedRowCount(Long count) throws SQLException { + // TODO(neenu): check if this need to be closed vs removed) + if (this.currentResultSet != null) { + try { + this.currentResultSet.close(); + this.currentResultSet = null; + } catch (SQLException ex) { + throw new BigQueryJdbcException(ex); + } + } + this.currentUpdateCount = count; + } + + @InternalApi + BigQueryReadClient getBigQueryReadClient() { + if (this.bigQueryReadClient == null) { + this.bigQueryReadClient = this.connection.getBigQueryReadClient(); + } + return this.bigQueryReadClient; + } + + @InternalApi + ReadSession getReadSession(CreateReadSessionRequest readSessionRequest) { + LOG.finest("++enter++"); + return getBigQueryReadClient().createReadSession(readSessionRequest); + } + + @InternalApi + ArrowSchema getArrowSchema(ReadSession readSession) { + return readSession.getArrowSchema(); + } + + /** Uses Bigquery Storage Read API and returns the stream as ResultSet */ + @InternalApi + ResultSet processArrowResultSet(TableResult results) throws SQLException { + LOG.finest("++enter++"); + + // set the resultset + long totalRows = (getMaxRows() > 0) ? getMaxRows() : results.getTotalRows(); + JobId currentJobId = results.getJobId(); + TableId destinationTable = getDestinationTable(currentJobId); + Schema schema = results.getSchema(); + try { + String parent = String.format("projects/%s", destinationTable.getProject()); + String srcTable = + String.format( + "projects/%s/datasets/%s/tables/%s", + destinationTable.getProject(), + destinationTable.getDataset(), + destinationTable.getTable()); + + // Read all the columns if the source table (temp table) and stream the data back in Arrow + // format + ReadSession.Builder sessionBuilder = + ReadSession.newBuilder().setTable(srcTable).setDataFormat(DataFormat.ARROW); + + CreateReadSessionRequest.Builder builder = + CreateReadSessionRequest.newBuilder() + .setParent(parent) + .setReadSession(sessionBuilder) + .setMaxStreamCount(1); + + ReadSession readSession = getReadSession(builder.build()); + this.arrowBatchWrapperBlockingQueue = new LinkedBlockingDeque<>(getBufferSize()); + // deserialize and populate the buffer async, so that the client isn't blocked + Thread populateBufferWorker = + populateArrowBufferedQueue( + readSession, this.arrowBatchWrapperBlockingQueue, this.bigQueryReadClient); + + BigQueryArrowResultSet arrowResultSet = + BigQueryArrowResultSet.of( + schema, + getArrowSchema(readSession), + totalRows, + this, + this.arrowBatchWrapperBlockingQueue, + populateBufferWorker, + this.bigQuery); + arrowResultSetFinalizers.add( + new BigQueryResultSetFinalizers.ArrowResultSetFinalizer( + arrowResultSet, referenceQueueArrowRs, populateBufferWorker)); + arrowResultSet.setJobId(currentJobId); + return arrowResultSet; + + } catch (Exception ex) { + throw new BigQueryJdbcException(ex.getMessage(), ex); + } + } + + /** Asynchronously reads results and populates an arrow record queue */ + @InternalApi + Thread populateArrowBufferedQueue( + ReadSession readSession, + BlockingQueue arrowBatchWrapperBlockingQueue, + BigQueryReadClient bqReadClient) { + LOG.finest("++enter++"); + + Runnable arrowStreamProcessor = + () -> { + try { + // Use the first stream to perform reading. + String streamName = readSession.getStreams(0).getName(); + ReadRowsRequest readRowsRequest = + ReadRowsRequest.newBuilder().setReadStream(streamName).build(); + + // Process each block of rows as they arrive and decode using our simple row reader. + com.google.api.gax.rpc.ServerStream stream = + bqReadClient.readRowsCallable().call(readRowsRequest); + for (ReadRowsResponse response : stream) { + if (Thread.currentThread().isInterrupted() + || queryTaskExecutor.isShutdown()) { // do not process and shutdown + break; + } + + ArrowRecordBatch currentBatch = response.getArrowRecordBatch(); + arrowBatchWrapperBlockingQueue.put(BigQueryArrowBatchWrapper.of(currentBatch)); + } + + } catch (RuntimeException | InterruptedException e) { + LOG.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ arrowStreamProcessor", + e); + } finally { // logic needed for graceful shutdown + // marking end of stream + try { + arrowBatchWrapperBlockingQueue.put( + BigQueryArrowBatchWrapper.of(null, true)); // mark the end of the stream + } catch (InterruptedException e) { + LOG.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ markLast", + e); + } + } + }; + + Thread populateBufferWorker = JDBC_THREAD_FACTORY.newThread(arrowStreamProcessor); + populateBufferWorker.start(); + return populateBufferWorker; + } + + /** Executes SQL query using either fast query path or read API */ + void processQueryResponse(String query, TableResult results) throws SQLException { + LOG.finest( + String.format( + "API call completed{Query=%s, Parent Job ID=%s, Total rows=%s} ", + query, results.getJobId(), results.getTotalRows())); + JobId currentJobId = results.getJobId(); + if (currentJobId == null) { + LOG.fine("Standard API with Stateless query used."); + this.currentResultSet = processJsonResultSet(results); + } else if (useReadAPI(results)) { + LOG.fine("HighThroughputAPI used."); + LOG.info("HTAPI job ID: " + currentJobId.getJob()); + this.currentResultSet = processArrowResultSet(results); + } else { + // read API cannot be used. + LOG.fine("Standard API used."); + this.currentResultSet = processJsonResultSet(results); + } + this.currentUpdateCount = -1; + } + + // The read Ratio should be met + // AND the User must not have disabled the Read API + @VisibleForTesting + boolean useReadAPI(TableResult results) throws BigQueryJdbcSqlFeatureNotSupportedException { + LOG.finest("++enter++"); + if (!meetsReadRatio(results)) { + return false; + } + LOG.fine("Read API threshold is met."); + return querySettings.getUseReadAPI(); + } + + private boolean meetsReadRatio(TableResult results) { + LOG.finest("++enter++"); + long totalRows = results.getTotalRows(); + + if (totalRows == 0 || totalRows < querySettings.getHighThroughputMinTableSize()) { + return false; + } + + // TODO(BQ Team): TableResult doesnt expose the number of records in the current page, hence the + // below log iterates and counts. This is inefficient and we may eventually want to expose + // PageSize with TableResults + // TODO(Obada): Scope for performance optimization. + int pageSize = Iterators.size(results.getValues().iterator()); + return totalRows / pageSize > querySettings.getHighThroughputActivationRatio(); + } + + BigQueryJsonResultSet processJsonResultSet(TableResult results) { + String jobIdOrQueryId = + results.getJobId() == null ? results.getQueryId() : results.getJobId().getJob(); + LOG.info(String.format("BigQuery Job %s completed. Fetching results.", jobIdOrQueryId)); + List threadList = new ArrayList(); + + Schema schema = results.getSchema(); + long totalRows = (getMaxRows() > 0) ? getMaxRows() : results.getTotalRows(); + this.bigQueryFieldValueListWrapperBlockingQueue = new LinkedBlockingDeque<>(getBufferSize()); + BlockingQueue> rpcResponseQueue = + new LinkedBlockingDeque<>(getPageCacheSize(getBufferSize(), schema)); + + JobId jobId = results.getJobId(); + if (jobId != null) { + // Thread to make rpc calls to fetch data from the server + Thread nextPageWorker = + runNextPageTaskAsync(results, results.getNextPageToken(), jobId, rpcResponseQueue); + threadList.add(nextPageWorker); + } else { + try { + populateFirstPage(results, rpcResponseQueue); + rpcResponseQueue.put(Tuple.of(null, false)); + } catch (InterruptedException e) { + LOG.log( + Level.WARNING, + "\n" + + Thread.currentThread().getName() + + " Interrupted @ processJsonQueryResponseResults"); + } + } + + // Thread to parse data received from the server to client library objects + Thread populateBufferWorker = + parseAndPopulateRpcDataAsync( + schema, this.bigQueryFieldValueListWrapperBlockingQueue, rpcResponseQueue); + threadList.add(populateBufferWorker); + + Thread[] jsonWorkers = threadList.toArray(new Thread[0]); + + BigQueryJsonResultSet jsonResultSet = + BigQueryJsonResultSet.of( + schema, + totalRows, + this.bigQueryFieldValueListWrapperBlockingQueue, + this, + jsonWorkers, + this.bigQuery); + jsonResultSet.setJobId(jobId); + jsonResultSet.setQueryId(results.getQueryId()); + jsonResultSetFinalizers.add( + new BigQueryResultSetFinalizers.JsonResultSetFinalizer( + jsonResultSet, referenceQueueJsonRs, jsonWorkers)); + return jsonResultSet; + } + + void populateFirstPage( + TableResult result, BlockingQueue> rpcResponseQueue) { + LOG.finest("++enter++"); + // parse and put the first page in the pageCache before the other pages are parsed from the RPC + // calls + try { + // this is the first page which we have received. + rpcResponseQueue.put(Tuple.of(result, true)); + } catch (InterruptedException e) { + LOG.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ populateFirstPage"); + } + } + + @Override + public void setFetchDirection(int direction) throws SQLException { + if (direction != ResultSet.FETCH_FORWARD) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Only FETCH_FORWARD is supported."); + } + this.fetchDirection = direction; + } + + @VisibleForTesting + Thread runNextPageTaskAsync( + TableResult result, + String firstPageToken, + JobId jobId, + BlockingQueue> rpcResponseQueue) { + LOG.finest("++enter++"); + // parse and put the first page in the pageCache before the other pages are parsed from the RPC + // calls + populateFirstPage(result, rpcResponseQueue); + + // This thread makes the RPC calls and paginates + Runnable nextPageTask = + () -> { + // results.getPageToken(); + String pageToken = firstPageToken; + TableId destinationTable = null; + if (firstPageToken != null) { + destinationTable = getDestinationTable(jobId); + } + try { + // paginate for non null token + while (pageToken != null) { + // do not process further pages and shutdown + if (Thread.currentThread().isInterrupted() || queryTaskExecutor.isShutdown()) { + LOG.log( + Level.WARNING, + "\n" + + Thread.currentThread().getName() + + " Interrupted @ runNextPageTaskAsync"); + break; + } + long startTime = System.nanoTime(); + TableResult results = + this.bigQuery.listTableData( + destinationTable, + TableDataListOption.pageSize(querySettings.getMaxResultPerPage()), + TableDataListOption.pageToken(pageToken)); + + pageToken = results.getNextPageToken(); + // this will be parsed asynchronously without blocking the current + // thread + rpcResponseQueue.put(Tuple.of(results, true)); + LOG.fine( + String.format( + "Fetched %d results from the server in %d ms.", + querySettings.getMaxResultPerPage(), + (int) ((System.nanoTime() - startTime) / 1000000))); + } + // this will stop the parseDataTask as well when the pagination + // completes + rpcResponseQueue.put(Tuple.of(null, false)); + } catch (Exception ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + // We cannot do queryTaskExecutor.shutdownNow() here as populate buffer method may not + // have finished processing the records and even that will be interrupted + }; + + Thread nextPageWorker = JDBC_THREAD_FACTORY.newThread(nextPageTask); + nextPageWorker.start(); + return nextPageWorker; + } + + /** + * Takes TableResult from rpcResponseQueue and populates + * bigQueryFieldValueListWrapperBlockingQueue with FieldValueList + */ + @VisibleForTesting + Thread parseAndPopulateRpcDataAsync( + Schema schema, + BlockingQueue bigQueryFieldValueListWrapperBlockingQueue, + BlockingQueue> rpcResponseQueue) { + LOG.finest("++enter++"); + + Runnable populateBufferRunnable = + () -> { // producer thread populating the buffer + Iterable fieldValueLists; + // as we have to process the first page + boolean hasRows = true; + while (hasRows) { + try { + Tuple nextPageTuple = rpcResponseQueue.take(); + if (nextPageTuple.x() != null) { + fieldValueLists = nextPageTuple.x().getValues(); + } else { + fieldValueLists = null; + } + hasRows = nextPageTuple.y(); + + } catch (InterruptedException e) { + LOG.log(Level.WARNING, "\n" + Thread.currentThread().getName() + " Interrupted", e); + // Thread might get interrupted while calling the Cancel method, which is + // expected, so logging this instead of throwing the exception back + break; + } + + if (Thread.currentThread().isInterrupted() + || queryTaskExecutor.isShutdown() + || fieldValueLists == null) { + // do not process further pages and shutdown (outerloop) + break; + } + + long startTime = System.nanoTime(); + long results = 0; + for (FieldValueList fieldValueList : fieldValueLists) { + try { + if (Thread.currentThread().isInterrupted() || queryTaskExecutor.isShutdown()) { + // do not process further pages and shutdown (inner loop) + break; + } + bigQueryFieldValueListWrapperBlockingQueue.put( + BigQueryFieldValueListWrapper.of(schema.getFields(), fieldValueList)); + results += 1; + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } + LOG.fine( + String.format( + "Processed %d results in %d ms.", + results, (int) ((System.nanoTime() - startTime) / 1000000))); + } + try { + // All the pages has been processed, put this marker + bigQueryFieldValueListWrapperBlockingQueue.put( + BigQueryFieldValueListWrapper.of(null, null, true)); + } catch (InterruptedException e) { + LOG.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ populateBufferAsync", + e); + } + }; + + Thread populateBufferWorker = JDBC_THREAD_FACTORY.newThread(populateBufferRunnable); + populateBufferWorker.start(); + return populateBufferWorker; + } + + /** + * Helper method that determines the optimal number of caches pages to improve read performance + */ + @VisibleForTesting + int getPageCacheSize(Integer numBufferedRows, Schema schema) { + LOG.finest("++enter++"); + // Min number of pages to cache + final int MIN_CACHE_SIZE = 3; + // Min number of pages to cache + final int MAX_CACHE_SIZE = 20; + int numColumns = schema.getFields().size(); + int numCachedPages; + long numCachedRows = numBufferedRows == null ? 0 : numBufferedRows.longValue(); + + // TODO: Further enhance this logic depending on customer feedback on memory consumption + if (numCachedRows > 10000) { + // the size of numBufferedRows is quite large and as per our tests we should be able to + // do enough even with low + numCachedPages = 2; + } + // too many fields are being read, setting the page size on the lower end + else if (numColumns > 15 && numCachedRows > 5000) { + numCachedPages = 3; + } + // low pagesize with fewer number of columns, we can cache more pages + else if (numCachedRows < 2000 && numColumns < 15) { + numCachedPages = 20; + } + // default - under 10K numCachedRows with any number of columns + else { + numCachedPages = 5; + } + return numCachedPages < MIN_CACHE_SIZE + ? MIN_CACHE_SIZE + : (Math.min(numCachedPages, MAX_CACHE_SIZE)); + } + + @Override + public int getFetchDirection() { + return this.fetchDirection; + } + + // TODO(neenu): Fix this value + // getNumBufferedRows in querySettings is always the same withDefaultValues - 20000 buffer size + // So, getBufferSize is also 20000. + private int getBufferSize() { + return (this.querySettings == null + || this.querySettings.getNumBufferedRows() == null + || this.querySettings.getNumBufferedRows() < 10000 + ? 20000 + : Math.min(this.querySettings.getNumBufferedRows() * 2, 100000)); + } + + /** Returns the destinationTable from jobId by calling `jobs.get` API */ + TableId getDestinationTable(JobId jobId) { + Job job = this.bigQuery.getJob(jobId); + LOG.finest(String.format("Destination Table retrieved from %s", job.getJobId())); + return ((QueryJobConfiguration) job.getConfiguration()).getDestinationTable(); + } + + QueryJobConfiguration.Builder getJobConfig(String query) { + LOG.finest("++enter++"); + QueryJobConfiguration.Builder queryConfigBuilder = QueryJobConfiguration.newBuilder(query); + if (this.querySettings.getJobTimeoutMs() > 0) { + queryConfigBuilder.setJobTimeoutMs(this.querySettings.getJobTimeoutMs()); + } + if (this.querySettings.getMaxBytesBilled() > 0) { + queryConfigBuilder.setMaximumBytesBilled(this.querySettings.getMaxBytesBilled()); + } + if (this.querySettings.getDefaultDataset() != null) { + queryConfigBuilder.setDefaultDataset(this.querySettings.getDefaultDataset()); + } + Map mergedLabels = new HashMap<>(); + if (this.querySettings.getLabels() != null) { + mergedLabels.putAll(this.querySettings.getLabels()); + } + if (this.extraLabels != null) { + mergedLabels.putAll(this.extraLabels); + } + queryConfigBuilder.setLabels(mergedLabels); + queryConfigBuilder.setUseQueryCache(this.querySettings.getUseQueryCache()); + queryConfigBuilder.setMaxResults(this.querySettings.getMaxResultPerPage()); + if (this.querySettings.getSessionInfoConnectionProperty() != null) { + queryConfigBuilder.setConnectionProperties( + ImmutableList.of(this.querySettings.getSessionInfoConnectionProperty())); + } else { + queryConfigBuilder.setCreateSession(querySettings.isEnableSession()); + } + if (this.querySettings.getKmsKeyName() != null) { + EncryptionConfiguration encryption = + EncryptionConfiguration.newBuilder() + .setKmsKeyName(this.querySettings.getKmsKeyName()) + .build(); + queryConfigBuilder.setDestinationEncryptionConfiguration(encryption); + } + if (this.querySettings.getQueryProperties() != null) { + queryConfigBuilder.setConnectionProperties(this.querySettings.getQueryProperties()); + } + boolean useLegacy = + QueryDialectType.BIG_QUERY.equals( + QueryDialectType.valueOf(this.querySettings.getQueryDialect())); + queryConfigBuilder.setUseLegacySql(useLegacy); + + return queryConfigBuilder; + } + + private void checkIfDatasetExistElseCreate(String datasetName) { + Dataset dataset = bigQuery.getDataset(DatasetId.of(datasetName)); + if (dataset == null) { + LOG.info(String.format("Creating a hidden dataset: %s ", datasetName)); + DatasetInfo datasetInfo = + DatasetInfo.newBuilder(datasetName) + .setDefaultTableLifetime(this.querySettings.getDestinationDatasetExpirationTime()) + .build(); + bigQuery.create(datasetInfo); + } + } + + private String getDefaultDestinationTable() { + String timeOfCreation = String.valueOf(Instant.now().toEpochMilli()); + String randomizedId = String.valueOf(new Random().nextInt(9999)); + return DEFAULT_TABLE_NAME + timeOfCreation + randomizedId; + } + + @InternalApi + JobIdWrapper insertJob(JobConfiguration jobConfiguration) throws SQLException { + Job job; + JobInfo jobInfo = JobInfo.of(jobConfiguration); + LOG.finest("++enter++"); + try { + job = this.bigQuery.create(jobInfo); + } catch (BigQueryException ex) { + throw new BigQueryJdbcException(ex); + } + return new JobIdWrapper(job.getJobId(), null, null); + } + + @Override + public void setFetchSize(int rows) { + this.fetchSize = rows; + } + + @Override + public int getFetchSize() { + return this.fetchSize; + } + + /** + * Gets the extra labels for this statement. + * + * @return A map of the extra labels. + */ + public Map getExtraLabels() { + return this.extraLabels; + } + + /** + * Sets the extra labels for this statement. + * + * @param extraLabels A map of the extra labels. + */ + public void setExtraLabels(Map extraLabels) { + this.extraLabels = extraLabels; + } + + @Override + public int getResultSetConcurrency() { + return ResultSet.CONCUR_READ_ONLY; + } + + ResultSet getCurrentResultSet() { + return this.currentResultSet; + } + + @Override + public int getResultSetType() { + return ResultSet.TYPE_FORWARD_ONLY; + } + + /** + * Wraps jobId and the firstPage of QueryResponse, so that we can avoid RPC to fetch the first + * page again + */ + static class JobIdWrapper { + + private JobId jobId; + private TableResult firstPage; + private ArrayList jobs; + + public JobIdWrapper(JobId jobId, TableResult firstPage, ArrayList jobs) { + this.jobId = jobId; + this.firstPage = firstPage; + this.jobs = jobs; + } + + JobId getJobId() { + return this.jobId; + } + + void setJobId(JobId jobId) { + this.jobId = jobId; + } + + TableResult getResults() { + return this.firstPage; + } + + void setResults(TableResult firstPage) { + this.firstPage = firstPage; + } + + ArrayList getJobs() { + return jobs; + } + + void setJobs(ArrayList jobs) { + this.jobs = jobs; + } + } + + @Override + public void addBatch(String sql) throws SQLException { + if (sql == null || sql.isEmpty()) { + return; + } + LOG.finest("++enter++"); + sql = sql.trim(); + if (!sql.endsWith(";")) { + sql += "; "; + } + SqlType sqlType = getQueryType(QueryJobConfiguration.newBuilder(sql).build(), null); + if (!SqlType.DML.equals(sqlType)) { + throw new IllegalArgumentException("addBatch currently supports DML operations."); + } + this.batchQueries.add(sql); + } + + @Override + public void clearBatch() { + this.batchQueries.clear(); + } + + @Override + public int[] executeBatch() throws SQLException { + LOG.finest("++enter++"); + int[] result = new int[this.batchQueries.size()]; + if (this.batchQueries.isEmpty()) { + return result; + } + + try { + String combinedQueries = String.join("", this.batchQueries); + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(combinedQueries); + jobConfiguration.setPriority(QueryJobConfiguration.Priority.BATCH); + runQuery(combinedQueries, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + + int i = 0; + while (getUpdateCount() != -1 && i < this.batchQueries.size()) { + result[i] = getUpdateCount(); + getMoreResults(); + i++; + } + + clearBatch(); + return result; + } + + @Override + public Connection getConnection() { + return this.connection; + } + + public boolean hasMoreResults() { + if (this.parentJobId == null) { + return false; + } + return this.currentJobIdIndex + 1 < this.parentJobId.getJobs().size(); + } + + @Override + public boolean getMoreResults(int current) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (current != CLOSE_CURRENT_RESULT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "The JDBC driver only supports Statement.CLOSE_CURRENT_RESULT."); + } + + if (this.parentJobId == null) { + return false; + } + + try { + if (this.currentResultSet != null) { + this.currentResultSet.close(); + this.currentResultSet = null; + // Statement can be closed if it was the last result + if (isClosed) { + return false; + } + } + + Job currentJob = getNextJob(); + if (currentJob != null) { + StatementType statementType = + ((QueryStatistics) (currentJob.getStatistics())).getStatementType(); + SqlType sqlType = getQueryType(currentJob.getConfiguration(), statementType); + handleQueryResult(this.scriptQuery, currentJob.getQueryResults(), sqlType); + + return sqlType == SqlType.SELECT; + } else { + resetStatementFields(); + return false; + } + } catch (InterruptedException | SQLException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } + + @Override + public boolean isWrapperFor(Class iface) { + return iface.isInstance(this); + } + + @Override + public T unwrap(Class iface) throws SQLException { + if (!isWrapperFor(iface)) { + throw new BigQueryJdbcException( + String.format("Unable to cast Statement to %s class.", iface.getName())); + } + return (T) this; + } + + @Override + public int getResultSetHoldability() { + return ResultSet.CLOSE_CURSORS_AT_COMMIT; + } + + @Override + public boolean isClosed() { + return this.isClosed; + } + + @Override + public void setPoolable(boolean poolable) { + this.poolable = poolable; + } + + @Override + public boolean isPoolable() { + return this.poolable; + } + + @Override + public void closeOnCompletion() { + this.closeOnCompletion = true; + } + + @Override + public boolean isCloseOnCompletion() { + return this.closeOnCompletion; + } + + protected void logQueryExecutionStart(String sql) { + if (sql == null) { + return; + } + String sanitizedSql = sql.trim().replaceAll("\\s+", " "); + String truncatedSql = + sanitizedSql.length() > 256 ? sanitizedSql.substring(0, 256) + "..." : sanitizedSql; + LOG.info("Executing query: " + truncatedSql); + LOG.info("Using query settings: " + this.querySettings.toString()); + } + + /** Throws a {@link BigQueryJdbcException} if this object is closed */ + void checkClosed() throws SQLException { + if (isClosed()) { + throw new BigQueryJdbcException("This " + getClass().getName() + " has been closed"); + } + } + + enum SqlType { + SELECT, + DML, + DML_EXTRA, + DDL, + SCRIPT, + TCL, + OTHER + } + + enum QueryDialectType { + SQL, + BIG_QUERY + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactory.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactory.java new file mode 100644 index 000000000..a5aa7a73a --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactory.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.util.concurrent.ThreadFactory; + +@InternalApi +class BigQueryThreadFactory implements ThreadFactory { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryThreadFactory.class.getName()); + private String threadPrefix; + private int threadSerialNum = 0; + + public BigQueryThreadFactory(String threadPrefix) { + this.threadPrefix = threadPrefix; + } + + public BigQueryThreadFactory() { + this.threadPrefix = "DEFAULT_POOL_"; + } + + @Override + public Thread newThread(Runnable r) { + Thread t = new Thread(r, threadPrefix + (++threadSerialNum)); // non thread safe increment + t.setDaemon(true); + LOG.finest(String.format("New thread %s created.", t.getName())); + return t; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercer.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercer.java new file mode 100644 index 000000000..d156c2d66 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercer.java @@ -0,0 +1,149 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionException; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionNotFoundException; +import java.util.Map; + +/** + * Provides a declarative mechanism for coercing an object from one type to another. For example, + * coercion of {@link String} to {@link Integer} can be achieved like this: + * + *

    + *   Integer value = BigQueryTypeCoercer.INSTANCE.coerceTo(Integer.class, "3452148");
    + *   System.out.println(value); // 3452148
    + * 
    + * + * A {@link BigQueryTypeCoercer} is baked with all the default {@link BigQueryCoercion}s from {@link + * BigQueryDefaultCoercions} to coerce all the primitive types. + * + *

    It is also possible to extend the behaviour of {@link BigQueryTypeCoercer} to other custom + * user defined types by creating an implementation of {@link BigQueryCoercion} and register it with + * {@link BigQueryTypeCoercerBuilder} using it's {@link + * BigQueryTypeCoercerBuilder#registerTypeCoercion(BigQueryCoercion)} method. + * + *

    + *   public class TextToStringCoercion extends BigQueryBigQueryCoercion{
    + *
    + *    public TextToStringCoercion() {
    + *       super(Text.class, String.class);
    + *    }
    + *
    + *    @Override
    + *    String coerce(Text text) {
    + *       return text.toString();  // logic to coerce from Text type to String type
    + *    }
    + *  }
    + * 
    + * + * and use it like this + * + *
    + *    byte[] bytesArray = {72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33};
    + *    Text text = new Text(bytesArray);
    + *
    + *    BigQueryTypeCoercer typeCoercer = new BigQueryTypeCoercerBuilder()
    + *         .registerCoercion(new TextToStringCoercion())  // registering a custom coercion
    + *         .build();
    + *    System.out.println(typeCoercer.coerceTo(String.class, text));  //  Hello World!
    + * 
    + */ +@InternalApi +class BigQueryTypeCoercer { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryTypeCoercer.class.getName()); + + /** A {@link BigQueryTypeCoercer} instance with all the inbuilt {@link BigQueryCoercion}s */ + static BigQueryTypeCoercer INSTANCE; + + static { + INSTANCE = BigQueryDefaultCoercions.builder().build(); + } + + private final Map, Map, BigQueryCoercion>> allCoercions; + + BigQueryTypeCoercer(Map, Map, BigQueryCoercion>> allCoercions) { + this.allCoercions = allCoercions; + } + + /** + * Coerce an object to the type specified. + * + * @param value the object that needs to be coerced. + * @param targetClass the target class for the coercion + * @throws BigQueryJdbcCoercionNotFoundException when coercion can not be performed to the target + * type. + * @throws BigQueryJdbcCoercionException when an error is encountered while performing the + * coercion. + */ + T coerceTo(Class targetClass, Object value) { + Class sourceClass = value == null ? Void.class : value.getClass(); + // FieldValue object for null-values requires special check + if (sourceClass == FieldValue.class && ((FieldValue.class.cast(value)).isNull())) { + sourceClass = Void.class; + } + // No coercion needed + if (sourceClass.equals(targetClass)) { + return targetClass.cast(value); + } + BigQueryCoercion coercion = findCoercion(sourceClass, targetClass); + LOG.finest(() -> String.format("%s coercion for %s", coercion, value)); + // Value is null case & no explicit coercion + if (sourceClass == Void.class && coercion == null) { + return null; + } + if (coercion == null) { + if (targetClass.equals(String.class)) { + return (T) value.toString(); + } + throw new BigQueryJdbcCoercionNotFoundException(sourceClass, targetClass); + } + try { + return coercion.coerce(sourceClass != Void.class ? value : null); + } catch (Exception ex) { + throw new BigQueryJdbcCoercionException(ex); + } + } + + /** + * Creates a {@link BigQueryTypeCoercerBuilder} with all the default coercions from {@link + * BigQueryDefaultCoercions}. + */ + static BigQueryTypeCoercerBuilder builder() { + return BigQueryDefaultCoercions.builder(); + } + + private BigQueryCoercion findCoercion(Class sourceClass, Class targetClass) { + Map, BigQueryCoercion> bySourceMap = this.allCoercions.get(sourceClass); + // AutoValue generated concrete classes are registered with their abstract classes and not the + // concrete class. Lets make sure the we can find the registered abstract class for such + // classes. The abstract class in these cases would be the super class of the generated + // AutoValue concrete classes. + if (bySourceMap == null) { + Class registeredAbstractClass = sourceClass.getSuperclass(); + bySourceMap = this.allCoercions.get(registeredAbstractClass); + } + // If we still can't find the coercion source class entry then just return. + if (bySourceMap == null) { + return null; + } + return (BigQueryCoercion) bySourceMap.get(targetClass); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerBuilder.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerBuilder.java new file mode 100644 index 000000000..8539515ed --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerBuilder.java @@ -0,0 +1,79 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; + +/** + * A builder to create {@link BigQueryTypeCoercer} to perform the coercion of custom user defined + * types. + */ +@InternalApi +class BigQueryTypeCoercerBuilder { + + private final Map, Map, BigQueryCoercion>> allCoercions; + + BigQueryTypeCoercerBuilder() { + this.allCoercions = new HashMap<>(); + } + + /** + * registers a {@link BigQueryCoercion} + * + * @param coercion A {@link BigQueryCoercion} to register with this builder. + */ + BigQueryTypeCoercerBuilder registerTypeCoercion(BigQueryCoercion coercion) { + Type[] typeArguments = + ((ParameterizedType) coercion.getClass().getGenericInterfaces()[0]) + .getActualTypeArguments(); + Class sourceClass = (Class) typeArguments[0]; + Class targetClass = (Class) typeArguments[1]; + this.registerInternal(coercion, sourceClass, targetClass); + return this; + } + + /** + * registers a {@link BigQueryCoercion} using an implementation of {@link Function} + * + * @param function A {@link Function} to register with the builder. + * @param sourceClass the source class + * @param targetClass the target class + */ + BigQueryTypeCoercerBuilder registerTypeCoercion( + Function function, Class sourceClass, Class targetClass) { + this.registerInternal((BigQueryCoercion) function::apply, sourceClass, targetClass); + return this; + } + + /** builds the {@link BigQueryTypeCoercer} with all the registered {@link BigQueryCoercion}s. */ + BigQueryTypeCoercer build() { + return new BigQueryTypeCoercer(this.allCoercions); + } + + private void registerInternal( + BigQueryCoercion coercion, Class sourceClass, Class targetClass) { + Map, BigQueryCoercion> mapBySource = + this.allCoercions.getOrDefault(sourceClass, new HashMap<>()); + mapBySource.put(targetClass, coercion); + this.allCoercions.putIfAbsent(sourceClass, mapBySource); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercionUtility.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercionUtility.java new file mode 100644 index 000000000..9a4dc2130 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercionUtility.java @@ -0,0 +1,409 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.Range; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.Period; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.time.temporal.ChronoUnit; +import java.util.concurrent.TimeUnit; +import org.apache.arrow.vector.PeriodDuration; +import org.apache.arrow.vector.util.Text; + +@InternalApi +class BigQueryTypeCoercionUtility { + + static BigQueryTypeCoercer INSTANCE; + + static { + INSTANCE = + BigQueryTypeCoercer.builder() + .registerTypeCoercion(new FieldValueToString()) + .registerTypeCoercion(new FieldValueToInteger()) + .registerTypeCoercion(new FieldValueToFloat()) + .registerTypeCoercion(new FieldValueToShort()) + .registerTypeCoercion(new FieldValueToLong()) + .registerTypeCoercion(new FieldValueToDouble()) + .registerTypeCoercion(new FieldValueToBigDecimal()) + .registerTypeCoercion(new FieldValueToBoolean()) + .registerTypeCoercion(new FieldValueToBytesArray()) + .registerTypeCoercion(new FieldValueToTimestamp()) + .registerTypeCoercion(new FieldValueToTime()) + .registerTypeCoercion(new FieldValueToDate()) + .registerTypeCoercion(new FieldValueToObject()) + .registerTypeCoercion(new StringToBytesArray()) + .registerTypeCoercion(new RangeToString()) + .registerTypeCoercion(new IntegerToLong()) + .registerTypeCoercion(new BytesArrayToString()) + + // Read API Type coercions + .registerTypeCoercion(Timestamp::valueOf, LocalDateTime.class, Timestamp.class) + .registerTypeCoercion(Text::toString, Text.class, String.class) + .registerTypeCoercion(new TextToInteger()) + .registerTypeCoercion(new LongToTimestamp()) + .registerTypeCoercion(new LongToTime()) + .registerTypeCoercion(new IntegerToDate()) + .registerTypeCoercion( + (Timestamp ts) -> Date.valueOf(ts.toLocalDateTime().toLocalDate()), + Timestamp.class, + Date.class) + .registerTypeCoercion( + (Timestamp ts) -> Time.valueOf(ts.toLocalDateTime().toLocalTime()), + Timestamp.class, + Time.class) + .registerTypeCoercion( + (Time time) -> // Per JDBC spec, the date component should be 1970-01-01 + Timestamp.valueOf(LocalDateTime.of(LocalDate.ofEpochDay(0), time.toLocalTime())), + Time.class, + Timestamp.class) + .registerTypeCoercion( + (Date date) -> new Timestamp(date.getTime()), Date.class, Timestamp.class) + .registerTypeCoercion(new TimestampToString()) + .registerTypeCoercion(new TimeToString()) + .registerTypeCoercion((Long l) -> l != 0L, Long.class, Boolean.class) + .registerTypeCoercion((Double d) -> d != 0.0d, Double.class, Boolean.class) + .registerTypeCoercion( + (BigDecimal bd) -> bd.compareTo(BigDecimal.ZERO) != 0, + BigDecimal.class, + Boolean.class) + .registerTypeCoercion((Integer i) -> i != 0, Integer.class, Boolean.class) + .registerTypeCoercion((Float f) -> f != 0.0f, Float.class, Boolean.class) + .registerTypeCoercion((Short s) -> s.shortValue() != 0, Short.class, Boolean.class) + .registerTypeCoercion((Boolean b) -> b ? 1L : 0L, Boolean.class, Long.class) + .registerTypeCoercion((Boolean b) -> b ? 1.0d : 0.0d, Boolean.class, Double.class) + .registerTypeCoercion((Boolean b) -> b ? 1.0f : 0.0f, Boolean.class, Float.class) + .registerTypeCoercion((Boolean b) -> (short) (b ? 1 : 0), Boolean.class, Short.class) + .registerTypeCoercion((Boolean b) -> (byte) (b ? 1 : 0), Boolean.class, Byte.class) + .registerTypeCoercion( + (Boolean b) -> b ? BigDecimal.ONE : BigDecimal.ZERO, + Boolean.class, + BigDecimal.class) + .registerTypeCoercion(new PeriodDurationToString()) + .registerTypeCoercion(unused -> (byte) 0, Void.class, Byte.class) + .registerTypeCoercion(unused -> 0, Void.class, Integer.class) + .registerTypeCoercion(unused -> 0L, Void.class, Long.class) + .registerTypeCoercion(unused -> 0D, Void.class, Double.class) + .registerTypeCoercion(unused -> 0f, Void.class, Float.class) + .registerTypeCoercion(unused -> (short) 0, Void.class, Short.class) + .build(); + } + + private static class TimestampToString implements BigQueryCoercion { + private static final DateTimeFormatter FORMATTER = + DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSSS"); + + @Override + public String coerce(Timestamp value) { + return FORMATTER.format(value.toLocalDateTime()); + } + } + + private static class TimeToString implements BigQueryCoercion { + private static final DateTimeFormatter FORMATTER = DateTimeFormatter.ofPattern("HH:mm:ss.SSS"); + + @Override + public String coerce(Time value) { + return FORMATTER.format(value.toLocalTime()); + } + } + + private static class PeriodDurationToString implements BigQueryCoercion { + + @Override + public String coerce(PeriodDuration value) { + StringBuilder builder = new StringBuilder(); + + // Conversion of Period + Period period = value.getPeriod().normalized(); + + builder + .append(period.getYears()) + .append("-") + .append(period.getMonths()) + .append(" ") + .append(period.getDays()) + .append(" "); + + // Conversion of Duration + Duration duration = value.getDuration(); + if (duration.isNegative()) { + builder.append("-"); + duration = duration.negated(); + } + long hours = duration.toHours(); + duration = duration.minusHours(hours); + long minutes = duration.toMinutes(); + duration = duration.minusMinutes(minutes); + long seconds = duration.getSeconds(); + duration = duration.minusSeconds(seconds); + long microseconds = duration.toNanos() / 1000; + + builder + .append(hours) + .append(":") + .append(minutes) + .append(":") + .append(seconds) + .append(".") + .append(microseconds); + + String result = builder.toString(); + result = result.replaceFirst("--", "-"); + + return result; + } + } + + private static class IntegerToDate implements BigQueryCoercion { + + @Override + public Date coerce(Integer value) { + // For example int 18993 represents 2022-01-01 + // Using LocalDate here to avoid this date getting affected by local time zones. + LocalDate date = LocalDate.ofEpochDay(Long.valueOf(value)); + return Date.valueOf(date); + } + } + + private static class LongToTime implements BigQueryCoercion { + + @Override + public Time coerce(Long value) { + + int HH = (int) TimeUnit.MICROSECONDS.toHours(value); + int MM = (int) (TimeUnit.MICROSECONDS.toMinutes(value) % 60); + int SS = (int) (TimeUnit.MICROSECONDS.toSeconds(value) % 60); + + // Note: BQ Time has a precision of up to six fractional digits (microsecond precision) + // but java.sql.Time do not. So data after seconds is not returned. + return new Time(HH, MM, SS); + } + } + + private static class LongToTimestamp implements BigQueryCoercion { + + @Override + public Timestamp coerce(Long value) { + // Long value is in microseconds. All further calculations should account for the unit. + Instant instant = Instant.ofEpochMilli(value / 1000).plusNanos((value % 1000) * 1000); + // JDBC is defaulting to UTC because BQ UI defaults to UTC. + LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC")); + return Timestamp.valueOf(localDateTime); + } + } + + private static class TextToInteger implements BigQueryCoercion { + + @Override + public Integer coerce(Text value) { + return Integer.parseInt(value.toString()); + } + } + + private static class FieldValueToObject implements BigQueryCoercion { + + @Override + public Object coerce(FieldValue fieldValue) { + return fieldValue.getValue(); + } + } + + private static class FieldValueToDate implements BigQueryCoercion { + + @Override + public Date coerce(FieldValue fieldValue) { + return Date.valueOf(fieldValue.getStringValue()); + } + } + + private static class FieldValueToTime implements BigQueryCoercion { + + @Override + public Time coerce(FieldValue fieldValue) { + // Time ranges from 00:00:00 to 23:59:59.999999 in BigQuery + String strTime = fieldValue.getStringValue(); + try { + LocalTime localTime = LocalTime.parse(strTime); + // Convert LocalTime to milliseconds of the day. This correctly preserves millisecond + // precision and truncates anything smaller + long millis = TimeUnit.NANOSECONDS.toMillis(localTime.toNanoOfDay()); + return new Time(millis); + } catch (java.time.format.DateTimeParseException e) { + throw new IllegalArgumentException( + "Cannot parse the value " + strTime + " to java.sql.Time", e); + } + } + } + + private static class FieldValueToTimestamp implements BigQueryCoercion { + + @Override + public Timestamp coerce(FieldValue fieldValue) { + String rawValue = fieldValue.getStringValue(); + // BigQuery DATETIME strings are formatted like "YYYY-MM-DD'T'HH:MM:SS.fffffffff" + // BigQuery TIMESTAMP strings are numeric epoch seconds. + if (rawValue.contains("T")) { + // It's a DATETIME string. + // Timestamp.valueOf() expects "yyyy-mm-dd hh:mm:ss.fffffffff" format. + return Timestamp.valueOf(rawValue.replace('T', ' ')); + } else { + // It's a TIMESTAMP numeric string. + long microseconds = fieldValue.getTimestampValue(); + Instant instant = Instant.EPOCH.plus(microseconds, ChronoUnit.MICROS); + // JDBC is defaulting to UTC because BQ UI defaults to UTC. + LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC")); + return Timestamp.valueOf(localDateTime); + } + } + } + + private static class FieldValueToBytesArray implements BigQueryCoercion { + + @Override + public byte[] coerce(FieldValue fieldValue) { + return fieldValue.getBytesValue(); + } + } + + private static class StringToBytesArray implements BigQueryCoercion { + + @Override + public byte[] coerce(String value) { + return value.getBytes(); + } + } + + private static class BytesArrayToString implements BigQueryCoercion { + + @Override + public String coerce(byte[] value) { + return java.util.Base64.getEncoder().encodeToString(value); + } + } + + private static class FieldValueToBoolean implements BigQueryCoercion { + + @Override + public Boolean coerce(FieldValue fieldValue) { + return !fieldValue.isNull() && fieldValue.getBooleanValue(); + } + } + + private static class FieldValueToBigDecimal implements BigQueryCoercion { + + @Override + public BigDecimal coerce(FieldValue fieldValue) { + return fieldValue.getNumericValue(); + } + } + + private static class FieldValueToDouble implements BigQueryCoercion { + + @Override + public Double coerce(FieldValue fieldValue) { + return fieldValue.getDoubleValue(); + } + } + + private static class FieldValueToLong implements BigQueryCoercion { + + @Override + public Long coerce(FieldValue fieldValue) { + return fieldValue.getLongValue(); + } + } + + private static class FieldValueToInteger implements BigQueryCoercion { + + @Override + public Integer coerce(FieldValue fieldValue) { + return (int) fieldValue.getLongValue(); + } + } + + private static class FieldValueToFloat implements BigQueryCoercion { + + @Override + public Float coerce(FieldValue fieldValue) { + return (float) fieldValue.getDoubleValue(); + } + } + + private static class FieldValueToShort implements BigQueryCoercion { + + @Override + public Short coerce(FieldValue fieldValue) { + return (short) fieldValue.getLongValue(); + } + } + + private static class FieldValueToString implements BigQueryCoercion { + + @Override + public String coerce(FieldValue fieldValue) { + if (Attribute.REPEATED.equals(fieldValue.getAttribute())) { // Case for Arrays + return fieldValue.getValue().toString(); + } + if (Attribute.RANGE.equals(fieldValue.getAttribute())) { // Range values + Range rangeValue = fieldValue.getRangeValue(); + return INSTANCE.coerceTo(String.class, rangeValue); + } + if (Attribute.RECORD.equals(fieldValue.getAttribute())) { // Case for Structs + return fieldValue.getRecordValue().toString(); + } + return fieldValue.getStringValue(); + } + } + + private static class IntegerToLong implements BigQueryCoercion { + + @Override + public Long coerce(Integer intValue) { + if (intValue == null) { + return 0L; + } + return Long.valueOf(intValue); + } + } + + private static class RangeToString implements BigQueryCoercion { + + @Override + public String coerce(Range value) { + FieldValue startValue = value.getStart(); + FieldValue endValue = value.getEnd(); + + String start = startValue.isNull() ? "UNBOUNDED" : startValue.getStringValue(); + String end = endValue.isNull() ? "UNBOUNDED" : endValue.getStringValue(); + // The start of a range is inclusive, and the end is exclusive. + return String.format("[%s, %s)", start, end); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/DataSource.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/DataSource.java new file mode 100644 index 000000000..b1501890b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/DataSource.java @@ -0,0 +1,664 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import java.io.PrintWriter; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Map; +import java.util.Properties; +import java.util.logging.Logger; + +/** + * BigQuery JDBC implementation of {@link javax.sql.DataSource} + * + *

    A factory for connections to the physical data source that this DataSource object represents. + * An alternative to the DriverManager facility, a DataSource object is the preferred means of + * getting a connection. An object that implements the DataSource interface will typically be + * registered with a naming service based on the Javaâ„¢ Naming and Directory (JNDI) API. + */ +public class DataSource implements javax.sql.DataSource { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private String URL; + private String projectId; + private String defaultDataset; + private String location; + private String userAgent; + private Boolean enableHighThroughputAPI; + private Integer highThroughputMinTableSize; + private Integer highThroughputActivationRatio; + private Boolean unsupportedHTAPIFallback; + private String kmsKeyName; + private Map queryProperties; + private String logLevel; + private Boolean enableSession; + private String logPath; + private Integer oAuthType; + private String oAuthServiceAcctEmail; + private String oAuthPvtKeyPath; + private String oAuthPvtKey; + private String oAuthAccessToken; + private String oAuthRefreshToken; + private Boolean useQueryCache; + private String queryDialect; + private Boolean allowLargeResults; + private String destinationTable; + private String destinationDataset; + private Long destinationDatasetExpirationTime; + private String universeDomain; + private String proxyHost; + private String proxyPort; + private String proxyUid; + private String proxyPwd; + private String oAuthClientId; + private String oAuthClientSecret; + private Integer jobCreationMode; + private Long maxResults; + private String partnerToken; + private Boolean enableWriteAPI; + private String additionalProjects; + private Boolean filterTablesOnDefaultDataset; + private Integer requestGoogleDriveScope; + private Integer metadataFetchThreadCount; + private String sslTrustStorePath; + private String sslTrustStorePassword; + + // Make sure the JDBC driver class is loaded. + static { + try { + Class.forName("com.google.cloud.bigquery.jdbc.BigQueryDriver"); + } catch (ClassNotFoundException ex) { + throw new IllegalStateException( + "DataSource failed to load com.google.cloud.bigquery.jdbc.BigQueryDriver", ex); + } + } + + /** An implementation of DataSource must include a public no-arg constructor. */ + public DataSource() {} + + @Override + public Connection getConnection() throws SQLException { + if (getURL() == null) { + throw new BigQueryJdbcException( + "Connection URL is null. Please specify a valid Connection URL to get Connection."); + } + if (!BigQueryDriver.getRegisteredDriver().acceptsURL(getURL())) { + throw new BigQueryJdbcException( + "The URL " + getURL() + " is invalid. Please specify a valid Connection URL. "); + } + return DriverManager.getConnection(getURL(), createProperties()); + } + + private Properties createProperties() { + Properties connectionProperties = new Properties(); + if (this.projectId != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROJECT_ID_PROPERTY_NAME, this.projectId); + } + if (this.defaultDataset != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.DEFAULT_DATASET_PROPERTY_NAME, this.defaultDataset); + } + if (this.location != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LOCATION_PROPERTY_NAME, this.location); + } + if (this.enableHighThroughputAPI != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ENABLE_HTAPI_PROPERTY_NAME, + String.valueOf(this.enableHighThroughputAPI)); + } + if (this.unsupportedHTAPIFallback != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME, + String.valueOf(this.unsupportedHTAPIFallback)); + } + if (this.highThroughputMinTableSize != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.HTAPI_MIN_TABLE_SIZE_PROPERTY_NAME, + String.valueOf(this.highThroughputMinTableSize)); + } + if (this.highThroughputActivationRatio != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.HTAPI_ACTIVATION_RATIO_PROPERTY_NAME, + String.valueOf(this.highThroughputActivationRatio)); + } + if (this.kmsKeyName != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.KMS_KEY_NAME_PROPERTY_NAME, this.kmsKeyName); + } + if (this.queryProperties != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.QUERY_PROPERTIES_NAME, this.queryProperties.toString()); + } + if (this.enableSession != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ENABLE_SESSION_PROPERTY_NAME, String.valueOf(this.enableSession)); + } + if (this.logLevel != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LOG_LEVEL_PROPERTY_NAME, this.logLevel); + } + if (this.logPath != null) { + connectionProperties.setProperty(BigQueryJdbcUrlUtility.LOG_PATH_PROPERTY_NAME, this.logPath); + } + if (this.oAuthType != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME, String.valueOf(this.oAuthType)); + } + if (this.oAuthServiceAcctEmail != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_SA_EMAIL_PROPERTY_NAME, this.oAuthServiceAcctEmail); + } + if (this.oAuthPvtKeyPath != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME, this.oAuthPvtKeyPath); + } + if (this.oAuthPvtKey != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME, this.oAuthPvtKey); + } + if (this.oAuthAccessToken != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_ACCESS_TOKEN_PROPERTY_NAME, this.oAuthAccessToken); + } + if (this.oAuthRefreshToken != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME, this.oAuthRefreshToken); + } + if (this.useQueryCache != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.USE_QUERY_CACHE_PROPERTY_NAME, String.valueOf(this.useQueryCache)); + } + if (this.queryDialect != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.QUERY_DIALECT_PROPERTY_NAME, this.queryDialect); + } + if (this.allowLargeResults != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ALLOW_LARGE_RESULTS_PROPERTY_NAME, + String.valueOf(this.allowLargeResults)); + } + if (this.destinationTable != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LARGE_RESULTS_TABLE_PROPERTY_NAME, this.destinationTable); + } + if (this.destinationDataset != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LARGE_RESULTS_DATASET_PROPERTY_NAME, this.destinationDataset); + } + if (this.destinationDatasetExpirationTime != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.DESTINATION_DATASET_EXPIRATION_TIME_PROPERTY_NAME, + String.valueOf(this.destinationDatasetExpirationTime)); + } + if (this.universeDomain != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, this.universeDomain); + } + if (this.proxyHost != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, this.proxyHost); + } + if (this.proxyPort != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, this.proxyPort); + } + if (this.proxyUid != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME, this.proxyUid); + } + if (this.proxyPwd != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME, this.proxyPwd); + } + if (this.oAuthClientId != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, this.oAuthClientId); + } + if (this.oAuthClientSecret != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, this.oAuthClientSecret); + } + if (this.jobCreationMode != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.JOB_CREATION_MODE_PROPERTY_NAME, + String.valueOf(this.jobCreationMode)); + } + if (this.maxResults != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.MAX_RESULTS_PROPERTY_NAME, String.valueOf(this.maxResults)); + } + if (this.partnerToken != null && !this.partnerToken.isEmpty()) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PARTNER_TOKEN_PROPERTY_NAME, this.partnerToken); + } + if (this.enableWriteAPI != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ENABLE_WRITE_API_PROPERTY_NAME, + String.valueOf(this.enableWriteAPI)); + } + if (this.additionalProjects != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ADDITIONAL_PROJECTS_PROPERTY_NAME, this.additionalProjects); + } + if (this.filterTablesOnDefaultDataset != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME, + String.valueOf(this.filterTablesOnDefaultDataset)); + } + if (this.requestGoogleDriveScope != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + String.valueOf(this.requestGoogleDriveScope)); + } + if (this.metadataFetchThreadCount != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.METADATA_FETCH_THREAD_COUNT_PROPERTY_NAME, + String.valueOf(this.metadataFetchThreadCount)); + } + if (this.sslTrustStorePath != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.SSL_TRUST_STORE_PROPERTY_NAME, + String.valueOf(this.sslTrustStorePath)); + } + if (this.sslTrustStorePassword != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.SSL_TRUST_STORE_PWD_PROPERTY_NAME, + String.valueOf(this.sslTrustStorePassword)); + } + return connectionProperties; + } + + @Override + public Connection getConnection(String username, String password) throws SQLException { + LOG.warning( + "Username and Password is not supported in Bigquery JDBC Driver. Values discarded."); + return getConnection(); + } + + public String getURL() { + return URL; + } + + public void setURL(String URL) { + this.URL = URL; + } + + public String getProjectId() { + return projectId; + } + + public void setProjectId(String projectId) { + this.projectId = projectId; + } + + public String getDefaultDataset() { + return defaultDataset; + } + + public void setDefaultDataset(String defaultDataset) { + this.defaultDataset = defaultDataset; + } + + public String getLocation() { + return location; + } + + public void setLocation(String location) { + this.location = location; + } + + public String getUserAgent() { + return userAgent; + } + + public void setUserAgent(String userAgent) { + this.userAgent = userAgent; + } + + public String getPartnerToken() { + return partnerToken; + } + + public void setPartnerToken(String partnerToken) { + // This property is expected to be set by partners only. For more details on exact format + // supported, refer b/396086960 + this.partnerToken = partnerToken; + } + + public boolean getEnableHighThroughputAPI() { + return enableHighThroughputAPI; + } + + public void setEnableHighThroughputAPI(Boolean enableHighThroughputAPI) { + this.enableHighThroughputAPI = enableHighThroughputAPI; + } + + public int getHighThroughputMinTableSize() { + return highThroughputMinTableSize; + } + + public int getHighThroughputActivationRatio() { + return highThroughputActivationRatio; + } + + public void setHighThroughputMinTableSize(Integer highThroughputMinTableSize) { + this.highThroughputMinTableSize = highThroughputMinTableSize; + } + + public void setHighThroughputActivationRatio(Integer highThroughputActivationRatio) { + this.highThroughputActivationRatio = highThroughputActivationRatio; + } + + public void setKmsKeyName(String kmsKeyName) { + this.kmsKeyName = kmsKeyName; + } + + public String getKmsKeyName() { + return this.kmsKeyName; + } + + public void setQueryProperties(Map queryProperties) { + this.queryProperties = queryProperties; + } + + public Map getQueryProperties() { + return this.queryProperties; + } + + public void setUnsupportedHTAPIFallback(Boolean unsupportedHTAPIFallback) { + this.unsupportedHTAPIFallback = unsupportedHTAPIFallback; + } + + public boolean getUnsupportedHTAPIFallback() { + return this.unsupportedHTAPIFallback; + } + + public boolean getEnableSession() { + return enableSession; + } + + public void setEnableSession(Boolean enableSession) { + this.enableSession = enableSession; + } + + public String getLogLevel() { + return logLevel; + } + + public void setLogLevel(String logLevel) { + this.logLevel = logLevel; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } + + public String getUniverseDomain() { + return universeDomain; + } + + public void setUniverseDomain(String universeDomain) { + this.universeDomain = universeDomain; + } + + public String getProxyHost() { + return proxyHost; + } + + public void setProxyHost(String proxyHost) { + this.proxyHost = proxyHost; + } + + public String getProxyPort() { + return proxyPort; + } + + public void setProxyPort(String proxyPort) { + this.proxyPort = proxyPort; + } + + public String getProxyUid() { + return proxyUid; + } + + public void setProxyUid(String proxyUid) { + this.proxyUid = proxyUid; + } + + public String getProxyPwd() { + return proxyPwd; + } + + public void setProxyPwd(String proxyPwd) { + this.proxyPwd = proxyPwd; + } + + public int getOAuthType() { + return oAuthType; + } + + public void setOAuthType(Integer oAuthType) { + this.oAuthType = oAuthType; + } + + public String getOAuthServiceAcctEmail() { + return oAuthServiceAcctEmail; + } + + public void setOAuthServiceAcctEmail(String oAuthServiceAcctEmail) { + this.oAuthServiceAcctEmail = oAuthServiceAcctEmail; + } + + public String getOAuthPvtKeyPath() { + return oAuthPvtKeyPath; + } + + public String getOAuthPvtKey() { + return oAuthPvtKey; + } + + public void setOAuthPvtKey(String oAuthPvtKey) { + this.oAuthPvtKey = oAuthPvtKey; + } + + public void setOAuthPvtKeyPath(String oAuthPvtKeyPath) { + this.oAuthPvtKeyPath = oAuthPvtKeyPath; + } + + public String getOAuthAccessToken() { + return oAuthAccessToken; + } + + public void setOAuthAccessToken(String oAuthAccessToken) { + this.oAuthAccessToken = oAuthAccessToken; + } + + public String getOAuthRefreshToken() { + return oAuthRefreshToken; + } + + public void setOAuthRefreshToken(String oAuthRefreshToken) { + this.oAuthRefreshToken = oAuthRefreshToken; + } + + public Boolean getUseQueryCache() { + return useQueryCache; + } + + public String getQueryDialect() { + return queryDialect; + } + + public Boolean getAllowLargeResults() { + return allowLargeResults; + } + + public String getDestinationTable() { + return destinationTable; + } + + public String getDestinationDataset() { + return destinationDataset; + } + + public Long getDestinationDatasetExpirationTime() { + return destinationDatasetExpirationTime; + } + + public void setUseQueryCache(Boolean useQueryCache) { + this.useQueryCache = useQueryCache; + } + + public void setQueryDialect(String queryDialect) { + this.queryDialect = queryDialect; + } + + public void setAllowLargeResults(Boolean allowLargeResults) { + this.allowLargeResults = allowLargeResults; + } + + public void setDestinationTable(String destinationTable) { + this.destinationTable = destinationTable; + } + + public void setDestinationDataset(String destinationDataset) { + this.destinationDataset = destinationDataset; + } + + public void setDestinationDatasetExpirationTime(long destinationDatasetExpirationTime) { + this.destinationDatasetExpirationTime = destinationDatasetExpirationTime; + } + + public String getOAuthClientId() { + return oAuthClientId; + } + + public void setOAuthClientId(String oAuthClientId) { + this.oAuthClientId = oAuthClientId; + } + + public String getOAuthClientSecret() { + return oAuthClientSecret; + } + + public void setOAuthClientSecret(String oAuthClientSecret) { + this.oAuthClientSecret = oAuthClientSecret; + } + + public Integer getJobCreationMode() { + return jobCreationMode; + } + + public void setJobCreationMode(Integer jobCreationMode) { + this.jobCreationMode = jobCreationMode; + } + + public Boolean getEnableWriteAPI() { + return enableWriteAPI; + } + + public void setEnableWriteAPI(Boolean enableWriteAPI) { + this.enableWriteAPI = enableWriteAPI; + } + + public String getAdditionalProjects() { + return additionalProjects; + } + + public void setAdditionalProjects(String additionalProjects) { + this.additionalProjects = additionalProjects; + } + + public Boolean getFilterTablesOnDefaultDataset() { + return filterTablesOnDefaultDataset; + } + + public void setFilterTablesOnDefaultDataset(Boolean filterTablesOnDefaultDataset) { + this.filterTablesOnDefaultDataset = filterTablesOnDefaultDataset; + } + + public Integer getRequestGoogleDriveScope() { + return requestGoogleDriveScope; + } + + public void setRequestGoogleDriveScope(Integer requestGoogleDriveScope) { + this.requestGoogleDriveScope = requestGoogleDriveScope; + } + + public Integer getMetadataFetchThreadCount() { + return metadataFetchThreadCount; + } + + public void setMetadataFetchThreadCount(Integer metadataFetchThreadCount) { + this.metadataFetchThreadCount = metadataFetchThreadCount; + } + + public String getSSLTrustStorePath() { + return sslTrustStorePath; + } + + public void setSSLTrustStorePath(String sslTrustStorePath) { + this.sslTrustStorePath = sslTrustStorePath; + } + + public String getSSLTrustStorePassword() { + return sslTrustStorePassword; + } + + public void setSSLTrustStorePassword(String sslTrustStorePassword) { + this.sslTrustStorePassword = sslTrustStorePassword; + } + + @Override + public PrintWriter getLogWriter() { + return null; + } + + @Override + public void setLogWriter(PrintWriter out) {} + + @Override + public void setLoginTimeout(int seconds) {} + + @Override + public int getLoginTimeout() { + return 0; + } + + @Override + public Logger getParentLogger() { + return BigQueryJdbcRootLogger.getRootLogger(); + } + + @Override + public T unwrap(Class iface) { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) { + return false; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSource.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSource.java new file mode 100644 index 000000000..412e4ca9e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSource.java @@ -0,0 +1,68 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.common.annotations.VisibleForTesting; +import java.sql.Connection; +import java.sql.SQLException; +import javax.sql.ConnectionPoolDataSource; +import javax.sql.PooledConnection; + +public class PooledConnectionDataSource extends DataSource implements ConnectionPoolDataSource { + private PooledConnectionListener connectionPoolManager = null; + Connection bqConnection = null; + + @Override + public PooledConnection getPooledConnection() throws SQLException { + if (connectionPoolManager != null && !connectionPoolManager.isConnectionPoolEmpty()) { + return connectionPoolManager.getPooledConnection(); + } + // Create the Underlying physical connection + if (bqConnection == null) { + bqConnection = super.getConnection(); + } + if (bqConnection == null) { + throw new BigQueryJdbcRuntimeException( + "Cannot get pooled connection: unable to get underlying physical connection"); + } + String connectionURl = ((BigQueryConnection) bqConnection).getConnectionUrl(); + Long connectionPoolSize = + BigQueryJdbcUrlUtility.parseConnectionPoolSize(connectionURl, this.toString()); + if (connectionPoolManager == null) { + connectionPoolManager = new PooledConnectionListener(connectionPoolSize); + } + BigQueryPooledConnection bqPooledConnection = new BigQueryPooledConnection(bqConnection); + bqPooledConnection.addConnectionEventListener(connectionPoolManager); + return bqPooledConnection; + } + + @VisibleForTesting + void setConnection(Connection connection) { + this.bqConnection = connection; + } + + @VisibleForTesting + public PooledConnectionListener getConnectionPoolManager() { + return this.connectionPoolManager; + } + + @Override + public PooledConnection getPooledConnection(String arg0, String arg1) throws SQLException { + throw new UnsupportedOperationException("This operation is not supported by the driver"); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionListener.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionListener.java new file mode 100644 index 000000000..c0f082043 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionListener.java @@ -0,0 +1,145 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import java.util.UUID; +import java.util.concurrent.LinkedBlockingDeque; +import javax.sql.ConnectionEvent; +import javax.sql.ConnectionEventListener; +import javax.sql.PooledConnection; + +public class PooledConnectionListener implements ConnectionEventListener { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private String id; // Mainly for internal use + private LinkedBlockingDeque connectionPool; + private Long connectionPoolSize = 0L; + + public PooledConnectionListener(Long connPoolSize) { + id = UUID.randomUUID().toString(); + this.connectionPoolSize = connPoolSize; + if (getConnectionPoolSize() > 0L) { + connectionPool = new LinkedBlockingDeque<>(getConnectionPoolSize().intValue()); + } else { + connectionPool = new LinkedBlockingDeque<>(); + } + } + + public Long getConnectionPoolSize() { + return this.connectionPoolSize; + } + + public int getConnectionPoolCurrentCapacity() { + return this.connectionPool.size(); + } + + public boolean isConnectionPoolEmpty() { + return (connectionPool != null && connectionPool.isEmpty()); + } + + PooledConnection getPooledConnection() { + if (isConnectionPoolEmpty()) { + LOG.warning("Connection pool is empty"); + return null; + } + // Return the first element. + return connectionPool.getFirst(); + } + + void addConnection(PooledConnection connection) { + LOG.finest("++enter++"); + if (connection == null) { + LOG.warning("Connection passed in is null"); + return; + } + if (connectionPool.contains(connection)) { + LOG.warning("Connection already in the pool"); + return; + } + connectionPool.add(connection); + } + + void removeConnection(PooledConnection connection) { + LOG.finest("++enter++"); + if (connection == null) { + LOG.warning("Connection passed in is null"); + return; + } + if (!connectionPool.contains(connection)) { + LOG.warning("Connection already in the pool"); + return; + } + connectionPool.remove(connection); + } + + @Override + public void connectionClosed(ConnectionEvent event) { + LOG.finest("++enter++"); + Object eventSource = event.getSource(); + if (eventSource == null + || !(eventSource instanceof BigQueryPooledConnection) + || !(eventSource.getClass().isAssignableFrom(BigQueryPooledConnection.class))) { + throw new IllegalArgumentException( + "Invalid ConnectionEvent source passed to connectionClosed. Expecting" + + " BigQueryPooledConnection."); + } + BigQueryPooledConnection bqPooledConnection = (BigQueryPooledConnection) eventSource; + addConnection(bqPooledConnection); + LOG.finest("Added pooled connection to connection pool"); + } + + @Override + public void connectionErrorOccurred(ConnectionEvent event) { + LOG.finest("++enter++"); + Object eventSource = event.getSource(); + if (eventSource == null + || !(eventSource instanceof BigQueryPooledConnection) + || !(eventSource.getClass().isAssignableFrom(BigQueryPooledConnection.class))) { + throw new IllegalArgumentException( + "Invalid ConnectionEvent source passed to connectionClosed. Expecting" + + " BigQueryPooledConnection."); + } + BigQueryPooledConnection bqPooledConnection = (BigQueryPooledConnection) eventSource; + removeConnection(bqPooledConnection); + String errorMessage = + (event.getSQLException() != null) + ? event.getSQLException().getMessage() + : "Connection error occured"; + LOG.finest( + String.format( + "Removed pooled connection from connection pool due to error: %s", errorMessage)); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + PooledConnectionListener other = (PooledConnectionListener) obj; + if (id == null) { + if (other.id != null) return false; + } else if (!id.equals(other.id)) return false; + return true; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/resources/META-INF/services/java.sql.Driver b/google-cloud-bigquery-jdbc/src/main/resources/META-INF/services/java.sql.Driver new file mode 100644 index 000000000..1ea35896b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/META-INF/services/java.sql.Driver @@ -0,0 +1 @@ +com.google.cloud.bigquery.jdbc.BigQueryDriver diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetCrossReference.sql b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetCrossReference.sql new file mode 100644 index 000000000..da8386270 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetCrossReference.sql @@ -0,0 +1,72 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +SELECT PKTABLE_CAT, + PKTABLE_SCHEM, + PKTABLE_NAME, + PRIMARY.column_name AS PKCOLUMN_NAME, + FOREIGN.constraint_catalog AS FKTABLE_CAT, + FOREIGN.constraint_schema AS FKTABLE_SCHEM, + FOREIGN.table_name AS FKTABLE_NAME, + FOREIGN.column_name AS FKCOLUMN_NAME, + FOREIGN.ordinal_position AS KEY_SEQ, + NULL AS UPDATE_RULE, + NULL AS DELETE_RULE, + FOREIGN.constraint_name AS FK_NAME, + PRIMARY.constraint_name AS PK_NAME, + NULL AS DEFERRABILITY +FROM (SELECT DISTINCT CCU.table_catalog AS PKTABLE_CAT, + CCU.table_schema AS PKTABLE_SCHEM, + CCU.table_name AS PKTABLE_NAME, + TC.constraint_catalog, + TC.constraint_schema, + TC.constraint_name, + TC.table_catalog, + TC.table_schema, + TC.table_name, + TC.constraint_type, + KCU.column_name, + KCU.ordinal_position, + KCU.position_in_unique_constraint + FROM `%1$s.%2$s.INFORMATION_SCHEMA.TABLE_CONSTRAINTS` TC + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` KCU + USING + (constraint_catalog, + constraint_schema, + constraint_name, + table_catalog, + table_schema, + table_name) + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE` CCU + USING + (constraint_catalog, + constraint_schema, + constraint_name) + WHERE constraint_type = 'FOREIGN KEY' + AND TC.table_name = '%6$s') FOREIGN + INNER JOIN (SELECT * + FROM `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` + WHERE position_in_unique_constraint IS NULL + AND RTRIM(table_name) = '%3$s') PRIMARY +ON + FOREIGN.PKTABLE_CAT = PRIMARY.table_catalog + AND FOREIGN.PKTABLE_SCHEM = PRIMARY.table_schema + AND FOREIGN.PKTABLE_NAME = PRIMARY.table_name + AND FOREIGN.position_in_unique_constraint = + PRIMARY.ordinal_position +ORDER BY FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, KEY_SEQ \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetExportedKeys.sql b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetExportedKeys.sql new file mode 100644 index 000000000..4058f6bff --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetExportedKeys.sql @@ -0,0 +1,71 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +SELECT PKTABLE_CAT, + PKTABLE_SCHEM, + PKTABLE_NAME, + PRIMARY.column_name AS PKCOLUMN_NAME, + FOREIGN.constraint_catalog AS FKTABLE_CAT, + FOREIGN.constraint_schema AS FKTABLE_SCHEM, + FOREIGN.table_name AS FKTABLE_NAME, + FOREIGN.column_name AS FKCOLUMN_NAME, + FOREIGN.ordinal_position AS KEY_SEQ, + NULL AS UPDATE_RULE, + NULL AS DELETE_RULE, + FOREIGN.constraint_name AS FK_NAME, + PRIMARY.constraint_name AS PK_NAME, + NULL AS DEFERRABILITY +FROM (SELECT DISTINCT CCU.table_catalog AS PKTABLE_CAT, + CCU.table_schema AS PKTABLE_SCHEM, + CCU.table_name AS PKTABLE_NAME, + TC.constraint_catalog, + TC.constraint_schema, + TC.constraint_name, + TC.table_catalog, + TC.table_schema, + TC.table_name, + TC.constraint_type, + KCU.column_name, + KCU.ordinal_position, + KCU.position_in_unique_constraint + FROM `%1$s.%2$s.INFORMATION_SCHEMA.TABLE_CONSTRAINTS` TC + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` KCU + USING + (constraint_catalog, + constraint_schema, + constraint_name, + table_catalog, + table_schema, + table_name) + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE` CCU + USING + (constraint_catalog, + constraint_schema, + constraint_name) + WHERE constraint_type = 'FOREIGN KEY') FOREIGN + INNER JOIN (SELECT * + FROM `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` + WHERE position_in_unique_constraint IS NULL + AND RTRIM(table_name) = '%3$s') PRIMARY +ON + FOREIGN.PKTABLE_CAT = PRIMARY.table_catalog + AND FOREIGN.PKTABLE_SCHEM = PRIMARY.table_schema + AND FOREIGN.PKTABLE_NAME = PRIMARY.table_name + AND FOREIGN.position_in_unique_constraint = + PRIMARY.ordinal_position +ORDER BY FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, KEY_SEQ \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetImportedKeys.sql b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetImportedKeys.sql new file mode 100644 index 000000000..3f4142eb0 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetImportedKeys.sql @@ -0,0 +1,71 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +SELECT PKTABLE_CAT, + PKTABLE_SCHEM, + PKTABLE_NAME, + PRIMARY.column_name AS PKCOLUMN_NAME, + FOREIGN.constraint_catalog AS FKTABLE_CAT, + FOREIGN.constraint_schema AS FKTABLE_SCHEM, + FOREIGN.table_name AS FKTABLE_NAME, + FOREIGN.column_name AS FKCOLUMN_NAME, + FOREIGN.ordinal_position AS KEY_SEQ, + NULL AS UPDATE_RULE, + NULL AS DELETE_RULE, + FOREIGN.constraint_name AS FK_NAME, + PRIMARY.constraint_name AS PK_NAME, + NULL AS DEFERRABILITY +FROM (SELECT DISTINCT CCU.table_catalog AS PKTABLE_CAT, + CCU.table_schema AS PKTABLE_SCHEM, + CCU.table_name AS PKTABLE_NAME, + TC.constraint_catalog, + TC.constraint_schema, + TC.constraint_name, + TC.table_catalog, + TC.table_schema, + TC.table_name, + TC.constraint_type, + KCU.column_name, + KCU.ordinal_position, + KCU.position_in_unique_constraint + FROM `%1$s.%2$s.INFORMATION_SCHEMA.TABLE_CONSTRAINTS` TC + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` KCU + USING + (constraint_catalog, + constraint_schema, + constraint_name, + table_catalog, + table_schema, + table_name) + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE` CCU + USING + (constraint_catalog, + constraint_schema, + constraint_name) + WHERE constraint_type = 'FOREIGN KEY' + AND TC.table_name = '%3$s') FOREIGN + INNER JOIN (SELECT * + FROM `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` + WHERE position_in_unique_constraint IS NULL) PRIMARY +ON + FOREIGN.PKTABLE_CAT = PRIMARY.table_catalog + AND FOREIGN.PKTABLE_SCHEM = PRIMARY.table_schema + AND FOREIGN.PKTABLE_NAME = PRIMARY.table_name + AND FOREIGN.position_in_unique_constraint = + PRIMARY.ordinal_position +ORDER BY PKTABLE_CAT, PKTABLE_SCHEM, PKTABLE_NAME, KEY_SEQ \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetPrimaryKeys.sql b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetPrimaryKeys.sql new file mode 100644 index 000000000..282910fb9 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetPrimaryKeys.sql @@ -0,0 +1,30 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +SELECT table_catalog AS TABLE_CAT, + table_schema AS TABLE_SCHEM, + table_name AS TABLE_NAME, + column_name AS COLUMN_NAME, + ordinal_position AS KEY_SEQ, + constraint_name AS PK_NAME +FROM + %s.%s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE +WHERE + table_name = '%s' + AND CONTAINS_SUBSTR(constraint_name + , 'pk$') +ORDER BY + COLUMN_NAME; diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/dependencies.properties b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/dependencies.properties new file mode 100644 index 000000000..6908cb6cc --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/dependencies.properties @@ -0,0 +1,4 @@ +# Versions of oneself +# {x-version-update-start:google-cloud-bigquery-jdbc:current} +version.jdbc=${project.version} +# {x-version-update-end} \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/ArrowFormatTypeBigQueryCoercionUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/ArrowFormatTypeBigQueryCoercionUtilityTest.java new file mode 100644 index 000000000..0524fc87d --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/ArrowFormatTypeBigQueryCoercionUtilityTest.java @@ -0,0 +1,234 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryTypeCoercionUtility.INSTANCE; +import static com.google.common.truth.Truth.assertThat; +import static java.time.Month.FEBRUARY; +import static java.time.Month.JANUARY; + +import com.google.cloud.bigquery.FieldElementType; +import com.google.cloud.bigquery.Range; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.Duration; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.Period; +import org.apache.arrow.vector.PeriodDuration; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; +import org.apache.arrow.vector.util.Text; +import org.junit.Rule; +import org.junit.Test; + +public class ArrowFormatTypeBigQueryCoercionUtilityTest { + + @Rule public final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + private static final Range RANGE_DATE = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .setStart("1970-01-02") + .setEnd("1970-03-04") + .build(); + + private static final Range RANGE_DATETIME = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .setStart("2014-08-19 05:41:35.220000") + .setEnd("2015-09-20 06:41:35.220000") + .build(); + + private static final Range RANGE_TIMESTAMP = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .setStart("2014-08-19 12:41:35.220000+00:00") + .setEnd("2015-09-20 13:41:35.220000+01:00") + .build(); + + @Test + public void nullToString() { + assertThat(INSTANCE.coerceTo(String.class, null)).isNull(); + } + + @Test + public void JsonStringArrayListToString() { + JsonStringArrayList employeeList = new JsonStringArrayList<>(); + employeeList.add(1); + employeeList.add(2); + employeeList.add(3); + + assertThat(INSTANCE.coerceTo(String.class, employeeList)).isEqualTo("[1,2,3]"); + } + + @Test + public void localDateTimeToTimestamp() { + LocalDateTime localDatetime = LocalDateTime.of(1995, FEBRUARY, 23, 20, 15); + + assertThat(INSTANCE.coerceTo(Timestamp.class, localDatetime)) + .isEqualTo(Timestamp.valueOf(localDatetime)); + } + + @Test + public void textToString() { + Text text = new Text("Hello World!"); + + assertThat(INSTANCE.coerceTo(String.class, text)).isEqualTo("Hello World!"); + } + + @Test + public void nullToInteger() { + assertThat(INSTANCE.coerceTo(Integer.class, null)).isEqualTo(0); + } + + @Test + public void textToInteger() { + Text text = new Text("51423"); + + assertThat(INSTANCE.coerceTo(Integer.class, text)).isEqualTo(51423); + } + + @Test + public void longToInteger() { + assertThat(INSTANCE.coerceTo(Integer.class, 56L)).isEqualTo(56); + } + + @Test + public void bigDecimalToInteger() { + assertThat(INSTANCE.coerceTo(Integer.class, new BigDecimal("56"))).isEqualTo(56); + } + + @Test + public void nullToLong() { + assertThat(INSTANCE.coerceTo(Long.class, null)).isEqualTo(0L); + } + + @Test + public void bigDecimalToLong() { + assertThat(INSTANCE.coerceTo(Long.class, new BigDecimal("56"))).isEqualTo(56L); + } + + @Test + public void nullToDouble() { + assertThat(INSTANCE.coerceTo(Double.class, null)).isEqualTo(0D); + } + + @Test + public void bigDecimalToDouble() { + assertThat(INSTANCE.coerceTo(Double.class, new BigDecimal("56"))).isEqualTo(56D); + } + + @Test + public void nullToBoolean() { + assertThat(INSTANCE.coerceTo(Boolean.class, null)).isFalse(); + } + + @Test + public void nullToByteArray() { + assertThat(INSTANCE.coerceTo(byte[].class, null)).isNull(); + } + + @Test + public void nullToTimestamp() { + assertThat(INSTANCE.coerceTo(Timestamp.class, null)).isNull(); + } + + @Test + public void longToTimestamp() { + assertThat(INSTANCE.coerceTo(Timestamp.class, 1408452095220000L)) + .isEqualTo(new Timestamp(1408452095220L)); + } + + @Test + public void nullToTime() { + assertThat(INSTANCE.coerceTo(Time.class, null)).isNull(); + } + + @Test + public void longToTime() { + assertThat(INSTANCE.coerceTo(Time.class, 1408452095220000L)) + .isEqualTo(new Time(1408452095000L)); + } + + @Test + public void nullToDate() { + assertThat(INSTANCE.coerceTo(Date.class, null)).isNull(); + } + + @Test + public void integerToDate() { + LocalDate expectedDate = LocalDate.of(2022, JANUARY, 1); + assertThat(INSTANCE.coerceTo(Date.class, 18993).toLocalDate()).isEqualTo(expectedDate); + } + + @Test + public void periodDurationToString() { + Period period = Period.of(1, 3, 24); + Duration duration = Duration.ofHours(3).plusMinutes(45).plusSeconds(23).plusNanos(123456000); + PeriodDuration periodDuration = new PeriodDuration(period, duration); + assertThat(INSTANCE.coerceTo(String.class, periodDuration)).isEqualTo("1-3 24 3:45:23.123456"); + + Period period2 = Period.of(1, 6, -8); + Duration duration2 = Duration.ofHours(9).plusMinutes(43).plusSeconds(23).plusNanos(123456000); + PeriodDuration periodDuration2 = new PeriodDuration(period2, duration2); + assertThat(INSTANCE.coerceTo(String.class, periodDuration2)).isEqualTo("1-6 -8 9:43:23.123456"); + } + + // Range tests + + @Test + public void JsonStringHashMapToString() { + JsonStringHashMap employeeMap = new JsonStringHashMap<>(); + employeeMap.putIfAbsent("name1", "type1"); + employeeMap.putIfAbsent("name2", "type2"); + employeeMap.putIfAbsent("name3", "type3"); + + assertThat(INSTANCE.coerceTo(String.class, employeeMap)) + .isEqualTo("{\"name1\":\"type1\",\"name2\":\"type2\",\"name3\":\"type3\"}"); + } + + @Test + public void rangeDateToString() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATE.getStart().getStringValue(), RANGE_DATE.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATE)).isEqualTo(expectedRangeDate); + } + + @Test + public void rangeDatetimeToString() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATETIME.getStart().getStringValue(), RANGE_DATETIME.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATETIME)).isEqualTo(expectedRangeDate); + } + + @Test + public void rangeTimestampToString() { + String expectedRangeTimestamp = + String.format( + "[%s, %s)", + RANGE_TIMESTAMP.getStart().getStringValue(), RANGE_TIMESTAMP.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_TIMESTAMP)).isEqualTo(expectedRangeTimestamp); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfPrimitivesTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfPrimitivesTest.java new file mode 100644 index 000000000..5b33fda78 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfPrimitivesTest.java @@ -0,0 +1,367 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.StandardSQLTypeName.BIGNUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.BYTES; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATE; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATETIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.FLOAT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.GEOGRAPHY; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.NUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIMESTAMP; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowArraySchemaAndValue; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.nestedResultSetToColumnLists; +import static com.google.common.truth.Truth.assertThat; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; +import static java.time.Month.MARCH; +import static java.util.Arrays.copyOfRange; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.stream.Stream; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.Text; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.function.ThrowingRunnable; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class BigQueryArrowArrayOfPrimitivesTest { + + private final Field schema; + private final JsonStringArrayList arrayValues; + private final Object[] expected; + private final int javaSqlTypeCode; + private Array array; + private final StandardSQLTypeName currentType; + + @ClassRule public static final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + public BigQueryArrowArrayOfPrimitivesTest( + StandardSQLTypeName currentType, + Tuple> schemaAndValue, + Object[] expected, + int javaSqlTypeCode) { + this.currentType = currentType; + this.schema = schemaAndValue.x(); + this.arrayValues = schemaAndValue.y(); + this.expected = expected; + this.javaSqlTypeCode = javaSqlTypeCode; + } + + @Before + public void setUp() { + array = new BigQueryArrowArray(this.schema, this.arrayValues); + } + + @Parameters(name = "{index}: primitive array of {0}") + public static Collection data() { + timeZoneRule.enforce(); + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820227000); + LocalDate aDate = LocalDate.of(2023, MARCH, 30); + LocalTime aTime = LocalTime.of(11, 14, 19, 820227); + return Arrays.asList( + new Object[][] { + { + INT64, + arrowArraySchemaAndValue(INT64, 10L, 20L, 30L, 40L), + new Long[] {10L, 20L, 30L, 40L}, + Types.BIGINT + }, + { + BOOL, + arrowArraySchemaAndValue(BOOL, TRUE, FALSE, FALSE, TRUE), + new Boolean[] {true, false, false, true}, + Types.BOOLEAN + }, + { + FLOAT64, + arrowArraySchemaAndValue( + FLOAT64, + Double.valueOf("11.2"), + Double.valueOf("33.4"), + Double.valueOf("55.6"), + Double.valueOf("77.8")), + new Double[] {11.2, 33.4, 55.6, 77.8}, + Types.DOUBLE + }, + { + NUMERIC, + arrowArraySchemaAndValue( + NUMERIC, + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657")), + new BigDecimal[] { + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657") + }, + Types.NUMERIC + }, + { + BIGNUMERIC, + arrowArraySchemaAndValue( + BIGNUMERIC, + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657")), + new BigDecimal[] { + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657") + }, + Types.NUMERIC + }, + { + STRING, + arrowArraySchemaAndValue( + STRING, new Text("one"), new Text("two"), new Text("three"), new Text("four")), + new String[] {"one", "two", "three", "four"}, + Types.NVARCHAR + }, + { + TIMESTAMP, + arrowArraySchemaAndValue( + TIMESTAMP, + Long.valueOf("1680174859820227"), + Long.valueOf("1680261259820227"), + Long.valueOf("1680347659820227"), + Long.valueOf("1680434059820227")), + new Timestamp[] { + Timestamp.valueOf(aTimeStamp), // 2023-03-30 16:44:19.82 + Timestamp.valueOf(aTimeStamp.plusDays(1)), + Timestamp.valueOf(aTimeStamp.plusDays(2)), + Timestamp.valueOf(aTimeStamp.plusDays(3)) + }, + Types.TIMESTAMP + }, + { + DATE, + arrowArraySchemaAndValue(DATE, 19446, 19447, 19448, 19449), + new Date[] { + Date.valueOf(aDate), + Date.valueOf(aDate.plusDays(1)), + Date.valueOf(aDate.plusDays(2)), + Date.valueOf(aDate.plusDays(3)) + }, + Types.DATE + }, + { + TIME, + arrowArraySchemaAndValue( + TIME, + Long.valueOf("40459820227"), // 11:14:19.820227 + Long.valueOf("40460820227"), + Long.valueOf("40461820227"), + Long.valueOf("40462820227")), + new Time[] { + Time.valueOf(aTime), + Time.valueOf(aTime.plusSeconds(1)), + Time.valueOf(aTime.plusSeconds(2)), + Time.valueOf(aTime.plusSeconds(3)) + }, + Types.TIME + }, + { + DATETIME, + arrowArraySchemaAndValue( + DATETIME, + LocalDateTime.parse("2023-03-30T11:14:19.820227"), + LocalDateTime.parse("2023-03-30T11:15:19.820227"), + LocalDateTime.parse("2023-03-30T11:16:19.820227"), + LocalDateTime.parse("2023-03-30T11:17:19.820227")), + new Timestamp[] { + Timestamp.valueOf(LocalDateTime.parse("2023-03-30T11:14:19.820227")), + Timestamp.valueOf(LocalDateTime.parse("2023-03-30T11:15:19.820227")), + Timestamp.valueOf(LocalDateTime.parse("2023-03-30T11:16:19.820227")), + Timestamp.valueOf(LocalDateTime.parse("2023-03-30T11:17:19.820227")) + }, + Types.TIMESTAMP + }, + { + GEOGRAPHY, + arrowArraySchemaAndValue( + GEOGRAPHY, + new Text("POINT(-122 47)"), + new Text("POINT(-122 48)"), + new Text("POINT(-121 47)"), + new Text("POINT(-123 48)")), + new String[] {"POINT(-122 47)", "POINT(-122 48)", "POINT(-121 47)", "POINT(-123 48)"}, + Types.OTHER + }, + { + BYTES, + arrowArraySchemaAndValue( + BYTES, + Stream.of("one", "two", "three", "four") + .map(String::getBytes) + .toArray(byte[][]::new)), // array of bytes array + new byte[][] { + "one".getBytes(), "two".getBytes(), "three".getBytes(), "four".getBytes() + }, + Types.VARBINARY + } + }); + } + + @Test + public void getArray() throws SQLException { + assertThat(array.getArray()).isEqualTo(this.expected); + } + + @Test + public void getSlicedArray() throws SQLException { + int fromIndex = 1; + int toIndexExclusive = 3; + Object[] expectedSlicedArray = + copyOfRange(this.expected, fromIndex, toIndexExclusive); // copying index(1,2) + + // the first element is at index 1 + assertThat(array.getArray(fromIndex + 1, 2)).isEqualTo(expectedSlicedArray); + } + + @Test + public void getSlicedArrayWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getArray(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getResultSet() throws SQLException { + ResultSet resultSet = this.array.getResultSet(); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + ArrayList indexList = indexAndValues.x(); + ArrayList columnValues = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {1, 2, 3, 4}); + assertThat(columnValues.toArray()).isEqualTo(this.expected); + } + + @Test + public void getSlicedResultSet() throws SQLException { + int fromIndex = 1; + int toIndexExclusive = 3; + Object[] expectedSlicedArray = + copyOfRange(this.expected, fromIndex, toIndexExclusive); // copying index(1,2) + + // the first element is at index 1 + ResultSet resultSet = array.getResultSet(fromIndex + 1, 2); + + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + ArrayList indexList = indexAndValues.x(); + ArrayList columnValues = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {2, 3}); + assertThat(columnValues.toArray()).isEqualTo(expectedSlicedArray); + } + + @Test + public void getSlicedResultSetWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getResultSet(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getBaseTypeName() throws SQLException { + assertThat(array.getBaseTypeName()).isEqualTo(this.currentType.name()); + } + + @Test + public void getBaseType() throws SQLException { + assertThat(array.getBaseType()).isEqualTo(this.javaSqlTypeCode); + } + + @Test + public void free() throws SQLException { + this.array.free(); + + ensureArrayIsInvalid(() -> array.getArray()); + ensureArrayIsInvalid(() -> array.getArray(1, 2)); + ensureArrayIsInvalid(() -> array.getResultSet()); + ensureArrayIsInvalid(() -> array.getResultSet(1, 2)); + ensureArrayIsInvalid(() -> array.getBaseTypeName()); + ensureArrayIsInvalid(() -> array.getBaseType()); + } + + @Test + public void getArrayWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(emptyMap())); + Exception exception2 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getResultSetWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getResultSet(emptyMap())); + Exception exception2 = + assertThrows( + SQLFeatureNotSupportedException.class, () -> array.getResultSet(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + private void ensureArrayIsInvalid(ThrowingRunnable block) { + Exception exception = assertThrows(IllegalStateException.class, block); + assertThat(exception.getMessage()).isEqualTo(INVALID_ARRAY); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfStructTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfStructTest.java new file mode 100644 index 000000000..7cb84e70a --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfStructTest.java @@ -0,0 +1,205 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.LegacySQLTypeName.RECORD; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowArrayOf; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowStructOf; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.nestedResultSetToColumnLists; +import static com.google.common.truth.Truth.assertThat; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.StandardSQLTypeName; +import java.sql.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Types; +import java.util.ArrayList; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; +import org.apache.arrow.vector.util.Text; +import org.junit.Before; +import org.junit.Test; +import org.junit.function.ThrowingRunnable; + +public class BigQueryArrowArrayOfStructTest { + + private Array array; + + @Before + public void setUp() { + FieldList profileSchema = + FieldList.of( + Field.newBuilder("name", LegacySQLTypeName.STRING).build(), + Field.newBuilder("age", LegacySQLTypeName.INTEGER).build(), + Field.newBuilder("adult", LegacySQLTypeName.BOOLEAN).build()); + + JsonStringHashMap record1 = + arrowStructOf( + Tuple.of(STRING, new Text("Arya")), Tuple.of(INT64, 15L), Tuple.of(BOOL, FALSE)) + .y(); + JsonStringHashMap record2 = + arrowStructOf( + Tuple.of(STRING, new Text("Khal Drogo")), + Tuple.of(INT64, 35L), + Tuple.of(BOOL, TRUE)) + .y(); + JsonStringHashMap record3 = + arrowStructOf( + Tuple.of(STRING, new Text("Ned Stark")), Tuple.of(INT64, 45L), Tuple.of(BOOL, TRUE)) + .y(); + JsonStringHashMap record4 = + arrowStructOf( + Tuple.of(STRING, new Text("Jon Snow")), Tuple.of(INT64, 25L), Tuple.of(BOOL, TRUE)) + .y(); + + Field arrayOfStructSchema = + Field.newBuilder("profiles", RECORD, profileSchema).setMode(Mode.REPEATED).build(); + + JsonStringArrayList> arrayOfStructValue = + arrowArrayOf(record1, record2, record3, record4); + array = new BigQueryArrowArray(arrayOfStructSchema, arrayOfStructValue); + } + + @Test + public void getArray() throws SQLException { + Struct[] structArray = (Struct[]) array.getArray(); + + assertThat(structArray.length).isEqualTo(4); + assertThat(structArray[0].getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(structArray[1].getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structArray[2].getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + assertThat(structArray[3].getAttributes()).isEqualTo(asList("Jon Snow", 25L, true).toArray()); + } + + @Test + public void getSlicedArray() throws SQLException { + Struct[] structArray = (Struct[]) array.getArray(2, 2); + + assertThat(structArray.length).isEqualTo(2); + assertThat(structArray[0].getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structArray[1].getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + } + + @Test + public void getSlicedArrayWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getArray(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getResultSet() throws SQLException { + ResultSet resultSet = array.getResultSet(); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + + ArrayList indexList = indexAndValues.x(); + ArrayList structs = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {1, 2, 3, 4}); + assertThat(structs.get(0).getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(structs.get(1).getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structs.get(2).getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + assertThat(structs.get(3).getAttributes()).isEqualTo(asList("Jon Snow", 25L, true).toArray()); + } + + @Test + public void getSlicedResultSet() throws SQLException { + ResultSet resultSet = array.getResultSet(2, 2); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + + ArrayList indexList = indexAndValues.x(); + ArrayList structs = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {2, 3}); + assertThat(structs.get(0).getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structs.get(1).getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + } + + @Test + public void getResultSetWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getResultSet(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getBaseTypeName() throws SQLException { + assertThat(array.getBaseTypeName()).isEqualTo(StandardSQLTypeName.STRUCT.name()); + } + + @Test + public void getBaseType() throws SQLException { + assertThat(array.getBaseType()).isEqualTo(Types.STRUCT); + } + + @Test + public void free() throws SQLException { + this.array.free(); + + ensureArrayIsInvalid(() -> array.getArray()); + ensureArrayIsInvalid(() -> array.getArray(1, 2)); + ensureArrayIsInvalid(() -> array.getBaseTypeName()); + ensureArrayIsInvalid(() -> array.getBaseType()); + } + + @Test + public void getArrayWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(emptyMap())); + Exception exception2 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getResultSetWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getResultSet(emptyMap())); + Exception exception2 = + assertThrows( + SQLFeatureNotSupportedException.class, () -> array.getResultSet(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + private void ensureArrayIsInvalid(ThrowingRunnable block) { + Exception exception = assertThrows(IllegalStateException.class, block); + assertThat(exception.getMessage()).isEqualTo(INVALID_ARRAY); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSetTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSetTest.java new file mode 100644 index 000000000..efde49309 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSetTest.java @@ -0,0 +1,358 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.utils.ArrowUtilities.serializeSchema; +import static com.google.cloud.bigquery.jdbc.utils.ArrowUtilities.serializeVectorSchemaRoot; +import static com.google.common.truth.Truth.assertThat; +import static org.apache.arrow.vector.types.Types.MinorType.INT; +import static org.apache.arrow.vector.types.Types.MinorType.VARCHAR; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.sql.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Struct; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingDeque; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.DateMilliVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.Float8Vector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.TimeMilliVector; +import org.apache.arrow.vector.TimeStampMicroVector; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.complex.ListVector; +import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.complex.impl.UnionListWriter; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.Text; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryArrowResultSetTest { + + private static final FieldList fieldList = + FieldList.of( + Field.of("boolField", StandardSQLTypeName.BOOL), + Field.of("int64Filed", StandardSQLTypeName.INT64), + Field.of("float64Field", StandardSQLTypeName.FLOAT64), + Field.of("stringField", StandardSQLTypeName.STRING), + Field.of("timeStampField", StandardSQLTypeName.TIMESTAMP), + Field.of("bytesField", StandardSQLTypeName.BYTES), + Field.newBuilder("intArrayField", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REPEATED) + .build(), + Field.of( + "structField", + StandardSQLTypeName.STRUCT, + Field.of("name", StandardSQLTypeName.STRING), + Field.of("age", StandardSQLTypeName.INT64)), + Field.of("numericField", StandardSQLTypeName.BIGNUMERIC), + Field.of("timeField", StandardSQLTypeName.TIME), + Field.of("dateField", StandardSQLTypeName.DATE)); + + private BigQueryArrowBatchWrapper arrowBatchWrapper; + private BigQueryArrowBatchWrapper arrowBatchWrapperLast; + + private BigQueryStatement statement; + + private BlockingQueue buffer; + private BlockingQueue bufferWithTwoRows; + private static final Schema QUERY_SCHEMA = Schema.of(fieldList); + + private VectorSchemaRoot vectorSchemaRoot; + private BigQueryArrowResultSet bigQueryArrowResultSet; + private BigQueryArrowResultSet bigQueryArrowResultSetNested; + + private VectorSchemaRoot getTestVectorSchemaRoot() { + RootAllocator allocator = new RootAllocator(); + BitVector boolField = + new BitVector("boolField", allocator); // Mapped with StandardSQLTypeName.BOOL + boolField.allocateNew(2); + boolField.set(0, 0); + boolField.setValueCount(1); + IntVector int64Filed = + new IntVector("int64Filed", allocator); // Mapped with StandardSQLTypeName.INT64 + int64Filed.allocateNew(2); + int64Filed.set(0, 1); + int64Filed.setValueCount(1); + Float8Vector float64Field = + new Float8Vector("float64Field", allocator); // Mapped with StandardSQLTypeName.FLOAT64 + float64Field.allocateNew(2); + float64Field.set(0, 1.1f); + float64Field.setValueCount(1); + VarCharVector stringField = + new VarCharVector("stringField", allocator); // Mapped with StandardSQLTypeName.STRING + stringField.allocateNew(2); + stringField.set(0, new Text("text1")); + stringField.setValueCount(1); + TimeStampMicroVector timeStampField = + new TimeStampMicroVector( + "timeStampField", allocator); // Mapped with StandardSQLTypeName.TIMESTAMP + timeStampField.allocateNew(2); + timeStampField.set(0, 10000L); + timeStampField.setValueCount(1); + VarBinaryVector bytesField = + new VarBinaryVector("bytesField", allocator); // Mapped with StandardSQLTypeName.BYTES + bytesField.allocateNew(2); + bytesField.set(0, "text1".getBytes()); + bytesField.setValueCount(1); + + ListVector listVector = ListVector.empty("intArrayField", allocator); + UnionListWriter listWriter = listVector.getWriter(); + listWriter.setPosition(0); + listWriter.startList(); + listWriter.writeBigInt(10L); + listWriter.writeBigInt(20L); + listWriter.setValueCount(2); + listWriter.endList(); + listVector.setValueCount(1); + + StructVector structVector = StructVector.empty("structField", allocator); + VarCharVector nameVector = + structVector.addOrGet( + "name", FieldType.notNullable(VARCHAR.getType()), VarCharVector.class); + IntVector ageVector = + structVector.addOrGet("age", FieldType.notNullable(INT.getType()), IntVector.class); + structVector.allocateNew(); + + nameVector.set(0, new Text("Jon Doe")); + nameVector.setValueCount(1); + + ageVector.set(0, 29); + ageVector.setValueCount(1); + + structVector.setValueCount(1); + structVector.setIndexDefined(0); + + IntVector numericField = + new IntVector("numericField", allocator); // Mapped with StandardSQLTypeName.BIGNUMERIC + numericField.allocateNew(1000); + numericField.set(0, 1); + numericField.setValueCount(1); + TimeMilliVector timeField = + new TimeMilliVector("timeField", allocator); // Mapped with StandardSQLTypeName.TIME + timeField.allocateNew(2); + timeField.set(0, 1234); + timeField.setValueCount(1); + DateMilliVector dateField = + new DateMilliVector("dateField", allocator); // Mapped with StandardSQLTypeName.DATE + dateField.allocateNew(2); + dateField.set(0, 5000); + dateField.setValueCount(1); + + List fieldVectors = + ImmutableList.of( + boolField, + int64Filed, + float64Field, + stringField, + timeStampField, + bytesField, + listVector, + structVector, + numericField, + timeField, + dateField); + return new VectorSchemaRoot(fieldVectors); + } + + private JsonStringArrayList getJsonStringArrayList() { + JsonStringArrayList jsonStringArrayList = new JsonStringArrayList<>(); + jsonStringArrayList.addAll(Arrays.asList(10L, 20L)); + return jsonStringArrayList; + } + + @Before + public void setUp() throws SQLException, IOException { + buffer = new LinkedBlockingDeque<>(); + bufferWithTwoRows = new LinkedBlockingDeque<>(); + vectorSchemaRoot = getTestVectorSchemaRoot(); + ArrowRecordBatch batch = + ArrowRecordBatch.newBuilder() + .setSerializedRecordBatch(serializeVectorSchemaRoot(vectorSchemaRoot)) + .build(); + arrowBatchWrapper = BigQueryArrowBatchWrapper.of(batch); + arrowBatchWrapperLast = BigQueryArrowBatchWrapper.of(null, true); // last flag + buffer.add(arrowBatchWrapper); + buffer.add(arrowBatchWrapperLast); + bufferWithTwoRows.add(arrowBatchWrapper); + bufferWithTwoRows.add(arrowBatchWrapperLast); + + statement = mock(BigQueryStatement.class); + ArrowSchema arrowSchema = + ArrowSchema.newBuilder() + .setSerializedSchema(serializeSchema(vectorSchemaRoot.getSchema())) + .build(); + Thread workerThread = new Thread(); + bigQueryArrowResultSet = + BigQueryArrowResultSet.of( + QUERY_SCHEMA, arrowSchema, 1, statement, buffer, workerThread, null); + + // nested result set data setup + JsonStringArrayList jsonStringArrayList = getJsonStringArrayList(); + Schema arraySchema = + Schema.of( + Field.newBuilder("integerArray", StandardSQLTypeName.INT64) + .setMode(Mode.REPEATED) + .build()); + bigQueryArrowResultSetNested = + BigQueryArrowResultSet.getNestedResultSet( + arraySchema, + BigQueryArrowBatchWrapper.getNestedFieldValueListWrapper(jsonStringArrayList), + 0, + jsonStringArrayList.size()); + } + + @Test + public void testVectorSchemaRoot() { + assertThat(vectorSchemaRoot).isNotNull(); + assertThat(vectorSchemaRoot.getRowCount()).isEqualTo(1); + } + + @Test + public void testBufferSize() { + assertThat(buffer).isNotNull(); + assertThat(buffer.size()).isEqualTo(2); + } + + @Test + public void testRowCount() throws SQLException, IOException { + ArrowSchema arrowSchema = + ArrowSchema.newBuilder() + .setSerializedSchema(serializeSchema(vectorSchemaRoot.getSchema())) + .build(); + Thread workerThread = new Thread(); + // ResultSet with 1 row buffer and 1 total rows. + BigQueryArrowResultSet bigQueryArrowResultSet2 = + BigQueryArrowResultSet.of( + QUERY_SCHEMA, arrowSchema, 1, statement, buffer, workerThread, null); + + assertThat(resultSetRowCount(bigQueryArrowResultSet2)).isEqualTo(1); + // ResultSet with 2 rows buffer and 1 total rows. + bigQueryArrowResultSet2 = + BigQueryArrowResultSet.of( + QUERY_SCHEMA, arrowSchema, 1, statement, bufferWithTwoRows, workerThread, null); + + assertThat(resultSetRowCount(bigQueryArrowResultSet2)).isEqualTo(1); + } + + @Test + // This method tests iteration and Resultset's type getters + public void testIteration() throws SQLException { + int cnt = 0; + assertThat(bigQueryArrowResultSet.isBeforeFirst()).isTrue(); + while (bigQueryArrowResultSet.next()) { + cnt++; + assertThat(bigQueryArrowResultSet.isLast()).isTrue(); // we have one test row + assertThat(bigQueryArrowResultSet.isFirst()).isTrue(); // we have one test row + + assertThat(bigQueryArrowResultSet.getString(4)).isEqualTo("text1"); + + // array + assertThat(bigQueryArrowResultSet.getArray("intArrayField").getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(bigQueryArrowResultSet.getArray(7).getArray()).isEqualTo(new Object[] {10L, 20L}); + assertThat(((Array) bigQueryArrowResultSet.getObject("intArrayField")).getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(((Array) bigQueryArrowResultSet.getObject(7)).getArray()) + .isEqualTo(new Object[] {10L, 20L}); + + // struct + assertThat(((Struct) bigQueryArrowResultSet.getObject("structField")).getAttributes()) + .isEqualTo(new Object[] {"Jon Doe", 29L}); + assertThat(((Struct) bigQueryArrowResultSet.getObject(8)).getAttributes()) + .isEqualTo(new Object[] {"Jon Doe", 29L}); + } + assertThat(cnt).isEqualTo(1); + assertThat(bigQueryArrowResultSet.next()).isFalse(); + assertThat(bigQueryArrowResultSet.isAfterLast()).isTrue(); + } + + @Test + public void testIsClosed() { + assertThat(bigQueryArrowResultSet.isClosed()).isFalse(); + } + + @Test + public void testResultSetHoldability() throws SQLException { + assertThat(bigQueryArrowResultSet.getHoldability()) + .isEqualTo(bigQueryArrowResultSet.HOLD_CURSORS_OVER_COMMIT); + } + + @Test + public void testStatement() throws SQLException { + assertThat(bigQueryArrowResultSet.getStatement()).isEqualTo(statement); + assertThat(bigQueryArrowResultSetNested.getStatement()).isNull(); + } + + @Test + public void testConcurrencyTypeColumn() throws SQLException { + assertThat(bigQueryArrowResultSet.getConcurrency()).isEqualTo(ResultSet.CONCUR_READ_ONLY); + assertThat(bigQueryArrowResultSet.getType()).isEqualTo(ResultSet.TYPE_FORWARD_ONLY); + assertThat(bigQueryArrowResultSet.findColumn("boolField")).isEqualTo(1); + } + + @Test + public void testIterationNested() throws SQLException { + int cnt = 0; + assertThat(bigQueryArrowResultSetNested.isBeforeFirst()).isTrue(); + while (bigQueryArrowResultSetNested.next()) { + cnt++; + if (cnt == 1) { + assertThat(bigQueryArrowResultSetNested.isFirst()).isTrue(); + + } else { // 2nd row is the last row + assertThat(bigQueryArrowResultSetNested.isLast()).isTrue(); + } + assertThat(bigQueryArrowResultSetNested.getInt(1)) + .isEqualTo(cnt); // the first column is index 1 + assertThat(bigQueryArrowResultSetNested.getInt(2)) + .isEqualTo(cnt * 10); // second column has values 10 and 20 + } + assertThat(cnt).isEqualTo(2); + assertThat(bigQueryArrowResultSetNested.next()).isFalse(); + assertThat(bigQueryArrowResultSetNested.isAfterLast()).isTrue(); + } + + private int resultSetRowCount(BigQueryArrowResultSet resultSet) throws SQLException { + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + return rowCount; + } + + // TODO: Unit Test for iteration and getters +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStructTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStructTest.java new file mode 100644 index 000000000..2c3bedcc4 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStructTest.java @@ -0,0 +1,245 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.StandardSQLTypeName.BIGNUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.BYTES; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATE; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATETIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.FLOAT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.GEOGRAPHY; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.NUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIMESTAMP; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowArraySchemaAndValue; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowStructOf; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.toArrowStruct; +import static com.google.common.truth.Truth.assertThat; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; +import static java.time.Month.MARCH; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; +import org.apache.arrow.vector.util.Text; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; + +public class BigQueryArrowStructTest { + + @Rule public final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + private Struct structWithPrimitiveValues; + + @Before + public void setUp() { + Tuple> schemaAndValues = + arrowStructOf( + Tuple.of(INT64, Long.valueOf("10")), + Tuple.of(BOOL, TRUE), + Tuple.of(FLOAT64, Double.valueOf("11.2")), + Tuple.of(NUMERIC, new BigDecimal("11.2657")), + Tuple.of(BIGNUMERIC, new BigDecimal("11.2657")), + Tuple.of(STRING, new Text("one")), + Tuple.of(TIMESTAMP, Long.valueOf("1680174859820227")), // 2023-03-30 16:44:19.82 + Tuple.of(DATE, 19446), // 2023-03-30 + Tuple.of(TIME, Long.valueOf("40459820227")), + Tuple.of(DATETIME, LocalDateTime.parse("2023-03-30T11:14:19.820227")), + Tuple.of(GEOGRAPHY, new Text("POINT(-122 47)")), + Tuple.of(BYTES, "one".getBytes())); + + structWithPrimitiveValues = new BigQueryArrowStruct(schemaAndValues.x(), schemaAndValues.y()); + } + + @Test + public void structOfPrimitives() throws SQLException { + assertThat(structWithPrimitiveValues.getAttributes()) + .isEqualTo( + asList( + 10L, + true, + 11.2, + new BigDecimal("11.2657"), + new BigDecimal("11.2657"), + "one", + Timestamp.valueOf(LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820227000)), + Date.valueOf(LocalDate.of(2023, MARCH, 30)), + Time.valueOf(LocalTime.of(11, 14, 19, 820227)), + Timestamp.valueOf("2023-03-30 11:14:19.820227"), + "POINT(-122 47)", + "one".getBytes()) + .toArray()); + } + + @Test + public void structOfArrays() throws SQLException { + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820227000); + LocalDate aDate = LocalDate.of(2023, MARCH, 30); + LocalTime aTime = LocalTime.of(11, 14, 19, 820227); + List>> schemaAndValues = + Arrays.asList( + arrowArraySchemaAndValue(INT64, 10L, 20L), + arrowArraySchemaAndValue(BOOL, Boolean.TRUE, FALSE), + arrowArraySchemaAndValue(FLOAT64, Double.valueOf("11.2"), Double.valueOf("33.4")), + arrowArraySchemaAndValue(NUMERIC, new BigDecimal("11.2657"), new BigDecimal("33.4657")), + arrowArraySchemaAndValue( + BIGNUMERIC, new BigDecimal("11.2657"), new BigDecimal("33.4657")), + arrowArraySchemaAndValue(STRING, new Text("one"), new Text("two")), + arrowArraySchemaAndValue( + TIMESTAMP, Long.valueOf("1680174859820227"), Long.valueOf("1680261259820227")), + arrowArraySchemaAndValue(DATE, 19446, 19447), + arrowArraySchemaAndValue( + TIME, Long.valueOf("40459820227"), Long.valueOf("40460820227")), + arrowArraySchemaAndValue( + DATETIME, + LocalDateTime.parse("2023-03-30T11:14:19.820227"), + LocalDateTime.parse("2023-03-30T11:15:19.820227")), + arrowArraySchemaAndValue( + GEOGRAPHY, new Text("POINT(-122 47)"), new Text("POINT(-122 48)")), + arrowArraySchemaAndValue( + BYTES, Stream.of("one", "two").map(String::getBytes).toArray(byte[][]::new))); + + List orderedSchemas = + schemaAndValues.stream().map(Tuple::x).collect(Collectors.toList()); + JsonStringHashMap jsonStringHashMap = toArrowStruct(schemaAndValues); + + Struct struct = new BigQueryArrowStruct(FieldList.of(orderedSchemas), jsonStringHashMap); + + Object[] attributes = struct.getAttributes(); + assertThat(((Array) attributes[0]).getArray()).isEqualTo(new Long[] {10L, 20L}); + assertThat(((Array) attributes[1]).getArray()).isEqualTo(new Boolean[] {true, false}); + assertThat(((Array) attributes[2]).getArray()).isEqualTo(new Double[] {11.2, 33.4}); + assertThat(((Array) attributes[3]).getArray()) + .isEqualTo(new BigDecimal[] {new BigDecimal("11.2657"), new BigDecimal("33.4657")}); + assertThat(((Array) attributes[4]).getArray()) + .isEqualTo(new BigDecimal[] {new BigDecimal("11.2657"), new BigDecimal("33.4657")}); + assertThat(((Array) attributes[5]).getArray()).isEqualTo(new String[] {"one", "two"}); + assertThat(((Array) attributes[6]).getArray()) + .isEqualTo( + new Timestamp[] { + Timestamp.valueOf(aTimeStamp), // 2023-03-30 16:44:19.82 + Timestamp.valueOf(aTimeStamp.plusDays(1)) + }); + assertThat(((Array) attributes[7]).getArray()) + .isEqualTo(new Date[] {Date.valueOf(aDate), Date.valueOf(aDate.plusDays(1))}); + assertThat(((Array) attributes[8]).getArray()) + .isEqualTo(new Time[] {Time.valueOf(aTime), Time.valueOf(aTime.plusSeconds(1))}); + assertThat(((Array) attributes[9]).getArray()) // DATETIME + .isEqualTo( + new Timestamp[] { + Timestamp.valueOf("2023-03-30 11:14:19.820227"), + Timestamp.valueOf("2023-03-30 11:15:19.820227") + }); + assertThat(((Array) attributes[10]).getArray()) + .isEqualTo(new String[] {"POINT(-122 47)", "POINT(-122 48)"}); + assertThat(((Array) attributes[11]).getArray()) + .isEqualTo(new byte[][] {"one".getBytes(), "two".getBytes()}); + } + + @Test + public void structOfStructs() throws SQLException { + FieldList profileSchema = + FieldList.of( + Field.of("name", LegacySQLTypeName.STRING), + Field.of("age", LegacySQLTypeName.INTEGER), + Field.of("adult", LegacySQLTypeName.BOOLEAN)); + FieldList addressSchema = + FieldList.of( + Field.of("state", LegacySQLTypeName.STRING), + Field.of("zip", LegacySQLTypeName.INTEGER)); + FieldList rootStructSchema = + FieldList.of( + Field.of("profile", LegacySQLTypeName.RECORD, profileSchema), + Field.of("address", LegacySQLTypeName.RECORD, addressSchema)); + + JsonStringHashMap profileValue = + new JsonStringHashMap() { + { + put("name", new Text("Arya")); + put("age", 15L); + put("adult", FALSE); + } + }; + JsonStringHashMap addressValue = + new JsonStringHashMap() { + { + put("state", new Text("Michigan")); + put("zip", 49086L); + } + }; + JsonStringHashMap rootStructValue = + new JsonStringHashMap() { + { + put("profile", profileValue); + put("address", addressValue); + } + }; + + Struct struct = new BigQueryArrowStruct(rootStructSchema, rootStructValue); + Object[] attributes = struct.getAttributes(); + Struct profileStruct = (Struct) attributes[0]; + Struct addressStruct = (Struct) attributes[1]; + + assertThat(profileStruct.getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(addressStruct.getAttributes()).isEqualTo(asList("Michigan", 49086L).toArray()); + } + + @Test + public void getSQLTypeNameIsNotSupported() { + Exception exception = + assertThrows( + SQLFeatureNotSupportedException.class, structWithPrimitiveValues::getSQLTypeName); + assertThat(exception.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getAttributesWithCustomTypeMappingsIsNotSupported() { + Exception exception = + assertThrows( + SQLFeatureNotSupportedException.class, + () -> structWithPrimitiveValues.getAttributes(emptyMap())); + assertThat(exception.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSetTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSetTest.java new file mode 100644 index 000000000..90dad9935 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSetTest.java @@ -0,0 +1,104 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertFalse; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.CALLS_REAL_METHODS; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import java.lang.reflect.Field; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryBaseResultSetTest { + private BigQuery bigQuery; + private BigQueryBaseResultSet resultSet; + private Job job; + private QueryStatistics statistics; + + @Before + public void setUp() { + // Using mock() for QueryStatistics because Builder() seems to not be available + // from outside. + bigQuery = mock(BigQuery.class); + job = mock(Job.class); + doReturn(job).when(bigQuery).getJob(any(JobId.class)); + + statistics = mock(QueryStatistics.class); + doReturn(statistics).when(job).getStatistics(); + + resultSet = mock(BigQueryBaseResultSet.class, CALLS_REAL_METHODS); + try { + Field field = BigQueryBaseResultSet.class.getDeclaredField("bigQuery"); + field.setAccessible(true); + field.set(resultSet, bigQuery); + } catch (Exception e) { + assertFalse(true); + } + } + + @Test + public void testGetQueryId() { + resultSet.setQueryId("queryId"); + assertThat(resultSet.getQueryId()).isEqualTo("queryId"); + } + + @Test + public void testGetJobId() { + resultSet.setJobId(JobId.of("jobId")); + assertThat(resultSet.getJobId()).isEqualTo(JobId.of("jobId")); + } + + @Test + public void testGetQueryStatistics() { + resultSet.setJobId(JobId.of("jobId")); + assertThat(resultSet.getQueryStatistics()).isInstanceOf(QueryStatistics.class); + } + + @Test + public void testGetQueryStatisticsCaching() { + resultSet.setJobId(JobId.of("jobId")); + assertThat(resultSet.getQueryStatistics()).isInstanceOf(QueryStatistics.class); + // Change return value to null to ensure lazy init saved the state + doReturn(null).when(job).getStatistics(); + assertThat(resultSet.getQueryStatistics()).isInstanceOf(QueryStatistics.class); + } + + @Test + public void testGetQueryStatistics_no_client() { + resultSet = mock(BigQueryBaseResultSet.class, CALLS_REAL_METHODS); + assertThat(resultSet.getQueryStatistics()).isNull(); + } + + @Test + public void testGetQueryStatistics_no_job_id() { + assertThat(resultSet.getQueryStatistics()).isNull(); + } + + @Test + public void testGetQueryStatistics_no_job() { + doReturn(job).when(bigQuery).getJob(any(JobId.class)); + assertThat(resultSet.getQueryStatistics()).isNull(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBigQueryTypeCoercerBuilderTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBigQueryTypeCoercerBuilderTest.java new file mode 100644 index 000000000..4af163245 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBigQueryTypeCoercerBuilderTest.java @@ -0,0 +1,43 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.cloud.bigquery.jdbc.TestType.Text; +import org.junit.Test; + +public class BigQueryBigQueryTypeCoercerBuilderTest { + + @Test + public void shouldBeAbleToConvertCustomTypes() { + byte[] bytesArray = {72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33}; + Text text = new Text(bytesArray); + + BigQueryTypeCoercer bigQueryTypeCoercer = + new BigQueryTypeCoercerBuilder().registerTypeCoercion(new TextToStringCoercion()).build(); + + assertThat(bigQueryTypeCoercer.coerceTo(String.class, text)).isEqualTo("Hello World!"); + } + + private static class TextToStringCoercion implements BigQueryCoercion { + @Override + public String coerce(Text value) { + return new String(value.getBytes()); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatementTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatementTest.java new file mode 100644 index 000000000..f9729bf21 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatementTest.java @@ -0,0 +1,1118 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; +import java.io.IOException; +import java.io.Reader; +import java.io.StringReader; +import java.math.BigDecimal; +import java.sql.*; +import java.util.Calendar; +import java.util.HashMap; +import java.util.Map; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryCallableStatementTest { + + private BigQueryConnection bigQueryConnection; + private static final String GET_PARAM_KEY = "ParamKey"; + private static final String PARAM_KEY = GET_PARAM_KEY; + + @Before + public void setUp() throws IOException, SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + } + + @Test + public void testCreateCallableStatement() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc"); + assertNotNull(statement); + + assertEquals("call testProc", statement.getCallableStatementSql()); + } + + @Test + public void testRegisterOutParamIndexVarchar() throws SQLException { + registerOutParamIndexHelper(1, Types.VARCHAR, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamIndexNVarchar() throws SQLException { + registerOutParamIndexHelper(1, Types.NVARCHAR, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamIndexBigInt() throws SQLException { + registerOutParamIndexHelper(1, Types.BIGINT, Long.class, StandardSQLTypeName.INT64, -1); + } + + @Test + public void testRegisterOutParamIndexInteger() throws SQLException { + registerOutParamIndexHelper(1, Types.INTEGER, Integer.class, StandardSQLTypeName.INT64, -1); + } + + @Test + public void testRegisterOutParamIndexBoolean() throws SQLException { + registerOutParamIndexHelper(1, Types.BOOLEAN, Boolean.class, StandardSQLTypeName.BOOL, -1); + } + + @Test + public void testRegisterOutParamIndexDouble() throws SQLException { + registerOutParamIndexHelper(1, Types.DOUBLE, Double.class, StandardSQLTypeName.FLOAT64, -1); + } + + @Test + public void testRegisterOutParamIndexFloat() throws SQLException { + registerOutParamIndexHelper(1, Types.FLOAT, Float.class, StandardSQLTypeName.FLOAT64, -1); + } + + @Test + public void testRegisterOutParamIndexNumeric() throws SQLException { + registerOutParamIndexHelper(1, Types.NUMERIC, BigDecimal.class, StandardSQLTypeName.NUMERIC, 2); + } + + @Test + public void testRegisterOutParamIndexTimestamp() throws SQLException { + registerOutParamIndexHelper( + 1, Types.TIMESTAMP, Timestamp.class, StandardSQLTypeName.TIMESTAMP, -1); + } + + @Test + public void testRegisterOutParamIndexDate() throws SQLException { + registerOutParamIndexHelper(1, Types.DATE, Date.class, StandardSQLTypeName.DATE, -1); + } + + @Test + public void testRegisterOutParamIndexTime() throws SQLException { + registerOutParamIndexHelper(1, Types.TIME, Time.class, StandardSQLTypeName.TIME, -1); + } + + @Test + public void testRegisterOutParamIndexOther() throws SQLException { + registerOutParamIndexHelper(1, Types.OTHER, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamIndexBinary() throws SQLException { + registerOutParamIndexHelper(1, Types.BINARY, byte[].class, StandardSQLTypeName.BYTES, -1); + } + + @Test + public void testRegisterOutParamIndexVarBinary() throws SQLException { + registerOutParamIndexHelper(1, Types.VARBINARY, byte[].class, StandardSQLTypeName.BYTES, -1); + } + + @Test + public void testRegisterOutParamIndexStruct() throws SQLException { + registerOutParamIndexHelper(1, Types.STRUCT, Struct.class, StandardSQLTypeName.STRUCT, -1); + } + + @Test + public void testRegisterOutParamIndexArray() throws SQLException { + registerOutParamIndexHelper(1, Types.ARRAY, Array.class, StandardSQLTypeName.ARRAY, -1); + } + + @Test + public void testRegisterOutParamIndexBit() throws SQLException { + registerOutParamIndexHelper(1, Types.BIT, Boolean.class, StandardSQLTypeName.BOOL, -1); + } + + @Test + public void testRegisterOutParamNameVarchar() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.VARCHAR, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamNameNVarchar() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.NVARCHAR, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamNameBigInt() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.BIGINT, Long.class, StandardSQLTypeName.INT64, -1); + } + + @Test + public void testRegisterOutParamNameInteger() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.INTEGER, Integer.class, StandardSQLTypeName.INT64, -1); + } + + @Test + public void testRegisterOutParamNameBoolean() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.BOOLEAN, Boolean.class, StandardSQLTypeName.BOOL, -1); + } + + @Test + public void testRegisterOutParamNameDouble() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.DOUBLE, Double.class, StandardSQLTypeName.FLOAT64, -1); + } + + @Test + public void testRegisterOutParamNameFloat() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.FLOAT, Float.class, StandardSQLTypeName.FLOAT64, -1); + } + + @Test + public void testRegisterOutParamNameNumeric() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.NUMERIC, BigDecimal.class, StandardSQLTypeName.NUMERIC, 2); + } + + @Test + public void testRegisterOutParamNameTimestamp() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.TIMESTAMP, Timestamp.class, StandardSQLTypeName.TIMESTAMP, -1); + } + + @Test + public void testRegisterOutParamNameDate() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.DATE, Date.class, StandardSQLTypeName.DATE, -1); + } + + @Test + public void testRegisterOutParamNameTime() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.TIME, Time.class, StandardSQLTypeName.TIME, -1); + } + + @Test + public void testRegisterOutParamNameOther() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.OTHER, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamNameBinary() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.BINARY, byte[].class, StandardSQLTypeName.BYTES, -1); + } + + @Test + public void testRegisterOutParamNameVarBinary() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.VARBINARY, byte[].class, StandardSQLTypeName.BYTES, -1); + } + + @Test + public void testRegisterOutParamNameStruct() throws SQLException { + registerOutParamIndexHelper(1, Types.STRUCT, Struct.class, StandardSQLTypeName.STRUCT, -1); + } + + @Test + public void testRegisterOutParamNameArray() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.ARRAY, Array.class, StandardSQLTypeName.ARRAY, -1); + } + + @Test + public void testRegisterOutParamNameBit() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.BIT, Boolean.class, StandardSQLTypeName.BOOL, -1); + } + + @Test + public void testRegisterOutParamIndexScaleFail() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + assertThrows( + IllegalArgumentException.class, () -> statement.registerOutParameter(1, Types.VARCHAR, 3)); + } + + @Test + public void testRegisterOutNameIndexScaleFail() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + assertThrows( + IllegalArgumentException.class, + () -> statement.registerOutParameter("ParamKey", Types.VARCHAR, 3)); + } + + @Test + public void testGetArrayParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Array expected = mock(Array.class); + + statement.getParameterHandler().setParameter(1, expected, Array.class); + Array actual = statement.getArray(1); + assertEquals(expected, actual); + } + + @Test + public void testGetArrayParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Array expected = mock(Array.class); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Array.class, BigQueryStatementParameterType.IN, 0); + Array actual = statement.getArray(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetBigDecimalParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + BigDecimal expected = mock(BigDecimal.class); + + statement.getParameterHandler().setParameter(1, expected, BigDecimal.class); + BigDecimal actual = statement.getBigDecimal(1); + assertEquals(expected, actual); + } + + @Test + public void testGetBigDecimalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + BigDecimal expected = mock(BigDecimal.class); + + statement + .getParameterHandler() + .setParameter( + GET_PARAM_KEY, expected, BigDecimal.class, BigQueryStatementParameterType.IN, 0); + BigDecimal actual = statement.getBigDecimal(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetBooleanParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Boolean expected = true; + + statement.getParameterHandler().setParameter(1, expected, Boolean.class); + Boolean actual = statement.getBoolean(1); + assertEquals(expected, actual); + } + + @Test + public void testGetBooleanParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Boolean expected = true; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Boolean.class, BigQueryStatementParameterType.IN, 0); + Boolean actual = statement.getBoolean(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetByteParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Byte expected = "hello".getBytes()[0]; + + statement.getParameterHandler().setParameter(1, expected, Byte.class); + Byte actual = statement.getByte(1); + assertEquals(expected, actual); + } + + @Test + public void testGetByteParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Byte expected = "hello".getBytes()[0]; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Byte.class, BigQueryStatementParameterType.IN, 0); + Byte actual = statement.getByte(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetBytesParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + byte[] expected = "hello".getBytes(); + + statement.getParameterHandler().setParameter(1, expected, byte[].class); + byte[] actual = statement.getBytes(1); + assertEquals(expected, actual); + } + + @Test + public void testGetBytesParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + byte[] expected = "hello".getBytes(); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, byte[].class, BigQueryStatementParameterType.IN, 0); + byte[] actual = statement.getBytes(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetCharacterStreamParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Reader actual = statement.getCharacterStream(1); + assertNotNull(actual); + assertTrue(actual instanceof StringReader); + } + + @Test + public void testGetCharacterStreamParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Reader actual = statement.getCharacterStream(GET_PARAM_KEY); + assertNotNull(actual); + assertTrue(actual instanceof StringReader); + } + + @Test + public void testGetDateParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Date expected = mock(Date.class); + + statement.getParameterHandler().setParameter(1, expected, Date.class); + Date actual = statement.getDate(1); + assertEquals(expected, actual); + } + + @Test + public void testGetDateParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Date expected = mock(Date.class); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Date.class, BigQueryStatementParameterType.IN, 0); + Date actual = statement.getDate(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetDateParamWithCalByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Date expected = new Date(cal.getTimeInMillis()); + + statement.getParameterHandler().setParameter(1, expected, Date.class); + Date actual = statement.getDate(1, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetDateParamWithCalByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Date expected = new Date(cal.getTimeInMillis()); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Date.class, BigQueryStatementParameterType.IN, 0); + Date actual = statement.getDate(GET_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetDoubleParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Double expected = 10.123; + + statement.getParameterHandler().setParameter(1, expected, Double.class); + Double actual = statement.getDouble(1); + assertEquals(expected, actual); + } + + @Test + public void testGetDoubleParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Double expected = 10.123; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Double.class, BigQueryStatementParameterType.IN, 0); + Double actual = statement.getDouble(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetFloatParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Float expected = 10.123F; + + statement.getParameterHandler().setParameter(1, expected, Float.class); + Float actual = statement.getFloat(1); + assertEquals(expected, actual); + } + + @Test + public void testGetFloatParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Float expected = 10.123F; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Float.class, BigQueryStatementParameterType.IN, 0); + Float actual = statement.getFloat(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetIntegerParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Integer expected = 10; + + statement.getParameterHandler().setParameter(1, expected, Integer.class); + Integer actual = statement.getInt(1); + assertEquals(expected, actual); + } + + @Test + public void testGetIntegerParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Integer expected = 10; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Integer.class, BigQueryStatementParameterType.IN, 0); + Integer actual = statement.getInt(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetLongParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 10L; + + statement.getParameterHandler().setParameter(1, expected, Long.class); + Long actual = statement.getLong(1); + assertEquals(expected, actual); + } + + @Test + public void testGetLongParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 10L; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Long.class, BigQueryStatementParameterType.IN, 0); + Long actual = statement.getLong(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetNCharacterStreamParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Reader actual = statement.getNCharacterStream(1); + assertNotNull(actual); + assertTrue(actual instanceof StringReader); + } + + @Test + public void testGetNCharacterStreamParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Reader actual = statement.getNCharacterStream(GET_PARAM_KEY); + assertNotNull(actual); + assertTrue(actual instanceof StringReader); + } + + @Test + public void testGetNStringParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + String actual = statement.getNString(1); + assertEquals(expected, actual); + } + + @Test + public void testGetNStringByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + String actual = statement.getNString(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetObjectParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Object actual = statement.getObject(1); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Object actual = statement.getObject(GET_PARAM_KEY); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamWithMapByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Map> map = new HashMap<>(); + map.putIfAbsent(StandardSQLTypeName.STRING.name(), String.class); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Object actual = statement.getObject(1, map); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamWithMapByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Map> map = new HashMap<>(); + map.putIfAbsent(StandardSQLTypeName.STRING.name(), String.class); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Object actual = statement.getObject(GET_PARAM_KEY, map); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamWithClassByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Object actual = statement.getObject(1, String.class); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamWithClassByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Object actual = statement.getObject(GET_PARAM_KEY, String.class); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetStringParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "test"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + String actual = statement.getString(1); + assertEquals(expected, actual); + } + + @Test + public void testGetStringParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "test"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + String actual = statement.getString(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetTimeParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Time expected = mock(Time.class); + + statement.getParameterHandler().setParameter(1, expected, Time.class); + Time actual = statement.getTime(1); + assertEquals(expected, actual); + } + + @Test + public void testGetTimeParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Time expected = mock(Time.class); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Time.class, BigQueryStatementParameterType.IN, 0); + Time actual = statement.getTime(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetTimeParamWithCalByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Time expected = new Time(cal.getTimeInMillis()); + + statement.getParameterHandler().setParameter(1, expected, Time.class); + Time actual = statement.getTime(1, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetTimeParamWithCalByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Time expected = new Time(cal.getTimeInMillis()); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Time.class, BigQueryStatementParameterType.IN, 0); + Time actual = statement.getTime(GET_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetTimestampParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Timestamp expected = mock(Timestamp.class); + + statement.getParameterHandler().setParameter(1, expected, Timestamp.class); + Timestamp actual = statement.getTimestamp(1); + assertEquals(expected, actual); + } + + @Test + public void testGetTimestampParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Timestamp expected = mock(Timestamp.class); + + statement + .getParameterHandler() + .setParameter( + GET_PARAM_KEY, expected, Timestamp.class, BigQueryStatementParameterType.IN, 0); + Timestamp actual = statement.getTimestamp(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetTimestampParamWithCalByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Timestamp expected = new Timestamp(cal.getTimeInMillis()); + + statement.getParameterHandler().setParameter(1, expected, Timestamp.class); + Timestamp actual = statement.getTimestamp(1, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetTimestampParamWithCalByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Timestamp expected = new Timestamp(cal.getTimeInMillis()); + + statement + .getParameterHandler() + .setParameter( + GET_PARAM_KEY, expected, Timestamp.class, BigQueryStatementParameterType.IN, 0); + Timestamp actual = statement.getTimestamp(GET_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testSetBigDecimalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + BigDecimal expected = mock(BigDecimal.class); + + statement.setBigDecimal(PARAM_KEY, expected); + BigDecimal actual = statement.getBigDecimal(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetBooleanParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Boolean expected = true; + + statement.setBoolean(PARAM_KEY, expected); + Boolean actual = statement.getBoolean(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetByteParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Byte expected = "hello".getBytes()[0]; + + statement.setByte(PARAM_KEY, expected); + Byte actual = statement.getByte(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetByteArrayParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + byte[] expected = "heelo".getBytes(); + + statement.setBytes(PARAM_KEY, expected); + byte[] actual = statement.getBytes(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetDateParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Date expected = mock(Date.class); + statement.setDate(PARAM_KEY, expected); + Date actual = statement.getDate(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetDateCalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Date expectedDate = mock(Date.class); + Calendar expectedCal = mock(Calendar.class); + + doReturn(1L).when(expectedDate).getTime(); + doReturn(1L).when(expectedCal).getTime(); + doReturn(1L).when(expectedCal).getTimeInMillis(); + statement.setDate(PARAM_KEY, expectedDate, expectedCal); + Date actual = statement.getDate(PARAM_KEY); + assertEquals(new Date(1L), actual); + actual = statement.getDate(PARAM_KEY, expectedCal); + assertEquals(new Date(1L), actual); + } + + @Test + public void testSetDoubleParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Double expected = 123.123; + statement.setDouble(PARAM_KEY, expected); + Double actual = statement.getDouble(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetFloatParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Float expected = 123.123F; + statement.setFloat(PARAM_KEY, expected); + Float actual = statement.getFloat(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetIntParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Integer expected = 1; + statement.setInt(PARAM_KEY, expected); + Integer actual = statement.getInt(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetLongParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 1L; + statement.setLong(PARAM_KEY, expected); + Long actual = statement.getLong(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetObjectParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 1L; + statement.setObject(PARAM_KEY, expected); + Long actual = statement.getObject(PARAM_KEY, Long.class); + assertEquals(expected, actual); + } + + @Test + public void testSetObjectSqlTypeParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 1L; + statement.setObject(PARAM_KEY, expected, java.sql.Types.BIGINT); + Long actual = statement.getObject(PARAM_KEY, Long.class); + assertEquals(expected, actual); + } + + @Test + public void testSetObjectSqlTypeScaleParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 1L; + statement.setObject(PARAM_KEY, expected, java.sql.Types.BIGINT, 5); + Long actual = statement.getObject(PARAM_KEY, Long.class); + assertEquals(expected, actual); + int scale = statement.getParameterHandler().getParameterScale(PARAM_KEY); + assertEquals(5, scale); + } + + @Test + public void testSetStringParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + statement.setString(PARAM_KEY, expected); + String actual = statement.getString(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetTimeParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Time expected = mock(Time.class); + statement.setTime(PARAM_KEY, expected); + Time actual = statement.getTime(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetTimeCalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Time expectedTime = mock(Time.class); + Calendar expectedCal = mock(Calendar.class); + + doReturn(1L).when(expectedTime).getTime(); + doReturn(1L).when(expectedCal).getTime(); + doReturn(1L).when(expectedCal).getTimeInMillis(); + statement.setTime(PARAM_KEY, expectedTime, expectedCal); + Time actual = statement.getTime(PARAM_KEY); + assertEquals(new Time(1L), actual); + actual = statement.getTime(PARAM_KEY, expectedCal); + assertEquals(new Time(1L), actual); + } + + @Test + public void testSetTimestampParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Timestamp expected = mock(Timestamp.class); + statement.setTimestamp(PARAM_KEY, expected); + Timestamp actual = statement.getTimestamp(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetTimestampCalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Timestamp expectedTimestamp = mock(Timestamp.class); + Calendar expectedCal = mock(Calendar.class); + + doReturn(1L).when(expectedTimestamp).getTime(); + doReturn(1L).when(expectedCal).getTime(); + doReturn(1L).when(expectedCal).getTimeInMillis(); + statement.setTimestamp(PARAM_KEY, expectedTimestamp, expectedCal); + Timestamp actual = statement.getTimestamp(PARAM_KEY); + assertEquals(new Timestamp(1L), actual); + actual = statement.getTimestamp(PARAM_KEY, expectedCal); + assertEquals(new Timestamp(1L), actual); + } + + ////////// Private helper methods //////////////////////// + private void registerOutParamIndexHelper( + int paramIndex, int javaSqlType, Class javaType, StandardSQLTypeName sqlType, int scale) + throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + + if (scale >= 0) { + statement.registerOutParameter(paramIndex, javaSqlType, scale); + } else { + statement.registerOutParameter(paramIndex, javaSqlType); + } + BigQueryParameterHandler paramHandler = statement.getParameterHandler(); + assertNotNull(paramHandler); + + assertNull(paramHandler.getParameter(paramIndex)); + assertEquals(BigQueryStatementParameterType.OUT, paramHandler.getParameterType(paramIndex)); + assertEquals(scale, paramHandler.getParameterScale(paramIndex)); + assertEquals(javaType, paramHandler.getType(paramIndex)); + assertEquals(sqlType, paramHandler.getSqlType(paramIndex)); + } + + private void registerOutParamNameHelper( + String paramName, int javaSqlType, Class javaType, StandardSQLTypeName sqlType, int scale) + throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + + if (scale >= 0) { + statement.registerOutParameter(paramName, javaSqlType, scale); + } else { + statement.registerOutParameter(paramName, javaSqlType); + } + BigQueryParameterHandler paramHandler = statement.getParameterHandler(); + assertNotNull(paramHandler); + + assertNull(paramHandler.getParameter(paramName)); + assertEquals(BigQueryStatementParameterType.OUT, paramHandler.getParameterType(paramName)); + assertEquals(scale, paramHandler.getParameterScale(paramName)); + assertEquals(javaType, paramHandler.getType(paramName)); + assertEquals(sqlType, paramHandler.getSqlType(paramName)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionTest.java new file mode 100644 index 000000000..22f98af07 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionTest.java @@ -0,0 +1,343 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.*; + +import com.google.api.gax.rpc.HeaderProvider; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; +import java.io.IOException; +import java.io.InputStream; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Properties; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryConnectionTest { + + private static final String DEFAULT_VERSION = "0.0.0"; + private static final String DEFAULT_JDBC_TOKEN_VALUE = "Google-BigQuery-JDBC-Driver"; + private String expectedVersion; + + @Before + public void setUp() throws IOException { + // Read the expected version from the dependencies.properties file once. + expectedVersion = getExpectedVersion(); + } + + private String getExpectedVersion() { + Properties props = new Properties(); + try (InputStream in = + getClass().getResourceAsStream("/com/google/cloud/bigquery/jdbc/dependencies.properties")) { + if (in != null) { + props.load(in); + String version = props.getProperty("version.jdbc"); + if (version != null) { + return version; + } + } + } catch (IOException e) { + System.err.println("Error reading dependencies.properties: " + e.getMessage()); + } + return DEFAULT_VERSION; + } + + @Test + public void testGetLibraryVersion() throws IOException, SQLException { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + String result = connection.getLibraryVersion(BigQueryConnection.class); + assertEquals(expectedVersion, result); + } + } + + @Test + public void testHeaderProvider() throws IOException, SQLException { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue(agent.startsWith(DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion)); + assertFalse(agent.contains("(GPN:")); + } + } + + @Test + public void testHeaderProviderWithPartnerToken() throws IOException, SQLException { + String partnerTokenString = "(GPN:MyPartner; staging)"; + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;PartnerToken=" + + partnerTokenString; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue( + agent.startsWith( + DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion + " " + partnerTokenString)); + assertTrue(agent.contains("(GPN:")); + assertTrue(agent.contains("MyPartner;")); + assertTrue(agent.contains("staging)")); + } + } + + @Test + public void testHeaderProviderWithEmptyPartnerToken() throws IOException, SQLException { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;PartnerToken="; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue(agent.startsWith(DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion)); + assertFalse(agent.contains("(GPN:")); + } + } + + @Test + public void testHeaderProviderWithPartnerTokenNoEnv() throws IOException, SQLException { + String partnerTokenString = "(GPN:MyPartner)"; + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;PartnerToken=" + + partnerTokenString; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue( + agent.startsWith( + DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion + " " + partnerTokenString)); + assertTrue(agent.contains("GPN:")); + assertTrue(agent.contains("MyPartner")); + } + } + + @Test + public void testHeaderProviderWithInvalidPartner() throws IOException, SQLException { + String partnerTokenString = "(MyPartner; staging)"; + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;PartnerToken=" + + partnerTokenString; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue(agent.startsWith(DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion)); + assertFalse(agent.contains("(MyPartner;")); + assertFalse(agent.contains("(GPN:")); + } + } + + @Test + public void testWriteAPIConnectionProperties() throws SQLException { + // Test without connection properties. Defaults to default values. + String connectionUriDefault = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connectionDefault = new BigQueryConnection(connectionUriDefault)) { + + assertFalse(connectionDefault.enableWriteAPI); + assertEquals(3, connectionDefault.writeAPIActivationRowCount); + assertEquals(1000, connectionDefault.writeAPIAppendRowCount); + } catch (IOException | SQLException e) { + throw new BigQueryJdbcException(e); + } + + // Test with connection properties + String connectionUri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "EnableWriteAPI=1;SWA_ActivationRowCount=6;SWA_AppendRowCount=500"; + try (BigQueryConnection connection = new BigQueryConnection(connectionUri)) { + assertTrue(connection.enableWriteAPI); + assertEquals(6, connection.writeAPIActivationRowCount); + assertEquals(500, connection.writeAPIAppendRowCount); + } catch (IOException | SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testGetWriteClient() throws SQLException { + // Test without connection properties. Defaults to default values. + String connectionUriDefault = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connectionDefault = new BigQueryConnection(connectionUriDefault)) { + assertNull(connectionDefault.bigQueryWriteClient); + // Lazy initialization + BigQueryWriteClient writeClient = connectionDefault.getBigQueryWriteClient(); + assertNotNull(writeClient); + assertFalse(writeClient.isShutdown()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testAdditionalProjects() throws IOException, BigQueryJdbcException { + String url1 = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "AdditionalProjects=projA,projB"; + try (BigQueryConnection conn1 = new BigQueryConnection(url1)) { + List additionalProjects1 = conn1.getAdditionalProjects(); + assertNotNull(additionalProjects1); + assertEquals(Arrays.asList("projA", "projB"), additionalProjects1); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + String url2 = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "AdditionalProjects=projX"; + try (BigQueryConnection conn2 = new BigQueryConnection(url2)) { + List additionalProjects2 = conn2.getAdditionalProjects(); + assertNotNull(additionalProjects2); + assertEquals(Collections.singletonList("projX"), additionalProjects2); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testFilterTablesOnDefaultDatasetProperty() throws SQLException, IOException { + // Test default value + String urlDefault = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connectionDefault = new BigQueryConnection(urlDefault)) { + assertFalse( + "Default value for FilterTablesOnDefaultDataset should be false", + connectionDefault.isFilterTablesOnDefaultDataset()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + + // Test explicitly setting to true + String urlTrue = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "FilterTablesOnDefaultDataset=1;"; + try (BigQueryConnection connectionTrue = new BigQueryConnection(urlTrue)) { + assertTrue( + "FilterTablesOnDefaultDataset should be true when set to 1", + connectionTrue.isFilterTablesOnDefaultDataset()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testRequestGoogleDriveScopeProperty() throws IOException, SQLException { + // Test enabled + String urlEnabled = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "RequestGoogleDriveScope=1;"; + try (BigQueryConnection connectionEnabled = new BigQueryConnection(urlEnabled)) { + assertEquals( + "RequestGoogleDriveScope should be enabled when set to 1", + 1, + connectionEnabled.isRequestGoogleDriveScope()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + + // Test disabled + String urlDisabled = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "RequestGoogleDriveScope=0;"; + try (BigQueryConnection connectionDisabled = new BigQueryConnection(urlDisabled)) { + assertEquals( + "RequestGoogleDriveScope should be disabled when set to 0", + 0, + connectionDisabled.isRequestGoogleDriveScope()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testMetaDataFetchThreadCountProperty() throws SQLException, IOException { + // Test Case 1: Should use the default value when the property is not specified. + String urlDefault = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connectionDefault = new BigQueryConnection(urlDefault)) { + assertEquals( + "Should use the default value when the property is not set", + BigQueryJdbcUrlUtility.DEFAULT_METADATA_FETCH_THREAD_COUNT_VALUE, + connectionDefault.getMetadataFetchThreadCount()); + } + + // Test Case 2: Should use the custom value when a valid integer is provided. + String urlCustom = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "MetaDataFetchThreadCount=16;"; + try (BigQueryConnection connectionCustom = new BigQueryConnection(urlCustom)) { + assertEquals( + "Should use the custom value when a valid integer is provided", + 16, + connectionCustom.getMetadataFetchThreadCount()); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTaskTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTaskTest.java new file mode 100644 index 000000000..b99ff4bec --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTaskTest.java @@ -0,0 +1,63 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.cloud.bigquery.jdbc.BigQueryResultSetFinalizers.ArrowResultSetFinalizer; +import java.lang.ref.ReferenceQueue; +import java.util.ArrayList; +import java.util.List; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryDaemonPollingTaskTest { + + static ReferenceQueue referenceQueueArrowRs; + static ReferenceQueue referenceQueueJsonRs; + static List arrowResultSetFinalizers; + static List jsonResultSetFinalizers; + + @Before + public void setUp() { + referenceQueueArrowRs = new ReferenceQueue<>(); + referenceQueueJsonRs = new ReferenceQueue<>(); + arrowResultSetFinalizers = new ArrayList<>(); + jsonResultSetFinalizers = new ArrayList<>(); + } + + @Test + public void testStartGcDaemonTask() { + + // start the Daemon first and then make sure it doesn't get started again + BigQueryDaemonPollingTask.startGcDaemonTask( + referenceQueueArrowRs, + referenceQueueJsonRs, + arrowResultSetFinalizers, + jsonResultSetFinalizers); // Daemon thread might have already started by the Junit at + // BigQueryStatementTest, hence we ignore the response here and + // check it on the line below + + assertThat( + BigQueryDaemonPollingTask.startGcDaemonTask( + referenceQueueArrowRs, + referenceQueueJsonRs, + arrowResultSetFinalizers, + jsonResultSetFinalizers)) + .isFalse(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaDataTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaDataTest.java new file mode 100644 index 000000000..536aae15b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaDataTest.java @@ -0,0 +1,3209 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; + +import com.google.api.gax.paging.Page; +import com.google.cloud.bigquery.*; +import com.google.cloud.bigquery.BigQuery.RoutineListOption; +import java.io.IOException; +import java.io.InputStream; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; +import java.util.*; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.regex.Pattern; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryDatabaseMetaDataTest { + + private BigQueryConnection bigQueryConnection; + private BigQueryDatabaseMetaData dbMetadata; + private BigQuery bigqueryClient; + + @Before + public void setUp() throws SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + bigqueryClient = mock(BigQuery.class); + Statement mockStatement = mock(Statement.class); + + when(bigQueryConnection.getConnectionUrl()).thenReturn("jdbc:bigquery://test-project"); + when(bigQueryConnection.getBigQuery()).thenReturn(bigqueryClient); + when(bigQueryConnection.createStatement()).thenReturn(mockStatement); + + dbMetadata = new BigQueryDatabaseMetaData(bigQueryConnection); + } + + private Table mockBigQueryTable( + String project, String dataset, String table, TableDefinition.Type type, String description) { + Table mockTable = mock(Table.class); + TableId mockTableId = TableId.of(project, dataset, table); + TableDefinition mockDefinition = mock(TableDefinition.class); + + when(mockTable.getTableId()).thenReturn(mockTableId); + when(mockTable.getDefinition()).thenReturn(mockDefinition); + when(mockDefinition.getType()).thenReturn(type); + when(mockTable.getDescription()).thenReturn(description); + + return mockTable; + } + + private StandardSQLDataType mockStandardSQLDataType(StandardSQLTypeName typeKind) { + StandardSQLDataType mockDataType = mock(StandardSQLDataType.class); + when(mockDataType.getTypeKind()).thenReturn(typeKind.name()); + return mockDataType; + } + + private RoutineArgument mockRoutineArgument(String name, StandardSQLTypeName type, String mode) { + RoutineArgument mockArg = mock(RoutineArgument.class); + when(mockArg.getName()).thenReturn(name); + StandardSQLDataType mockDataType = mockStandardSQLDataType(type); + when(mockArg.getDataType()).thenReturn(mockDataType); + when(mockArg.getMode()).thenReturn(mode); // "IN", "OUT", "INOUT", or null + return mockArg; + } + + private Routine mockBigQueryRoutineWithArgs( + String project, + String dataset, + String routineName, + String routineType, + String description, + List arguments) { + Routine mockRoutine = mock(Routine.class); + RoutineId mockRoutineId = RoutineId.of(project, dataset, routineName); + when(mockRoutine.getRoutineId()).thenReturn(mockRoutineId); + when(mockRoutine.getRoutineType()).thenReturn(routineType); + when(mockRoutine.getDescription()).thenReturn(description); + if (arguments != null) { + when(mockRoutine.getArguments()).thenReturn(arguments); + } else { + when(mockRoutine.getArguments()).thenReturn(Collections.emptyList()); + } + return mockRoutine; + } + + private StandardSQLTableType mockStandardSQLTableType(List columns) { + StandardSQLTableType mockTableType = mock(StandardSQLTableType.class); + when(mockTableType.getColumns()).thenReturn(columns); + return mockTableType; + } + + private StandardSQLField mockStandardSQLField(String name, StandardSQLTypeName type) { + StandardSQLField mockField = mock(StandardSQLField.class); + StandardSQLDataType mockedDataType = mockStandardSQLDataType(type); + when(mockField.getName()).thenReturn(name); + when(mockField.getDataType()).thenReturn(mockedDataType); + return mockField; + } + + @Test + public void testBigqueryDatabaseMetaDataGetters() throws SQLException { + BigQueryDatabaseMetaData dbMetadata = new BigQueryDatabaseMetaData(bigQueryConnection); + assertEquals("GoogleJDBCDriverForGoogleBigQuery", dbMetadata.getDriverName()); + assertEquals("Google BigQuery", dbMetadata.getDatabaseProductName()); + assertEquals("2.0", dbMetadata.getDatabaseProductVersion()); + assertEquals("Dataset", dbMetadata.getSchemaTerm()); + assertEquals("Procedure", dbMetadata.getProcedureTerm()); + assertEquals("Project", dbMetadata.getCatalogTerm()); + } + + @Test + public void testReadSqlFromFile() throws SQLException { + BigQueryDatabaseMetaData dbMetadata = new BigQueryDatabaseMetaData(bigQueryConnection); + + String primaryKeysQuery = + BigQueryDatabaseMetaData.readSqlFromFile("DatabaseMetaData_GetPrimaryKeys.sql"); + assertTrue(primaryKeysQuery.contains("pk$")); + + try { + when(bigQueryConnection.prepareStatement(primaryKeysQuery)).thenCallRealMethod(); + String sql = + dbMetadata.replaceSqlParameters( + primaryKeysQuery, "project_name", "dataset_name", "table_name"); + assertTrue(sql.contains("project_name.dataset_name.INFORMATION_SCHEMA.KEY_COLUMN_USAGE")); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testNeedsListing() { + assertTrue("Null pattern should require listing", dbMetadata.needsListing(null)); + assertTrue("Pattern with % should require listing", dbMetadata.needsListing("abc%def")); + assertTrue("Pattern with _ should require listing", dbMetadata.needsListing("abc_def")); + assertTrue("Pattern with both wildcards", dbMetadata.needsListing("a%c_d%f")); + assertFalse("Empty pattern should not require listing", dbMetadata.needsListing("")); + assertFalse("Pattern without wildcards", dbMetadata.needsListing("exactName")); + } + + @Test + public void testCompileSqlLikePattern() { + // Null input -> Null pattern + assertNull(dbMetadata.compileSqlLikePattern(null)); + + // Empty input -> Pattern matching nothing ($^) + Pattern emptyPattern = dbMetadata.compileSqlLikePattern(""); + assertNotNull(emptyPattern); + assertFalse(emptyPattern.matcher("").matches()); + assertFalse(emptyPattern.matcher("a").matches()); + assertEquals("(?!)", emptyPattern.pattern()); + + // Exact match + Pattern exactPattern = dbMetadata.compileSqlLikePattern("tableName"); + assertNotNull(exactPattern); + assertTrue(exactPattern.matcher("tableName").matches()); + assertTrue(exactPattern.matcher("TABLENAME").matches()); + assertFalse(exactPattern.matcher("tableNameX").matches()); + assertFalse(exactPattern.matcher("XtableName").matches()); + + // Percent wildcard (%) -> .* + Pattern percentPattern = dbMetadata.compileSqlLikePattern("table%"); + assertNotNull(percentPattern); + assertTrue(percentPattern.matcher("table").matches()); + assertTrue(percentPattern.matcher("tableName").matches()); + assertTrue(percentPattern.matcher("TABLE_123").matches()); + assertFalse(percentPattern.matcher("myTable").matches()); + + Pattern percentPattern2 = dbMetadata.compileSqlLikePattern("%Name"); + assertNotNull(percentPattern2); + assertTrue(percentPattern2.matcher("Name").matches()); + assertTrue(percentPattern2.matcher("tableName").matches()); + assertTrue(percentPattern2.matcher("VIEW_NAME").matches()); + assertFalse(percentPattern2.matcher("NameSuffix").matches()); + + Pattern percentPattern3 = dbMetadata.compileSqlLikePattern("ta%le"); + assertNotNull(percentPattern3); + assertTrue(percentPattern3.matcher("table").matches()); + assertTrue(percentPattern3.matcher("TALLE").matches()); + assertTrue(percentPattern3.matcher("tale").matches()); + assertFalse(percentPattern3.matcher("table123").matches()); + + // Underscore wildcard (_) -> . + Pattern underscorePattern = dbMetadata.compileSqlLikePattern("t_ble"); + assertNotNull(underscorePattern); + assertTrue(underscorePattern.matcher("table").matches()); + assertTrue(underscorePattern.matcher("tAble").matches()); + assertTrue(underscorePattern.matcher("tXble").matches()); + assertFalse(underscorePattern.matcher("tble").matches()); + assertFalse(underscorePattern.matcher("taable").matches()); + + // Mixed wildcards + Pattern mixedPattern = dbMetadata.compileSqlLikePattern("data_%_set%"); + assertNotNull(mixedPattern); + assertTrue(mixedPattern.matcher("data_1_set").matches()); + assertTrue(mixedPattern.matcher("data_foo_set_bar").matches()); + assertTrue(mixedPattern.matcher("DATA_X_SET").matches()); + assertFalse(mixedPattern.matcher("dataset").matches()); + assertFalse(mixedPattern.matcher("data_set").matches()); + + // Escaping regex metacharacters + Pattern dotPattern = dbMetadata.compileSqlLikePattern("version_1.0"); + assertNotNull(dotPattern); + assertTrue(dotPattern.matcher("version_1.0").matches()); + assertFalse(dotPattern.matcher("version_1X0").matches()); + + Pattern bracketPattern = dbMetadata.compileSqlLikePattern("array[0]"); + assertNotNull(bracketPattern); + assertTrue(bracketPattern.matcher("array[0]").matches()); + assertFalse(bracketPattern.matcher("array_0_").matches()); + } + + @Test + public void testMapBigQueryTypeToJdbc_ScalarTypes() { + // INT64 -> BIGINT + Field fieldInt64 = + Field.newBuilder("test_int", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoInt64 = + dbMetadata.mapBigQueryTypeToJdbc(fieldInt64); + assertEquals(Types.BIGINT, infoInt64.jdbcType); + assertEquals("BIGINT", infoInt64.typeName); + assertEquals(Integer.valueOf(19), infoInt64.columnSize); + assertEquals(Integer.valueOf(0), infoInt64.decimalDigits); + assertEquals(Integer.valueOf(10), infoInt64.numPrecRadix); + + // STRING -> NVARCHAR + Field fieldString = + Field.newBuilder("test_string", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoString = + dbMetadata.mapBigQueryTypeToJdbc(fieldString); + assertEquals(Types.NVARCHAR, infoString.jdbcType); + assertEquals("NVARCHAR", infoString.typeName); + assertNull(infoString.columnSize); + assertNull(infoString.decimalDigits); + assertNull(infoString.numPrecRadix); + + // BOOL -> BOOLEAN + Field fieldBool = + Field.newBuilder("test_bool", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoBool = dbMetadata.mapBigQueryTypeToJdbc(fieldBool); + assertEquals(Types.BOOLEAN, infoBool.jdbcType); + assertEquals("BOOLEAN", infoBool.typeName); + assertEquals(Integer.valueOf(1), infoBool.columnSize); + + // BYTES -> VARBINARY + Field fieldBytes = + Field.newBuilder("test_bytes", StandardSQLTypeName.BYTES) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoBytes = + dbMetadata.mapBigQueryTypeToJdbc(fieldBytes); + assertEquals(Types.VARBINARY, infoBytes.jdbcType); + assertEquals("VARBINARY", infoBytes.typeName); + assertNull(infoBytes.columnSize); + + // TIMESTAMP -> TIMESTAMP + Field fieldTimestamp = + Field.newBuilder("test_ts", StandardSQLTypeName.TIMESTAMP) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoTimestamp = + dbMetadata.mapBigQueryTypeToJdbc(fieldTimestamp); + assertEquals(Types.TIMESTAMP, infoTimestamp.jdbcType); + assertEquals("TIMESTAMP", infoTimestamp.typeName); + assertEquals(Integer.valueOf(29), infoTimestamp.columnSize); + assertNull(infoTimestamp.decimalDigits); + assertNull(infoTimestamp.numPrecRadix); + + // DATETIME -> TIMESTAMP + Field fieldDateTime = + Field.newBuilder("test_dt", StandardSQLTypeName.DATETIME) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoDateTime = + dbMetadata.mapBigQueryTypeToJdbc(fieldDateTime); + assertEquals(Types.TIMESTAMP, infoDateTime.jdbcType); + assertEquals("TIMESTAMP", infoDateTime.typeName); + assertEquals(Integer.valueOf(29), infoDateTime.columnSize); + assertNull(infoDateTime.decimalDigits); + assertNull(infoDateTime.numPrecRadix); + + // NUMERIC -> NUMERIC + Field fieldNumeric = + Field.newBuilder("test_num", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoNumeric = + dbMetadata.mapBigQueryTypeToJdbc(fieldNumeric); + assertEquals(Types.NUMERIC, infoNumeric.jdbcType); + assertEquals("NUMERIC", infoNumeric.typeName); + assertEquals(Integer.valueOf(38), infoNumeric.columnSize); + assertEquals(Integer.valueOf(9), infoNumeric.decimalDigits); + assertEquals(Integer.valueOf(10), infoNumeric.numPrecRadix); + + // BIGNUMERIC -> NUMERIC + Field fieldBigNumeric = + Field.newBuilder("test_bignum", StandardSQLTypeName.BIGNUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoBigNumeric = + dbMetadata.mapBigQueryTypeToJdbc(fieldBigNumeric); + assertEquals(Types.NUMERIC, infoBigNumeric.jdbcType); + assertEquals("NUMERIC", infoBigNumeric.typeName); + assertEquals(Integer.valueOf(77), infoBigNumeric.columnSize); + assertEquals(Integer.valueOf(38), infoBigNumeric.decimalDigits); + assertEquals(Integer.valueOf(10), infoBigNumeric.numPrecRadix); + + // GEOGRAPHY -> VARCHAR + Field fieldGeo = + Field.newBuilder("test_geo", StandardSQLTypeName.GEOGRAPHY) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoGeo = dbMetadata.mapBigQueryTypeToJdbc(fieldGeo); + assertEquals(Types.VARCHAR, infoGeo.jdbcType); + assertEquals("VARCHAR", infoGeo.typeName); + assertNull(infoGeo.columnSize); + + // DATE + Field fieldDate = + Field.newBuilder("test_date", StandardSQLTypeName.DATE) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoDate = dbMetadata.mapBigQueryTypeToJdbc(fieldDate); + assertEquals(Types.DATE, infoDate.jdbcType); + assertEquals("DATE", infoDate.typeName); + + // TIME + Field fieldTime = + Field.newBuilder("test_time", StandardSQLTypeName.TIME) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoTime = dbMetadata.mapBigQueryTypeToJdbc(fieldTime); + assertEquals(Types.TIME, infoTime.jdbcType); + assertEquals("TIME", infoTime.typeName); + + // STRUCT + Field fieldStruct = + Field.newBuilder( + "test_struct", + StandardSQLTypeName.STRUCT, + Field.of("sub_field", StandardSQLTypeName.STRING)) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoStruct = + dbMetadata.mapBigQueryTypeToJdbc(fieldStruct); + assertEquals(Types.STRUCT, infoStruct.jdbcType); + assertEquals("STRUCT", infoStruct.typeName); + } + + @Test + public void testMapBigQueryTypeToJdbc_ArrayType() { + Field fieldArray = + Field.newBuilder("test_array", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REPEATED) + .build(); + + BigQueryDatabaseMetaData.ColumnTypeInfo infoArray = + dbMetadata.mapBigQueryTypeToJdbc(fieldArray); + assertEquals(Types.ARRAY, infoArray.jdbcType); + assertEquals("ARRAY", infoArray.typeName); + assertNull(infoArray.columnSize); + assertNull(infoArray.decimalDigits); + assertNull(infoArray.numPrecRadix); + } + + @Test + public void testCreateColumnRow() { + Field realField = + Field.newBuilder("user_name", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("Name of the user") + .build(); + + String catalog = "test-project"; + String schema = "test_dataset"; + String table = "users"; + int ordinalPos = 3; + + List row = + dbMetadata.createColumnRow(catalog, schema, table, realField, ordinalPos); + + assertNotNull(row); + assertEquals(24, row.size()); + + assertEquals(catalog, row.get(0).getStringValue()); // 1. TABLE_CAT + assertEquals(schema, row.get(1).getStringValue()); // 2. TABLE_SCHEM + assertEquals(table, row.get(2).getStringValue()); // 3. TABLE_NAME + assertEquals("user_name", row.get(3).getStringValue()); // 4. COLUMN_NAME + assertEquals(String.valueOf(Types.NVARCHAR), row.get(4).getStringValue()); // 5. DATA_TYPE + assertEquals("NVARCHAR", row.get(5).getStringValue()); // 6. TYPE_NAME + assertTrue(row.get(6).isNull()); // 7. COLUMN_SIZE (was null for STRING) + assertTrue(row.get(7).isNull()); // 8. BUFFER_LENGTH (always null) + assertTrue(row.get(8).isNull()); // 9. DECIMAL_DIGITS (null for STRING) + assertTrue(row.get(9).isNull()); // 10. NUM_PREC_RADIX (null for STRING) + assertEquals( + String.valueOf(DatabaseMetaData.columnNullable), + row.get(10).getStringValue()); // 11. NULLABLE + assertEquals("Name of the user", row.get(11).getStringValue()); // 12. REMARKS + assertTrue(row.get(12).isNull()); // 13. COLUMN_DEF (null) + // 14, 15 unused + assertTrue(row.get(15).isNull()); // 16. CHAR_OCTET_LENGTH (was null) + assertEquals(String.valueOf(ordinalPos), row.get(16).getStringValue()); // 17. ORDINAL_POSITION + assertEquals("YES", row.get(17).getStringValue()); // 18. IS_NULLABLE (YES for NULLABLE mode) + // 19-22 SCOPE/SOURCE (null) + assertEquals("NO", row.get(22).getStringValue()); // 23. IS_AUTOINCREMENT (NO) + assertEquals("NO", row.get(23).getStringValue()); // 24. IS_GENERATEDCOLUMN (NO) + } + + @Test + public void testCreateColumnRow_RequiredInt() { + Field realField = + Field.newBuilder("user_id", StandardSQLTypeName.INT64).setMode(Field.Mode.REQUIRED).build(); + + String catalog = "p"; + String schema = "d"; + String table = "t"; + int ordinalPos = 1; + + List row = + dbMetadata.createColumnRow(catalog, schema, table, realField, ordinalPos); + + assertEquals(24, row.size()); + assertEquals("user_id", row.get(3).getStringValue()); // COLUMN_NAME + assertEquals(String.valueOf(Types.BIGINT), row.get(4).getStringValue()); // DATA_TYPE + assertEquals("BIGINT", row.get(5).getStringValue()); // TYPE_NAME + assertEquals("19", row.get(6).getStringValue()); // COLUMN_SIZE + assertEquals("0", row.get(8).getStringValue()); // DECIMAL_DIGITS + assertEquals("10", row.get(9).getStringValue()); // NUM_PREC_RADIX + assertEquals( + String.valueOf(DatabaseMetaData.columnNoNulls), row.get(10).getStringValue()); // NULLABLE + assertTrue(row.get(11).isNull()); // REMARKS (null description) + assertEquals(String.valueOf(ordinalPos), row.get(16).getStringValue()); // ORDINAL_POSITION + assertEquals("NO", row.get(17).getStringValue()); // IS_NULLABLE (NO for REQUIRED mode) + } + + @Test + public void testDefineGetTablesSchema() { + Schema schema = dbMetadata.defineGetTablesSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(10, fields.size()); + + Field tableCat = fields.get("TABLE_CAT"); + assertEquals("TABLE_CAT", tableCat.getName()); + assertEquals(StandardSQLTypeName.STRING, tableCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, tableCat.getMode()); + + Field tableName = fields.get("TABLE_NAME"); + assertEquals("TABLE_NAME", tableName.getName()); + assertEquals(StandardSQLTypeName.STRING, tableName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, tableName.getMode()); + + Field tableType = fields.get("TABLE_TYPE"); + assertEquals("TABLE_TYPE", tableType.getName()); + assertEquals(StandardSQLTypeName.STRING, tableType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, tableType.getMode()); + + Field remarks = fields.get("REMARKS"); + assertEquals("REMARKS", remarks.getName()); + assertEquals(StandardSQLTypeName.STRING, remarks.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, remarks.getMode()); + + Field refGeneration = fields.get("REF_GENERATION"); + assertEquals("REF_GENERATION", refGeneration.getName()); + assertEquals(StandardSQLTypeName.STRING, refGeneration.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, refGeneration.getMode()); + } + + @Test + public void testProcessTableInfo_Basic() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj"; + String schema = "ds"; + String name = "my_table"; + TableDefinition.Type type = TableDefinition.Type.TABLE; + String description = "My test table"; + + Table table = mockBigQueryTable(catalog, schema, name, type, description); + + dbMetadata.processTableInfo(table, null, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(10, row.size()); + assertEquals(catalog, row.get("TABLE_CAT").getStringValue()); + assertEquals(schema, row.get("TABLE_SCHEM").getStringValue()); + assertEquals(name, row.get("TABLE_NAME").getStringValue()); + assertEquals(type.toString(), row.get("TABLE_TYPE").getStringValue()); + assertEquals(description, row.get("REMARKS").getStringValue()); + assertTrue(row.get("TYPE_CAT").isNull()); + assertTrue(row.get("TYPE_SCHEM").isNull()); + assertTrue(row.get("TYPE_NAME").isNull()); + assertTrue(row.get("SELF_REFERENCING_COL_NAME").isNull()); + assertTrue(row.get("REF_GENERATION").isNull()); + } + + @Test + public void testProcessTableInfo_TypeFilterMatch() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Set requestedTypes = new HashSet<>(Arrays.asList("VIEW", "TABLE")); + + Table table = mockBigQueryTable("p", "d", "t", TableDefinition.Type.TABLE, "Desc"); + + dbMetadata.processTableInfo(table, requestedTypes, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + assertEquals("TABLE", collectedResults.get(0).get("TABLE_TYPE").getStringValue()); + } + + @Test + public void testProcessTableInfo_TypeFilterMismatch() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Set requestedTypes = new HashSet<>(Collections.singletonList("VIEW")); + + Table table = mockBigQueryTable("p", "d", "t", TableDefinition.Type.TABLE, "Desc"); + + dbMetadata.processTableInfo(table, requestedTypes, collectedResults, resultSchemaFields); + + assertEquals(0, collectedResults.size()); + } + + @Test + public void testProcessTableInfo_NullDescription() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + Table table = mockBigQueryTable("p", "d", "t_no_desc", TableDefinition.Type.TABLE, null); + + dbMetadata.processTableInfo(table, null, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertTrue(row.get("REMARKS").isNull()); + } + + private FieldValueList createTableRow( + String cat, String schem, String name, String type, FieldList schemaFields) { + List values = new ArrayList<>(); + values.add(dbMetadata.createStringFieldValue(cat)); // TABLE_CAT + values.add(dbMetadata.createStringFieldValue(schem)); // TABLE_SCHEM + values.add(dbMetadata.createStringFieldValue(name)); // TABLE_NAME + values.add(dbMetadata.createStringFieldValue(type)); // TABLE_TYPE + values.add(dbMetadata.createStringFieldValue("Remark for " + name)); // REMARKS + values.add(dbMetadata.createNullFieldValue()); // TYPE_CAT + values.add(dbMetadata.createNullFieldValue()); // TYPE_SCHEM + values.add(dbMetadata.createNullFieldValue()); // TYPE_NAME + values.add(dbMetadata.createNullFieldValue()); // SELF_REFERENCING_COL_NAME + values.add(dbMetadata.createNullFieldValue()); // REF_GENERATION + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testSortResults_Tables() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + // Add rows in unsorted order (based on TYPE, CAT, SCHEM, NAME) + results.add(createTableRow("cat_a", "sch_z", "table_1", "TABLE", schemaFields)); + results.add(createTableRow("cat_b", "sch_a", "view_1", "VIEW", schemaFields)); + results.add(createTableRow("cat_a", "sch_c", "table_2", "TABLE", schemaFields)); + results.add(createTableRow(null, "sch_b", "table_0", "TABLE", schemaFields)); + results.add(createTableRow("cat_a", "sch_z", "table_0", "TABLE", schemaFields)); + results.add(createTableRow("cat_a", null, "view_0", "VIEW", schemaFields)); + + Comparator comparator = dbMetadata.defineGetTablesComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getTables", dbMetadata.LOG); + + // Expected order: TABLEs first, then VIEWs. Within type, sort by CAT, SCHEM, NAME + assertEquals(6, results.size()); + + // 1. Null cat, sch_b, table_0, TABLE + assertTrue("Row 0 TABLE_CAT should be null", results.get(0).get("TABLE_CAT").isNull()); + assertEquals("sch_b", results.get(0).get("TABLE_SCHEM").getStringValue()); + assertEquals("table_0", results.get(0).get("TABLE_NAME").getStringValue()); + assertEquals("TABLE", results.get(0).get("TABLE_TYPE").getStringValue()); + + // 2. cat_a, sch_c, table_2, TABLE + assertEquals("cat_a", results.get(1).get("TABLE_CAT").getStringValue()); + assertEquals("sch_c", results.get(1).get("TABLE_SCHEM").getStringValue()); + assertEquals("table_2", results.get(1).get("TABLE_NAME").getStringValue()); + assertEquals("TABLE", results.get(1).get("TABLE_TYPE").getStringValue()); + + // 3. cat_a, sch_z, table_0, TABLE + assertEquals("cat_a", results.get(2).get("TABLE_CAT").getStringValue()); + assertEquals("sch_z", results.get(2).get("TABLE_SCHEM").getStringValue()); + assertEquals("table_0", results.get(2).get("TABLE_NAME").getStringValue()); + assertEquals("TABLE", results.get(2).get("TABLE_TYPE").getStringValue()); + + // 4. cat_a, sch_z, table_1, TABLE + assertEquals("cat_a", results.get(3).get("TABLE_CAT").getStringValue()); + assertEquals("sch_z", results.get(3).get("TABLE_SCHEM").getStringValue()); + assertEquals("table_1", results.get(3).get("TABLE_NAME").getStringValue()); + assertEquals("TABLE", results.get(3).get("TABLE_TYPE").getStringValue()); + + // 5. cat_a, null, view_0, VIEW + assertEquals("cat_a", results.get(4).get("TABLE_CAT").getStringValue()); + assertTrue("Row 4 TABLE_SCHEM should be null", results.get(4).get("TABLE_SCHEM").isNull()); + assertEquals("view_0", results.get(4).get("TABLE_NAME").getStringValue()); + assertEquals("VIEW", results.get(4).get("TABLE_TYPE").getStringValue()); + + // 6. cat_b, sch_a, view_1, VIEW + assertEquals("cat_b", results.get(5).get("TABLE_CAT").getStringValue()); + assertEquals("sch_a", results.get(5).get("TABLE_SCHEM").getStringValue()); + assertEquals("view_1", results.get(5).get("TABLE_NAME").getStringValue()); + assertEquals("VIEW", results.get(5).get("TABLE_TYPE").getStringValue()); + } + + @Test + public void testSortResults_Tables_EmptyList() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + Comparator comparator = dbMetadata.defineGetTablesComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getTables", dbMetadata.LOG); + assertTrue(results.isEmpty()); + } + + @Test + public void testDefineGetSchemasSchema() { + Schema schema = dbMetadata.defineGetSchemasSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(2, fields.size()); + + Field tableSchem = fields.get("TABLE_SCHEM"); + assertEquals("TABLE_SCHEM", tableSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, tableSchem.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, tableSchem.getMode()); + + Field tableCatalog = fields.get("TABLE_CATALOG"); + assertEquals("TABLE_CATALOG", tableCatalog.getName()); + assertEquals(StandardSQLTypeName.STRING, tableCatalog.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, tableCatalog.getMode()); + } + + private Dataset mockBigQueryDataset(String project, String datasetName) { + Dataset mockDataset = mock(Dataset.class); + DatasetId mockDatasetId = DatasetId.of(project, datasetName); + when(mockDataset.getDatasetId()).thenReturn(mockDatasetId); + return mockDataset; + } + + @Test + public void testProcessSchemaInfo() { + Schema resultSchema = dbMetadata.defineGetSchemasSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "project-alpha"; + String schemaName = "dataset_beta"; + Dataset dataset = mockBigQueryDataset(catalog, schemaName); + + dbMetadata.processSchemaInfo(dataset, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(2, row.size()); + assertEquals(schemaName, row.get("TABLE_SCHEM").getStringValue()); + assertEquals(catalog, row.get("TABLE_CATALOG").getStringValue()); + } + + private FieldValueList createSchemaRow(String cat, String schem, FieldList schemaFields) { + List values = new ArrayList<>(); + values.add(dbMetadata.createStringFieldValue(schem)); // TABLE_SCHEM + values.add(dbMetadata.createStringFieldValue(cat)); // TABLE_CATALOG + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testSortResults_Schemas() { + Schema resultSchema = dbMetadata.defineGetSchemasSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + results.add(createSchemaRow("proj_b", "schema_z", schemaFields)); + results.add(createSchemaRow("proj_a", "schema_y", schemaFields)); + results.add(createSchemaRow(null, "schema_x", schemaFields)); + results.add(createSchemaRow("proj_b", "schema_a", schemaFields)); + results.add(createSchemaRow("proj_a", "schema_c", schemaFields)); + + Comparator comparator = dbMetadata.defineGetSchemasComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getSchemas", dbMetadata.LOG); + + // Expected order: Sort by TABLE_CATALOG (nulls first), then TABLE_SCHEM + assertEquals(5, results.size()); + + // 1. Null catalog, schema_x + assertTrue("Row 0 TABLE_CATALOG should be null", results.get(0).get("TABLE_CATALOG").isNull()); + assertEquals("schema_x", results.get(0).get("TABLE_SCHEM").getStringValue()); + + // 2. proj_a, schema_c + assertEquals("proj_a", results.get(1).get("TABLE_CATALOG").getStringValue()); + assertEquals("schema_c", results.get(1).get("TABLE_SCHEM").getStringValue()); + + // 3. proj_a, schema_y + assertEquals("proj_a", results.get(2).get("TABLE_CATALOG").getStringValue()); + assertEquals("schema_y", results.get(2).get("TABLE_SCHEM").getStringValue()); + + // 4. proj_b, schema_a + assertEquals("proj_b", results.get(3).get("TABLE_CATALOG").getStringValue()); + assertEquals("schema_a", results.get(3).get("TABLE_SCHEM").getStringValue()); + + // 5. proj_b, schema_z + assertEquals("proj_b", results.get(4).get("TABLE_CATALOG").getStringValue()); + assertEquals("schema_z", results.get(4).get("TABLE_SCHEM").getStringValue()); + } + + @Test + public void testSortResults_Schemas_EmptyList() { + Schema resultSchema = dbMetadata.defineGetSchemasSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + Comparator comparator = dbMetadata.defineGetSchemasComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getSchemas", dbMetadata.LOG); + assertTrue(results.isEmpty()); + } + + private Routine mockBigQueryRoutine( + String project, String dataset, String routineName, String routineType, String description) { + Routine mockRoutine = mock(Routine.class); + RoutineId mockRoutineId = RoutineId.of(project, dataset, routineName); + when(mockRoutine.getRoutineId()).thenReturn(mockRoutineId); + when(mockRoutine.getRoutineType()).thenReturn(routineType); + when(mockRoutine.getDescription()).thenReturn(description); + return mockRoutine; + } + + @Test + public void testDefineGetProceduresSchema() { + Schema schema = dbMetadata.defineGetProceduresSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(9, fields.size()); + + Field procCat = fields.get("PROCEDURE_CAT"); + assertEquals("PROCEDURE_CAT", procCat.getName()); + assertEquals(StandardSQLTypeName.STRING, procCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, procCat.getMode()); + + Field procName = fields.get("PROCEDURE_NAME"); + assertEquals("PROCEDURE_NAME", procName.getName()); + assertEquals(StandardSQLTypeName.STRING, procName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, procName.getMode()); + + Field remarks = fields.get("REMARKS"); + assertEquals("REMARKS", remarks.getName()); + assertEquals(StandardSQLTypeName.STRING, remarks.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, remarks.getMode()); + + Field procType = fields.get("PROCEDURE_TYPE"); + assertEquals("PROCEDURE_TYPE", procType.getName()); + assertEquals(StandardSQLTypeName.INT64, procType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, procType.getMode()); + + Field specificName = fields.get("SPECIFIC_NAME"); + assertEquals("SPECIFIC_NAME", specificName.getName()); + assertEquals(StandardSQLTypeName.STRING, specificName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, specificName.getMode()); + } + + @Test + public void testProcessProcedureInfo_BasicProcedure() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj-1"; + String schema = "dataset_a"; + String name = "my_proc"; + String description = "A test procedure"; + + Routine routine = mockBigQueryRoutine(catalog, schema, name, "PROCEDURE", description); + + dbMetadata.processProcedureInfo(routine, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(9, row.size()); + assertEquals(catalog, row.get("PROCEDURE_CAT").getStringValue()); + assertEquals(schema, row.get("PROCEDURE_SCHEM").getStringValue()); + assertEquals(name, row.get("PROCEDURE_NAME").getStringValue()); + assertTrue(row.get("reserved1").isNull()); + assertTrue(row.get("reserved2").isNull()); + assertTrue(row.get("reserved3").isNull()); + assertEquals(description, row.get("REMARKS").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.procedureResultUnknown), + row.get("PROCEDURE_TYPE").getStringValue()); + assertEquals(name, row.get("SPECIFIC_NAME").getStringValue()); + } + + @Test + public void testProcessProcedureInfo_NullDescription() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj-1"; + String schema = "dataset_a"; + String name = "proc_no_desc"; + + Routine routine = mockBigQueryRoutine(catalog, schema, name, "PROCEDURE", null); + + dbMetadata.processProcedureInfo(routine, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertTrue(row.get("REMARKS").isNull()); + } + + @Test + public void testProcessProcedureInfo_SkipsNonProcedure() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + Routine routine = mockBigQueryRoutine("p", "d", "my_func", "FUNCTION", "A function"); + + dbMetadata.processProcedureInfo(routine, collectedResults, resultSchemaFields); + + assertTrue(collectedResults.isEmpty()); + } + + private FieldValueList createProcedureRow( + String cat, String schem, String name, String specName, FieldList schemaFields) { + List values = new ArrayList<>(); + values.add(dbMetadata.createStringFieldValue(cat)); // PROCEDURE_CAT + values.add(dbMetadata.createStringFieldValue(schem)); // PROCEDURE_SCHEM + values.add(dbMetadata.createStringFieldValue(name)); // PROCEDURE_NAME + values.add(dbMetadata.createNullFieldValue()); // reserved1 + values.add(dbMetadata.createNullFieldValue()); // reserved2 + values.add(dbMetadata.createNullFieldValue()); // reserved3 + values.add(dbMetadata.createStringFieldValue("Remark for " + name)); // REMARKS + values.add( + dbMetadata.createLongFieldValue( + (long) DatabaseMetaData.procedureResultUnknown)); // PROCEDURE_TYPE + values.add(dbMetadata.createStringFieldValue(specName)); // SPECIFIC_NAME + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testSortResults_Procedures() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + // Add rows in unsorted order (Sort by CAT, SCHEM, NAME, SPECIFIC_NAME) + results.add(createProcedureRow("cat_b", "sch_c", "proc_1", "proc_1_spec", schemaFields)); + results.add( + createProcedureRow("cat_a", "sch_z", "proc_alpha", "proc_alpha_spec", schemaFields)); + results.add(createProcedureRow("cat_a", "sch_z", "proc_beta", "proc_beta_spec", schemaFields)); + results.add(createProcedureRow(null, "sch_y", "proc_gamma", "proc_gamma_spec", schemaFields)); + results.add(createProcedureRow("cat_a", null, "proc_delta", "proc_delta_spec", schemaFields)); + results.add( + createProcedureRow("cat_a", "sch_z", "proc_alpha", "proc_alpha_spec_older", schemaFields)); + + Comparator comparator = dbMetadata.defineGetProceduresComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getProcedures", dbMetadata.LOG); + + // Expected Order: Null Cat, then Cat A (Null Schem, then sch_z), then Cat B. Within that, Name, + // then Spec Name. + assertEquals(6, results.size()); + + // 1. Null cat, sch_y, proc_gamma, proc_gamma_spec + assertTrue("Row 0 PROC_CAT should be null", results.get(0).get("PROCEDURE_CAT").isNull()); + assertEquals("sch_y", results.get(0).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_gamma", results.get(0).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_gamma_spec", results.get(0).get("SPECIFIC_NAME").getStringValue()); + + // 2. cat_a, Null schem, proc_delta, proc_delta_spec + assertEquals("cat_a", results.get(1).get("PROCEDURE_CAT").getStringValue()); + assertTrue("Row 1 PROC_SCHEM should be null", results.get(1).get("PROCEDURE_SCHEM").isNull()); + assertEquals("proc_delta", results.get(1).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_delta_spec", results.get(1).get("SPECIFIC_NAME").getStringValue()); + + // 3. cat_a, sch_z, proc_alpha, "proc_alpha_spec" (comes first lexicographically) <<< CORRECTED + // EXPECTATION + assertEquals("cat_a", results.get(2).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(2).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_alpha", results.get(2).get("PROCEDURE_NAME").getStringValue()); + assertEquals( + "proc_alpha_spec", results.get(2).get("SPECIFIC_NAME").getStringValue()); // <<< CORRECTED + + // 4. cat_a, sch_z, proc_alpha, "proc_alpha_spec_older" (comes second lexicographically) <<< + // CORRECTED EXPECTATION + assertEquals("cat_a", results.get(3).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(3).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_alpha", results.get(3).get("PROCEDURE_NAME").getStringValue()); + assertEquals( + "proc_alpha_spec_older", + results.get(3).get("SPECIFIC_NAME").getStringValue()); // <<< CORRECTED + + // 5. cat_a, sch_z, proc_beta, proc_beta_spec + assertEquals("cat_a", results.get(4).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(4).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_beta", results.get(4).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_beta_spec", results.get(4).get("SPECIFIC_NAME").getStringValue()); + + // 6. cat_b, sch_c, proc_1, proc_1_spec + assertEquals("cat_b", results.get(5).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_c", results.get(5).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_1", results.get(5).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_1_spec", results.get(5).get("SPECIFIC_NAME").getStringValue()); + } + + @Test + public void testSortResults_Procedures_EmptyList() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + Comparator comparator = dbMetadata.defineGetProceduresComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getProcedures", dbMetadata.LOG); + assertTrue(results.isEmpty()); + } + + @Test + public void testFindMatchingBigQueryObjects_Routines_ListWithPattern() { + String catalog = "p-cat"; + String schema = "d-sch"; + String pattern = "proc_%"; + DatasetId datasetId = DatasetId.of(catalog, schema); + + Routine proc1 = mockBigQueryRoutine(catalog, schema, "proc_abc", "PROCEDURE", "p1"); + Routine proc2 = mockBigQueryRoutine(catalog, schema, "proc_xyz", "PROCEDURE", "p2"); + Routine func1 = mockBigQueryRoutine(catalog, schema, "func_123", "FUNCTION", "f1"); + Routine otherProc = mockBigQueryRoutine(catalog, schema, "another_proc", "PROCEDURE", "p3"); + + Page page = mock(Page.class); + when(page.iterateAll()).thenReturn(Arrays.asList(proc1, func1, proc2, otherProc)); + when(bigqueryClient.listRoutines(eq(datasetId), any(BigQuery.RoutineListOption.class))) + .thenReturn(page); + + Pattern regex = dbMetadata.compileSqlLikePattern(pattern); + assertNotNull(regex); + + List results = + dbMetadata.findMatchingBigQueryObjects( + "Routine", + () -> bigqueryClient.listRoutines(datasetId, BigQuery.RoutineListOption.pageSize(500)), + (name) -> + bigqueryClient.getRoutine( + RoutineId.of(datasetId.getProject(), datasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + pattern, + regex, + dbMetadata.LOG); + + verify(bigqueryClient, times(1)) + .listRoutines(eq(datasetId), any(BigQuery.RoutineListOption.class)); + verify(bigqueryClient, never()).getRoutine(any(RoutineId.class)); + + assertNotNull(results); + List resultList = new ArrayList<>(results); + + assertEquals("Should contain only matching routines", 2, resultList.size()); + assertTrue("Should contain proc_abc", resultList.contains(proc1)); + assertTrue("Should contain proc_xyz", resultList.contains(proc2)); + assertFalse("Should not contain func_123", resultList.contains(func1)); + assertFalse("Should not contain another_proc", resultList.contains(otherProc)); + } + + @Test + public void testFindMatchingBigQueryObjects_Routines_ListNoPattern() { + String catalog = "p-cat"; + String schema = "d-sch"; + String pattern = null; + DatasetId datasetId = DatasetId.of(catalog, schema); + + Routine proc1 = mockBigQueryRoutine(catalog, schema, "proc_abc", "PROCEDURE", "p1"); + Routine func1 = mockBigQueryRoutine(catalog, schema, "func_123", "FUNCTION", "f1"); + + Page page = mock(Page.class); + when(page.iterateAll()).thenReturn(Arrays.asList(proc1, func1)); + when(bigqueryClient.listRoutines(eq(datasetId), any(BigQuery.RoutineListOption.class))) + .thenReturn(page); + + Pattern regex = dbMetadata.compileSqlLikePattern(pattern); + List results = + dbMetadata.findMatchingBigQueryObjects( + "Routine", + () -> bigqueryClient.listRoutines(datasetId, BigQuery.RoutineListOption.pageSize(500)), + (name) -> + bigqueryClient.getRoutine( + RoutineId.of(datasetId.getProject(), datasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + pattern, + regex, + dbMetadata.LOG); + + verify(bigqueryClient, times(1)) + .listRoutines(eq(datasetId), any(BigQuery.RoutineListOption.class)); + + assertNotNull(results); + List resultList = new ArrayList<>(results); + + assertEquals("Should contain all routines when pattern is null", 2, resultList.size()); + assertTrue("Should contain proc_abc", resultList.contains(proc1)); + assertTrue("Should contain func_123", resultList.contains(func1)); + } + + @Test + public void testFindMatchingBigQueryObjects_Routines_GetSpecific() { + String catalog = "p-cat"; + String schema = "d-sch"; + String procNameExact = "exactprocname"; + DatasetId datasetId = DatasetId.of(catalog, schema); + RoutineId routineId = RoutineId.of(catalog, schema, procNameExact); + Routine mockRoutine = mockBigQueryRoutine(catalog, schema, procNameExact, "PROCEDURE", "desc"); + + when(bigqueryClient.getRoutine(eq(routineId))).thenReturn(mockRoutine); + Pattern regex = dbMetadata.compileSqlLikePattern(procNameExact); + + List results = + dbMetadata.findMatchingBigQueryObjects( + "Routine", + () -> bigqueryClient.listRoutines(datasetId, BigQuery.RoutineListOption.pageSize(500)), + (name) -> + bigqueryClient.getRoutine( + RoutineId.of(datasetId.getProject(), datasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + procNameExact, + regex, + dbMetadata.LOG); + + verify(bigqueryClient, times(1)).getRoutine(eq(routineId)); + verify(bigqueryClient, never()) + .listRoutines(any(DatasetId.class), any(BigQuery.RoutineListOption.class)); + + assertNotNull(results); + List resultList = new ArrayList<>(results); + assertEquals(1, resultList.size()); + assertSame(mockRoutine, resultList.get(0)); + } + + @Test + public void testDefineGetProcedureColumnsSchema() { + Schema schema = dbMetadata.defineGetProcedureColumnsSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(20, fields.size()); + + Field procCat = fields.get("PROCEDURE_CAT"); + assertEquals("PROCEDURE_CAT", procCat.getName()); + assertEquals(StandardSQLTypeName.STRING, procCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, procCat.getMode()); + + Field colName = fields.get("COLUMN_NAME"); + assertEquals("COLUMN_NAME", colName.getName()); + assertEquals(StandardSQLTypeName.STRING, colName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, colName.getMode()); + + Field colType = fields.get("COLUMN_TYPE"); + assertEquals("COLUMN_TYPE", colType.getName()); + assertEquals(StandardSQLTypeName.INT64, colType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, colType.getMode()); + + Field dataType = fields.get("DATA_TYPE"); + assertEquals("DATA_TYPE", dataType.getName()); + assertEquals(StandardSQLTypeName.INT64, dataType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, dataType.getMode()); + + Field typeName = fields.get("TYPE_NAME"); + assertEquals("TYPE_NAME", typeName.getName()); + assertEquals(StandardSQLTypeName.STRING, typeName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, typeName.getMode()); + + Field ordinalPos = fields.get("ORDINAL_POSITION"); + assertEquals("ORDINAL_POSITION", ordinalPos.getName()); + assertEquals(StandardSQLTypeName.INT64, ordinalPos.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, ordinalPos.getMode()); + + Field isNullable = fields.get("IS_NULLABLE"); + assertEquals("IS_NULLABLE", isNullable.getName()); + assertEquals(StandardSQLTypeName.STRING, isNullable.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, isNullable.getMode()); + + Field specificName = fields.get("SPECIFIC_NAME"); + assertEquals("SPECIFIC_NAME", specificName.getName()); + assertEquals(StandardSQLTypeName.STRING, specificName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, specificName.getMode()); + } + + @Test + public void testDetermineTypeInfoFromDataType() { + // INT64 + StandardSQLDataType sqlInt64 = mockStandardSQLDataType(StandardSQLTypeName.INT64); + BigQueryDatabaseMetaData.ColumnTypeInfo infoInt64 = + dbMetadata.determineTypeInfoFromDataType(sqlInt64, "p", "c", 1); + assertEquals(Types.BIGINT, infoInt64.jdbcType); + assertEquals("BIGINT", infoInt64.typeName); + + // STRING + StandardSQLDataType sqlString = mockStandardSQLDataType(StandardSQLTypeName.STRING); + BigQueryDatabaseMetaData.ColumnTypeInfo infoString = + dbMetadata.determineTypeInfoFromDataType(sqlString, "p", "c", 1); + assertEquals(Types.NVARCHAR, infoString.jdbcType); + assertEquals("NVARCHAR", infoString.typeName); + + // BOOL + StandardSQLDataType sqlBool = mockStandardSQLDataType(StandardSQLTypeName.BOOL); + BigQueryDatabaseMetaData.ColumnTypeInfo infoBool = + dbMetadata.determineTypeInfoFromDataType(sqlBool, "p", "c", 1); + assertEquals(Types.BOOLEAN, infoBool.jdbcType); + assertEquals("BOOLEAN", infoBool.typeName); + + // STRUCT + StandardSQLDataType sqlStruct = mockStandardSQLDataType(StandardSQLTypeName.STRUCT); + BigQueryDatabaseMetaData.ColumnTypeInfo infoStruct = + dbMetadata.determineTypeInfoFromDataType(sqlStruct, "p", "c", 1); + assertEquals(Types.STRUCT, infoStruct.jdbcType); + assertEquals("STRUCT", infoStruct.typeName); + + // Case: null typeKind from StandardSQLDataType (should default to VARCHAR) + StandardSQLDataType sqlNullKind = mock(StandardSQLDataType.class); + when(sqlNullKind.getTypeKind()).thenReturn(null); + BigQueryDatabaseMetaData.ColumnTypeInfo infoNullKind = + dbMetadata.determineTypeInfoFromDataType(sqlNullKind, "p", "c", 1); + assertEquals(Types.VARCHAR, infoNullKind.jdbcType); + assertEquals("VARCHAR", infoNullKind.typeName); + + // Case: unknown typeKind from StandardSQLDataType (should default to VARCHAR) + StandardSQLDataType sqlUnknownKind = mock(StandardSQLDataType.class); + when(sqlUnknownKind.getTypeKind()).thenReturn("SUPER_DOOPER_TYPE"); + BigQueryDatabaseMetaData.ColumnTypeInfo infoUnknownKind = + dbMetadata.determineTypeInfoFromDataType(sqlUnknownKind, "p", "c", 1); + assertEquals(Types.VARCHAR, infoUnknownKind.jdbcType); + assertEquals("VARCHAR", infoUnknownKind.typeName); + } + + @Test + public void testCreateProcedureColumnRow_BasicInParam() { + String catalog = "proj_x"; + String schema = "data_y"; + String procName = "proc_z"; + String specificName = "proc_z_specific"; + + RoutineArgument arg = mockRoutineArgument("param_in", StandardSQLTypeName.STRING, "IN"); + int ordinalPos = 1; + + List row = + dbMetadata.createProcedureColumnRow( + catalog, schema, procName, specificName, arg, ordinalPos, "param_in"); + + assertNotNull(row); + assertEquals(20, row.size()); + + assertEquals(catalog, row.get(0).getStringValue()); // 1. PROCEDURE_CAT + assertEquals(schema, row.get(1).getStringValue()); // 2. PROCEDURE_SCHEM + assertEquals(procName, row.get(2).getStringValue()); // 3. PROCEDURE_NAME + assertEquals("param_in", row.get(3).getStringValue()); // 4. COLUMN_NAME + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnIn), + row.get(4).getStringValue()); // 5. COLUMN_TYPE + assertEquals(String.valueOf(Types.NVARCHAR), row.get(5).getStringValue()); // 6. DATA_TYPE + assertEquals("NVARCHAR", row.get(6).getStringValue()); // 7. TYPE_NAME + assertTrue(row.get(7).isNull()); // 8. PRECISION + assertTrue(row.get(8).isNull()); // 9. LENGTH + assertTrue(row.get(9).isNull()); // 10. SCALE + assertTrue(row.get(10).isNull()); // 11. RADIX + assertEquals( + String.valueOf(DatabaseMetaData.procedureNullable), + row.get(11).getStringValue()); // 12. NULLABLE + assertTrue(row.get(12).isNull()); // 13. REMARKS + assertTrue(row.get(13).isNull()); // 14. COLUMN_DEF + assertTrue(row.get(14).isNull()); // 15. SQL_DATA_TYPE + assertTrue(row.get(15).isNull()); // 16. SQL_DATETIME_SUB + assertTrue(row.get(16).isNull()); // 17. CHAR_OCTET_LENGTH + assertEquals(String.valueOf(ordinalPos), row.get(17).getStringValue()); // 18. ORDINAL_POSITION + assertEquals("YES", row.get(18).getStringValue()); // 19. IS_NULLABLE + assertEquals(specificName, row.get(19).getStringValue()); // 20. SPECIFIC_NAME + } + + @Test + public void testCreateProcedureColumnRow_NumericOutParam() { + RoutineArgument arg = mockRoutineArgument("param_out_num", StandardSQLTypeName.NUMERIC, "OUT"); + int ordinalPos = 2; + + List row = + dbMetadata.createProcedureColumnRow( + "p", "d", "proc", "proc_spec", arg, ordinalPos, "param_out_num"); + + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnOut), + row.get(4).getStringValue()); // COLUMN_TYPE + assertEquals(String.valueOf(Types.NUMERIC), row.get(5).getStringValue()); // DATA_TYPE + assertEquals("NUMERIC", row.get(6).getStringValue()); // TYPE_NAME + assertEquals("38", row.get(7).getStringValue()); // PRECISION + assertEquals("9", row.get(9).getStringValue()); // SCALE + assertEquals("10", row.get(10).getStringValue()); // RADIX + } + + @Test + public void testCreateProcedureColumnRow_InOutTimestampParam() { + RoutineArgument arg = + mockRoutineArgument("param_inout_ts", StandardSQLTypeName.TIMESTAMP, "INOUT"); + List row = + dbMetadata.createProcedureColumnRow( + "p", "d", "proc", "proc_spec", arg, 3, "param_inout_ts"); + + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnInOut), row.get(4).getStringValue()); + assertEquals(String.valueOf(Types.TIMESTAMP), row.get(5).getStringValue()); + assertEquals("TIMESTAMP", row.get(6).getStringValue()); + assertEquals("29", row.get(7).getStringValue()); // PRECISION for TIMESTAMP + } + + @Test + public void testCreateProcedureColumnRow_UnknownModeDefaultsToUnknownType() { + RoutineArgument arg = + mockRoutineArgument("param_unknown_mode", StandardSQLTypeName.BOOL, "UNKNOWN_MODE"); + List row = + dbMetadata.createProcedureColumnRow( + "p", "d", "proc", "proc_spec", arg, 1, "param_unknown_mode"); + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnUnknown), row.get(4).getStringValue()); + } + + @Test + public void testCreateProcedureColumnRow_NullArgumentObject() { + List row = + dbMetadata.createProcedureColumnRow( + "cat", "schem", "proc", "spec", null, 1, "fallback_arg_name"); + + assertNotNull(row); + assertEquals(20, row.size()); + assertEquals("fallback_arg_name", row.get(3).getStringValue()); // COLUMN_NAME + assertEquals(String.valueOf(Types.VARCHAR), row.get(5).getStringValue()); // DATA_TYPE + assertEquals("VARCHAR", row.get(6).getStringValue()); // TYPE_NAME + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnUnknown), row.get(4).getStringValue()); + } + + @Test + public void testCreateProcedureColumnRow_NullArgumentDataType() { + RoutineArgument argWithNullDataType = mock(RoutineArgument.class); + when(argWithNullDataType.getName()).thenReturn("arg_null_type"); + when(argWithNullDataType.getDataType()).thenReturn(null); + when(argWithNullDataType.getMode()).thenReturn("IN"); + + List row = + dbMetadata.createProcedureColumnRow( + "cat", "schem", "proc", "spec", argWithNullDataType, 1, "arg_null_type"); + + assertNotNull(row); + assertEquals("arg_null_type", row.get(3).getStringValue()); + assertEquals( + String.valueOf(Types.VARCHAR), row.get(5).getStringValue()); // DATA_TYPE should default + assertEquals("VARCHAR", row.get(6).getStringValue()); // TYPE_NAME should default + } + + @Test + public void testProcessProcedureArguments_NoArguments() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern noColumnNamePattern = null; // Match all columns + + Routine routine = + mockBigQueryRoutineWithArgs( + "p", "d", "proc_no_args", "PROCEDURE", "desc", Collections.emptyList()); // No arguments + + dbMetadata.processProcedureArguments( + routine, noColumnNamePattern, collectedResults, resultFields); + assertTrue(collectedResults.isEmpty()); + } + + @Test + public void testProcessProcedureArguments_WithArgumentsNoFilter() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern noColumnNamePattern = null; + + RoutineArgument arg1 = mockRoutineArgument("param1", StandardSQLTypeName.INT64, "IN"); + RoutineArgument arg2 = mockRoutineArgument("param2", StandardSQLTypeName.STRING, "OUT"); + Routine routine = + mockBigQueryRoutineWithArgs( + "p", "d", "proc_two_args", "PROCEDURE", "desc", Arrays.asList(arg1, arg2)); + + dbMetadata.processProcedureArguments( + routine, noColumnNamePattern, collectedResults, resultFields); + assertEquals(2, collectedResults.size()); + assertEquals("param1", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals("param2", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(1), collectedResults.get(0).get("ORDINAL_POSITION").getStringValue()); + assertEquals( + String.valueOf(2), collectedResults.get(1).get("ORDINAL_POSITION").getStringValue()); + } + + @Test + public void testProcessProcedureArguments_WithColumnNameFilter() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern columnNamePattern = Pattern.compile(".*_id"); // Match columns ending with _id + + RoutineArgument arg1 = mockRoutineArgument("user_id", StandardSQLTypeName.INT64, "IN"); + RoutineArgument arg2 = mockRoutineArgument("user_name", StandardSQLTypeName.STRING, "IN"); + RoutineArgument arg3 = mockRoutineArgument("session_id", StandardSQLTypeName.STRING, "INOUT"); + + Routine routine = + mockBigQueryRoutineWithArgs( + "p", "d", "proc_filtered_args", "PROCEDURE", "desc", Arrays.asList(arg1, arg2, arg3)); + + dbMetadata.processProcedureArguments( + routine, columnNamePattern, collectedResults, resultFields); + assertEquals(2, collectedResults.size()); + assertEquals("user_id", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals("session_id", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + } + + @Test + public void testProcessProcedureArguments_HandlesNullArgumentInList() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern noColumnNamePattern = null; + + RoutineArgument arg1 = mockRoutineArgument("valid_arg", StandardSQLTypeName.INT64, "IN"); + List argsWithNull = new ArrayList<>(); + argsWithNull.add(arg1); + argsWithNull.add(null); // Add a null argument + RoutineArgument arg3 = + mockRoutineArgument("another_valid_arg", StandardSQLTypeName.STRING, "OUT"); + argsWithNull.add(arg3); + + Routine routine = mock(Routine.class); + RoutineId routineId = RoutineId.of("p", "d", "proc_with_null_arg_in_list"); + when(routine.getRoutineId()).thenReturn(routineId); + when(routine.getRoutineType()).thenReturn("PROCEDURE"); + when(routine.getArguments()).thenReturn(argsWithNull); + + dbMetadata.processProcedureArguments( + routine, noColumnNamePattern, collectedResults, resultFields); + + assertEquals(3, collectedResults.size()); + assertEquals("valid_arg", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + // The fallback name generated by processProcedureArguments when arg is null in list + assertEquals( + "arg_retrieval_err_2", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(Types.VARCHAR), + collectedResults.get(1).get("DATA_TYPE").getStringValue()); // Default type for null arg + assertEquals("another_valid_arg", collectedResults.get(2).get("COLUMN_NAME").getStringValue()); + } + + private FieldValueList createProcedureColumnRowForSortTest( + String cat, + String schem, + String procName, + String specName, + String colName, + int ordinal, + FieldList schemaFields) { + List values = new ArrayList<>(20); + values.add(dbMetadata.createStringFieldValue(cat)); // 1. PROC_CAT + values.add(dbMetadata.createStringFieldValue(schem)); // 2. PROC_SCHEM + values.add(dbMetadata.createStringFieldValue(procName)); // 3. PROC_NAME + values.add(dbMetadata.createStringFieldValue(colName)); // 4. COLUMN_NAME + values.add( + dbMetadata.createLongFieldValue( + (long) DatabaseMetaData.procedureColumnIn)); // 5. COLUMN_TYPE + values.add(dbMetadata.createLongFieldValue((long) Types.VARCHAR)); // 6. DATA_TYPE + values.add(dbMetadata.createStringFieldValue("VARCHAR")); // 7. TYPE_NAME + values.add(dbMetadata.createNullFieldValue()); // 8. PRECISION + values.add(dbMetadata.createNullFieldValue()); // 9. LENGTH + values.add(dbMetadata.createNullFieldValue()); // 10. SCALE + values.add(dbMetadata.createNullFieldValue()); // 11. RADIX + values.add( + dbMetadata.createLongFieldValue((long) DatabaseMetaData.procedureNullable)); // 12. NULLABLE + values.add(dbMetadata.createStringFieldValue("Remark for " + colName)); // 13. REMARKS + values.add(dbMetadata.createNullFieldValue()); // 14. COLUMN_DEF + values.add(dbMetadata.createNullFieldValue()); // 15. SQL_DATA_TYPE + values.add(dbMetadata.createNullFieldValue()); // 16. SQL_DATETIME_SUB + values.add(dbMetadata.createNullFieldValue()); // 17. CHAR_OCTET_LENGTH + values.add(dbMetadata.createLongFieldValue((long) ordinal)); // 18. ORDINAL_POSITION + values.add(dbMetadata.createStringFieldValue("YES")); // 19. IS_NULLABLE + values.add(dbMetadata.createStringFieldValue(specName)); // 20. SPECIFIC_NAME + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testDefineGetProcedureColumnsComparator() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + // Order: PROC_CAT, PROC_SCHEM, PROC_NAME, SPECIFIC_NAME, COLUMN_NAME + results.add( + createProcedureColumnRowForSortTest( + "cat_b", "sch_y", "proc_1", "proc_1_spec", "param_a", 1, schemaFields)); + results.add( + createProcedureColumnRowForSortTest( + "cat_a", "sch_z", "proc_alpha", "proc_alpha_spec", "arg_z", 2, schemaFields)); + results.add( + createProcedureColumnRowForSortTest( + "cat_a", + "sch_z", + "proc_alpha", + "proc_alpha_spec", + "arg_m", + 1, + schemaFields)); // Same proc, different col + results.add( + createProcedureColumnRowForSortTest( + null, "sch_x", "proc_beta", "proc_beta_spec", "col_first", 1, schemaFields)); + results.add( + createProcedureColumnRowForSortTest( + "cat_a", null, "proc_gamma", "proc_gamma_spec", "input1", 1, schemaFields)); + + Comparator comparator = + dbMetadata.defineGetProcedureColumnsComparator(schemaFields); + assertNotNull(comparator); + dbMetadata.sortResults(results, comparator, "getProcedureColumns", dbMetadata.LOG); + + assertEquals(5, results.size()); + + // 1. Null cat, sch_x, proc_beta, proc_beta_spec, col_first + assertTrue(results.get(0).get("PROCEDURE_CAT").isNull()); + assertEquals("sch_x", results.get(0).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_beta", results.get(0).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_beta_spec", results.get(0).get("SPECIFIC_NAME").getStringValue()); + assertEquals("col_first", results.get(0).get("COLUMN_NAME").getStringValue()); + + // 2. cat_a, Null schem, proc_gamma, proc_gamma_spec, input1 + assertEquals("cat_a", results.get(1).get("PROCEDURE_CAT").getStringValue()); + assertTrue(results.get(1).get("PROCEDURE_SCHEM").isNull()); + assertEquals("proc_gamma", results.get(1).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_gamma_spec", results.get(1).get("SPECIFIC_NAME").getStringValue()); + assertEquals("input1", results.get(1).get("COLUMN_NAME").getStringValue()); + + // 3. cat_a, sch_z, proc_alpha, proc_alpha_spec, arg_m (m before z) + assertEquals("cat_a", results.get(2).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(2).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_alpha", results.get(2).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_alpha_spec", results.get(2).get("SPECIFIC_NAME").getStringValue()); + assertEquals("arg_m", results.get(2).get("COLUMN_NAME").getStringValue()); + + // 4. cat_a, sch_z, proc_alpha, proc_alpha_spec, arg_z + assertEquals("cat_a", results.get(3).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(3).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_alpha", results.get(3).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_alpha_spec", results.get(3).get("SPECIFIC_NAME").getStringValue()); + assertEquals("arg_z", results.get(3).get("COLUMN_NAME").getStringValue()); + + // 5. cat_b, sch_y, proc_1, proc_1_spec, param_a + assertEquals("cat_b", results.get(4).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_y", results.get(4).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_1", results.get(4).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_1_spec", results.get(4).get("SPECIFIC_NAME").getStringValue()); + assertEquals("param_a", results.get(4).get("COLUMN_NAME").getStringValue()); + } + + @Test + public void testListMatchingProcedureIdsFromDatasets() throws Exception { + String catalog = "test-proj"; + String schema1Name = "dataset1"; + String schema2Name = "dataset2"; + Dataset dataset1 = mockBigQueryDataset(catalog, schema1Name); + Dataset dataset2 = mockBigQueryDataset(catalog, schema2Name); + List datasetsToScan = Arrays.asList(dataset1, dataset2); + + Routine proc1_ds1 = mockBigQueryRoutine(catalog, schema1Name, "proc_a", "PROCEDURE", "desc a"); + Routine func1_ds1 = mockBigQueryRoutine(catalog, schema1Name, "func_b", "FUNCTION", "desc b"); + Routine proc2_ds2 = mockBigQueryRoutine(catalog, schema2Name, "proc_c", "PROCEDURE", "desc c"); + + Page page1 = mock(Page.class); + when(page1.iterateAll()).thenReturn(Arrays.asList(proc1_ds1, func1_ds1)); + when(bigqueryClient.listRoutines(eq(dataset1.getDatasetId()), any(RoutineListOption.class))) + .thenReturn(page1); + + Page page2 = mock(Page.class); + when(page2.iterateAll()).thenReturn(Collections.singletonList(proc2_ds2)); + when(bigqueryClient.listRoutines(eq(dataset2.getDatasetId()), any(RoutineListOption.class))) + .thenReturn(page2); + + ExecutorService mockExecutor = mock(ExecutorService.class); + doAnswer( + invocation -> { + Callable callable = invocation.getArgument(0); + @SuppressWarnings("unchecked") // Suppress warning for raw Future mock + Future mockedFuture = mock(Future.class); + + try { + Object result = callable.call(); + doReturn(result).when(mockedFuture).get(); + } catch (InterruptedException interruptedException) { + doThrow(interruptedException).when(mockedFuture).get(); + } catch (Exception e) { + doThrow(new ExecutionException(e)).when(mockedFuture).get(); + } + return mockedFuture; + }) + .when(mockExecutor) + .submit(any(Callable.class)); + + List resultIds = + dbMetadata.listMatchingProcedureIdsFromDatasets( + datasetsToScan, null, null, mockExecutor, catalog, dbMetadata.LOG); + + assertEquals(2, resultIds.size()); + assertTrue(resultIds.contains(proc1_ds1.getRoutineId())); + assertTrue(resultIds.contains(proc2_ds2.getRoutineId())); + assertFalse(resultIds.contains(func1_ds1.getRoutineId())); // Should not contain functions + + verify(mockExecutor, times(2)).submit(any(Callable.class)); + } + + @Test + public void testSubmitProcedureArgumentProcessingJobs_Basic() throws InterruptedException { + String catalog = "p"; + String schemaName = "d"; + RoutineArgument arg1 = mockRoutineArgument("arg1_name", StandardSQLTypeName.STRING, "IN"); + Routine proc1 = + mockBigQueryRoutineWithArgs( + catalog, schemaName, "proc1", "PROCEDURE", "desc1", Collections.singletonList(arg1)); + Routine func1 = + mockBigQueryRoutineWithArgs( + catalog, + schemaName, + "func1", + "FUNCTION", + "desc_func", + Collections.emptyList()); // Should be skipped + Routine proc2 = + mockBigQueryRoutineWithArgs( + catalog, schemaName, "proc2", "PROCEDURE", "desc2", Collections.emptyList()); + + List fullRoutines = Arrays.asList(proc1, func1, proc2); + Pattern columnNameRegex = null; + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + + ExecutorService mockExecutor = mock(ExecutorService.class); + List> processingTaskFutures = new ArrayList<>(); + + // Capture the runnable submitted to the executor + List submittedRunnables = new ArrayList<>(); + doAnswer( + invocation -> { + Runnable runnable = invocation.getArgument(0); + submittedRunnables.add(runnable); + Future future = mock(Future.class); + return future; + }) + .when(mockExecutor) + .submit(any(Runnable.class)); + + dbMetadata.submitProcedureArgumentProcessingJobs( + fullRoutines, + columnNameRegex, + collectedResults, + resultSchemaFields, + mockExecutor, + processingTaskFutures, + dbMetadata.LOG); + + verify(mockExecutor, times(2)).submit(any(Runnable.class)); + assertEquals(2, processingTaskFutures.size()); + } + + @Test + public void testDefineGetTableTypesSchema() { + Schema schema = BigQueryDatabaseMetaData.defineGetTableTypesSchema(); + + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Should have one column", 1, fields.size()); + + Field tableTypeField = fields.get("TABLE_TYPE"); + assertNotNull("TABLE_TYPE field should exist", tableTypeField); + assertEquals("Field name should be TABLE_TYPE", "TABLE_TYPE", tableTypeField.getName()); + assertEquals( + "Field type should be STRING", + StandardSQLTypeName.STRING, + tableTypeField.getType().getStandardType()); + assertEquals("Field mode should be REQUIRED", Field.Mode.REQUIRED, tableTypeField.getMode()); + } + + @Test + public void testPrepareGetTableTypesRows() { + Schema schema = BigQueryDatabaseMetaData.defineGetTableTypesSchema(); + List rows = BigQueryDatabaseMetaData.prepareGetTableTypesRows(schema); + + assertNotNull("Rows list should not be null", rows); + String[] expectedTableTypes = {"EXTERNAL", "MATERIALIZED VIEW", "SNAPSHOT", "TABLE", "VIEW"}; + assertEquals( + "Should have " + expectedTableTypes.length + " rows", + expectedTableTypes.length, + rows.size()); + + Set foundTypes = new HashSet<>(); + for (int i = 0; i < rows.size(); i++) { + FieldValueList row = rows.get(i); + assertEquals("Row " + i + " should have 1 field value", 1, row.size()); + assertFalse("FieldValue in row " + i + " should not be SQL NULL", row.get(0).isNull()); + + String tableType = row.get(0).getStringValue(); + foundTypes.add(tableType); + } + + assertEquals( + "All expected table types should be present and correctly mapped", + new HashSet<>(Arrays.asList(expectedTableTypes)), + foundTypes); + } + + @Test + public void testGetTableTypes() throws SQLException { + try (ResultSet rs = dbMetadata.getTableTypes()) { + assertNotNull("ResultSet from getTableTypes() should not be null", rs); + + ResultSetMetaData rsmd = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", rsmd); + assertEquals("Should have one column", 1, rsmd.getColumnCount()); + assertEquals("Column name should be TABLE_TYPE", "TABLE_TYPE", rsmd.getColumnName(1)); + assertEquals("Column type should be NVARCHAR", Types.NVARCHAR, rsmd.getColumnType(1)); + + List actualTableTypes = new ArrayList<>(); + while (rs.next()) { + actualTableTypes.add(rs.getString("TABLE_TYPE")); + } + + String[] expectedTableTypes = {"EXTERNAL", "MATERIALIZED VIEW", "SNAPSHOT", "TABLE", "VIEW"}; + assertEquals( + "Number of table types should match", expectedTableTypes.length, actualTableTypes.size()); + + Set expectedSet = new HashSet<>(Arrays.asList(expectedTableTypes)); + Set actualSet = new HashSet<>(actualTableTypes); + assertEquals( + "All expected table types should be present in the ResultSet", expectedSet, actualSet); + } + } + + @Test + public void testDefineGetSuperTablesSchema() { + Schema schema = dbMetadata.defineGetSuperTablesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 4 fields", 4, fields.size()); + + Field tableCat = fields.get("TABLE_CAT"); + assertNotNull(tableCat); + assertEquals("TABLE_CAT", tableCat.getName()); + assertEquals(StandardSQLTypeName.STRING, tableCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, tableCat.getMode()); + + Field tableSchem = fields.get("TABLE_SCHEM"); + assertNotNull(tableSchem); + assertEquals("TABLE_SCHEM", tableSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, tableSchem.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, tableSchem.getMode()); + + Field tableName = fields.get("TABLE_NAME"); + assertNotNull(tableName); + assertEquals("TABLE_NAME", tableName.getName()); + assertEquals(StandardSQLTypeName.STRING, tableName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, tableName.getMode()); + + Field superTableName = fields.get("SUPERTABLE_NAME"); + assertNotNull(superTableName); + assertEquals("SUPERTABLE_NAME", superTableName.getName()); + assertEquals(StandardSQLTypeName.STRING, superTableName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, superTableName.getMode()); + } + + @Test + public void testGetSuperTables_ReturnsEmptyResultSetWithCorrectMetadata() throws SQLException { + try (ResultSet rs = + dbMetadata.getSuperTables("testCatalog", "testSchemaPattern", "testTableNamePattern")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty (next() should return false)", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", metaData); + assertEquals("ResultSetMetaData should have 4 columns", 4, metaData.getColumnCount()); + + // Column 1: TABLE_CAT + assertEquals("TABLE_CAT", metaData.getColumnName(1)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(1)); // Assuming STRING maps to VARCHAR + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + // Column 2: TABLE_SCHEM + assertEquals("TABLE_SCHEM", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(2)); + + // Column 3: TABLE_NAME + assertEquals("TABLE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); // REQUIRED + + // Column 4: SUPERTABLE_NAME + assertEquals("SUPERTABLE_NAME", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); // REQUIRED + } + } + + @Test + public void testDefineGetSuperTypesSchema() { + Schema schema = dbMetadata.defineGetSuperTypesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 6 fields", 6, fields.size()); + + Field typeCat = fields.get("TYPE_CAT"); + assertNotNull(typeCat); + assertEquals("TYPE_CAT", typeCat.getName()); + assertEquals(StandardSQLTypeName.STRING, typeCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, typeCat.getMode()); + + Field typeSchem = fields.get("TYPE_SCHEM"); + assertNotNull(typeSchem); + assertEquals("TYPE_SCHEM", typeSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, typeSchem.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, typeSchem.getMode()); + + Field typeName = fields.get("TYPE_NAME"); + assertNotNull(typeName); + assertEquals("TYPE_NAME", typeName.getName()); + assertEquals(StandardSQLTypeName.STRING, typeName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, typeName.getMode()); + + Field superTypeCat = fields.get("SUPERTYPE_CAT"); + assertNotNull(superTypeCat); + assertEquals("SUPERTYPE_CAT", superTypeCat.getName()); + assertEquals(StandardSQLTypeName.STRING, superTypeCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, superTypeCat.getMode()); + + Field superTypeSchem = fields.get("SUPERTYPE_SCHEM"); + assertNotNull(superTypeSchem); + assertEquals("SUPERTYPE_SCHEM", superTypeSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, superTypeSchem.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, superTypeSchem.getMode()); + + Field superTypeName = fields.get("SUPERTYPE_NAME"); + assertNotNull(superTypeName); + assertEquals("SUPERTYPE_NAME", superTypeName.getName()); + assertEquals(StandardSQLTypeName.STRING, superTypeName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, superTypeName.getMode()); + } + + @Test + public void testGetSuperTypes_ReturnsEmptyResultSetWithCorrectMetadata() throws SQLException { + try (ResultSet rs = + dbMetadata.getSuperTypes("testCatalog", "testSchemaPattern", "testTypeNamePattern")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty (next() should return false)", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", metaData); + assertEquals("ResultSetMetaData should have 6 columns", 6, metaData.getColumnCount()); + + // Column 1: TYPE_CAT + assertEquals("TYPE_CAT", metaData.getColumnName(1)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + // Column 2: TYPE_SCHEM + assertEquals("TYPE_SCHEM", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(2)); + + // Column 3: TYPE_NAME + assertEquals("TYPE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + // Column 4: SUPERTYPE_CAT + assertEquals("SUPERTYPE_CAT", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(4)); + + // Column 5: SUPERTYPE_SCHEM + assertEquals("SUPERTYPE_SCHEM", metaData.getColumnName(5)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(5)); + + // Column 6: SUPERTYPE_NAME + assertEquals("SUPERTYPE_NAME", metaData.getColumnName(6)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(6)); + } + } + + @Test + public void testDefineGetAttributesSchema() { + Schema schema = dbMetadata.defineGetAttributesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 21 fields", 21, fields.size()); + + assertEquals("TYPE_CAT", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(0).getMode()); + + assertEquals("ATTR_NAME", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("DATA_TYPE", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(4).getMode()); + + assertEquals("ORDINAL_POSITION", fields.get(15).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(15).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(15).getMode()); + + assertEquals("IS_NULLABLE", fields.get(16).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(16).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(16).getMode()); + + assertEquals("SOURCE_DATA_TYPE", fields.get(20).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(20).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(20).getMode()); + } + + @Test + public void testGetAttributes_ReturnsEmptyResultSet() throws SQLException { + try (ResultSet rs = + dbMetadata.getAttributes("testCat", "testSchema", "testType", "testAttr%")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 21 columns", 21, metaData.getColumnCount()); + + assertEquals("TYPE_CAT", metaData.getColumnName(1)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + assertEquals("ATTR_NAME", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); + + assertEquals("DATA_TYPE", metaData.getColumnName(5)); + assertEquals(Types.BIGINT, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(5)); + + assertEquals("ORDINAL_POSITION", metaData.getColumnName(16)); + assertEquals(Types.BIGINT, metaData.getColumnType(16)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(16)); + + assertEquals("IS_NULLABLE", metaData.getColumnName(17)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(17)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(17)); + + assertEquals("SOURCE_DATA_TYPE", metaData.getColumnName(21)); + assertEquals(Types.BIGINT, metaData.getColumnType(21)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(21)); + } + } + + @Test + public void testDefineGetBestRowIdentifierSchema() { + Schema schema = dbMetadata.defineGetBestRowIdentifierSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 8 fields", 8, fields.size()); + + assertEquals("SCOPE", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(0).getMode()); + + assertEquals("COLUMN_NAME", fields.get(1).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(1).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(1).getMode()); + + assertEquals("DATA_TYPE", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("BUFFER_LENGTH", fields.get(5).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(5).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(5).getMode()); + + assertEquals("DECIMAL_DIGITS", fields.get(6).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(6).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(6).getMode()); + + assertEquals("PSEUDO_COLUMN", fields.get(7).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(7).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(7).getMode()); + } + + @Test + public void testGetBestRowIdentifier_ReturnsEmptyResultSetWithCorrectMetadata() + throws SQLException { + int testScope = DatabaseMetaData.bestRowSession; + boolean testNullable = true; + + try (ResultSet rs = + dbMetadata.getBestRowIdentifier( + "testCat", "testSchema", "testTable", testScope, testNullable)) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 8 columns", 8, metaData.getColumnCount()); + + assertEquals("SCOPE", metaData.getColumnName(1)); + assertEquals(Types.BIGINT, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(1)); + + assertEquals("COLUMN_NAME", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(2)); + + assertEquals("DATA_TYPE", metaData.getColumnName(3)); + assertEquals(Types.BIGINT, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("BUFFER_LENGTH", metaData.getColumnName(6)); + assertEquals(Types.BIGINT, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(6)); + + assertEquals("PSEUDO_COLUMN", metaData.getColumnName(8)); + assertEquals(Types.BIGINT, metaData.getColumnType(8)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(8)); + } + } + + @Test + public void testDefineGetUDTsSchema() { + Schema schema = dbMetadata.defineGetUDTsSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 7 fields", 7, fields.size()); + + assertEquals("TYPE_NAME", fields.get("TYPE_NAME").getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get("TYPE_NAME").getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get("TYPE_NAME").getMode()); + + assertEquals("CLASS_NAME", fields.get("CLASS_NAME").getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get("CLASS_NAME").getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get("CLASS_NAME").getMode()); + + assertEquals("DATA_TYPE", fields.get("DATA_TYPE").getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get("DATA_TYPE").getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get("DATA_TYPE").getMode()); + + assertEquals("BASE_TYPE", fields.get("BASE_TYPE").getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get("BASE_TYPE").getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get("BASE_TYPE").getMode()); + } + + @Test + public void testGetUDTs_ReturnsEmptyResultSet() throws SQLException { + int[] types = {Types.STRUCT, Types.DISTINCT}; + try (ResultSet rs = dbMetadata.getUDTs("testCat", "testSchema%", "testType%", types)) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 7 columns", 7, metaData.getColumnCount()); + + assertEquals("TYPE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("DATA_TYPE", metaData.getColumnName(5)); + assertEquals(Types.BIGINT, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(5)); + + assertEquals("BASE_TYPE", metaData.getColumnName(7)); + assertEquals(Types.BIGINT, metaData.getColumnType(7)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(7)); + } + } + + @Test + public void testDefineGetIndexInfoSchema() { + Schema schema = dbMetadata.defineGetIndexInfoSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 13 fields", 13, fields.size()); + + assertEquals("TABLE_NAME", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("NON_UNIQUE", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.BOOL, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("TYPE", fields.get(6).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(6).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(6).getMode()); + + assertEquals("CARDINALITY", fields.get(10).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(10).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(10).getMode()); + } + + @Test + public void testGetIndexInfo_ReturnsEmptyResultSetWithCorrectMetadata() throws SQLException { + try (ResultSet rs = dbMetadata.getIndexInfo("testCat", "testSchema", "testTable", true, true)) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 13 columns", 13, metaData.getColumnCount()); + + assertEquals("TABLE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("NON_UNIQUE", metaData.getColumnName(4)); + assertEquals(Types.BOOLEAN, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); + + assertEquals("TYPE", metaData.getColumnName(7)); + assertEquals(Types.BIGINT, metaData.getColumnType(7)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(7)); + + assertEquals("CARDINALITY", metaData.getColumnName(11)); + assertEquals(Types.BIGINT, metaData.getColumnType(11)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(11)); + } + } + + @Test + public void testDefineGetTablePrivilegesSchema() { + Schema schema = dbMetadata.defineGetTablePrivilegesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 7 fields", 7, fields.size()); + + assertEquals("TABLE_CAT", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(0).getMode()); + + assertEquals("TABLE_NAME", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("GRANTEE", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(4).getMode()); + + assertEquals("PRIVILEGE", fields.get(5).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(5).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(5).getMode()); + + assertEquals("IS_GRANTABLE", fields.get(6).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(6).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(6).getMode()); + } + + @Test + public void testGetTablePrivileges_ReturnsEmptyResultSetWithCorrectMetadata() + throws SQLException { + try (ResultSet rs = dbMetadata.getTablePrivileges("testCat", "testSchema%", "testTable%")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 7 columns", 7, metaData.getColumnCount()); + + assertEquals("TABLE_CAT", metaData.getColumnName(1)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + assertEquals("TABLE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("GRANTEE", metaData.getColumnName(5)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(5)); + + assertEquals("PRIVILEGE", metaData.getColumnName(6)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(6)); + + assertEquals("IS_GRANTABLE", metaData.getColumnName(7)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(7)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(7)); + } + } + + @Test + public void testDefineGetColumnPrivilegesSchema() { + Schema schema = dbMetadata.defineGetColumnPrivilegesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 8 fields", 8, fields.size()); + + assertEquals("TABLE_SCHEM", fields.get(1).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(1).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(1).getMode()); + + assertEquals("COLUMN_NAME", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("GRANTOR", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(4).getMode()); + + assertEquals("PRIVILEGE", fields.get(6).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(6).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(6).getMode()); + + assertEquals("IS_GRANTABLE", fields.get(7).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(7).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(7).getMode()); + } + + @Test + public void testGetColumnPrivileges_ReturnsEmptyResultSetWithCorrectMetadata() + throws SQLException { + try (ResultSet rs = + dbMetadata.getColumnPrivileges("testCat", "testSchema", "testTable", "testCol%")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 8 columns", 8, metaData.getColumnCount()); + + assertEquals("TABLE_SCHEM", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(2)); + + assertEquals("COLUMN_NAME", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); + + assertEquals("GRANTOR", metaData.getColumnName(5)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(5)); + + assertEquals("PRIVILEGE", metaData.getColumnName(7)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(7)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(7)); + + assertEquals("IS_GRANTABLE", metaData.getColumnName(8)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(8)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(8)); + } + } + + @Test + public void testDefineGetVersionColumnsSchema() { + Schema schema = dbMetadata.defineGetVersionColumnsSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 8 fields", 8, fields.size()); + + assertEquals("SCOPE", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(0).getMode()); + + assertEquals("COLUMN_NAME", fields.get(1).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(1).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(1).getMode()); + + assertEquals("DATA_TYPE", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("BUFFER_LENGTH", fields.get(5).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(5).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(5).getMode()); + + assertEquals("PSEUDO_COLUMN", fields.get(7).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(7).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(7).getMode()); + } + + @Test + public void testGetVersionColumns_ReturnsEmptyResultSetWithCorrectMetadata() throws SQLException { + try (ResultSet rs = dbMetadata.getVersionColumns("testCat", "testSchema", "testTable")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 8 columns", 8, metaData.getColumnCount()); + + assertEquals("SCOPE", metaData.getColumnName(1)); + assertEquals(Types.BIGINT, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + assertEquals("COLUMN_NAME", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(2)); + + assertEquals("DATA_TYPE", metaData.getColumnName(3)); + assertEquals(Types.BIGINT, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("BUFFER_LENGTH", metaData.getColumnName(6)); + assertEquals(Types.BIGINT, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(6)); + + assertEquals("PSEUDO_COLUMN", metaData.getColumnName(8)); + assertEquals(Types.BIGINT, metaData.getColumnType(8)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(8)); + } + } + + @Test + public void testDefineGetPseudoColumnsSchema() { + Schema schema = dbMetadata.defineGetPseudoColumnsSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 12 fields", 12, fields.size()); + + assertEquals("TABLE_NAME", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("COLUMN_NAME", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("DATA_TYPE", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(4).getMode()); + + assertEquals("COLUMN_SIZE", fields.get(5).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(5).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(5).getMode()); + + assertEquals("COLUMN_USAGE", fields.get(8).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(8).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(8).getMode()); + + assertEquals("IS_NULLABLE", fields.get(11).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(11).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(11).getMode()); + } + + @Test + public void testGetPseudoColumns_ReturnsEmptyResultSet() throws SQLException { + try (ResultSet rs = dbMetadata.getPseudoColumns("testCat", "testSchema%", "testTable%", "%")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 12 columns", 12, metaData.getColumnCount()); + + assertEquals("TABLE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("COLUMN_NAME", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); + + assertEquals("DATA_TYPE", metaData.getColumnName(5)); + assertEquals(Types.BIGINT, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(5)); + + assertEquals("COLUMN_SIZE", metaData.getColumnName(6)); + assertEquals(Types.BIGINT, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(6)); + + assertEquals("COLUMN_USAGE", metaData.getColumnName(9)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(9)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(9)); + + assertEquals("IS_NULLABLE", metaData.getColumnName(12)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(12)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(12)); + } + } + + @Test + public void testDefineGetFunctionsSchema() { + Schema schema = dbMetadata.defineGetFunctionsSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(6, fields.size()); + + Field funcCat = fields.get("FUNCTION_CAT"); + assertEquals("FUNCTION_CAT", funcCat.getName()); + assertEquals(StandardSQLTypeName.STRING, funcCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, funcCat.getMode()); + + Field funcSchem = fields.get("FUNCTION_SCHEM"); + assertEquals("FUNCTION_SCHEM", funcSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, funcSchem.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, funcSchem.getMode()); + + Field funcName = fields.get("FUNCTION_NAME"); + assertEquals("FUNCTION_NAME", funcName.getName()); + assertEquals(StandardSQLTypeName.STRING, funcName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, funcName.getMode()); + + Field remarks = fields.get("REMARKS"); + assertEquals("REMARKS", remarks.getName()); + assertEquals(StandardSQLTypeName.STRING, remarks.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, remarks.getMode()); + + Field funcType = fields.get("FUNCTION_TYPE"); + assertEquals("FUNCTION_TYPE", funcType.getName()); + assertEquals(StandardSQLTypeName.INT64, funcType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, funcType.getMode()); + + Field specificName = fields.get("SPECIFIC_NAME"); + assertEquals("SPECIFIC_NAME", specificName.getName()); + assertEquals(StandardSQLTypeName.STRING, specificName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, specificName.getMode()); + } + + @Test + public void testProcessFunctionInfo_ScalarFunction() { + Schema resultSchema = dbMetadata.defineGetFunctionsSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj-func"; + String schema = "dataset_func"; + String name = "my_scalar_func"; + String description = "A test scalar function"; + + Routine routine = mockBigQueryRoutine(catalog, schema, name, "SCALAR_FUNCTION", description); + + dbMetadata.processFunctionInfo(routine, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(6, row.size()); + assertEquals(catalog, row.get("FUNCTION_CAT").getStringValue()); + assertEquals(schema, row.get("FUNCTION_SCHEM").getStringValue()); + assertEquals(name, row.get("FUNCTION_NAME").getStringValue()); + assertEquals(description, row.get("REMARKS").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionResultUnknown), + row.get("FUNCTION_TYPE").getStringValue()); + assertEquals(name, row.get("SPECIFIC_NAME").getStringValue()); + } + + @Test + public void testProcessFunctionInfo_TableFunction() { + Schema resultSchema = dbMetadata.defineGetFunctionsSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj-func"; + String schema = "dataset_func"; + String name = "my_table_func"; + String description = "A test Table function"; + + Routine routine = mockBigQueryRoutine(catalog, schema, name, "TABLE_FUNCTION", description); + + dbMetadata.processFunctionInfo(routine, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(6, row.size()); + assertEquals(catalog, row.get("FUNCTION_CAT").getStringValue()); + assertEquals(schema, row.get("FUNCTION_SCHEM").getStringValue()); + assertEquals(name, row.get("FUNCTION_NAME").getStringValue()); + assertEquals(description, row.get("REMARKS").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionReturnsTable), + row.get("FUNCTION_TYPE").getStringValue()); + assertEquals(name, row.get("SPECIFIC_NAME").getStringValue()); + } + + private FieldValueList createFunctionRow( + String cat, + String schem, + String name, + String specName, + int funcType, + FieldList schemaFields) { + List values = new ArrayList<>(); + values.add(dbMetadata.createStringFieldValue(cat)); // FUNCTION_CAT + values.add(dbMetadata.createStringFieldValue(schem)); // FUNCTION_SCHEM + values.add(dbMetadata.createStringFieldValue(name)); // FUNCTION_NAME + values.add(dbMetadata.createStringFieldValue("Remark for " + name)); // REMARKS + values.add(dbMetadata.createLongFieldValue((long) funcType)); // FUNCTION_TYPE + values.add(dbMetadata.createStringFieldValue(specName)); // SPECIFIC_NAME + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testSortResults_Functions() { + Schema resultSchema = dbMetadata.defineGetFunctionsSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + // Add rows in unsorted order + results.add( + createFunctionRow( + "cat_b", + "sch_c", + "func_1", + "func_1_spec", + DatabaseMetaData.functionResultUnknown, + schemaFields)); + results.add( + createFunctionRow( + "cat_a", + "sch_z", + "func_alpha", + "func_alpha_spec", + DatabaseMetaData.functionReturnsTable, + schemaFields)); + results.add( + createFunctionRow( + "cat_a", + "sch_z", + "func_beta", + "func_beta_spec", + DatabaseMetaData.functionResultUnknown, + schemaFields)); + results.add( + createFunctionRow( + null, + "sch_y", + "func_gamma", + "func_gamma_spec", + DatabaseMetaData.functionReturnsTable, + schemaFields)); + results.add( + createFunctionRow( + "cat_a", + null, + "func_delta", + "func_delta_spec", + DatabaseMetaData.functionResultUnknown, + schemaFields)); + results.add( + createFunctionRow( + "cat_a", + "sch_z", + "func_alpha", + "func_alpha_spec_older", + DatabaseMetaData.functionReturnsTable, + schemaFields)); + + Comparator comparator = dbMetadata.defineGetFunctionsComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getFunctions", dbMetadata.LOG); + + // Expected Order: Null Cat, then Cat A (Null Schem, then sch_z), then Cat B. Within that, Name, + // then Spec Name. + assertEquals(6, results.size()); + + // Check order based on the comparator (CAT, SCHEM, NAME, SPECIFIC_NAME) + assertEquals("func_gamma", results.get(0).get("FUNCTION_NAME").getStringValue()); // null cat + assertEquals( + "func_delta", results.get(1).get("FUNCTION_NAME").getStringValue()); // cat_a, null schem + assertEquals( + "func_alpha", + results.get(2).get("FUNCTION_NAME").getStringValue()); // cat_a, sch_z, alpha, spec + assertEquals( + "func_alpha", + results.get(3).get("FUNCTION_NAME").getStringValue()); // cat_a, sch_z, alpha, spec_older + assertEquals( + "func_beta", results.get(4).get("FUNCTION_NAME").getStringValue()); // cat_a, sch_z, beta + assertEquals("func_1", results.get(5).get("FUNCTION_NAME").getStringValue()); // cat_b + } + + @Test + public void testDefineGetTypeInfoSchema() { + Schema schema = dbMetadata.defineGetTypeInfoSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 18 fields", 18, fields.size()); + + Field typeName = fields.get("TYPE_NAME"); + assertNotNull(typeName); + assertEquals("TYPE_NAME", typeName.getName()); + assertEquals(StandardSQLTypeName.STRING, typeName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, typeName.getMode()); + + Field dataType = fields.get("DATA_TYPE"); + assertNotNull(dataType); + assertEquals("DATA_TYPE", dataType.getName()); + assertEquals(StandardSQLTypeName.INT64, dataType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, dataType.getMode()); + + Field precision = fields.get("PRECISION"); + assertNotNull(precision); + assertEquals("PRECISION", precision.getName()); + assertEquals(StandardSQLTypeName.INT64, precision.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, precision.getMode()); + + Field caseSensitive = fields.get("CASE_SENSITIVE"); + assertNotNull(caseSensitive); + assertEquals("CASE_SENSITIVE", caseSensitive.getName()); + assertEquals(StandardSQLTypeName.BOOL, caseSensitive.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, caseSensitive.getMode()); + + Field numPrecRadix = fields.get("NUM_PREC_RADIX"); + assertNotNull(numPrecRadix); + assertEquals("NUM_PREC_RADIX", numPrecRadix.getName()); + assertEquals(StandardSQLTypeName.INT64, numPrecRadix.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, numPrecRadix.getMode()); + } + + @Test + public void testPrepareGetTypeInfoRows() { + Schema typeInfoSchema = dbMetadata.defineGetTypeInfoSchema(); + FieldList schemaFields = typeInfoSchema.getFields(); + List rows = dbMetadata.prepareGetTypeInfoRows(schemaFields); + + assertNotNull("Rows list should not be null", rows); + assertEquals("Should have 17 rows for 17 types", 17, rows.size()); + + // INT64 (should be BIGINT in JDBC) + Optional int64RowOpt = + rows.stream() + .filter(row -> "INT64".equals(row.get("TYPE_NAME").getStringValue())) + .findFirst(); + assertTrue("INT64 type info row should exist", int64RowOpt.isPresent()); + FieldValueList int64Row = int64RowOpt.get(); + assertEquals(String.valueOf(Types.BIGINT), int64Row.get("DATA_TYPE").getStringValue()); + assertEquals("19", int64Row.get("PRECISION").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.typeNullable), int64Row.get("NULLABLE").getStringValue()); + assertEquals("0", int64Row.get("CASE_SENSITIVE").getStringValue()); + assertEquals("10", int64Row.get("NUM_PREC_RADIX").getStringValue()); + + // BOOL (should be BOOLEAN in JDBC) + Optional boolRowOpt = + rows.stream() + .filter(row -> "BOOL".equals(row.get("TYPE_NAME").getStringValue())) + .findFirst(); + assertTrue("BOOL type info row should exist", boolRowOpt.isPresent()); + FieldValueList boolRow = boolRowOpt.get(); + assertEquals(String.valueOf(Types.BOOLEAN), boolRow.get("DATA_TYPE").getStringValue()); + assertEquals("1", boolRow.get("PRECISION").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.typeNullable), boolRow.get("NULLABLE").getStringValue()); + assertEquals("0", boolRow.get("CASE_SENSITIVE").getStringValue()); // false + assertTrue(boolRow.get("NUM_PREC_RADIX").isNull()); + + // STRING (should be NVARCHAR in JDBC) + Optional stringRowOpt = + rows.stream() + .filter(row -> "STRING".equals(row.get("TYPE_NAME").getStringValue())) + .findFirst(); + assertTrue("STRING type info row should exist", stringRowOpt.isPresent()); + FieldValueList stringRow = stringRowOpt.get(); + assertEquals(String.valueOf(Types.NVARCHAR), stringRow.get("DATA_TYPE").getStringValue()); + assertTrue(stringRow.get("PRECISION").isNull()); // Precision is null for STRING + assertEquals("'", stringRow.get("LITERAL_PREFIX").getStringValue()); + assertEquals("'", stringRow.get("LITERAL_SUFFIX").getStringValue()); + assertEquals("LENGTH", stringRow.get("CREATE_PARAMS").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.typeNullable), stringRow.get("NULLABLE").getStringValue()); + assertEquals("1", stringRow.get("CASE_SENSITIVE").getStringValue()); // true + assertTrue(stringRow.get("NUM_PREC_RADIX").isNull()); + } + + @Test + public void testGetTypeInfo() throws SQLException { + try (ResultSet rs = dbMetadata.getTypeInfo()) { + assertNotNull("ResultSet from getTypeInfo() should not be null", rs); + + ResultSetMetaData rsmd = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", rsmd); + assertEquals("Should have 18 columns", 18, rsmd.getColumnCount()); + assertEquals("TYPE_NAME", rsmd.getColumnName(1)); + assertEquals("DATA_TYPE", rsmd.getColumnName(2)); + assertEquals("PRECISION", rsmd.getColumnName(3)); + + List dataTypes = new ArrayList<>(); + int rowCount = 0; + while (rs.next()) { + rowCount++; + dataTypes.add(rs.getInt("DATA_TYPE")); + if ("INT64".equals(rs.getString("TYPE_NAME"))) { + assertEquals(Types.BIGINT, rs.getInt("DATA_TYPE")); + assertEquals(19, rs.getInt("PRECISION")); + } + } + assertEquals("Should have 17 rows for 17 types", 17, rowCount); + + // Verify sorting by DATA_TYPE + List sortedDataTypes = new ArrayList<>(dataTypes); + Collections.sort(sortedDataTypes); + assertEquals("Results should be sorted by DATA_TYPE", sortedDataTypes, dataTypes); + } + } + + @Test + public void testDefineGetFunctionColumnsSchema() { + Schema schema = dbMetadata.defineGetFunctionColumnsSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(17, fields.size()); + + assertEquals("FUNCTION_CAT", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(0).getMode()); + + assertEquals("COLUMN_NAME", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("COLUMN_TYPE", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(4).getMode()); + + assertEquals("ORDINAL_POSITION", fields.get(14).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(14).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(14).getMode()); + + assertEquals("SPECIFIC_NAME", fields.get(16).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(16).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(16).getMode()); + } + + @Test + public void testCreateFunctionColumnRow() { + StandardSQLDataType stringType = mockStandardSQLDataType(StandardSQLTypeName.STRING); + List row = + dbMetadata.createFunctionColumnRow( + "cat", + "sch", + "func", + "func_spec", + "param_in", + DatabaseMetaData.functionColumnIn, + stringType, + 1); + + assertEquals(17, row.size()); + assertEquals("cat", row.get(0).getStringValue()); + assertEquals("sch", row.get(1).getStringValue()); + assertEquals("func", row.get(2).getStringValue()); + assertEquals("param_in", row.get(3).getStringValue()); + assertEquals(String.valueOf(DatabaseMetaData.functionColumnIn), row.get(4).getStringValue()); + assertEquals(String.valueOf(Types.NVARCHAR), row.get(5).getStringValue()); // DATA_TYPE + assertEquals("NVARCHAR", row.get(6).getStringValue()); // TYPE_NAME + assertTrue(row.get(7).isNull()); // PRECISION + assertTrue(row.get(8).isNull()); // LENGTH + assertTrue(row.get(9).isNull()); // SCALE + assertTrue(row.get(10).isNull()); // RADIX + assertEquals( + String.valueOf(DatabaseMetaData.functionNullableUnknown), + row.get(11).getStringValue()); // NULLABLE + assertTrue(row.get(12).isNull()); // REMARKS + assertTrue(row.get(13).isNull()); // CHAR_OCTET_LENGTH (should be columnSize) + assertEquals("1", row.get(14).getStringValue()); // ORDINAL_POSITION + assertEquals("", row.get(15).getStringValue()); // IS_NULLABLE + assertEquals("func_spec", row.get(16).getStringValue()); // SPECIFIC_NAME + } + + @Test + public void testProcessFunctionParametersAndReturnValue_ScalarFunctionWithArgs() { + Schema resultSchema = dbMetadata.defineGetFunctionColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + RoutineArgument arg1 = mockRoutineArgument("in_str", StandardSQLTypeName.STRING, "IN"); + RoutineArgument arg2 = mockRoutineArgument("in_int", StandardSQLTypeName.INT64, "IN"); + Routine scalarFunc = + mockBigQueryRoutineWithArgs( + "cat", "ds", "my_scalar", "SCALAR_FUNCTION", "desc", Arrays.asList(arg1, arg2)); + when(scalarFunc.getReturnTableType()).thenReturn(null); // No return table for scalar + + dbMetadata.processFunctionParametersAndReturnValue( + scalarFunc, null, collectedResults, resultFields); + + assertEquals(2, collectedResults.size()); + // First argument + assertEquals("in_str", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionColumnIn), + collectedResults.get(0).get("COLUMN_TYPE").getStringValue()); + assertEquals("1", collectedResults.get(0).get("ORDINAL_POSITION").getStringValue()); + // Second argument + assertEquals("in_int", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionColumnIn), + collectedResults.get(1).get("COLUMN_TYPE").getStringValue()); + assertEquals("2", collectedResults.get(1).get("ORDINAL_POSITION").getStringValue()); + } + + @Test + public void testProcessFunctionParametersAndReturnValue_TableFunctionWithReturnTable() { + Schema resultSchema = dbMetadata.defineGetFunctionColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + StandardSQLField returnCol1 = mockStandardSQLField("out_id", StandardSQLTypeName.INT64); + StandardSQLField returnCol2 = mockStandardSQLField("out_val", StandardSQLTypeName.STRING); + StandardSQLTableType returnTable = + mockStandardSQLTableType(Arrays.asList(returnCol1, returnCol2)); + + Routine tableFunc = + mockBigQueryRoutineWithArgs( + "cat", "ds", "my_table_func", "TABLE_FUNCTION", "desc", Collections.emptyList()); + when(tableFunc.getReturnTableType()).thenReturn(returnTable); + + dbMetadata.processFunctionParametersAndReturnValue( + tableFunc, null, collectedResults, resultFields); + + assertEquals(2, collectedResults.size()); + // First return column + assertEquals("out_id", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionColumnResult), + collectedResults.get(0).get("COLUMN_TYPE").getStringValue()); + assertEquals("1", collectedResults.get(0).get("ORDINAL_POSITION").getStringValue()); + // Second return column + assertEquals("out_val", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionColumnResult), + collectedResults.get(1).get("COLUMN_TYPE").getStringValue()); + assertEquals("2", collectedResults.get(1).get("ORDINAL_POSITION").getStringValue()); + } + + @Test + public void testProcessFunctionParametersAndReturnValue_ColumnNameFilter() { + Schema resultSchema = dbMetadata.defineGetFunctionColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern columnNamePattern = Pattern.compile("id_.*"); // Match columns starting with "id_" + + RoutineArgument arg1 = mockRoutineArgument("id_arg", StandardSQLTypeName.INT64, "IN"); + RoutineArgument arg2 = mockRoutineArgument("name_arg", StandardSQLTypeName.STRING, "IN"); + StandardSQLField returnCol1 = mockStandardSQLField("id_return", StandardSQLTypeName.BOOL); + StandardSQLField returnCol2 = mockStandardSQLField("value_return", StandardSQLTypeName.FLOAT64); + StandardSQLTableType returnTable = + mockStandardSQLTableType(Arrays.asList(returnCol1, returnCol2)); + + Routine tableFunc = + mockBigQueryRoutineWithArgs( + "cat", "ds", "filter_func", "TABLE_FUNCTION", "desc", Arrays.asList(arg1, arg2)); + when(tableFunc.getReturnTableType()).thenReturn(returnTable); + + dbMetadata.processFunctionParametersAndReturnValue( + tableFunc, columnNamePattern, collectedResults, resultFields); + + assertEquals(2, collectedResults.size()); // Should match id_arg and id_return + assertEquals("id_return", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals("id_arg", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + } + + @Test + public void testDefineGetClientInfoPropertiesSchema() { + Schema schema = dbMetadata.defineGetClientInfoPropertiesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 4 fields", 4, fields.size()); + + Field nameField = fields.get("NAME"); + assertNotNull(nameField); + assertEquals("NAME", nameField.getName()); + assertEquals(StandardSQLTypeName.STRING, nameField.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, nameField.getMode()); + + Field maxLenField = fields.get("MAX_LEN"); + assertNotNull(maxLenField); + assertEquals("MAX_LEN", maxLenField.getName()); + assertEquals(StandardSQLTypeName.INT64, maxLenField.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, maxLenField.getMode()); + + Field defaultValueField = fields.get("DEFAULT_VALUE"); + assertNotNull(defaultValueField); + assertEquals("DEFAULT_VALUE", defaultValueField.getName()); + assertEquals(StandardSQLTypeName.STRING, defaultValueField.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, defaultValueField.getMode()); + + Field descriptionField = fields.get("DESCRIPTION"); + assertNotNull(descriptionField); + assertEquals("DESCRIPTION", descriptionField.getName()); + assertEquals(StandardSQLTypeName.STRING, descriptionField.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, descriptionField.getMode()); + } + + @Test + public void testGetClientInfoProperties() throws SQLException { + try (ResultSet rs = dbMetadata.getClientInfoProperties()) { + assertNotNull("ResultSet from getClientInfoProperties() should not be null", rs); + + ResultSetMetaData rsmd = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", rsmd); + assertEquals("Should have 4 columns", 4, rsmd.getColumnCount()); + assertEquals("NAME", rsmd.getColumnName(1)); + assertEquals(Types.NVARCHAR, rsmd.getColumnType(1)); + assertEquals("MAX_LEN", rsmd.getColumnName(2)); + assertEquals(Types.BIGINT, rsmd.getColumnType(2)); + assertEquals("DEFAULT_VALUE", rsmd.getColumnName(3)); + assertEquals(Types.NVARCHAR, rsmd.getColumnType(3)); + assertEquals("DESCRIPTION", rsmd.getColumnName(4)); + assertEquals(Types.NVARCHAR, rsmd.getColumnType(4)); + + List> actualRows = new ArrayList<>(); + while (rs.next()) { + Map row = new HashMap<>(); + row.put("NAME", rs.getString("NAME")); + row.put("MAX_LEN", rs.getLong("MAX_LEN")); + row.put("DEFAULT_VALUE", rs.getObject("DEFAULT_VALUE")); + row.put("DESCRIPTION", rs.getString("DESCRIPTION")); + actualRows.add(row); + } + + assertEquals("Should return 3 client info properties", 3, actualRows.size()); + + Map appNameRow = actualRows.get(0); + assertEquals("ApplicationName", appNameRow.get("NAME")); + assertEquals(25L, appNameRow.get("MAX_LEN")); + assertNull(appNameRow.get("DEFAULT_VALUE")); + assertEquals( + "The name of the application currently utilizing the connection.", + appNameRow.get("DESCRIPTION")); + + Map clientHostnameRow = actualRows.get(1); + assertEquals("ClientHostname", clientHostnameRow.get("NAME")); + + Map clientUserRow = actualRows.get(2); + assertEquals("ClientUser", clientUserRow.get("NAME")); + } + } + + @Test + public void testDefineGetCatalogsSchema() { + Schema schema = dbMetadata.defineGetCatalogsSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Should have one column", 1, fields.size()); + + Field tableCatField = fields.get("TABLE_CAT"); + assertNotNull("TABLE_CAT field should exist", tableCatField); + assertEquals("Field name should be TABLE_CAT", "TABLE_CAT", tableCatField.getName()); + assertEquals( + "Field type should be STRING", + StandardSQLTypeName.STRING, + tableCatField.getType().getStandardType()); + assertEquals("Field mode should be REQUIRED", Field.Mode.REQUIRED, tableCatField.getMode()); + } + + @Test + public void testPrepareGetCatalogsRows() { + Schema catalogsSchema = dbMetadata.defineGetCatalogsSchema(); + FieldList schemaFields = catalogsSchema.getFields(); + + // Test with a valid catalog name + List testCatalogName = new ArrayList<>(); + testCatalogName.add("test_catalog"); + + List rowsWithCatalog = + dbMetadata.prepareGetCatalogsRows(schemaFields, testCatalogName); + + assertNotNull("Rows list should not be null when catalog name is provided", rowsWithCatalog); + assertEquals("Should have one row when a catalog name is provided", 1, rowsWithCatalog.size()); + FieldValueList row = rowsWithCatalog.get(0); + assertEquals("Row should have 1 field value", 1, row.size()); + assertFalse("FieldValue in row should not be SQL NULL", row.get(0).isNull()); + assertEquals( + "TABLE_CAT should match the provided catalog name", + testCatalogName.get(0), + row.get(0).getStringValue()); + + // Test with empty catalog name list + List testEmptyCatalogList = new ArrayList<>(); + List rowsWithNullCatalog = + dbMetadata.prepareGetCatalogsRows(schemaFields, testEmptyCatalogList); + assertNotNull("Rows list should not be null when catalog name is null", rowsWithNullCatalog); + assertTrue("Should have zero rows when catalog name is null", rowsWithNullCatalog.isEmpty()); + } + + @Test + public void testGetSchemas_NoArgs_DelegatesCorrectly() { + BigQueryDatabaseMetaData spiedDbMetadata = spy(dbMetadata); + ResultSet mockResultSet = mock(ResultSet.class); + doReturn(mockResultSet).when(spiedDbMetadata).getSchemas(null, null); + + ResultSet rs = spiedDbMetadata.getSchemas(); + + assertSame( + "The returned ResultSet should be the one from the two-argument method", mockResultSet, rs); + verify(spiedDbMetadata, times(1)).getSchemas(null, null); + } + + // Non-Resultset DatabaseMetadata tests + + @Test + public void testIdentifierQuoteString() { + String actual = dbMetadata.getIdentifierQuoteString(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_QUOTED_IDENTIFIER, actual); + } + + @Test + public void testSQLKeyWords() { + String actual = dbMetadata.getSQLKeywords(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_RESERVED_KEYWORDS, actual); + } + + @Test + public void testNumericFunctions() { + String actual = dbMetadata.getNumericFunctions(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_NUMERIC_FNS, actual); + } + + @Test + public void testStringFunctions() { + String actual = dbMetadata.getStringFunctions(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_STRING_FNS, actual); + } + + @Test + public void testTimeAndDateFunctions() { + String actual = dbMetadata.getTimeDateFunctions(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_TIME_DATE_FNS, actual); + } + + @Test + public void testSystemFunctions() { + String actual = dbMetadata.getSystemFunctions(); + assertNull(actual); + } + + @Test + public void testSearchStringEscape() { + String actual = dbMetadata.getSearchStringEscape(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_ESCAPE, actual); + } + + @Test + public void testExtraNameChars() { + String actual = dbMetadata.getExtraNameCharacters(); + assertNull(actual); + } + + @Test + public void testCatalogSeparator() { + String actual = dbMetadata.getCatalogSeparator(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_CATALOG_SEPARATOR, actual); + } + + @Test + public void testMaxCharLiteralLength() { + int actual = dbMetadata.getMaxCharLiteralLength(); + assertEquals(0, actual); + } + + @Test + public void testMaxBinaryLiteralLength() { + int actual = dbMetadata.getMaxBinaryLiteralLength(); + assertEquals(0, actual); + } + + @Test + public void testMaxColumnNameLength() { + int actual = dbMetadata.getMaxColumnNameLength(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_MAX_COL_NAME_LEN, actual); + } + + @Test + public void testMaxColumnsInTable() { + int actual = dbMetadata.getMaxColumnsInTable(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_MAX_COLS_PER_TABLE, actual); + } + + @Test + public void testMaxColumnsInSelect() { + int actual = dbMetadata.getMaxColumnsInSelect(); + assertEquals(0, actual); + } + + @Test + public void testMaxColumnsInGroupBy() { + int actual = dbMetadata.getMaxColumnsInGroupBy(); + assertEquals(0, actual); + } + + @Test + public void testMaxColumnsInOrderBy() { + int actual = dbMetadata.getMaxColumnsInOrderBy(); + assertEquals(0, actual); + } + + @Test + public void testMaxColumnsInIndex() { + int actual = dbMetadata.getMaxColumnsInIndex(); + assertEquals(0, actual); + } + + @Test + public void testSupportsResultSetHoldabilitySupported() { + assertTrue(dbMetadata.supportsResultSetHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT)); + } + + @Test + public void testSupportsResultSetHoldabilityNotSupported() { + assertFalse(dbMetadata.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT)); + } + + @Test + public void testSupportsResultSetHoldabilityInvalid() { + assertFalse(dbMetadata.supportsResultSetHoldability(-1)); + } + + @Test + public void testResultSetHoldability() { + int actual = dbMetadata.getResultSetHoldability(); + assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, actual); + } + + @Test + public void testDatabaseMajorVersion() { + int actual = dbMetadata.getDatabaseMajorVersion(); + assertEquals(2, actual); + } + + @Test + public void testDatabaseMinorVersion() { + int actual = dbMetadata.getDatabaseMinorVersion(); + assertEquals(0, actual); + } + + @Test + public void testAllProceduresAreCallable() throws SQLException { + assertFalse(dbMetadata.allProceduresAreCallable()); + } + + @Test + public void testAllTablesAreSelectable() throws SQLException { + assertTrue(dbMetadata.allTablesAreSelectable()); + } + + @Test + public void testGetDriverVersionInfoFromProperties() { + Properties props = new Properties(); + String expectedVersionString = "0.0.0"; + int expectedMajor = 0; + int expectedMinor = 0; + + try (InputStream input = + getClass().getResourceAsStream("/com/google/cloud/bigquery/jdbc/dependencies.properties")) { + if (input != null) { + props.load(input); + expectedVersionString = props.getProperty("version.jdbc"); + if (expectedVersionString != null) { + String[] parts = expectedVersionString.split("\\."); + expectedMajor = Integer.parseInt(parts[0]); + expectedMinor = Integer.parseInt(parts[1].replaceAll("[^0-9].*", "")); + } + } + } catch (IOException | NumberFormatException e) { + fail( + "Error reading or parsing dependencies.properties for testing driver version: " + + e.getMessage()); + } + assertEquals(expectedVersionString, dbMetadata.getDriverVersion()); + assertEquals(expectedMajor, dbMetadata.getDriverMajorVersion()); + assertEquals(expectedMinor, dbMetadata.getDriverMinorVersion()); + } + + @Test + public void testSupportsUnion() throws SQLException { + assertTrue(dbMetadata.supportsUnion()); + } + + @Test + public void testSupportsUnionAll() throws SQLException { + assertTrue(dbMetadata.supportsUnionAll()); + } + + @Test + public void testGetMaxConnections() throws SQLException { + assertEquals(0, dbMetadata.getMaxConnections()); + } + + @Test + public void testGetMaxCursorNameLength() throws SQLException { + assertEquals(0, dbMetadata.getMaxCursorNameLength()); + } + + @Test + public void testGetMaxIndexLength() throws SQLException { + assertEquals(0, dbMetadata.getMaxIndexLength()); + } + + @Test + public void testGetMaxSchemaNameLength() throws SQLException { + assertEquals(1024, dbMetadata.getMaxSchemaNameLength()); + } + + @Test + public void testGetMaxProcedureNameLength() throws SQLException { + assertEquals(256, dbMetadata.getMaxProcedureNameLength()); + } + + @Test + public void testGetMaxCatalogNameLength() throws SQLException { + assertEquals(30, dbMetadata.getMaxCatalogNameLength()); + } + + @Test + public void testGetMaxRowSize() throws SQLException { + assertEquals(0, dbMetadata.getMaxRowSize()); + } + + @Test + public void testDoesMaxRowSizeIncludeBlobs() { + assertFalse(dbMetadata.doesMaxRowSizeIncludeBlobs()); + } + + @Test + public void testGetMaxStatementLength() throws SQLException { + assertEquals(0, dbMetadata.getMaxStatementLength()); + } + + @Test + public void testGetMaxStatements() throws SQLException { + assertEquals(0, dbMetadata.getMaxStatements()); + } + + @Test + public void testGetMaxTableNameLength() throws SQLException { + assertEquals(1024, dbMetadata.getMaxTableNameLength()); + } + + @Test + public void testGetMaxTablesInSelect() throws SQLException { + assertEquals(1000, dbMetadata.getMaxTablesInSelect()); + } + + @Test + public void testGetDefaultTransactionIsolation() throws SQLException { + assertEquals(8, dbMetadata.getDefaultTransactionIsolation()); + } + + @Test + public void testSupportsResultSetType() throws SQLException { + assertTrue(dbMetadata.supportsResultSetType(ResultSet.TYPE_FORWARD_ONLY)); + assertFalse(dbMetadata.supportsResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE)); + assertFalse(dbMetadata.supportsResultSetType(ResultSet.TYPE_SCROLL_SENSITIVE)); + } + + @Test + public void testSupportsResultSetConcurrency() throws SQLException { + assertTrue( + dbMetadata.supportsResultSetConcurrency( + ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)); + assertFalse( + dbMetadata.supportsResultSetConcurrency( + ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE)); + assertFalse( + dbMetadata.supportsResultSetConcurrency( + ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)); + assertFalse( + dbMetadata.supportsResultSetConcurrency( + ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE)); + } + + @Test + public void testGetSQLStateType() throws SQLException { + assertEquals(DatabaseMetaData.sqlStateSQL, dbMetadata.getSQLStateType()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercionsTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercionsTest.java new file mode 100644 index 000000000..d9cc0efb7 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercionsTest.java @@ -0,0 +1,221 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryTypeCoercer.INSTANCE; +import static com.google.common.truth.Truth.assertThat; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Arrays; +import java.util.Collection; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class BigQueryDefaultCoercionsTest { + + private final BigQueryTypeCoercer bigQueryTypeCoercer; + + public BigQueryDefaultCoercionsTest(String label, BigQueryTypeCoercer bigQueryTypeCoercer) { + this.bigQueryTypeCoercer = bigQueryTypeCoercer; + } + + @Parameters(name = "{index}: {0}") + public static Collection data() { + return Arrays.asList( + new Object[][] { + {"default BigQueryTypeCoercer", INSTANCE}, + {"customizable BigQueryTypeCoercer", BigQueryTypeCoercer.builder().build()} + }); + } + + @Test + public void stringToBoolean() { + assertThat(bigQueryTypeCoercer.coerceTo(Boolean.class, "true")).isTrue(); + assertThat(bigQueryTypeCoercer.coerceTo(Boolean.class, "false")).isFalse(); + } + + @Test + public void stringToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, "3452148")).isEqualTo(3452148); + } + + @Test + public void stringToBigInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(BigInteger.class, "2147483647456")) + .isEqualTo(new BigInteger("2147483647456")); + } + + @Test + public void stringToLong() { + assertThat(bigQueryTypeCoercer.coerceTo(Long.class, "2147483647456")) + .isEqualTo(Long.valueOf("2147483647456")); + } + + @Test + public void stringToDouble() { + assertThat(bigQueryTypeCoercer.coerceTo(Double.class, "2147483647456.56684593495")) + .isEqualTo(Double.valueOf("2147483647456.56684593495")); + } + + @Test + public void stringToBigDecimal() { + assertThat(bigQueryTypeCoercer.coerceTo(BigDecimal.class, "2147483647456.56684593495")) + .isEqualTo(new BigDecimal("2147483647456.56684593495")); + } + + @Test + public void booleanToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, true)).isEqualTo("true"); + assertThat(bigQueryTypeCoercer.coerceTo(String.class, false)).isEqualTo("false"); + } + + @Test + public void booleanToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, true)).isEqualTo(1); + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, false)).isEqualTo(0); + } + + @Test + public void longToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, 2147483647L)).isEqualTo(2147483647); + } + + @Test + public void longToShort() { + assertThat(bigQueryTypeCoercer.coerceTo(Short.class, 32000L)).isEqualTo((short) 32000); + } + + @Test + public void longToByte() { + assertThat(bigQueryTypeCoercer.coerceTo(Byte.class, 127L)).isEqualTo((byte) 127); + } + + @Test + public void longToDouble() { + assertThat(bigQueryTypeCoercer.coerceTo(Double.class, 2147483647456L)) + .isEqualTo(Double.valueOf("2147483647456")); + } + + @Test + public void longToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, 2147483647456L)) + .isEqualTo("2147483647456"); + } + + @Test + public void doubleToFloat() { + assertThat(bigQueryTypeCoercer.coerceTo(Float.class, Double.valueOf("4567.213245"))) + .isEqualTo(Float.valueOf("4567.213245")); + } + + @Test + public void doubleToLong() { + assertThat(bigQueryTypeCoercer.coerceTo(Long.class, Double.valueOf("2147483647456.213245"))) + .isEqualTo(2147483647456L); + } + + @Test + public void doubleToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, Double.valueOf("21474836.213245"))) + .isEqualTo(21474836); + } + + @Test + public void doubleToBigDecimal() { + assertThat(bigQueryTypeCoercer.coerceTo(BigDecimal.class, Double.valueOf("21474836.213245"))) + .isEqualTo(new BigDecimal("21474836.213245")); + } + + @Test + public void doubleToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, Double.valueOf("21474836.213245"))) + .isEqualTo("2.1474836213245E7"); + } + + @Test + public void floatToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, 62356.45f)).isEqualTo(62356); + } + + @Test + public void floatToDouble() { + assertThat(bigQueryTypeCoercer.coerceTo(Double.class, 62356.45f)) + .isEqualTo(Double.valueOf(62356.45f)); + } + + @Test + public void floatToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, 62356.45f)).isEqualTo("62356.45"); + } + + @Test + public void bigIntegerToLong() { + assertThat(bigQueryTypeCoercer.coerceTo(Long.class, new BigInteger("2147483647"))) + .isEqualTo(2147483647L); + } + + @Test + public void bigIntegerToBigDecimal() { + assertThat(bigQueryTypeCoercer.coerceTo(BigDecimal.class, new BigInteger("2147483647"))) + .isEqualTo(new BigDecimal("2147483647")); + } + + @Test + public void bigIntegerToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, new BigInteger("2147483647"))) + .isEqualTo("2147483647"); + } + + @Test + public void bigDecimalToDouble() { + assertThat(bigQueryTypeCoercer.coerceTo(Double.class, new BigDecimal("2147483647.74356"))) + .isEqualTo(2147483647.74356); + } + + @Test + public void bigDecimalToBigInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(BigInteger.class, new BigDecimal("2147483647.74356"))) + .isEqualTo(new BigInteger("2147483647")); + } + + @Test + public void bigDecimalToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, new BigDecimal("2147483647.74356"))) + .isEqualTo(2147483647); + } + + @Test + public void bigDecimalToLong() { + assertThat(bigQueryTypeCoercer.coerceTo(Long.class, new BigDecimal("2147483647.74356"))) + .isEqualTo(2147483647L); + } + + @Test + public void bigDecimalToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, new BigDecimal("2147483647.74356"))) + .isEqualTo("2147483647.74356"); + } + + @Test + public void nullToBoolean() { + assertThat(bigQueryTypeCoercer.coerceTo(Boolean.class, null)).isFalse(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDriverTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDriverTest.java new file mode 100644 index 000000000..125de54df --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDriverTest.java @@ -0,0 +1,97 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertThrows; + +import java.sql.Connection; +import java.sql.DriverPropertyInfo; +import java.sql.SQLException; +import java.util.Properties; +import java.util.logging.Logger; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryDriverTest { + + static BigQueryDriver bigQueryDriver; + + @Before + public void setUp() { + bigQueryDriver = BigQueryDriver.getRegisteredDriver(); + } + + @Test + public void testInvalidURLDoesNotConnect() { + assertThrows(IllegalArgumentException.class, () -> bigQueryDriver.connect("badURL.com", null)); + } + + @Test + public void testValidURLDoesConnect() throws SQLException { + Connection connection = + bigQueryDriver.connect( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;", + new Properties()); + assertThat(connection.isClosed()).isFalse(); + } + + @Test + public void testInvalidURLInAcceptsURLReturnsFalse() throws SQLException { + assertThat(bigQueryDriver.acceptsURL("badURL.com")).isFalse(); + } + + @Test + public void testValidURLInAcceptsURLReturnsTrue() throws SQLException { + assertThat(bigQueryDriver.acceptsURL("jdbc:bigquery:https://google.com:8080;projectId=123456")) + .isTrue(); + } + + @Test + public void testGetPropertyInfoReturnsValidProperties() { + DriverPropertyInfo[] res = + bigQueryDriver.getPropertyInfo( + "jdbc:bigquery:https://google.com:8080;projectId=123456;OAuthType=3", new Properties()); + int i = 0; + for (BigQueryConnectionProperty prop : BigQueryJdbcUrlUtility.VALID_PROPERTIES) { + assertThat(res[i].name).isEqualTo(prop.getName()); + i++; + } + } + + @Test + public void testGetMajorVersionMatchesDriverMajorVersion() { + assertThat(bigQueryDriver.getMajorVersion()).isEqualTo(0); + } + + @Test + public void testGetMinorVersionMatchesDriverMinorVersion() { + assertThat(bigQueryDriver.getMinorVersion()).isEqualTo(1); + } + + @Test + public void testGetParentLoggerReturnsLogger() { + assertThat(bigQueryDriver.getParentLogger()).isInstanceOf(Logger.class); + } + + @Test + public void testJDBCCompliantReturnsFalse() { + assertThat(bigQueryDriver.jdbcCompliant()).isFalse(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBaseTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBaseTest.java new file mode 100644 index 000000000..616f3bab9 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBaseTest.java @@ -0,0 +1,82 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.jdbc.utils.URIBuilder; + +public class BigQueryJdbcBaseTest { + + // This is a fake pkcs8 key generated specifically for unittests + protected static final String fake_pkcs8_key = + "-----BEGIN PRIVATE KEY-----\n" + + // + "MIIBUwIBADANBgkqhkiG9w0BAQEFAASCAT0wggE5AgEAAkEAnt6w5AMZBvOecsJ9\n" + + // + "4TeVz+GpAtBnTqkxWfxLJykkvb+V/3IhXr5Zw40y47RdoRly/QDFJz3Ac+nmwCSP\n" + + // + "8QW3GQIDAQABAkBPmdrd1COFFSnN7F9wKg65QyMQ0uUAR8v/f2cUbwwGuhwdMuGZ\n" + + // + "DPwgVZySxFKort7TfPru6NzbACL3EFAl9y9RAiEA7XPq5Tu+LOw4/CZFABykguBV\n" + + // + "8rYC+F72+GqkhvlGhZUCIQCrR2/zGIKqJSTKfQhKOteP7cx5dWrumHYNuC5InOGC\n" + + // + "dQIgM6bzgcntJHh+LNtmRw/z+UQzbgiJvN1re7426+VtocECIE7ejFxicviqNfDP\n" + + // + "9ltIES8Dj152hRDtP589qoJhSy5pAiAJot/kBQD8yFYMU1X02oi+6f8QqXxcHwZX\n" + + // + "2wK1Zawz/A==\n" + + // + "-----END PRIVATE KEY-----"; + + protected static URIBuilder getBaseUri() { + return new URIBuilder("jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;"); + } + + protected static URIBuilder getBaseUri(int authType) { + return getBaseUri().append("OAuthType", authType); + } + + protected static URIBuilder getBaseUri(int authType, String projectId) { + return getBaseUri(authType).append("PROJECT_ID", projectId); + } + + protected static URIBuilder getUriOAuthServiceAccount() { + return getBaseUri() + .append("OAuthType", 0) + .append("OAuthServiceAcctEmail", "service@account") + .append("OAuthPvtKey", fake_pkcs8_key); + } + + protected static URIBuilder getUriOAuthUserAccount() { + return getBaseUri() + .append("OAuthType", 1) + .append("OAuthClientId", "client@id") + .append("OAuthClientSecret", "client_secret"); + } + + protected static URIBuilder getUriOAuthToken() { + return getBaseUri().append("OAuthType", 2).append("OAuthAccessToken", "RedactedToken"); + } + + protected static URIBuilder getUriOAuthApplicationDefault() { + return getBaseUri().append("OAuthType", 3); + } + + protected static URIBuilder getUriOAuthExternal() { + return getBaseUri().append("OAuthType", 4).append("OAuthPvtKey", fake_pkcs8_key); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtilityTest.java new file mode 100644 index 000000000..b7fc03162 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtilityTest.java @@ -0,0 +1,505 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.auth.oauth2.GoogleCredentials; +import com.google.auth.oauth2.ImpersonatedCredentials; +import com.google.auth.oauth2.UserAuthorizer; +import com.google.auth.oauth2.UserCredentials; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Paths; +import java.security.PrivateKey; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; + +public class BigQueryJdbcOAuthUtilityTest extends BigQueryJdbcBaseTest { + + private static final int USER_AUTH_PORT = 53737; + private static final String EXPECTED_USER_AUTH_URL = + "https://accounts.google.com/o/oauth2/auth?response_type=code&client_id=client_id&redirect_uri=http://localhost:" + + USER_AUTH_PORT + + "&scope=https://www.googleapis.com/auth/bigquery&state=test_state&access_type=offline&prompt=consent&login_hint=test_user&include_granted_scopes=true"; + + @Test + public void testParseOAuthPropsForAuthType0KeyfileOnly() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=0;" + + "OAuthPvtKeyPath=C:\\SecureFiles\\ServiceKeyFile.p12;", + null); + + assertThat(result.get("OAuthType")).isEqualTo("GOOGLE_SERVICE_ACCOUNT"); + assertThat(result.get("OAuthPvtKeyPath")).isEqualTo("C:\\SecureFiles\\ServiceKeyFile.p12"); + } + + @Test + public void testParseOAuthPropsForAuthType0ViaEmail() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=0;" + + "OAuthServiceAcctEmail=dummytest@dummytest.iam.gserviceaccount.com;" + + "OAuthPvtKey=RedactedKey;", + null); + + assertThat(result.get("OAuthType")).isEqualTo("GOOGLE_SERVICE_ACCOUNT"); + assertThat(result.get("OAuthServiceAcctEmail")) + .isEqualTo("dummytest@dummytest.iam.gserviceaccount.com"); + assertThat(result.get("OAuthPvtKey")).isEqualTo("RedactedKey"); + } + + @Test + public void testInvalidTokenUriForAuthType0() { + String connectionString = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=0;" + + "OAuthServiceAcctEmail=dummytest@dummytest.iam.gserviceaccount.com;" + + "OAuthPvtKey=" + + fake_pkcs8_key + + ";" + + "EndpointOverrides=OAuth2=brokenuri{};"; + Map oauthProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(connectionString, null); + Map overrideProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connectionString, null); + + try { + BigQueryJdbcOAuthUtility.getCredentials(oauthProperties, overrideProperties, null); + Assert.fail(); + } catch (BigQueryJdbcRuntimeException e) { + assertThat(e.getMessage()).contains("java.net.URISyntaxException"); + } + } + + @Test + public void testParseOAuthPropsForAuthType2() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken;", + null); + + assertThat(result.get("OAuthType")).isEqualTo("PRE_GENERATED_TOKEN"); + assertThat(result.get("OAuthAccessToken")).isEqualTo("RedactedToken"); + } + + @Test + public void testParseOAuthPropsForAuthType3() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=MyBigQueryProject;", + null); + + assertThat(result.get("OAuthType")).isEqualTo("APPLICATION_DEFAULT_CREDENTIALS"); + } + + @Test + public void testParseOAuthPropsForDefaultAuthType() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3", + null); + + assertThat(result.get("OAuthType")).isEqualTo("APPLICATION_DEFAULT_CREDENTIALS"); + } + + @Test + public void testGetCredentialsForPreGeneratedToken() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken;", + null); + + GoogleCredentials credentials = + BigQueryJdbcOAuthUtility.getCredentials(authProperties, Collections.EMPTY_MAP, null); + assertThat(credentials).isNotNull(); + } + + @Test + public void testGetCredentialsForPreGeneratedTokenTPC() throws IOException { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken;" + + "universeDomain=testDomain;", + null); + Map stringStringMap = new HashMap<>(); + stringStringMap.put( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, "testDomain"); + Map overrideProperties = new HashMap<>(stringStringMap); + + GoogleCredentials credentials = + BigQueryJdbcOAuthUtility.getCredentials(authProperties, overrideProperties, null); + assertThat(credentials.getUniverseDomain()).isEqualTo("testDomain"); + } + + @Test + @Ignore // For running locally only similar to our other JDBC tests. + public void testGetCredentialsForApplicationDefault() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=MyBigQueryProject;", + null); + + GoogleCredentials credentials = + BigQueryJdbcOAuthUtility.getCredentials(authProperties, null, null); + assertThat(credentials).isNotNull(); + } + + @Test + public void testParseOAuthPropsForUserAuth() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=1;ProjectId=MyBigQueryProject;" + + "OAuthClientId=client;OAuthClientSecret=secret;", + null); + + assertThat(authProperties.get("OAuthType")).isEqualTo("GOOGLE_USER_ACCOUNT"); + assertThat(authProperties.get("OAuthClientId")).isEqualTo("client"); + assertThat(authProperties.get("OAuthClientSecret")).isEqualTo("secret"); + } + + @Test + public void testGenerateUserAuthURL() { + try { + HashMap authProperties = new HashMap<>(); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, "client_id"); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, "client_secret"); + + UserAuthorizer userAuthorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, new HashMap(), USER_AUTH_PORT, null); + + String userId = "test_user"; + String state = "test_state"; + URI baseURI = URI.create("http://example.com/foo"); + + URL authURL = userAuthorizer.getAuthorizationUrl(userId, state, baseURI); + + assertThat(authURL.toString()).isEqualTo(EXPECTED_USER_AUTH_URL); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testGenerateUserAuthURLOverrideOauthEndpoint() { + try { + + URI overrideTokenSeverURI = new URI("https://oauth2-gsprivateall.p.googleapis.com/token"); + String connectionString = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=1;" + + "OAuthClientId=client;OAuthClientSecret=secret;" + + "EndpointOverrides=OAuth2=" + + overrideTokenSeverURI + + ";"; + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(connectionString, null); + Map overrideProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connectionString, null); + + UserAuthorizer userAuthorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, overrideProperties, USER_AUTH_PORT, null); + + assertThat(overrideTokenSeverURI).isEqualTo(userAuthorizer.toBuilder().getTokenServerUri()); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testParseOAuthPropsForRefreshToken() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;OAuthRefreshToken=token;" + + "OAuthClientId=client;OAuthClientSecret=secret;", + null); + + assertThat(authProperties.get("OAuthType")).isEqualTo("PRE_GENERATED_TOKEN"); + assertThat(authProperties.get("OAuthRefreshToken")).isEqualTo("token"); + assertThat(authProperties.get("OAuthClientId")).isEqualTo("client"); + assertThat(authProperties.get("OAuthClientSecret")).isEqualTo("secret"); + } + + @Test + public void testParseOverridePropsForRefreshTokenAuth() { + try { + + String connectionString = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=2;OAuthRefreshToken=token;" + + "OAuthClientId=client;OAuthClientSecret=secret;" + + "EndpointOverrides=Oauth2=https://oauth2-private.p.googleapis.com/token;"; + + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(connectionString, null); + Map overrideProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connectionString, null); + + UserCredentials userCredentials = + BigQueryJdbcOAuthUtility.getPreGeneratedRefreshTokenCredentials( + authProperties, overrideProperties, null); + + assertThat(userCredentials.toBuilder().getTokenServerUri()) + .isEqualTo(URI.create("https://oauth2-private.p.googleapis.com/token")); + + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testParseBYOIDProps() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:433;OAuthType=4;" + + "ProjectId=MyBigQueryProject;" + + "BYOID_AudienceUri=//iam.googleapis.com/locations/global/workforcePools/pool-id/providers/provider-id;" + + "BYOID_PoolUserProject=workforceProjectNumber;" + + "BYOID_CredentialSource={\"file\": \"C:\\\\Token.txt\"};" + + "BYOID_SA_Impersonation_Uri=testSA;" + + "BYOID_SubjectTokenType=urn:ietf:params:oauth:tokentype:jwt;" + + "BYOID_TokenUri=https://testuri.com/v1/token", + null); + + assertThat(result.get("BYOID_AudienceUri")) + .isEqualTo( + "//iam.googleapis.com/locations/global/workforcePools/pool-id/providers/provider-id"); + assertThat(result.get("BYOID_PoolUserProject")).isEqualTo("workforceProjectNumber"); + assertThat(result.get("BYOID_CredentialSource")).isEqualTo("{\"file\": \"C:\\\\Token.txt\"}"); + assertThat(result.get("BYOID_SA_Impersonation_Uri")).isEqualTo("testSA"); + assertThat(result.get("BYOID_SubjectTokenType")) + .isEqualTo("urn:ietf:params:oauth:tokentype:jwt"); + assertThat(result.get("BYOID_TokenUri")).isEqualTo("https://testuri.com/v1/token"); + } + + @Test + public void testParseOAuthProperties_UserAccount_RequestDriveScopeEnabled() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=1;OAuthClientId=redactedClientId;OAuthClientSecret=redactedClientSecret;" + + "RequestGoogleDriveScope=1;"; + Map properties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(url, this.getClass().getName()); + assertEquals( + String.valueOf(BigQueryJdbcOAuthUtility.AuthType.GOOGLE_USER_ACCOUNT), + properties.get(BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME)); + assertEquals( + "redactedClientId", properties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME)); + assertEquals( + "redactedClientSecret", + properties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + assertEquals( + "1", properties.get(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)); + } + + @Test + public void testParseOAuthProperties_UserAccount_RequestDriveScopeDisabled() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=1;OAuthClientId=redactedClientId;OAuthClientSecret=redactedClientSecret;" + + "RequestGoogleDriveScope=0;"; + Map properties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(url, this.getClass().getName()); + assertEquals( + "0", properties.get(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)); + } + + @Test + public void testParseOAuthProperties_UserAccount_RequestDriveScopeDefault() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=1;OAuthClientId=redactedClientId;OAuthClientSecret=redactedClientSecret;"; + Map properties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(url, this.getClass().getName()); + assertEquals( + String.valueOf(BigQueryJdbcUrlUtility.DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE), + properties.get(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)); + } + + @Test + public void testGetUserAuthorizer_WithDriveScope() throws URISyntaxException { + Map authProperties = new HashMap<>(); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, "redactedClientId"); + authProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, "redactedClientSecret"); + authProperties.put(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, "1"); + + UserAuthorizer authorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, Collections.emptyMap(), 12345, this.getClass().getName()); + + assertTrue(authorizer.getScopes().contains("https://www.googleapis.com/auth/bigquery")); + assertTrue(authorizer.getScopes().contains("https://www.googleapis.com/auth/drive.readonly")); + assertEquals(2, authorizer.getScopes().size()); + } + + @Test + public void testGetUserAuthorizer_WithoutDriveScope() throws URISyntaxException { + Map authProperties = new HashMap<>(); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, "redactedClientId"); + authProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, "redactedClientSecret"); + authProperties.put(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, "0"); + + UserAuthorizer authorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, Collections.emptyMap(), 12345, this.getClass().getName()); + assertTrue(authorizer.getScopes().contains("https://www.googleapis.com/auth/bigquery")); + assertFalse(authorizer.getScopes().contains("https://www.googleapis.com/auth/drive.readonly")); + assertEquals(1, authorizer.getScopes().size()); + } + + @Test + public void testGetUserAuthorizer_InvalidDriveScopeValue() throws URISyntaxException { + Map authProperties = new HashMap<>(); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, "redactedClientId"); + authProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, "redactedClientSecret"); + authProperties.put( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, "invalid_value"); + UserAuthorizer authorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, Collections.emptyMap(), 12345, this.getClass().getName()); + assertFalse(authorizer.getScopes().contains("https://www.googleapis.com/auth/drive.readonly")); + } + + @Test + public void testParseUserImpersonationDefault() { + String connectionUri = + getUriOAuthServiceAccount() + .append("ServiceAccountImpersonationEmail", "impersonated") + .toString(); + Map result = BigQueryJdbcOAuthUtility.parseOAuthProperties(connectionUri, ""); + assertEquals( + "impersonated", + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME)); + assertEquals( + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_SCOPES_VALUE, + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME)); + assertEquals( + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_VALUE, + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME)); + } + + @Test + public void testParseUserImpersonationNonDefault() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + getUriOAuthServiceAccount() + .append("ServiceAccountImpersonationEmail", "impersonated") + .append("ServiceAccountImpersonationScopes", "scopes") + .append("ServiceAccountImpersonationTokenLifetime", 300) + .toString(), + ""); + assertEquals( + "impersonated", + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME)); + assertEquals( + "scopes", result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME)); + assertEquals( + "300", + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME)); + } + + @Test + public void testGetServiceAccountImpersonatedCredentials() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + getUriOAuthServiceAccount() + .append("ServiceAccountImpersonationEmail", "impersonated") + .toString(), + ""); + GoogleCredentials credentials = + BigQueryJdbcOAuthUtility.getCredentials(authProperties, Collections.EMPTY_MAP, null); + assertThat(credentials).isInstanceOf(ImpersonatedCredentials.class); + } + + @Test + public void testPrivateKeyFromPkcs8() { + PrivateKey pk = BigQueryJdbcOAuthUtility.privateKeyFromPkcs8(fake_pkcs8_key); + assertNotNull(pk); + } + + @Test + public void testPrivateKeyFromPkcs8_wrong() { + PrivateKey pk = BigQueryJdbcOAuthUtility.privateKeyFromPkcs8(""); + assertNull(pk); + } + + // Command to generate key: + // keytool -genkey -alias privatekey -keyalg RSA -keysize 2048 -storepass notasecret \ + // -keypass notasecret -storetype pkcs12 -keystore ./fake.p12 + @Test + public void testPrivateKeyFromP12File() { + URL resource = BigQueryJdbcOAuthUtilityTest.class.getResource("/fake.p12"); + try { + PrivateKey pk = + BigQueryJdbcOAuthUtility.privateKeyFromP12File( + Paths.get(resource.toURI()).toAbsolutePath().toString(), "notasecret"); + assertNotNull(pk); + } catch (Exception e) { + assertTrue(false); + } + } + + @Test + public void testPrivateKeyFromP12File_missing_file() { + PrivateKey pk = BigQueryJdbcOAuthUtility.privateKeyFromP12File("", ""); + assertNull(pk); + } + + @Test + public void testPrivateKeyFromP12File_wrong_password() { + URL resource = BigQueryJdbcOAuthUtilityTest.class.getResource("/fake.p12"); + try { + PrivateKey pk = + BigQueryJdbcOAuthUtility.privateKeyFromP12File( + Paths.get(resource.toURI()).toAbsolutePath().toString(), "fake"); + assertNull(pk); + } catch (Exception e) { + assertTrue(false); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameterTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameterTest.java new file mode 100644 index 000000000..815759892 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameterTest.java @@ -0,0 +1,75 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; +import org.junit.Test; + +public class BigQueryJdbcParameterTest { + + @Test + public void testSettersAndGetters() { + int expectedIndex = 3; + Object expectedValue = "String Object"; + Class expectedType = String.class; + StandardSQLTypeName expectedSqlType = StandardSQLTypeName.STRING; + String expectedParamName = "StringParameter"; + BigQueryStatementParameterType expectedParamType = BigQueryStatementParameterType.IN; + int expectedScale = -1; + + BigQueryJdbcParameter parameter = new BigQueryJdbcParameter(); + parameter.setIndex(3); + parameter.setValue("String Object"); + parameter.setType(String.class); + parameter.setSqlType(StandardSQLTypeName.STRING); + parameter.setParamName("StringParameter"); + parameter.setParamType(BigQueryStatementParameterType.IN); + parameter.setScale(-1); + + assertEquals(expectedIndex, parameter.getIndex()); + assertEquals(expectedValue, parameter.getValue()); + assertEquals(expectedType, parameter.getType()); + assertEquals(expectedSqlType, parameter.getSqlType()); + assertEquals(expectedParamName, parameter.getParamName()); + assertEquals(expectedParamType, parameter.getParamType()); + assertEquals(expectedScale, parameter.getScale()); + } + + @Test + public void testCopyConstructor() { + int expectedIndex = 3; + Object expectedValue = "String Object"; + Class expectedType = String.class; + StandardSQLTypeName expectedSqlType = StandardSQLTypeName.STRING; + + BigQueryJdbcParameter parameter = new BigQueryJdbcParameter(); + parameter.setIndex(3); + parameter.setValue("String Object"); + parameter.setType(String.class); + parameter.setSqlType(StandardSQLTypeName.STRING); + + BigQueryJdbcParameter copiedParameter = new BigQueryJdbcParameter(parameter); + + assertEquals(expectedIndex, copiedParameter.getIndex()); + assertEquals(expectedValue, copiedParameter.getValue()); + assertEquals(expectedType, copiedParameter.getType()); + assertEquals(expectedSqlType, copiedParameter.getSqlType()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtilityTest.java new file mode 100644 index 000000000..203502cda --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtilityTest.java @@ -0,0 +1,293 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; + +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.http.HttpTransportOptions; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.junit.Test; + +public class BigQueryJdbcProxyUtilityTest { + @Test + public void testParsingAllProxyProperties() { + Map result = + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;", + null); + + assertThat(result.get("ProxyHost")).isEqualTo("34.94.167.18"); + assertThat(result.get("ProxyPort")).isEqualTo("3128"); + assertThat(result.get("ProxyUid")).isEqualTo("fahmz"); + assertThat(result.get("ProxyPwd")).isEqualTo("pass"); + } + + @Test + public void testParsingInvalidPortThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=portnumber;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;", + null)); + } + + @Test + public void testMissingHostThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyPort=3128;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;", + null)); + } + + @Test + public void testMissingPortThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;", + null)); + } + + @Test + public void testMissingUidWithPwdThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;" + + "ProxyPwd=pass;", + null)); + } + + @Test + public void testMissingPwdWithUidThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;" + + "ProxyUid=fahmz;", + null)); + } + + @Test + public void testGetHttpTransportOptionsWithAuthenticatedProxy() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=TestProject" + + ";OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;"; + + Map proxyProperties = + BigQueryJdbcProxyUtility.parseProxyProperties(connection_uri, null); + HttpTransportOptions result = + BigQueryJdbcProxyUtility.getHttpTransportOptions(proxyProperties, null, null, null); + assertNotNull(result); + } + + @Test + public void testGetHttpTransportOptionsWithNonAuthenticatedProxy() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=TestProject" + + ";OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;"; + + Map proxyProperties = + BigQueryJdbcProxyUtility.parseProxyProperties(connection_uri, null); + HttpTransportOptions result = + BigQueryJdbcProxyUtility.getHttpTransportOptions(proxyProperties, null, null, null); + assertNotNull(result); + } + + @Test + public void testGetHttpTransportOptionsWithNoProxySettingsReturnsNull() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=TestProject" + + ";OAuthType=3;"; + + Map proxyProperties = + BigQueryJdbcProxyUtility.parseProxyProperties(connection_uri, null); + HttpTransportOptions result = + BigQueryJdbcProxyUtility.getHttpTransportOptions(proxyProperties, null, null, null); + assertNull(result); + } + + private String getTestResourcePath(String resourceName) throws URISyntaxException { + URL resourceUrl = getClass().getClassLoader().getResource(resourceName); + if (resourceUrl == null) { + throw new RuntimeException("Test resource not found: " + resourceName); + } + return new File(resourceUrl.toURI()).getAbsolutePath(); + } + + @Test + public void testGetHttpTransportOptions_withSslTrustStore_noPassword() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_nopass.jks"); + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), trustStorePath, null, "TestClass"); + assertNotNull(options); + assertNotNull(options.getHttpTransportFactory()); + } + + @Test + public void testGetHttpTransportOptions_withSslTrustStore_withCorrectPassword() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_withpass.jks"); + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), trustStorePath, "testpassword", "TestClass"); + assertNotNull(options); + assertNotNull(options.getHttpTransportFactory()); + } + + @Test + public void testGetHttpTransportOptions_withSslTrustStore_withIncorrectPassword() + throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_withpass.jks"); + BigQueryJdbcRuntimeException exception = + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), + trustStorePath, + "wrongpassword", + "TestClass")); + assertThat(exception.getCause()).isInstanceOf(IOException.class); + } + + @Test + public void testGetHttpTransportOptions_withInvalidSslTrustStorePath() { + String invalidPath = "/path/to/nonexistent/truststore.jks"; + BigQueryJdbcRuntimeException exception = + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), invalidPath, null, "TestClass")); + + assertThat(exception.getCause()).isInstanceOf(FileNotFoundException.class); + } + + @Test + public void testGetHttpTransportOptions_withSslAndProxy() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_nopass.jks"); + Map proxyProperties = new HashMap<>(); + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, "proxy.example.com"); + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, "8080"); + + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + proxyProperties, trustStorePath, null, "TestClass"); + assertNotNull(options); + assertNotNull(options.getHttpTransportFactory()); + } + + @Test + public void testGetTransportChannelProvider_withSslTrustStore_noPassword() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_nopass.jks"); + TransportChannelProvider provider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + Collections.emptyMap(), trustStorePath, null, "TestClass"); + assertNotNull(provider); + } + + @Test + public void testGetTransportChannelProvider_withSslTrustStore_withCorrectPassword() + throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_withpass.jks"); + TransportChannelProvider provider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + Collections.emptyMap(), trustStorePath, "testpassword", "TestClass"); + assertNotNull(provider); + } + + @Test + public void testGetTransportChannelProvider_withSslAndProxy() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_nopass.jks"); + Map proxyProperties = new HashMap<>(); + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, "proxy.example.com"); + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, "8080"); + + TransportChannelProvider provider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + proxyProperties, trustStorePath, null, "TestClass"); + assertNotNull(provider); + } + + @Test + public void testGetTransportChannelProvider_noProxyNoSsl_returnsNull() { + TransportChannelProvider provider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + Collections.emptyMap(), null, null, "TestClass"); + assertNull(provider); + } + + @Test + public void testGetHttpTransportOptions_noProxyNoSsl_returnsNull() { + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), null, null, "TestClass"); + assertNull(options); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtilityTest.java new file mode 100644 index 000000000..86f087bf3 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtilityTest.java @@ -0,0 +1,803 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.common.collect.Maps; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import org.junit.Test; + +public class BigQueryJdbcUrlUtilityTest { + + @Test + public void testParsePropertyWithNoDefault() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "OAuthType"); + assertThat(result).isNull(); + } + + @Test + public void testParsePropertyWithDefault() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "OAuthType"); + assertThat(result).isEqualTo(null); + } + + @Test + public void testParsePropertyWithValue() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "ProjectId"); + assertThat(result).isEqualTo("MyBigQueryProject"); + } + + @Test + public void testParsePropertyWithValueCaseInsensitive() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "PROJECTID=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "ProjectId"); + assertThat(result).isEqualTo("MyBigQueryProject"); + } + + @Test + public void testAppendPropertiesToURL() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + Properties properties = new Properties(); + properties.setProperty("OAuthType", "3"); + + String updatedUrl = BigQueryJdbcUrlUtility.appendPropertiesToURL(url, null, properties); + assertThat(updatedUrl.contains("OAuthType=3")); + } + + @Test + public void testConnectionPropertiesFromURI() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=testProject;OAUTHTYPE=3;DEFAULTDATASET=testDataset;LOCATION=us-central1"; + + assertThat(BigQueryJdbcUrlUtility.parseUriProperty(connection_uri, "OAUTHTYPE")).isEqualTo("3"); + assertThat(BigQueryJdbcUrlUtility.parseUriProperty(connection_uri, "LOCATION")) + .isEqualTo("us-central1"); + } + + @Test + public void testConnectionPropertiesFromURIMultiline() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;Multiline=value1\nvalue2\n;"; + + assertThat(BigQueryJdbcUrlUtility.parseUriProperty(connection_uri, "Multiline")) + .isEqualTo("value1\nvalue2\n"); + } + + @Test + public void testConnectionPropertiesFromURIMultilineNoSemicolon() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;Multiline=value1\nvalue2"; + + assertThat(BigQueryJdbcUrlUtility.parseUriProperty(connection_uri, "Multiline")) + .isEqualTo("value1\nvalue2"); + } + + @Test + public void testOverridePropertiesFromURICompatibility() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=testProject;PrivateServiceConnectUris=" + + "BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + } + + @Test + public void testOverridePropertiesDoesNotAffectOriginalParsersAtEnd() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PrivateServiceConnectUris=" + + "BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443," + + "OAUTH2=https://oauth2-myprivateserver.p.googleapis.com;PROJECTID=testProject;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + assertThat(parsedPSCProperties.get("OAUTH2")) + .isEqualTo("https://oauth2-myprivateserver.p.googleapis.com"); + } + + @Test + public void testOverridePropertiesDoesNotParseOutsideOfPrivateServiceConnectUris() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PrivateServiceConnectUris=" + + "BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443;" + // Hard to see but semicolon ends it here. + + "OAUTH2=https://oauth2-myprivateserver.p.googleapis.com;PROJECTID=testProject;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + assertThat(parsedPSCProperties.get("OAUTH2")).isNull(); + } + + @Test + public void testOverridePropertiesDoesNotParserPropertiesInMiddle() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PrivateServiceConnectUris=" + + "BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com,OAUTHTYPE=2," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443," + + "OAUTH2=https://oauth2-myprivateserver.p.googleapis.com;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + assertThat(parsedPSCProperties.get("OAUTH2")) + .isEqualTo("https://oauth2-myprivateserver.p.googleapis.com"); + } + + @Test + public void testOverridePropertyBeforeProceedingOverrideParameterDoesNotParse() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com;" + + "PrivateServiceConnectUris=" + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443," + + "OAUTH2=https://oauth2-myprivateserver.p.googleapis.com;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertNull(parsedPSCProperties.get("BIGQUERY")); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + assertThat(parsedPSCProperties.get("OAUTH2")) + .isEqualTo("https://oauth2-myprivateserver.p.googleapis.com"); + } + + @Test + public void testOverridePropertiesFromURIGoogleExperience() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=testProject;EndpointOverrides=" + + "BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + } + + @Test + public void testAllOverridePropertiesFromURIGoogleExperience() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=testProject;EndpointOverrides=" + + "BIGQUERY=https://bigquery-myprivateserver.p.googleapis.com," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443," + + "OAUTH2=https://oauth2-myprivateserver.p.googleapis.com," + + "STS=https://sts-myprivateserver.p.googleapis.com;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + assertThat(parsedPSCProperties.get("OAUTH2")) + .isEqualTo("https://oauth2-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("STS")) + .isEqualTo("https://sts-myprivateserver.p.googleapis.com"); + } + + @Test + public void testCaseSensitivityOverridePropertiesFromURI() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=testProject;endpointOverrides=" + + "bigQuery=https://bigquery-myprivateserver.p.googleapis.com," + + "READ_API=https://bigquerystorage-myprivateserver.p.googleapis.com:443;"; + + Map parsedPSCProperties = + BigQueryJdbcUrlUtility.parseOverrideProperties(connection_uri, null); + + assertThat(parsedPSCProperties.get("BIGQUERY")) + .isEqualTo("https://bigquery-myprivateserver.p.googleapis.com"); + assertThat(parsedPSCProperties.get("READ_API")) + .isEqualTo("https://bigquerystorage-myprivateserver.p.googleapis.com:443"); + } + + @Test + public void testParseJobCreationModeDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;"; + + boolean jobCreationMode = BigQueryJdbcUrlUtility.parseJobCreationMode(connection_uri, null); + assertTrue(jobCreationMode); + } + + @Test + public void testParseJobCreationMode() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobCreationMode=1"; + + boolean jobCreationMode = BigQueryJdbcUrlUtility.parseJobCreationMode(connection_uri, null); + assertFalse(jobCreationMode); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobCreationMode=2"; + + jobCreationMode = BigQueryJdbcUrlUtility.parseJobCreationMode(connection_uri, null); + assertTrue(jobCreationMode); + } + + @Test + public void testParseJobCreationModeInvalidInteger() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobCreationMode=25"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseJobCreationMode(connection_uri, null)); + } + + @Test + public void testParseJobCreationModeInvalidString() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobCreationMode=JOB_CREATION_OPTIONAL"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseJobCreationMode(connection_uri, null)); + } + + @Test + public void testGetConnectionPropertyDefaultValue() { + assertEquals( + BigQueryJdbcUrlUtility.getConnectionPropertyDefaultValue("BYOID_TokenUri"), + "https://sts.googleapis.com/v1/token"); + } + + @Test + public void testParseRetryTimeoutInSecs() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "Timeout=10"; + + long retryTimeoutInSeconds = + BigQueryJdbcUrlUtility.parseRetryTimeoutInSecs(connection_uri, null); + assertEquals(10, retryTimeoutInSeconds); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "Timeout=20"; + + retryTimeoutInSeconds = BigQueryJdbcUrlUtility.parseRetryTimeoutInSecs(connection_uri, null); + assertEquals(20, retryTimeoutInSeconds); + } + + @Test + public void testParseRetryTimeoutInSecsDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long retryTimeoutInSeconds = + BigQueryJdbcUrlUtility.parseRetryTimeoutInSecs(connection_uri, null); + assertEquals(0, retryTimeoutInSeconds); + } + + @Test + public void testParseRetryTimeoutSecondsInvalidLong() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "Timeout=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseRetryTimeoutInSecs(connection_uri, null)); + } + + public void testParseJobTimeout() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobTimeout=10"; + + long jobTimeout = BigQueryJdbcUrlUtility.parseJobTimeout(connection_uri, null); + assertEquals(10, jobTimeout); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobTimeout=20"; + + jobTimeout = BigQueryJdbcUrlUtility.parseJobTimeout(connection_uri, null); + assertEquals(20, jobTimeout); + } + + @Test + public void testParseJobTimeoutDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long jobTimeout = BigQueryJdbcUrlUtility.parseJobTimeout(connection_uri, null); + assertEquals(0L, jobTimeout); + } + + @Test + public void testParseJobTimeoutInvalid() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "JobTimeout=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseJobTimeout(connection_uri, null)); + } + + @Test + public void testParsePartnerTokenProperty() { + // Case with partner name and environment + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "PartnerToken=(GPN:partner_company; dev);ProjectId=MyBigQueryProject;"; + String expected = " (GPN:partner_company; dev)"; + String result = + BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, "testParsePartnerTokenProperty"); + assertThat(result).isEqualTo(expected); + + // Case with only partner name + url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "PartnerToken=(GPN:another_partner);ProjectId=MyBigQueryProject;"; + expected = " (GPN:another_partner)"; + result = BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, "testParsePartnerTokenProperty"); + assertThat(result).isEqualTo(expected); + + // Case when PartnerToken property is not present + url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;"; + result = BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, "testParsePartnerTokenProperty"); + assertNull(result); + + // Case when PartnerToken property is present but empty + url = "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PartnerToken=();"; + result = BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, "testParsePartnerTokenProperty"); + assertNull(result); + + // Case when PartnerToken property is present but without partner name + url = "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PartnerToken=(env);"; + result = BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, "testParsePartnerTokenProperty"); + assertNull(result); + + // Case with extra spaces around the values + url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "PartnerToken= ( GPN: partner_name ; test_env ) ;"; + expected = " (GPN: partner_name; test_env)"; + result = BigQueryJdbcUrlUtility.parsePartnerTokenProperty(url, "testParsePartnerTokenProperty"); + assertThat(result).isEqualTo(expected); + } + + public void testParseRetryInitialDelayInSecs() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "RetryInitialDelay=10"; + + long retryInitialDelaySeconds = + BigQueryJdbcUrlUtility.parseRetryInitialDelayInSecs(connection_uri, null); + assertEquals(10, retryInitialDelaySeconds); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "RetryInitialDelay=20"; + + retryInitialDelaySeconds = + BigQueryJdbcUrlUtility.parseRetryInitialDelayInSecs(connection_uri, null); + assertEquals(20, retryInitialDelaySeconds); + } + + @Test + public void testParseRetryInitialDelayInSecsDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long retryInitialDelaySeconds = + BigQueryJdbcUrlUtility.parseRetryInitialDelayInSecs(connection_uri, null); + assertEquals(0, retryInitialDelaySeconds); + } + + @Test + public void testParseRetryInitialDelaySecondsInvalidLong() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "RetryInitialDelay=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseRetryInitialDelayInSecs(connection_uri, null)); + } + + @Test + public void testParseRetryMaxDelayInSecs() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "RetryMaxDelay=10"; + + long retryMaxDelaySeconds = + BigQueryJdbcUrlUtility.parseRetryMaxDelayInSecs(connection_uri, null); + assertEquals(10, retryMaxDelaySeconds); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "RetryMaxDelay=20"; + + retryMaxDelaySeconds = BigQueryJdbcUrlUtility.parseRetryMaxDelayInSecs(connection_uri, null); + assertEquals(20, retryMaxDelaySeconds); + } + + @Test + public void testParseRetryMaxDelayInSecsDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long retryMaxDelaySeconds = + BigQueryJdbcUrlUtility.parseRetryMaxDelayInSecs(connection_uri, null); + assertEquals(0, retryMaxDelaySeconds); + } + + @Test + public void testParseRetryMaxDelaySecondsInvalidLong() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "RetryMaxDelay=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseRetryMaxDelayInSecs(connection_uri, null)); + } + + @Test + public void testParseRequestGoogleDriveScope_Default() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;"; + Integer value = + BigQueryJdbcUrlUtility.parseIntProperty( + url, + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + BigQueryJdbcUrlUtility.DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE, + this.getClass().getName()); + assertEquals( + Integer.valueOf(BigQueryJdbcUrlUtility.DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE), value); + } + + // Connection Pool Size + + @Test + public void testParseConnectionPoolSize() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ConnectionPoolSize=10"; + long connectionPoolSize = BigQueryJdbcUrlUtility.parseConnectionPoolSize(connection_uri, null); + assertEquals(10, connectionPoolSize); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ConnectionPoolSize=20"; + + connectionPoolSize = BigQueryJdbcUrlUtility.parseConnectionPoolSize(connection_uri, null); + assertEquals(20, connectionPoolSize); + } + + @Test + public void testParseConnectionPoolSizeDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long connectionPoolSize = BigQueryJdbcUrlUtility.parseConnectionPoolSize(connection_uri, null); + assertEquals(10, connectionPoolSize); + } + + @Test + public void testParseConnectionPoolSizeDefaultNullConnectionUrl() { + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseConnectionPoolSize(null, null)); + } + + @Test + public void testParseConnectionPoolSizeDefaultEmptyConnectionUrl() { + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseConnectionPoolSize("", null)); + } + + @Test + public void testParseConnectionPoolSizeInvalidLong() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ConnectionPoolSize=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseConnectionPoolSize(connection_uri, null)); + } + + // Listener Pool Size + + @Test + public void testParseListenerPoolSize() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ListenerPoolSize=10"; + long listenerPoolSize = BigQueryJdbcUrlUtility.parseListenerPoolSize(connection_uri, null); + assertEquals(10, listenerPoolSize); + + connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ListenerPoolSize=20"; + + listenerPoolSize = BigQueryJdbcUrlUtility.parseListenerPoolSize(connection_uri, null); + assertEquals(20, listenerPoolSize); + } + + @Test + public void testParseListenerPoolSizeDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long listenerPoolSize = BigQueryJdbcUrlUtility.parseListenerPoolSize(connection_uri, null); + assertEquals(10, listenerPoolSize); + } + + @Test + public void testParseListenerPoolSizeDefaultNullConnectionUrl() { + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseListenerPoolSize(null, null)); + } + + @Test + public void testParseListenerPoolSizeDefaultEmptyConnectionUrl() { + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseListenerPoolSize("", null)); + } + + @Test + public void testParseListenerPoolSizeInvalidLong() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ListenerPoolSize=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseListenerPoolSize(connection_uri, null)); + } + + @Test + public void testParseStringListProperty_NullOrEmpty() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;SomeProp="; + List result = + BigQueryJdbcUrlUtility.parseStringListProperty(url, "NonExistentProp", "TestClass"); + assertEquals(Collections.emptyList(), result); + + result = BigQueryJdbcUrlUtility.parseStringListProperty(url, "SomeProp", "TestClass"); + assertEquals(Collections.emptyList(), result); + + String urlWithEmptyList = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;ListProp=,,"; + result = + BigQueryJdbcUrlUtility.parseStringListProperty(urlWithEmptyList, "ListProp", "TestClass"); + assertEquals(Collections.emptyList(), result); + } + + @Test + public void testParseStringListProperty_SingleValue() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;ListProp=project1"; + List result = + BigQueryJdbcUrlUtility.parseStringListProperty(url, "ListProp", "TestClass"); + assertEquals(Collections.singletonList("project1"), result); + } + + @Test + public void testParseStringListProperty_MultipleValues() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;ListProp=project1,project2,project3"; + List result = + BigQueryJdbcUrlUtility.parseStringListProperty(url, "ListProp", "TestClass"); + assertEquals(Arrays.asList("project1", "project2", "project3"), result); + } + + @Test + public void testParseIntProperty_ValidInteger() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;SomeIntProp=123"; + Integer defaultValue = 0; + Integer result = + BigQueryJdbcUrlUtility.parseIntProperty(url, "SomeIntProp", defaultValue, "TestClass"); + assertEquals(Integer.valueOf(123), result); + } + + @Test + public void testParseIntProperty_PropertyNotPresent() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;SomeIntProp=123"; + Integer defaultValue = 42; + Integer result = + BigQueryJdbcUrlUtility.parseIntProperty(url, "MissingIntProp", defaultValue, "TestClass"); + assertEquals(defaultValue, result); + } + + @Test + public void testParseIntProperty_InvalidIntegerValue() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;InvalidIntProp=abc"; + Integer defaultValue = 77; + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcUrlUtility.parseIntProperty( + url, "InvalidIntProp", defaultValue, "TestClass")); + } + + @Test + public void testParseIntProperty_EmptyStringValue() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;EmptyIntProp="; + Integer defaultValue = 88; + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcUrlUtility.parseIntProperty( + url, "EmptyIntProp", defaultValue, "TestClass")); + } + + @Test + public void testParseMaxBytesBilled() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "MaximumBytesBilled=10000"; + + long maxBytesBilled = BigQueryJdbcUrlUtility.parseMaximumBytesBilled(connection_uri, null); + assertEquals(10000, maxBytesBilled); + } + + @Test + public void testParseMaxBytesBilledDefault() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject"; + + long maxBytesBilled = BigQueryJdbcUrlUtility.parseMaximumBytesBilled(connection_uri, null); + assertEquals(0, maxBytesBilled); + } + + @Test + public void testParseMaxBytesBilledNullUrl() { + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseMaximumBytesBilled(null, null)); + } + + @Test + public void testParseMaxBytesBilledEmptyUrl() { + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseMaximumBytesBilled("", null)); + } + + @Test + public void testParseMaxBytesBilledInvalidLong() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "MaximumBytesBilled=invalid"; + + assertThrows( + NumberFormatException.class, + () -> BigQueryJdbcUrlUtility.parseMaximumBytesBilled(connection_uri, null)); + } + + @Test + public void testParseLabels() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "Labels=k1=v1,k2=v2,k3=v3;"; + + Map labels = BigQueryJdbcUrlUtility.parseLabels(connection_uri, null); + assertNotNull(labels); + assertFalse(labels.isEmpty()); + assertEquals(3, labels.size()); + + Map expected = + new HashMap() { + { + put("k1", "v1"); + put("k2", "v2"); + put("k3", "v3"); + } + }; + + assertTrue(Maps.difference(expected, labels).areEqual()); + } + + @Test + public void testParseLabelsEmpty() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;"; + + Map labels = BigQueryJdbcUrlUtility.parseLabels(connection_uri, null); + assertNull(labels); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfPrimitivesTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfPrimitivesTest.java new file mode 100644 index 000000000..6f10ae79a --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfPrimitivesTest.java @@ -0,0 +1,342 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.StandardSQLTypeName.BIGNUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.BYTES; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATE; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATETIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.FLOAT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.GEOGRAPHY; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.NUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIMESTAMP; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arraySchemaAndValue; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.nestedResultSetToColumnLists; +import static com.google.common.truth.Truth.assertThat; +import static java.time.Month.MARCH; +import static java.util.Arrays.copyOfRange; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import com.google.common.io.BaseEncoding; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.TimeUnit; +import java.util.stream.Stream; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.function.ThrowingRunnable; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class BigQueryJsonArrayOfPrimitivesTest { + + private final Field schema; + private final FieldValue arrayValues; + private final Object[] expected; + private final int javaSqlTypeCode; + private Array array; + private final StandardSQLTypeName currentType; + + @ClassRule public static final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + public BigQueryJsonArrayOfPrimitivesTest( + StandardSQLTypeName currentType, + Tuple schemaAndValue, + Object[] expected, + int javaSqlTypeCode) { + this.currentType = currentType; + this.schema = schemaAndValue.x(); + this.arrayValues = schemaAndValue.y(); + this.expected = expected; + this.javaSqlTypeCode = javaSqlTypeCode; + } + + @Before + public void setUp() { + array = new BigQueryJsonArray(this.schema, this.arrayValues); + } + + @Parameters(name = "{index}: primitive array of {0}") + public static Collection data() { + timeZoneRule.enforce(); + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820227000); + LocalDate aDate = LocalDate.of(2023, MARCH, 30); + LocalTime aTime = LocalTime.of(11, 14, 19, 820227000); + return Arrays.asList( + new Object[][] { + { + INT64, + arraySchemaAndValue(INT64, "10", "20", "30", "40"), + new Long[] {10L, 20L, 30L, 40L}, + Types.BIGINT + }, + { + BOOL, + arraySchemaAndValue(BOOL, "true", "false", "false", "true"), + new Boolean[] {true, false, false, true}, + Types.BOOLEAN + }, + { + FLOAT64, + arraySchemaAndValue(FLOAT64, "11.2", "33.4", "55.6", "77.8"), + new Double[] {11.2, 33.4, 55.6, 77.8}, + Types.DOUBLE + }, + { + NUMERIC, + arraySchemaAndValue(NUMERIC, "11.2657", "33.4657", "55.6657", "77.8657"), + new BigDecimal[] { + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657") + }, + Types.NUMERIC + }, + { + BIGNUMERIC, + arraySchemaAndValue(BIGNUMERIC, "11.2657", "33.4657", "55.6657", "77.8657"), + new BigDecimal[] { + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657") + }, + Types.NUMERIC + }, + { + STRING, + arraySchemaAndValue(STRING, "one", "two", "three", "four"), + new String[] {"one", "two", "three", "four"}, + Types.NVARCHAR + }, + { + TIMESTAMP, + arraySchemaAndValue( + TIMESTAMP, + "1680174859.8202269", + "1680261259.8202269", + "1680347659.8202269", + "1680434059.8202269"), + new Timestamp[] { + Timestamp.valueOf(aTimeStamp), // 2023-03-30 16:44:19.82 + Timestamp.valueOf(aTimeStamp.plusDays(1)), + Timestamp.valueOf(aTimeStamp.plusDays(2)), + Timestamp.valueOf(aTimeStamp.plusDays(3)) + }, + Types.TIMESTAMP + }, + { + DATE, + arraySchemaAndValue(DATE, "2023-03-30", "2023-03-31", "2023-04-01", "2023-04-02"), + new Date[] { + Date.valueOf(aDate), + Date.valueOf(aDate.plusDays(1)), + Date.valueOf(aDate.plusDays(2)), + Date.valueOf(aDate.plusDays(3)) + }, + Types.DATE + }, + { + TIME, + arraySchemaAndValue( + TIME, "11:14:19.820227", "11:14:20.820227", "11:14:21.820227", "11:14:22.820227"), + new Time[] { + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())), + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.plusSeconds(1).toNanoOfDay())), + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.plusSeconds(2).toNanoOfDay())), + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.plusSeconds(3).toNanoOfDay())) + }, + Types.TIME + }, + { + DATETIME, + arraySchemaAndValue( + DATETIME, + "2023-03-30T11:14:19.820227", + "2023-03-30T11:15:19.820227", + "2023-03-30T11:16:19.820227", + "2023-03-30T11:17:19.820227"), + new Timestamp[] { + Timestamp.valueOf("2023-03-30 11:14:19.820227"), + Timestamp.valueOf("2023-03-30 11:15:19.820227"), + Timestamp.valueOf("2023-03-30 11:16:19.820227"), + Timestamp.valueOf("2023-03-30 11:17:19.820227") + }, + Types.TIMESTAMP + }, + { + GEOGRAPHY, + arraySchemaAndValue( + GEOGRAPHY, "POINT(-122 47)", "POINT(-122 48)", "POINT(-121 47)", "POINT(-123 48)"), + new String[] {"POINT(-122 47)", "POINT(-122 48)", "POINT(-121 47)", "POINT(-123 48)"}, + Types.OTHER + }, + { + BYTES, + arraySchemaAndValue( + BYTES, + Stream.of("one", "two", "three", "four") + .map(s -> BaseEncoding.base64().encode(s.getBytes())) + .toArray(String[]::new)), + new byte[][] { + "one".getBytes(), "two".getBytes(), "three".getBytes(), "four".getBytes() + }, + Types.VARBINARY + } + }); + } + + @Test + public void getArray() throws SQLException { + assertThat(array.getArray()).isEqualTo(this.expected); + } + + @Test + public void getSlicedArray() throws SQLException { + int fromIndex = 1; + int toIndexExclusive = 3; + Object[] expectedSlicedArray = + copyOfRange(this.expected, fromIndex, toIndexExclusive); // copying index(1,2) + + // the first element is at index 1 + assertThat(array.getArray(fromIndex + 1, 2)).isEqualTo(expectedSlicedArray); + } + + @Test + public void getSlicedArrayWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getArray(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getResultSet() throws SQLException { + ResultSet resultSet = this.array.getResultSet(); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + ArrayList indexList = indexAndValues.x(); + ArrayList columnValues = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {1, 2, 3, 4}); + assertThat(columnValues.toArray()).isEqualTo(this.expected); + } + + @Test + public void getSlicedResultSet() throws SQLException { + int fromIndex = 1; + int toIndexExclusive = 3; + Object[] expectedSlicedArray = + copyOfRange(this.expected, fromIndex, toIndexExclusive); // copying index(1,2) + + // the first element is at index 1 + ResultSet resultSet = array.getResultSet(fromIndex + 1, 2); + + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + ArrayList indexList = indexAndValues.x(); + ArrayList columnValues = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {2, 3}); + assertThat(columnValues.toArray()).isEqualTo(expectedSlicedArray); + } + + @Test + public void getSlicedResultSetWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getResultSet(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getBaseTypeName() throws SQLException { + assertThat(array.getBaseTypeName()).isEqualTo(this.currentType.name()); + } + + @Test + public void getBaseType() throws SQLException { + assertThat(array.getBaseType()).isEqualTo(this.javaSqlTypeCode); + } + + @Test + public void free() throws SQLException { + this.array.free(); + + ensureArrayIsInvalid(() -> array.getArray()); + ensureArrayIsInvalid(() -> array.getArray(1, 2)); + ensureArrayIsInvalid(() -> array.getResultSet()); + ensureArrayIsInvalid(() -> array.getResultSet(1, 2)); + ensureArrayIsInvalid(() -> array.getBaseTypeName()); + ensureArrayIsInvalid(() -> array.getBaseType()); + } + + @Test + public void getArrayWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(emptyMap())); + Exception exception2 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getResultSetWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getResultSet(emptyMap())); + Exception exception2 = + assertThrows( + SQLFeatureNotSupportedException.class, () -> array.getResultSet(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + private void ensureArrayIsInvalid(ThrowingRunnable block) { + Exception exception = assertThrows(IllegalStateException.class, block); + assertThat(exception.getMessage()).isEqualTo(INVALID_ARRAY); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfStructTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfStructTest.java new file mode 100644 index 000000000..b390d642e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfStructTest.java @@ -0,0 +1,204 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.FieldValue.Attribute.PRIMITIVE; +import static com.google.cloud.bigquery.LegacySQLTypeName.RECORD; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.nestedResultSetToColumnLists; +import static com.google.common.truth.Truth.assertThat; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.StandardSQLTypeName; +import java.sql.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Types; +import java.util.ArrayList; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.function.ThrowingRunnable; + +public class BigQueryJsonArrayOfStructTest { + + private Array array; + + @Before + public void setUp() { + FieldList profileSchema = + FieldList.of( + Field.newBuilder("name", LegacySQLTypeName.STRING).build(), + Field.newBuilder("age", LegacySQLTypeName.INTEGER).build(), + Field.newBuilder("adult", LegacySQLTypeName.BOOLEAN).build()); + + FieldValue record1 = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Arya"), + FieldValue.of(PRIMITIVE, "15"), + FieldValue.of(PRIMITIVE, "false")))); + FieldValue record2 = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Khal Drogo"), + FieldValue.of(PRIMITIVE, "35"), + FieldValue.of(PRIMITIVE, "true")))); + FieldValue record3 = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Ned Stark"), + FieldValue.of(PRIMITIVE, "45"), + FieldValue.of(PRIMITIVE, "true")))); + FieldValue record4 = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Jon Snow"), + FieldValue.of(PRIMITIVE, "25"), + FieldValue.of(PRIMITIVE, "true")))); + + Field arrayOfStructSchema = + Field.newBuilder("profiles", RECORD, profileSchema).setMode(Mode.REPEATED).build(); + + FieldValue arrayOfStructValue = + FieldValue.of( + Attribute.REPEATED, FieldValueList.of(asList(record1, record2, record3, record4))); + array = new BigQueryJsonArray(arrayOfStructSchema, arrayOfStructValue); + } + + @Test + public void getArray() throws SQLException { + Struct[] structArray = (Struct[]) array.getArray(); + + assertThat(structArray.length).isEqualTo(4); + assertThat(structArray[0].getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(structArray[1].getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structArray[2].getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + assertThat(structArray[3].getAttributes()).isEqualTo(asList("Jon Snow", 25L, true).toArray()); + } + + @Test + public void getSlicedArray() throws SQLException { + Struct[] structArray = (Struct[]) array.getArray(2, 2); + + assertThat(structArray.length).isEqualTo(2); + assertThat(structArray[0].getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structArray[1].getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + } + + @Test + public void getSlicedArrayWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getArray(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getResultSet() throws SQLException { + ResultSet resultSet = array.getResultSet(); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + + ArrayList indexList = indexAndValues.x(); + ArrayList structs = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {1, 2, 3, 4}); + assertThat(structs.get(0).getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(structs.get(1).getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structs.get(2).getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + assertThat(structs.get(3).getAttributes()).isEqualTo(asList("Jon Snow", 25L, true).toArray()); + } + + @Test + public void getSlicedResultSet() throws SQLException { + ResultSet resultSet = array.getResultSet(2, 2); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + + ArrayList indexList = indexAndValues.x(); + ArrayList structs = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {2, 3}); + assertThat(structs.get(0).getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structs.get(1).getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + } + + @Test + public void getResultSetWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getResultSet(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getBaseTypeName() throws SQLException { + assertThat(array.getBaseTypeName()).isEqualTo(StandardSQLTypeName.STRUCT.name()); + } + + @Test + public void getBaseType() throws SQLException { + assertThat(array.getBaseType()).isEqualTo(Types.STRUCT); + } + + @Test + public void free() throws SQLException { + this.array.free(); + + ensureArrayIsInvalid(() -> array.getArray()); + ensureArrayIsInvalid(() -> array.getArray(1, 2)); + ensureArrayIsInvalid(() -> array.getBaseTypeName()); + ensureArrayIsInvalid(() -> array.getBaseType()); + } + + @Test + public void getArrayWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(emptyMap())); + Exception exception2 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + private void ensureArrayIsInvalid(ThrowingRunnable block) { + Exception exception = Assert.assertThrows(IllegalStateException.class, block); + assertThat(exception.getMessage()).isEqualTo(INVALID_ARRAY); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSetTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSetTest.java new file mode 100644 index 000000000..4c715833f --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSetTest.java @@ -0,0 +1,476 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static java.time.Month.MARCH; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import com.google.common.collect.ImmutableList; +import com.google.common.io.BaseEncoding; +import com.google.common.io.CharStreams; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.Calendar; +import java.util.TimeZone; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; + +public class BigQueryJsonResultSetTest { + + @Rule public final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + private static final FieldList fieldList = + FieldList.of( + Field.of("first", StandardSQLTypeName.BOOL), + Field.of("second", StandardSQLTypeName.INT64), + Field.of("third", StandardSQLTypeName.FLOAT64), + Field.of("fourth", StandardSQLTypeName.STRING), + Field.of("fifth", StandardSQLTypeName.TIMESTAMP), + Field.of("sixth", StandardSQLTypeName.BYTES), + Field.of("seventh", StandardSQLTypeName.STRING), + Field.newBuilder("eight", StandardSQLTypeName.INT64).setMode(Field.Mode.REPEATED).build(), + Field.of( + "ninth", + StandardSQLTypeName.STRUCT, + Field.of("first", StandardSQLTypeName.FLOAT64), + Field.of("second", StandardSQLTypeName.TIMESTAMP)), + Field.of("tenth", StandardSQLTypeName.NUMERIC), + Field.of("eleventh", StandardSQLTypeName.BIGNUMERIC), + Field.of("twelfth", LegacySQLTypeName.TIME), + Field.of("thirteenth", LegacySQLTypeName.INTEGER), + Field.of("fourteenth", LegacySQLTypeName.DATE)); + + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820000000); + LocalTime aTime = LocalTime.of(11, 14, 19, 820000000); + private static final String STRING_VAL = "STRING_VALUE"; + private static final Schema QUERY_SCHEMA = Schema.of(fieldList); + private final FieldValue booleanFv = FieldValue.of(Attribute.PRIMITIVE, "false"); + private final FieldValue integerFv = FieldValue.of(Attribute.PRIMITIVE, "1"); + private final FieldValue floatFv = FieldValue.of(Attribute.PRIMITIVE, "1.5"); + private final FieldValue stringFv = FieldValue.of(Attribute.PRIMITIVE, STRING_VAL); + private final FieldValue timestampFv = + FieldValue.of(Attribute.PRIMITIVE, "1680174859.820000"); // 2023-03-30 16:44:19.82 + + private final FieldValue bytesFv = + FieldValue.of( + Attribute.PRIMITIVE, + BaseEncoding.base64().encode(STRING_VAL.getBytes(StandardCharsets.UTF_8))); + + private final FieldValue nullFv = FieldValue.of(Attribute.PRIMITIVE, null); + private final FieldValue repeatedFv = + FieldValue.of( + Attribute.REPEATED, + FieldValueList.of( + ImmutableList.of( + FieldValue.of(Attribute.PRIMITIVE, "10"), + FieldValue.of(Attribute.PRIMITIVE, "20")))); + private final FieldValue recordFv = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + ImmutableList.of(floatFv, timestampFv), fieldList.get("ninth").getSubFields())); + private final FieldValue numericFv = FieldValue.of(Attribute.PRIMITIVE, "12345678"); + private final FieldValue bigNumericFv = FieldValue.of(Attribute.PRIMITIVE, "12345678.99"); + + private final FieldValue timeFv = FieldValue.of(Attribute.PRIMITIVE, "11:14:19.820000"); + + private final FieldValue shortFv = FieldValue.of(Attribute.PRIMITIVE, "10"); + private final FieldValue dateFv = FieldValue.of(Attribute.PRIMITIVE, "2020-01-15"); + + private final FieldValueList fieldValues = + FieldValueList.of( + ImmutableList.of( + booleanFv, // 1 + integerFv, // 2 + floatFv, // 3 + stringFv, // 4 + timestampFv, // 5 + bytesFv, // 6 + nullFv, // 7 + repeatedFv, // 8 + recordFv, // 9 + numericFv, // 10 + bigNumericFv, // 11 + timeFv, // 12 + shortFv, // 13 + dateFv // 14 + ), + fieldList); + + private BigQueryFieldValueListWrapper bigQueryFieldValueListWrapperNested; + + private BigQueryStatement statement; + private BigQueryStatement statementForTwoRows; + + private BigQueryJsonResultSet bigQueryJsonResultSet; + private BigQueryJsonResultSet bigQueryJsonResultSetNested; + + private BlockingQueue buffer; + private BlockingQueue bufferWithTwoRows; + + @Before + public void setUp() { + // Buffer with one row + buffer = new LinkedBlockingDeque<>(2); + statement = mock(BigQueryStatement.class); + buffer.add(BigQueryFieldValueListWrapper.of(fieldList, fieldValues)); + buffer.add(BigQueryFieldValueListWrapper.of(null, null, true)); // last marker + Thread[] workerThreads = {new Thread()}; + bigQueryJsonResultSet = + BigQueryJsonResultSet.of(QUERY_SCHEMA, 1L, buffer, statement, workerThreads); + + // Buffer with 2 rows. + bufferWithTwoRows = new LinkedBlockingDeque<>(3); + statementForTwoRows = mock(BigQueryStatement.class); + bufferWithTwoRows.add(BigQueryFieldValueListWrapper.of(fieldList, fieldValues)); + bufferWithTwoRows.add(BigQueryFieldValueListWrapper.of(fieldList, fieldValues)); + bufferWithTwoRows.add(BigQueryFieldValueListWrapper.of(null, null, true)); // last marker + + // values for nested types + Field fieldEight = fieldList.get("eight"); + FieldValue fieldEightValue = fieldValues.get("eight"); + FieldList nestedFieldList = Schema.of(fieldEight).getFields(); + bigQueryFieldValueListWrapperNested = + BigQueryFieldValueListWrapper.getNestedFieldValueListWrapper( + nestedFieldList, fieldEightValue.getRepeatedValue()); + bigQueryJsonResultSetNested = + BigQueryJsonResultSet.getNestedResultSet( + Schema.of(fieldEight), + bigQueryFieldValueListWrapperNested, + 0, + fieldEightValue.getRepeatedValue().size()); + } + + private boolean resetResultSet() + throws SQLException { // re-initialises the resultset and moves the cursor to the first row + Thread[] workerThreads = {new Thread()}; + bigQueryJsonResultSet = + BigQueryJsonResultSet.of(QUERY_SCHEMA, 1L, buffer, statement, workerThreads); + return bigQueryJsonResultSet.next(); // move to the first row + } + + @Test + public void testIsClosed() { + assertThat(bigQueryJsonResultSet.isClosed()).isFalse(); + } + + @Test + public void testClose() { + // TODO(prashant): Add test case after close method is implemented + } + + @Test + public void testRowCount() throws SQLException { + Thread[] workerThreads = {new Thread()}; + // ResultSet with 1 row buffer and 1 total rows. + BigQueryJsonResultSet bigQueryJsonResultSet2 = + BigQueryJsonResultSet.of(QUERY_SCHEMA, 1L, buffer, statement, workerThreads); + assertThat(resultSetRowCount(bigQueryJsonResultSet2)).isEqualTo(1); + // ResultSet with 2 rows buffer and 1 total rows. + bigQueryJsonResultSet2 = + BigQueryJsonResultSet.of( + QUERY_SCHEMA, 1L, bufferWithTwoRows, statementForTwoRows, workerThreads); + assertThat(resultSetRowCount(bigQueryJsonResultSet2)).isEqualTo(1); + } + + @Test + // This method tests iteration and Resultset's type getters + public void testIteration() throws SQLException { + int cnt = 0; + assertThat(bigQueryJsonResultSet.isBeforeFirst()).isTrue(); + while (bigQueryJsonResultSet.next()) { + cnt++; + assertThat(bigQueryJsonResultSet.isLast()).isTrue(); // we have one test row + assertThat(bigQueryJsonResultSet.isFirst()).isTrue(); // we have one test row + assertThat(bigQueryJsonResultSet.getBoolean("first")).isFalse(); + assertThat(bigQueryJsonResultSet.getBoolean(1)).isFalse(); + assertThat(bigQueryJsonResultSet.getInt("second")).isEqualTo(1); + assertThat(bigQueryJsonResultSet.getInt(2)).isEqualTo(1); + assertThat(bigQueryJsonResultSet.getFloat("third")).isEqualTo(1.5f); + assertThat(bigQueryJsonResultSet.getFloat(3)).isEqualTo(1.5f); + assertThat(bigQueryJsonResultSet.getString("fourth")).isEqualTo(STRING_VAL); + assertThat(bigQueryJsonResultSet.getString(4)).isEqualTo(STRING_VAL); + assertThat(bigQueryJsonResultSet.getTimestamp("fifth")) + .isEqualTo(Timestamp.valueOf(aTimeStamp)); + assertThat(bigQueryJsonResultSet.getTimestamp(5)).isEqualTo(Timestamp.valueOf(aTimeStamp)); + assertThat(bigQueryJsonResultSet.wasNull()).isFalse(); + assertThat(bigQueryJsonResultSet.getObject("seventh")).isNull(); // test null + assertThat(bigQueryJsonResultSet.getObject(7)).isNull(); + assertThat(bigQueryJsonResultSet.wasNull()).isTrue(); + assertThat(bigQueryJsonResultSet.getArray("eight").getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(bigQueryJsonResultSet.getArray(8).getArray()).isEqualTo(new Object[] {10L, 20L}); + assertThat(((Array) bigQueryJsonResultSet.getObject("eight")).getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(((Array) bigQueryJsonResultSet.getObject(8)).getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(((Struct) bigQueryJsonResultSet.getObject("ninth")).getAttributes()) + .isEqualTo(new Object[] {1.5, Timestamp.valueOf(aTimeStamp)}); + assertThat(((Struct) bigQueryJsonResultSet.getObject(9)).getAttributes()) + .isEqualTo(new Object[] {1.5, Timestamp.valueOf(aTimeStamp)}); + assertThat(bigQueryJsonResultSet.getLong("tenth")).isEqualTo(12345678L); + assertThat(bigQueryJsonResultSet.getLong(10)).isEqualTo(12345678L); + assertThat(bigQueryJsonResultSet.getDouble("eleventh")).isEqualTo(12345678.99D); + assertThat(bigQueryJsonResultSet.getDouble(11)).isEqualTo(12345678.99D); + Time expectedTime = new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())); + assertThat(bigQueryJsonResultSet.getTime("twelfth")).isEqualTo(expectedTime); + assertThat(bigQueryJsonResultSet.getTime(12)).isEqualTo(expectedTime); + assertThat(bigQueryJsonResultSet.getShort("thirteenth")).isEqualTo((short) 10); + assertThat(bigQueryJsonResultSet.getShort(13)).isEqualTo((short) 10); + } + assertThat(cnt).isEqualTo(1); + assertThat(bigQueryJsonResultSet.next()).isFalse(); + assertThat(bigQueryJsonResultSet.isAfterLast()).isTrue(); + } + + @Test + public void testGetObjectWithPrimitives() throws SQLException { + bigQueryJsonResultSet.next(); + assertThat(bigQueryJsonResultSet.getObject("first")).isEqualTo(false); + assertThat(bigQueryJsonResultSet.getObject(1)).isEqualTo(false); + assertThat(bigQueryJsonResultSet.getObject("second")).isEqualTo(1); + assertThat(bigQueryJsonResultSet.getObject(2)).isEqualTo(1); + assertThat(bigQueryJsonResultSet.getObject("third")).isEqualTo(1.5); + assertThat(bigQueryJsonResultSet.getObject(3)).isEqualTo(1.5); + assertThat(bigQueryJsonResultSet.getObject("fourth")).isEqualTo(STRING_VAL); + assertThat(bigQueryJsonResultSet.getObject(4)).isEqualTo(STRING_VAL); + assertThat(bigQueryJsonResultSet.getObject("fifth")).isEqualTo(Timestamp.valueOf(aTimeStamp)); + assertThat(bigQueryJsonResultSet.getObject(5)).isEqualTo(Timestamp.valueOf(aTimeStamp)); + assertThat(bigQueryJsonResultSet.getObject("sixth")) + .isEqualTo(STRING_VAL.getBytes(StandardCharsets.UTF_8)); + assertThat(bigQueryJsonResultSet.getObject(6)) + .isEqualTo(STRING_VAL.getBytes(StandardCharsets.UTF_8)); + assertThat(bigQueryJsonResultSet.wasNull()).isFalse(); + assertThat(bigQueryJsonResultSet.getObject("seventh")).isNull(); // test null + assertThat(bigQueryJsonResultSet.getObject(7)).isNull(); + assertThat(bigQueryJsonResultSet.wasNull()).isTrue(); + + assertThat(bigQueryJsonResultSet.getObject("tenth")).isEqualTo(new BigDecimal("12345678")); + assertThat(bigQueryJsonResultSet.getObject(10)).isEqualTo(new BigDecimal("12345678")); + assertThat(bigQueryJsonResultSet.getObject("eleventh")) + .isEqualTo(new BigDecimal("12345678.99")); + assertThat(bigQueryJsonResultSet.getObject(11)).isEqualTo(new BigDecimal("12345678.99")); + Time expectedTime = new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())); + assertThat(bigQueryJsonResultSet.getObject("twelfth")).isEqualTo(expectedTime); + assertThat(bigQueryJsonResultSet.getObject(12)).isEqualTo(expectedTime); + assertThat(bigQueryJsonResultSet.getObject("thirteenth")).isEqualTo((short) 10); + assertThat(bigQueryJsonResultSet.getObject(13)).isEqualTo((short) 10); + } + + // validate the input streams + @Test + public void testCharacterStream() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + Reader charStream = bigQueryJsonResultSet.getCharacterStream("fourth"); + String expectedVal = CharStreams.toString(charStream); + assertThat(expectedVal).isEqualTo(STRING_VAL); + } + + @Test + public void testBinaryStream() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + StringBuilder textBuilder = new StringBuilder(); + InputStream binInputStream = bigQueryJsonResultSet.getBinaryStream(6); + Reader reader = new BufferedReader(new InputStreamReader(binInputStream)); + int c; + while ((c = reader.read()) != -1) { + textBuilder.append((char) c); + } + assertThat(textBuilder.toString()).isEqualTo(STRING_VAL); + reader.close(); + } + + @Test + public void testAsciiStream() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + StringBuilder textBuilder = new StringBuilder(); + InputStream binInputStream = bigQueryJsonResultSet.getAsciiStream(4); + Reader reader = new BufferedReader(new InputStreamReader(binInputStream)); + int c; + while ((c = reader.read()) != -1) { + textBuilder.append((char) c); + } + String expectedAsciiString = + new String(STRING_VAL.getBytes(), 0, STRING_VAL.length(), StandardCharsets.US_ASCII); + assertThat(textBuilder.length()).isEqualTo(expectedAsciiString.length()); + assertThat(textBuilder.toString()).isEqualTo(expectedAsciiString); + reader.close(); + } + + @Test + public void testUnicodeStream() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + InputStream binInputStream = bigQueryJsonResultSet.getUnicodeStream(4); + byte[] cbuf = new byte[100]; + int len = binInputStream.read(cbuf, 0, cbuf.length); + String colFourVal = new String(cbuf, 0, len, StandardCharsets.UTF_16LE); + assertThat(colFourVal).isEqualTo(STRING_VAL); + } + + @Test + public void testClob() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + java.sql.Clob clobVal = bigQueryJsonResultSet.getClob(4); + StringBuilder textBuilder = new StringBuilder(); + Reader charStream = clobVal.getCharacterStream(); + int intValueOfChar; + while ((intValueOfChar = charStream.read()) != -1) { + textBuilder.append((char) intValueOfChar); + } + charStream.close(); + assertThat(textBuilder.toString()).isEqualTo(STRING_VAL); + } + + @Test + public void testBlob() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + StringBuilder textBuilder = new StringBuilder(); + Blob blobVal = bigQueryJsonResultSet.getBlob(6); + InputStream binInputStream = blobVal.getBinaryStream(); + Reader reader = new BufferedReader(new InputStreamReader(binInputStream)); + int c; + while ((c = reader.read()) != -1) { + textBuilder.append((char) c); + } + assertThat(textBuilder.toString()).isEqualTo(STRING_VAL); + reader.close(); + } + + @Test + public void testBytes() throws SQLException { + assertThat(resetResultSet()).isTrue(); + assertThat(bigQueryJsonResultSet.getBytes("sixth")) + .isEqualTo(STRING_VAL.getBytes(StandardCharsets.UTF_8)); + assertThat(bigQueryJsonResultSet.getBytes(6)) + .isEqualTo(STRING_VAL.getBytes(StandardCharsets.UTF_8)); + } + + @Test + public void testResultSetHoldability() + throws SQLException { // TODO(prashant): Revisit this after Statement's commit is finalised + assertThat(bigQueryJsonResultSet.getHoldability()) + .isEqualTo(ResultSet.HOLD_CURSORS_OVER_COMMIT); + } + + @Test + public void testStatement() throws SQLException { + assertThat(bigQueryJsonResultSet.getStatement()).isEqualTo(statement); + assertThat(bigQueryJsonResultSetNested.getStatement()).isNull(); + } + + @Test + public void testConcurrency() throws SQLException { + assertThat(bigQueryJsonResultSet.getConcurrency()).isEqualTo(ResultSet.CONCUR_READ_ONLY); + assertThat(bigQueryJsonResultSet.getType()).isEqualTo(ResultSet.TYPE_FORWARD_ONLY); + assertThat(bigQueryJsonResultSet.findColumn("first")).isEqualTo(1); + } + + @Test + public void testIterationNested() throws SQLException { + int cnt = 0; + assertThat(bigQueryJsonResultSetNested.isBeforeFirst()).isTrue(); + while (bigQueryJsonResultSetNested.next()) { + cnt++; + if (cnt == 1) { + assertThat(bigQueryJsonResultSetNested.isFirst()).isTrue(); + + } else { // 2nd row is the last row + assertThat(bigQueryJsonResultSetNested.isLast()).isTrue(); + } + assertThat(bigQueryJsonResultSetNested.getInt(1)) + .isEqualTo(cnt); // the first column is index 1 + assertThat(bigQueryJsonResultSetNested.getInt(2)) + .isEqualTo(cnt * 10); // second column has values 10 and 20 + } + assertThat(cnt).isEqualTo(2); + assertThat(bigQueryJsonResultSetNested.next()).isFalse(); + assertThat(bigQueryJsonResultSetNested.isAfterLast()).isTrue(); + } + + @Test + public void testTime() throws SQLException { + assertThat(resetResultSet()).isTrue(); + Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("EST")); + Time expectedTime = new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())); + assertThat(bigQueryJsonResultSet.getTime(12)) + .isEqualTo(bigQueryJsonResultSet.getTime(12, calendar)); + assertThat(expectedTime).isEqualTo(bigQueryJsonResultSet.getTime(12, calendar)); + assertThat(bigQueryJsonResultSet.getTime("twelfth")) + .isEqualTo(bigQueryJsonResultSet.getTime("twelfth", calendar)); + } + + @Test + public void testTimestamp() throws SQLException { + assertThat(resetResultSet()).isTrue(); + Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("EST")); + Timestamp time = bigQueryJsonResultSet.getTimestamp(5); + Timestamp timeWithCal = bigQueryJsonResultSet.getTimestamp(5, calendar); + assertThat(time).isEqualTo(timeWithCal); + assertThat(bigQueryJsonResultSet.getTimestamp("fifth")) + .isEqualTo(bigQueryJsonResultSet.getTimestamp("fifth")); + } + + @Test + public void testDate() throws SQLException { + assertThat(resetResultSet()).isTrue(); + Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("EST")); + // epoc should match + assertThat(bigQueryJsonResultSet.getDate(14).getTime()) + .isEqualTo(bigQueryJsonResultSet.getDate(14, calendar).getTime()); + assertThat(Date.valueOf("2020-01-15").getTime()) + .isEqualTo(bigQueryJsonResultSet.getDate(14, calendar).getTime()); + assertThat(bigQueryJsonResultSet.getDate("fourteenth").getTime()) + .isEqualTo(bigQueryJsonResultSet.getDate("fourteenth", calendar).getTime()); + } + + private int resultSetRowCount(BigQueryJsonResultSet resultSet) throws SQLException { + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + return rowCount; + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStructTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStructTest.java new file mode 100644 index 000000000..f07d8cad2 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStructTest.java @@ -0,0 +1,264 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.FieldValue.Attribute.PRIMITIVE; +import static com.google.cloud.bigquery.FieldValue.Attribute.RECORD; +import static com.google.cloud.bigquery.StandardSQLTypeName.BIGNUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.BYTES; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATE; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATETIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.FLOAT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.GEOGRAPHY; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.NUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIMESTAMP; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arraySchemaAndValue; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.primitiveSchemaAndValue; +import static com.google.common.io.BaseEncoding.base64; +import static com.google.common.truth.Truth.assertThat; +import static java.time.Month.MARCH; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import com.google.common.io.BaseEncoding; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; + +public class BigQueryJsonStructTest { + + @Rule public final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + private Struct structWithPrimitiveValues; + private Struct structWithNullValue; + + @Before + public void setUp() { + List> schemaAndValues = + Arrays.asList( + primitiveSchemaAndValue(INT64, "10"), + primitiveSchemaAndValue(BOOL, "true"), + primitiveSchemaAndValue(FLOAT64, "11.2"), + primitiveSchemaAndValue(NUMERIC, "11.2657"), + primitiveSchemaAndValue(BIGNUMERIC, "11.2657"), + primitiveSchemaAndValue(STRING, "one"), + primitiveSchemaAndValue(TIMESTAMP, "1680174859.8200000"), // 2023-03-30 16:44:19.82 + primitiveSchemaAndValue(DATE, "2023-03-30"), + primitiveSchemaAndValue(TIME, "11:14:19.820000"), + primitiveSchemaAndValue(DATETIME, "2023-03-30T11:14:19.8200000"), + primitiveSchemaAndValue(GEOGRAPHY, "POINT(-122 47)"), + primitiveSchemaAndValue(BYTES, base64().encode("one".getBytes()))); + List orderedSchemas = + schemaAndValues.stream().map(Tuple::x).collect(Collectors.toList()); + List orderedValues = + schemaAndValues.stream().map(Tuple::y).collect(Collectors.toList()); + + structWithPrimitiveValues = + new BigQueryJsonStruct( + FieldList.of(orderedSchemas), FieldValue.of(RECORD, FieldValueList.of(orderedValues))); + structWithNullValue = + new BigQueryJsonStruct(FieldList.of(orderedSchemas), FieldValue.of(PRIMITIVE, null)); + } + + @Test + public void structOfPrimitives() throws SQLException { + assertThat(structWithPrimitiveValues.getAttributes()) + .isEqualTo( + Arrays.asList( + 10L, + true, + 11.2, + new BigDecimal("11.2657"), + new BigDecimal("11.2657"), + "one", + Timestamp.valueOf(LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820000000)), + Date.valueOf(LocalDate.of(2023, MARCH, 30)), + new Time( + TimeUnit.NANOSECONDS.toMillis( + LocalTime.parse("11:14:19.820").toNanoOfDay())), + Timestamp.valueOf("2023-03-30 11:14:19.8200000"), + "POINT(-122 47)", + "one".getBytes()) + .toArray()); + } + + @Test + public void structOfArrays() throws SQLException { + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820000000); + LocalDate aDate = LocalDate.of(2023, MARCH, 30); + LocalTime aTime = LocalTime.of(11, 14, 19, 820000000); + List> schemaAndValues = + Arrays.asList( + arraySchemaAndValue(INT64, "10", "20"), + arraySchemaAndValue(BOOL, "true", "false"), + arraySchemaAndValue(FLOAT64, "11.2", "33.4"), + arraySchemaAndValue(NUMERIC, "11.2657", "33.4657"), + arraySchemaAndValue(BIGNUMERIC, "11.2657", "33.4657"), + arraySchemaAndValue(STRING, "one", "two"), + arraySchemaAndValue(TIMESTAMP, "1680174859.820000", "1680261259.820000"), + arraySchemaAndValue(DATE, "2023-03-30", "2023-03-31"), + arraySchemaAndValue(TIME, "11:14:19.820000", "11:14:20.820000"), + arraySchemaAndValue( + DATETIME, "2023-03-30T11:14:19.820000", "2023-03-30T11:15:19.820000"), + arraySchemaAndValue(GEOGRAPHY, "POINT(-122 47)", "POINT(-122 48)"), + arraySchemaAndValue( + BYTES, + Stream.of("one", "two") + .map(s -> BaseEncoding.base64().encode(s.getBytes())) + .toArray(String[]::new))); + + List orderedSchemas = + schemaAndValues.stream().map(Tuple::x).collect(Collectors.toList()); + List orderedValues = + schemaAndValues.stream().map(Tuple::y).collect(Collectors.toList()); + + Struct struct = + new BigQueryJsonStruct( + FieldList.of(orderedSchemas), FieldValue.of(RECORD, FieldValueList.of(orderedValues))); + + Object[] attributes = struct.getAttributes(); + assertThat(((Array) attributes[0]).getArray()).isEqualTo(new Long[] {10L, 20L}); + assertThat(((Array) attributes[1]).getArray()).isEqualTo(new Boolean[] {true, false}); + assertThat(((Array) attributes[2]).getArray()).isEqualTo(new Double[] {11.2, 33.4}); + assertThat(((Array) attributes[3]).getArray()) + .isEqualTo(new BigDecimal[] {new BigDecimal("11.2657"), new BigDecimal("33.4657")}); + assertThat(((Array) attributes[4]).getArray()) + .isEqualTo(new BigDecimal[] {new BigDecimal("11.2657"), new BigDecimal("33.4657")}); + assertThat(((Array) attributes[5]).getArray()).isEqualTo(new String[] {"one", "two"}); + assertThat(((Array) attributes[6]).getArray()) + .isEqualTo( + new Timestamp[] { + Timestamp.valueOf(aTimeStamp), // 2023-03-30 16:44:19.82 + Timestamp.valueOf(aTimeStamp.plusDays(1)) + }); + assertThat(((Array) attributes[7]).getArray()) + .isEqualTo(new Date[] {Date.valueOf(aDate), Date.valueOf(aDate.plusDays(1))}); + assertThat(((Array) attributes[8]).getArray()) + .isEqualTo( + new Time[] { + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())), + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.plusSeconds(1).toNanoOfDay())) + }); + assertThat(((Array) attributes[9]).getArray()) // DATETIME + .isEqualTo( + new Timestamp[] { + Timestamp.valueOf("2023-03-30 11:14:19.820000"), + Timestamp.valueOf("2023-03-30 11:15:19.820000") + }); + assertThat(((Array) attributes[10]).getArray()) + .isEqualTo(new String[] {"POINT(-122 47)", "POINT(-122 48)"}); + assertThat(((Array) attributes[11]).getArray()) + .isEqualTo(new byte[][] {"one".getBytes(), "two".getBytes()}); + } + + @Test + public void structOfStructs() throws SQLException { + FieldList profileSchema = + FieldList.of( + Field.of("name", LegacySQLTypeName.STRING), + Field.of("age", LegacySQLTypeName.INTEGER), + Field.of("adult", LegacySQLTypeName.BOOLEAN)); + FieldList addressSchema = + FieldList.of( + Field.of("state", LegacySQLTypeName.STRING), + Field.of("zip", LegacySQLTypeName.INTEGER)); + FieldList rootStructSchema = + FieldList.of( + Field.of("profile", LegacySQLTypeName.RECORD, profileSchema), + Field.of("address", LegacySQLTypeName.RECORD, addressSchema)); + + FieldValue profileValue = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Arya"), + FieldValue.of(PRIMITIVE, "15"), + FieldValue.of(PRIMITIVE, "false")))); + FieldValue addressValue = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList(FieldValue.of(PRIMITIVE, "Michigan"), FieldValue.of(PRIMITIVE, "49086")))); + + FieldValue rootStructValue = + FieldValue.of(RECORD, FieldValueList.of(asList(profileValue, addressValue))); + + Struct struct = new BigQueryJsonStruct(rootStructSchema, rootStructValue); + Object[] attributes = struct.getAttributes(); + Struct profileStruct = (Struct) attributes[0]; + Struct addressStruct = (Struct) attributes[1]; + + assertThat(profileStruct.getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(addressStruct.getAttributes()).isEqualTo(asList("Michigan", 49086L).toArray()); + } + + @Test + public void structWithNullValue() throws SQLException { + assertThat(structWithNullValue.getAttributes()) + .isEqualTo( + Arrays.asList(0L, false, 0.0, null, null, null, null, null, null, null, null, null) + .toArray()); + } + + @Test + public void getSQLTypeNameIsNotSupported() { + Exception exception = + assertThrows( + SQLFeatureNotSupportedException.class, structWithPrimitiveValues::getSQLTypeName); + assertThat(exception.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getAttributesWithCustomTypeMappingsIsNotSupported() { + Exception exception = + assertThrows( + SQLFeatureNotSupportedException.class, + () -> structWithPrimitiveValues.getAttributes(emptyMap())); + assertThat(exception.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandlerTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandlerTest.java new file mode 100644 index 000000000..0dc085b60 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandlerTest.java @@ -0,0 +1,142 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; +import org.junit.Test; + +public class BigQueryParameterHandlerTest { + + @Test + public void testGetSetParameterByName() throws Exception { + BigQueryParameterHandler paramHandler = new BigQueryParameterHandler(2); + // Add Param 1 + paramHandler.setParameter( + "ParamKey1", "ParamValue1", String.class, BigQueryStatementParameterType.IN, -1); + String paramValue = (String) paramHandler.getParameter("ParamKey1"); + assertNotNull(paramValue); + assertEquals("ParamValue1", paramValue); + BigQueryStatementParameterType paramType = paramHandler.getParameterType("ParamKey1"); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.IN, paramType); + int scale = paramHandler.getParameterScale("ParamKey1"); + assertEquals(-1, scale); + assertEquals(String.class, paramHandler.getType("ParamKey1")); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType("ParamKey1")); + + // Add Param 2 + paramHandler.setParameter( + "ParamKey2", "ParamValue2", String.class, BigQueryStatementParameterType.INOUT, 1); + paramValue = (String) paramHandler.getParameter("ParamKey2"); + assertNotNull(paramValue); + assertEquals("ParamValue2", paramValue); + paramType = paramHandler.getParameterType("ParamKey2"); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.INOUT, paramType); + scale = paramHandler.getParameterScale("ParamKey2"); + assertEquals(1, scale); + assertEquals(String.class, paramHandler.getType("ParamKey2")); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType("ParamKey2")); + + // Update Param 1 + paramHandler.setParameter( + "ParamKey1", "ParamValue1-UPD", String.class, BigQueryStatementParameterType.OUT, 1); + paramValue = (String) paramHandler.getParameter("ParamKey1"); + assertNotNull(paramValue); + assertEquals("ParamValue1-UPD", paramValue); + paramType = paramHandler.getParameterType("ParamKey1"); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.OUT, paramType); + scale = paramHandler.getParameterScale("ParamKey1"); + assertEquals(1, scale); + assertEquals(String.class, paramHandler.getType("ParamKey1")); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType("ParamKey1")); + + // Update Param 2 + paramHandler.setParameter( + "ParamKey2", "ParamValue2-UPD", String.class, BigQueryStatementParameterType.INOUT, 2); + paramValue = (String) paramHandler.getParameter("ParamKey2"); + assertNotNull(paramValue); + assertEquals("ParamValue2-UPD", paramValue); + paramType = paramHandler.getParameterType("ParamKey2"); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.INOUT, paramType); + scale = paramHandler.getParameterScale("ParamKey2"); + assertEquals(2, scale); + assertEquals(String.class, paramHandler.getType("ParamKey2")); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType("ParamKey2")); + } + + @Test + public void testGetSetParameterByIndex() throws Exception { + BigQueryParameterHandler paramHandler = new BigQueryParameterHandler(2); + + // Add Param 1 + paramHandler.setParameter(1, "ParamValue1", String.class); + String value = (String) paramHandler.getParameter(1); + assertNotNull(value); + assertEquals("ParamValue1", value); + BigQueryStatementParameterType paramType = paramHandler.getParameterType(1); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.UNSPECIFIED, paramType); + assertEquals(String.class, paramHandler.getType(1)); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType(1)); + + // Add Param 2 + paramHandler.setParameter( + 2, "ParamValue2", String.class, BigQueryStatementParameterType.IN, -1); + value = (String) paramHandler.getParameter(2); + assertNotNull(value); + assertEquals("ParamValue2", value); + paramType = paramHandler.getParameterType(2); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.IN, paramType); + int scale = paramHandler.getParameterScale(2); + assertEquals(-1, scale); + assertEquals(String.class, paramHandler.getType(2)); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType(2)); + + // Update Param 1 + paramHandler.setParameter(1, "ParamValue1-UPD", String.class); + value = (String) paramHandler.getParameter(1); + assertNotNull(value); + assertEquals("ParamValue1-UPD", value); + paramType = paramHandler.getParameterType(1); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.UNSPECIFIED, paramType); + assertEquals(String.class, paramHandler.getType(1)); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType(1)); + + // Update Param 2 + paramHandler.setParameter( + 2, "ParamValue2-UPD", String.class, BigQueryStatementParameterType.OUT, 2); + value = (String) paramHandler.getParameter(2); + assertNotNull(value); + assertEquals("ParamValue2-UPD", value); + paramType = paramHandler.getParameterType(2); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.OUT, paramType); + scale = paramHandler.getParameterScale(2); + assertEquals(2, scale); + assertEquals(String.class, paramHandler.getType(2)); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType(2)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnectionTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnectionTest.java new file mode 100644 index 000000000..ee9d63beb --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnectionTest.java @@ -0,0 +1,174 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.TestConnectionListener; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; + +import java.io.IOException; +import java.sql.*; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryPooledConnectionTest { + private BigQueryConnection bigQueryConnection; + private static final Long LISTENER_POOL_SIZE = 10L; + + @Before + public void setUp() throws IOException, SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + } + + @Test + public void testGetPooledConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + } + + @Test + public void testPooledConnectionClose() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + } + + @Test + public void testReuseConnectionAfterClose() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + + connection = pooledConnection.getConnection(); + assertTrue(pooledConnection.inUse()); + } + + @Test + public void testAddConnectionListener() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + + assertTrue(pooledConnection.isListenerPooled(listner)); + } + + @Test + public void testRemoveConnectionListener() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + assertTrue(pooledConnection.isListenerPooled(listner)); + pooledConnection.removeConnectionEventListener(listner); + assertFalse(pooledConnection.isListenerPooled(listner)); + } + + @Test + public void testConnectionHandleClosedByConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + assertEquals(1, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + assertTrue(pooledConnection.isListenerPooled(listner)); + } + + @Test + public void testConnectionHandleClosedByPooledConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + pooledConnection.close(); + assertFalse(pooledConnection.inUse()); + assertEquals(1, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + assertTrue(pooledConnection.isListenerPooled(listner)); + } + + @Test + public void testFireConnectionError() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + pooledConnection.fireConnectionError(new SQLException("test")); + assertFalse(pooledConnection.inUse()); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(1, listner.getConnectionErrorCount()); + + assertFalse(pooledConnection.isListenerPooled(listner)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizersTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizersTest.java new file mode 100644 index 000000000..7332dce93 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizersTest.java @@ -0,0 +1,67 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import org.junit.Before; +import org.junit.Test; + +public class BigQueryResultSetFinalizersTest { + Thread arrowWorker; + Thread[] jsonWorkers; + + @Before + public void setUp() { + // create and start the demon threads + arrowWorker = + new Thread( + () -> { + while (true) { + if (Thread.currentThread().isInterrupted()) { + break; + } + } + }); + arrowWorker.setDaemon(true); + Thread jsonWorker = + new Thread( + () -> { + while (true) { + if (Thread.currentThread().isInterrupted()) { + break; + } + } + }); + jsonWorker.setDaemon(true); + jsonWorkers = new Thread[] {jsonWorker}; + arrowWorker.start(); + jsonWorker.start(); + } + + @Test + public void testFinalizeResources() { + BigQueryResultSetFinalizers.ArrowResultSetFinalizer arrowResultSetFinalizer = + new BigQueryResultSetFinalizers.ArrowResultSetFinalizer(null, null, arrowWorker); + arrowResultSetFinalizer.finalizeResources(); + assertThat(arrowWorker.isInterrupted()).isTrue(); + BigQueryResultSetFinalizers.JsonResultSetFinalizer jsonResultSetFinalizer = + new BigQueryResultSetFinalizers.JsonResultSetFinalizer(null, null, jsonWorkers); + jsonResultSetFinalizer.finalizeResources(); + assertThat(jsonWorkers[0].isInterrupted()).isTrue(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadataTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadataTest.java new file mode 100644 index 000000000..b4d14296d --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadataTest.java @@ -0,0 +1,277 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.common.collect.ImmutableList; +import java.sql.Array; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Types; +import java.util.List; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class BigQueryResultSetMetadataTest { + + private BigQueryStatement statement; + + private static Field tenthField = + Field.newBuilder("tenth", LegacySQLTypeName.NUMERIC) + .setName("tenth") + .setType(StandardSQLTypeName.NUMERIC) + .setPrecision(12L) + .setScale(9L) + .build(); + private static final FieldList fieldList = + FieldList.of( + Field.of("first", StandardSQLTypeName.BOOL), + Field.of("second", StandardSQLTypeName.INT64), + Field.of("third", StandardSQLTypeName.FLOAT64), + Field.of("fourth", StandardSQLTypeName.STRING), + Field.of("fifth", StandardSQLTypeName.TIMESTAMP), + Field.of("sixth", StandardSQLTypeName.BYTES), + Field.of("seventh", StandardSQLTypeName.STRING), + Field.newBuilder("eight", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REPEATED) + .build(), + Field.of( + "ninth", + StandardSQLTypeName.STRUCT, + Field.of("first", StandardSQLTypeName.FLOAT64), + Field.of("second", StandardSQLTypeName.TIMESTAMP)), + tenthField, + Field.of("eleventh", StandardSQLTypeName.BIGNUMERIC), + Field.of("twelfth", LegacySQLTypeName.TIME), + Field.of("thirteenth", LegacySQLTypeName.DATE)); + + private static final List fieldListSqlTypes = + ImmutableList.of( + Types.BOOLEAN, + Types.BIGINT, + Types.DOUBLE, + Types.NVARCHAR, + Types.TIMESTAMP, + Types.VARBINARY, + Types.NVARCHAR, + Types.ARRAY, + Types.STRUCT, + Types.NUMERIC, + Types.NUMERIC, + Types.TIME, + Types.DATE); + + private static final List fieldListClassNames = + ImmutableList.of( + "java.lang.Boolean", + "java.lang.Long", + "java.lang.Double", + "java.lang.String", + "java.sql.Timestamp", + byte[].class.getName(), + "java.lang.String", + Array.class.getName(), + "java.sql.Struct", + "java.math.BigDecimal", + "java.math.BigDecimal", + "java.sql.Time", + "java.sql.Date"); + private static final Schema QUERY_SCHEMA = Schema.of(fieldList); + + private ResultSetMetaData resultSetMetaData; + + private ResultSetMetaData resultSetMetaDataNested; + + @Before + public void setUp() throws SQLException { + statement = mock(BigQueryStatement.class); + Thread[] workerThreads = {new Thread()}; + BigQueryJsonResultSet bigQueryJsonResultSet = + BigQueryJsonResultSet.of(QUERY_SCHEMA, 1L, null, statement, workerThreads); + // values for nested types + resultSetMetaData = bigQueryJsonResultSet.getMetaData(); + + // values for nested types + Field fieldEight = fieldList.get("eight"); + // The schema for the nested result set should describe the elements of the array. + Field elementField = fieldEight.toBuilder().setMode(Field.Mode.NULLABLE).build(); + FieldList nestedFieldList = FieldList.of(elementField); + BigQueryFieldValueListWrapper bigQueryFieldValueListWrapperNested = + BigQueryFieldValueListWrapper.getNestedFieldValueListWrapper(nestedFieldList, null); + BigQueryJsonResultSet bigQueryJsonResultSetNested = + BigQueryJsonResultSet.getNestedResultSet( + Schema.of(nestedFieldList), bigQueryFieldValueListWrapperNested, -1, -1); + resultSetMetaDataNested = bigQueryJsonResultSetNested.getMetaData(); + } + + @Test + public void testGetColumnType() throws SQLException { + // match the mapping for all the types in the test dataset + for (int colIndex = 1; colIndex <= 13; colIndex++) { + assertThat(resultSetMetaData.getColumnType(colIndex)) + .isEqualTo(fieldListSqlTypes.get(colIndex - 1)); + } + } + + @Test + public void testGetColumnTypeName() throws SQLException { + assertThat(resultSetMetaData.getColumnTypeName(1)).isEqualTo("BOOL"); + assertThat(resultSetMetaData.getColumnTypeName(2)).isEqualTo("INT64"); + assertThat(resultSetMetaData.getColumnTypeName(3)).isEqualTo("FLOAT64"); + assertThat(resultSetMetaData.getColumnTypeName(4)).isEqualTo("STRING"); + assertThat(resultSetMetaData.getColumnTypeName(5)).isEqualTo("TIMESTAMP"); + assertThat(resultSetMetaData.getColumnTypeName(6)).isEqualTo("BYTES"); + assertThat(resultSetMetaData.getColumnTypeName(7)).isEqualTo("STRING"); + assertThat(resultSetMetaData.getColumnTypeName(8)).isEqualTo("ARRAY"); + assertThat(resultSetMetaData.getColumnTypeName(9)).isEqualTo("STRUCT"); + assertThat(resultSetMetaData.getColumnTypeName(10)).isEqualTo("NUMERIC"); + assertThat(resultSetMetaData.getColumnTypeName(11)).isEqualTo("BIGNUMERIC"); + assertThat(resultSetMetaData.getColumnTypeName(12)).isEqualTo("TIME"); + assertThat(resultSetMetaData.getColumnTypeName(13)).isEqualTo("DATE"); + } + + @Test + public void testColumnClassName() + throws SQLException { // match the mapping for all the types in the test dataset + for (int colIndex = 1; colIndex <= 13; colIndex++) { + assertThat(resultSetMetaData.getColumnClassName(colIndex)) + .isEqualTo(fieldListClassNames.get(colIndex - 1)); + } + } + + @Test + public void testResultSetMetadataProperties() throws SQLException { + assertThat(resultSetMetaData).isNotNull(); + assertThat(resultSetMetaData.getColumnCount()).isEqualTo(13); + assertThat(resultSetMetaData.isAutoIncrement(1)).isFalse(); + assertThat(resultSetMetaData.isSearchable(4)).isTrue(); + assertThat(resultSetMetaData.isCurrency(4)).isFalse(); + assertThat(resultSetMetaData.isReadOnly(4)).isFalse(); + assertThat(resultSetMetaData.isDefinitelyWritable(4)).isFalse(); + assertThat(resultSetMetaData.isWritable(4)).isTrue(); + assertThat(resultSetMetaData.isNullable(4)).isEqualTo(ResultSetMetaData.columnNullableUnknown); + } + + @Test + public void testPrecision() throws SQLException { + assertThat(resultSetMetaData.getPrecision(10)).isEqualTo(12L); + assertThat(resultSetMetaData.getPrecision(1)) + .isEqualTo(0); // schema doesn't have this info, should be defaulted to 0 + } + + @Test + public void testSigned() throws SQLException { + assertThat(resultSetMetaData.isSigned(4)).isFalse(); + assertThat(resultSetMetaData.isSigned(2)).isTrue(); + } + + @Test + public void testCheckNameLabelCatalog() throws SQLException { + assertThat(resultSetMetaData.getColumnLabel(1)).isEqualTo("first"); + assertThat(resultSetMetaData.getColumnName(10)).isEqualTo("tenth"); + assertThat(resultSetMetaData.getColumnName(10)).isEqualTo("tenth"); + assertThat(resultSetMetaData.getSchemaName(10)).isEqualTo(""); + assertThat(resultSetMetaData.getCatalogName(10)).isEqualTo(""); + } + + @Test + public void testCheckCaseSensitive() throws SQLException { + assertThat(resultSetMetaData.isCaseSensitive(2)).isFalse(); + assertThat(resultSetMetaData.isCaseSensitive(4)).isTrue(); + } + + @Test + public void testScale() throws SQLException { + assertThat(resultSetMetaData.getScale(10)).isEqualTo(9L); + assertThat(resultSetMetaData.getScale(4)).isEqualTo(0L); + } + + @Test + public void testColumnDisplaySize() throws SQLException { + assertThat(resultSetMetaData.getColumnDisplaySize(1)).isEqualTo(5); + assertThat(resultSetMetaData.getColumnDisplaySize(13)).isEqualTo(10); + assertThat(resultSetMetaData.getColumnDisplaySize(2)).isEqualTo(10); + assertThat(resultSetMetaData.getColumnDisplaySize(3)).isEqualTo(14); + assertThat(resultSetMetaData.getColumnDisplaySize(12)).isEqualTo(50); + assertThat(resultSetMetaData.getColumnDisplaySize(5)).isEqualTo(16); + } + + // Nested Types + + @Test + public void testResultSetMetaDataNestedColType() throws SQLException { + assertThat(resultSetMetaDataNested).isNotNull(); + assertThat(resultSetMetaDataNested.getColumnType(1)).isEqualTo(Types.NVARCHAR); + assertThat(resultSetMetaDataNested.getColumnClassName(1)).isEqualTo("java.lang.String"); + } + + @Test + public void testNestedresultSetMetaDataNestedProperties() throws SQLException { + assertThat(resultSetMetaDataNested.getColumnCount()).isEqualTo(1); + assertThat(resultSetMetaDataNested.isAutoIncrement(1)).isFalse(); + assertThat(resultSetMetaDataNested.isSearchable(1)).isTrue(); + assertThat(resultSetMetaDataNested.isCurrency(1)).isFalse(); + assertThat(resultSetMetaDataNested.isReadOnly(1)).isFalse(); + assertThat(resultSetMetaDataNested.isDefinitelyWritable(1)).isFalse(); + assertThat(resultSetMetaDataNested.isWritable(1)).isTrue(); + assertThat(resultSetMetaDataNested.isNullable(1)).isEqualTo(ResultSetMetaData.columnNullable); + } + + @Test + public void testNestedPrecision() throws SQLException { + assertThat(resultSetMetaDataNested.getPrecision(1)) + .isEqualTo(0); // schema doesn't have this info, should be defaulted to 0 + } + + @Test + public void testNestedSigned() throws SQLException { + assertThat(resultSetMetaDataNested.isSigned(1)).isFalse(); + } + + @Test + public void testNestedCheckNameLabelCatalog() throws SQLException { + assertThat(resultSetMetaDataNested.getColumnLabel(1)).isEqualTo("eight"); + assertThat(resultSetMetaDataNested.getColumnName(1)).isEqualTo("eight"); + assertThat(resultSetMetaDataNested.getSchemaName(1)).isEqualTo(""); + assertThat(resultSetMetaDataNested.getCatalogName(1)).isEqualTo(""); + } + + @Test + public void testNestedCheckCaseSensitive() throws SQLException { + assertThat(resultSetMetaDataNested.isCaseSensitive(1)).isTrue(); + } + + @Test + public void testNestedScale() throws SQLException { + assertThat(resultSetMetaDataNested.getScale(1)).isEqualTo(0L); + } + + @Test + public void testNestedColumnDisplaySize() throws SQLException { + assertThat(resultSetMetaDataNested.getColumnDisplaySize(1)).isEqualTo(50); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryStatementTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryStatementTest.java new file mode 100644 index 000000000..22dc07219 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryStatementTest.java @@ -0,0 +1,483 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.utils.ArrowUtilities.serializeSchema; +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQuery.QueryResultsOption; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.JobStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryJobConfiguration.Priority; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableResult; +import com.google.cloud.bigquery.jdbc.BigQueryStatement.JobIdWrapper; +import com.google.cloud.bigquery.spi.BigQueryRpcFactory; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import com.google.cloud.bigquery.storage.v1.BigQueryReadClient; +import com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1.ReadSession; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Maps; +import java.io.IOException; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.BlockingQueue; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; + +public class BigQueryStatementTest { + + private BigQueryConnection bigQueryConnection; + private static final String PROJECT = "project"; + + private BigQueryRpcFactory rpcFactoryMock; + + private BigQueryReadClient storageReadClient; + + private BigQuery bigquery; + + private BigQueryStatement bigQueryStatement; + + private final String query = "select * from test"; + + private final String jobIdVal = UUID.randomUUID().toString(); + + private JobId jobId; + + private static final FieldList fieldList = + FieldList.of( + Field.of("first", StandardSQLTypeName.BOOL), + Field.of("second", StandardSQLTypeName.INT64)); + + private static final String DEFAULT_TEST_DATASET = "bigquery_test_dataset"; + + private static final TableId TABLE_ID = TableId.of(DEFAULT_TEST_DATASET, PROJECT); + + private static ArrowSchema arrowSchema; + + private final Map LABELS = + new HashMap() { + { + put("key1", "val1"); + put("key2", "val2"); + put("key3", "val3"); + } + }; + + private Job getJobMock( + TableResult result, QueryJobConfiguration configuration, StatementType type) + throws InterruptedException { + Job job = mock(Job.class); + JobStatistics.QueryStatistics statistics = mock(QueryStatistics.class); + JobId jobId = mock(JobId.class); + doReturn(result).when(job).getQueryResults(any(QueryResultsOption.class)); + doReturn(jobId).when(job).getJobId(); + doReturn(configuration).when(job).getConfiguration(); + doReturn(statistics).when(job).getStatistics(); + doReturn(type).when(statistics).getStatementType(); + return job; + } + + @Before + public void setUp() throws IOException, SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + rpcFactoryMock = mock(BigQueryRpcFactory.class); + bigquery = mock(BigQuery.class); + bigQueryConnection.bigQuery = bigquery; + storageReadClient = mock(BigQueryReadClient.class); + jobId = JobId.newBuilder().setJob(jobIdVal).build(); + + doReturn(bigquery).when(bigQueryConnection).getBigQuery(); + doReturn(10L).when(bigQueryConnection).getJobTimeoutInSeconds(); + doReturn(10L).when(bigQueryConnection).getMaxBytesBilled(); + doReturn(LABELS).when(bigQueryConnection).getLabels(); + doReturn(BigQueryJdbcUrlUtility.DEFAULT_QUERY_DIALECT_VALUE) + .when(bigQueryConnection) + .getQueryDialect(); + doReturn(1000L).when(bigQueryConnection).getMaxResults(); + bigQueryStatement = new BigQueryStatement(bigQueryConnection); + VectorSchemaRoot vectorSchemaRoot = getTestVectorSchemaRoot(); + arrowSchema = + ArrowSchema.newBuilder() + .setSerializedSchema(serializeSchema(vectorSchemaRoot.getSchema())) + .build(); + // bigQueryConnection.addOpenStatements(bigQueryStatement); + + } + + private VectorSchemaRoot getTestVectorSchemaRoot() { + RootAllocator allocator = new RootAllocator(); + BitVector boolField = + new BitVector("boolField", allocator); // Mapped with StandardSQLTypeName.BOOL + boolField.allocateNew(2); + boolField.set(0, 0); + boolField.setValueCount(1); + IntVector int64Filed = + new IntVector("int64Filed", allocator); // Mapped with StandardSQLTypeName.INT64 + int64Filed.allocateNew(2); + int64Filed.set(0, 1); + int64Filed.setValueCount(1); + List fieldVectors = ImmutableList.of(boolField, int64Filed); + return new VectorSchemaRoot(fieldVectors); + } + + private BigQueryOptions createBigQueryOptionsForProject( + String project, BigQueryRpcFactory rpcFactory) { + return BigQueryOptions.newBuilder() + .setProjectId(project) + .setServiceRpcFactory(rpcFactory) + .setRetrySettings(ServiceOptions.getNoRetrySettings()) + .build(); + } + + @Test + public void testStatementNonNull() { + assertThat(bigQueryStatement).isNotNull(); + } + + @Ignore + public void testExecFastQueryPath() throws SQLException, InterruptedException { + JobIdWrapper jobIdWrapper = new JobIdWrapper(jobId, null, null); + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + + TableResult result = Mockito.mock(TableResult.class); + BigQueryJsonResultSet jsonResultSet = mock(BigQueryJsonResultSet.class); + QueryJobConfiguration jobConfiguration = QueryJobConfiguration.newBuilder(query).build(); + + doReturn(result).when(bigquery).query(jobConfiguration); + doReturn(jsonResultSet).when(bigQueryStatementSpy).processJsonResultSet(result); + + bigQueryStatementSpy.runQuery(query, jobConfiguration); + // verify the statement's state + assertThat(bigQueryStatementSpy.jobIds.size()).isEqualTo(1); // job id should be created + assertThat(bigQueryStatementSpy.jobIds.get(0)).isNotNull(); + // assertThat(bigQueryStatementSpy.currentResultSet).isNotNull(); + + } + + @Test + public void testExecSlowQueryPath() throws SQLException, InterruptedException { + JobIdWrapper jobIdWrapper = new JobIdWrapper(jobId, null, null); + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + TableResult tableResult = mock(TableResult.class); + QueryJobConfiguration queryJobConfiguration = + QueryJobConfiguration.newBuilder(query) + .setPriority(Priority.BATCH) // query settings for slow query path + .build(); + Job job = getJobMock(tableResult, queryJobConfiguration, StatementType.SELECT); + + doReturn(job).when(bigquery).create(any(JobInfo.class)); + + doReturn(jobIdWrapper) + .when(bigQueryStatementSpy) + .insertJob(any(com.google.cloud.bigquery.JobConfiguration.class)); + doReturn(false).when(bigQueryStatementSpy).useReadAPI(eq(tableResult)); + doReturn(mock(JobId.class)).when(tableResult).getJobId(); + + ResultSet bigQueryJsonResultSet = mock(BigQueryJsonResultSet.class); + + doReturn(bigQueryJsonResultSet) + .when(bigQueryStatementSpy) + .processJsonResultSet(any(TableResult.class)); + + bigQueryStatementSpy.runQuery(query, queryJobConfiguration); + // verify the statement's state + // job id is created during runQuery, but cleaned up after function completes. + assertThat(bigQueryStatementSpy.jobIds.size()).isEqualTo(0); + assertThat(bigQueryStatementSpy.getResultSet()).isEqualTo(bigQueryJsonResultSet); + } + + @Test + public void getArrowResultSetTest() throws SQLException { + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + BigQueryReadClient bigQueryReadClient = Mockito.spy(mock(BigQueryReadClient.class)); + Schema schema = Schema.of(fieldList); + ReadSession readSession = ReadSession.getDefaultInstance(); + doReturn(bigQueryReadClient).when(bigQueryStatementSpy).getBigQueryReadClient(); + doReturn(readSession) + .when(bigQueryStatementSpy) + .getReadSession(any(CreateReadSessionRequest.class)); + Thread mockWorker = new Thread(); + doReturn(mockWorker) + .when(bigQueryStatementSpy) + .populateArrowBufferedQueue( + any(ReadSession.class), any(BlockingQueue.class), any(BigQueryReadClient.class)); + + doReturn(arrowSchema).when(bigQueryStatementSpy).getArrowSchema(any(ReadSession.class)); + + JobId jobId = JobId.of("123"); + TableResult result = Mockito.mock(TableResult.class); + doReturn(schema).when(result).getSchema(); + doReturn(10L).when(result).getTotalRows(); + doReturn(TABLE_ID).when(bigQueryStatementSpy).getDestinationTable(any()); + doReturn(jobId).when(result).getJobId(); + Job job = mock(Job.class); + doReturn(mock(QueryStatistics.class)).when(job).getStatistics(); + doReturn(job).when(bigquery).getJob(jobId); + + ResultSet resultSet = bigQueryStatementSpy.processArrowResultSet(result); + assertThat(resultSet).isNotNull(); + assertThat(resultSet).isInstanceOf(BigQueryArrowResultSet.class); + assertThat(resultSet.isLast()).isFalse(); // as we have 10 rows + } + + @Test + public void getJobTimeoutTest() throws Exception { + QueryJobConfiguration jobConfig = bigQueryStatement.getJobConfig("select 1").build(); + assertEquals(10000L, jobConfig.getJobTimeoutMs().longValue()); + } + + @Test + public void getMaxBytesBilledTest() throws Exception { + QueryJobConfiguration jobConfig = bigQueryStatement.getJobConfig("select 1").build(); + assertEquals(10L, jobConfig.getMaximumBytesBilled().longValue()); + } + + @Test + public void testSetMaxRowsJson() throws SQLException { + assertEquals(0, bigQueryStatement.getMaxRows()); + bigQueryStatement.setMaxRows(10); + assertEquals(10, bigQueryStatement.getMaxRows()); + } + + @Test + public void setQueryTimeoutTest() throws Exception { + bigQueryStatement.setQueryTimeout(3); + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + + TableResult result = Mockito.mock(TableResult.class); + BigQueryJsonResultSet jsonResultSet = mock(BigQueryJsonResultSet.class); + QueryJobConfiguration jobConfiguration = + QueryJobConfiguration.newBuilder(query).setJobTimeoutMs(10000L).build(); + + Job job = getJobMock(result, jobConfiguration, StatementType.SELECT); + doReturn(job).when(bigquery).create(any(JobInfo.class)); + + doReturn(jsonResultSet).when(bigQueryStatementSpy).processJsonResultSet(result); + ArgumentCaptor captor = ArgumentCaptor.forClass(JobInfo.class); + + bigQueryStatementSpy.runQuery(query, jobConfiguration); + verify(bigquery).create(captor.capture()); + QueryJobConfiguration jobConfig = captor.getValue().getConfiguration(); + assertEquals(3000L, jobConfig.getJobTimeoutMs().longValue()); + } + + @Test + public void getLabelsTest() throws Exception { + QueryJobConfiguration jobConfig = bigQueryStatement.getJobConfig("select 1").build(); + Map expected = + new HashMap() { + { + put("key1", "val1"); + put("key2", "val2"); + put("key3", "val3"); + } + }; + + assertTrue(Maps.difference(expected, jobConfig.getLabels()).areEqual()); + } + + @Test + public void getUpdateCountTest() { + bigQueryStatement.setUpdateCount(100L); + assertEquals(100, bigQueryStatement.getUpdateCount()); + assertEquals(100L, bigQueryStatement.getLargeUpdateCount()); + } + + @Test + public void testSetExtraLabels() { + Map extraLabels = new HashMap<>(); + extraLabels.put("extraKey1", "extraVal1"); + bigQueryStatement.setExtraLabels(extraLabels); + assertEquals(extraLabels, bigQueryStatement.getExtraLabels()); + } + + @Test + public void testGetJobConfigWithExtraLabels() { + Map extraLabels = new HashMap<>(); + extraLabels.put("extraKey1", "extraVal1"); + extraLabels.put("key1", "overrideVal1"); // Override connection label + bigQueryStatement.setExtraLabels(extraLabels); + + QueryJobConfiguration jobConfig = bigQueryStatement.getJobConfig("select 1").build(); + Map expectedLabels = new HashMap<>(); + expectedLabels.put("key1", "overrideVal1"); + expectedLabels.put("key2", "val2"); + expectedLabels.put("key3", "val3"); + expectedLabels.put("extraKey1", "extraVal1"); + + assertTrue(Maps.difference(expectedLabels, jobConfig.getLabels()).areEqual()); + } + + @Test + public void testJoblessQuery() throws SQLException, InterruptedException { + // 1. Test JobCreationMode=2 (jobless) + doReturn(true).when(bigQueryConnection).getUseStatelessQueryMode(); + BigQueryStatement joblessStatement = new BigQueryStatement(bigQueryConnection); + BigQueryStatement joblessStatementSpy = Mockito.spy(joblessStatement); + + TableResult tableResultMock = mock(TableResult.class); + doReturn("queryId").when(tableResultMock).getQueryId(); + doReturn(null).when(tableResultMock).getJobId(); + doReturn(tableResultMock) + .when(bigquery) + .queryWithTimeout(any(QueryJobConfiguration.class), any(), any()); + doReturn(mock(BigQueryJsonResultSet.class)) + .when(joblessStatementSpy) + .processJsonResultSet(tableResultMock); + + Job dryRunJobMock = getJobMock(null, null, StatementType.SELECT); + ArgumentCaptor dryRunCaptor = ArgumentCaptor.forClass(JobInfo.class); + doReturn(dryRunJobMock).when(bigquery).create(dryRunCaptor.capture()); + + joblessStatementSpy.executeQuery("SELECT 1"); + + verify(bigquery).queryWithTimeout(any(QueryJobConfiguration.class), any(), any()); + verify(bigquery).create(any(JobInfo.class)); + assertTrue( + Boolean.TRUE.equals( + ((QueryJobConfiguration) dryRunCaptor.getValue().getConfiguration()).dryRun())); + + // 2. Test JobCreationMode=1 (jobful) + Mockito.reset(bigquery); + doReturn(false).when(bigQueryConnection).getUseStatelessQueryMode(); + BigQueryStatement jobfulStatement = new BigQueryStatement(bigQueryConnection); + BigQueryStatement jobfulStatementSpy = Mockito.spy(jobfulStatement); + + TableResult tableResultJobfulMock = mock(TableResult.class); + QueryJobConfiguration jobConf = QueryJobConfiguration.newBuilder("SELECT 1").build(); + Job jobMock = getJobMock(tableResultJobfulMock, jobConf, StatementType.SELECT); + ArgumentCaptor jobfulCaptor = ArgumentCaptor.forClass(JobInfo.class); + doReturn(jobMock).when(bigquery).create(jobfulCaptor.capture()); + doReturn(mock(BigQueryJsonResultSet.class)) + .when(jobfulStatementSpy) + .processJsonResultSet(tableResultJobfulMock); + + jobfulStatementSpy.executeQuery("SELECT 1"); + + verify(bigquery).create(any(JobInfo.class)); + assertTrue( + jobfulCaptor.getAllValues().stream() + .noneMatch( + jobInfo -> + Boolean.TRUE.equals( + ((QueryJobConfiguration) jobInfo.getConfiguration()).dryRun()))); + verify(bigquery, Mockito.never()) + .queryWithTimeout(any(QueryJobConfiguration.class), any(), any()); + } + + @Test + public void testCloseCancelsJob() throws SQLException, InterruptedException { + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + TableResult tableResult = mock(TableResult.class); + Schema mockSchema = Schema.of(FieldList.of()); + doReturn(mockSchema).when(tableResult).getSchema(); + QueryJobConfiguration queryJobConfiguration = + QueryJobConfiguration.newBuilder(query).setPriority(Priority.BATCH).build(); + Job job = getJobMock(tableResult, queryJobConfiguration, StatementType.SELECT); + + doReturn(job).when(bigquery).create(any(JobInfo.class)); + doReturn(false).when(bigQueryStatementSpy).useReadAPI(eq(tableResult)); + doReturn(mock(JobId.class)).when(tableResult).getJobId(); + Mockito.when(job.getQueryResults(any(QueryResultsOption.class))) + .thenAnswer( + invocation -> { + Thread.sleep(2000); + return null; + }); + Thread t = + new Thread( + () -> { + try { + bigQueryStatementSpy.runQuery(query, queryJobConfiguration); + } catch (Exception e) { + } + }); + + t.start(); + // Sleep to allow background thread to call "create". + Thread.sleep(500); + bigQueryStatementSpy.close(); + t.join(); + verify(bigquery, Mockito.times(1)).cancel(any(JobId.class)); + } + + @Test + public void testCancelWithJoblessQuery() throws SQLException, InterruptedException { + doReturn(true).when(bigQueryConnection).getUseStatelessQueryMode(); + BigQueryStatement joblessStatement = new BigQueryStatement(bigQueryConnection); + BigQueryStatement joblessStatementSpy = Mockito.spy(joblessStatement); + + TableResult tableResultMock = mock(TableResult.class); + doReturn(null).when(tableResultMock).getJobId(); + + doReturn(tableResultMock) + .when(bigquery) + .queryWithTimeout(any(QueryJobConfiguration.class), any(), any()); + + Job dryRunJobMock = getJobMock(null, null, StatementType.SELECT); + doReturn(dryRunJobMock).when(bigquery).create(any(JobInfo.class)); + + BigQueryJsonResultSet resultSetMock = mock(BigQueryJsonResultSet.class); + doReturn(resultSetMock).when(joblessStatementSpy).processJsonResultSet(tableResultMock); + + joblessStatementSpy.executeQuery("SELECT 1"); + + // Pre-check: statement has a result set + assertTrue(joblessStatementSpy.currentResultSet != null); + + joblessStatementSpy.cancel(); + + // Post-check: The result set's close() method was called + verify(resultSetMock).close(); + + // And no backend cancellation was attempted + verify(bigquery, Mockito.never()).cancel(any(JobId.class)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactoryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactoryTest.java new file mode 100644 index 000000000..d271df0e2 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactoryTest.java @@ -0,0 +1,41 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import java.util.concurrent.ThreadFactory; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryThreadFactoryTest { + + private static ThreadFactory JDBC_THREAD_FACTORY; + + @Before + public void setUp() { + JDBC_THREAD_FACTORY = new BigQueryThreadFactory("BigQuery-Thread-"); + } + + @Test + public void testNewThread() { + assertThat(JDBC_THREAD_FACTORY).isNotNull(); + Thread thread = JDBC_THREAD_FACTORY.newThread(() -> {}); + assertThat(thread).isNotNull(); + assertThat(thread.getName()).startsWith("BigQuery-Thread-"); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerTest.java new file mode 100644 index 000000000..a758cf15c --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerTest.java @@ -0,0 +1,100 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionException; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionNotFoundException; +import com.google.cloud.bigquery.jdbc.TestType.Text; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.function.Function; +import org.junit.Test; + +public class BigQueryTypeCoercerTest { + + @Test + public void shouldReturnSameValueWhenTargetTypeIsSameAsSourceType() { + assertThat(BigQueryTypeCoercer.INSTANCE.coerceTo(Integer.class, 56)).isEqualTo(56); + assertThat(BigQueryTypeCoercer.INSTANCE.coerceTo(Long.class, 56L)).isEqualTo(56L); + } + + @Test + public void shouldBeAbleToComposeMultipleCoercions() { + StringToBigDecimal stringToBigDecimal = new StringToBigDecimal(); + BigDecimalToBigInteger bigDecimalToBigInteger = new BigDecimalToBigInteger(); + + Function composedCoercion = + stringToBigDecimal.andThen(bigDecimalToBigInteger); + + BigQueryTypeCoercer bigQueryTypeCoercer = + new BigQueryTypeCoercerBuilder() + .registerTypeCoercion(composedCoercion, String.class, BigInteger.class) + .build(); + + assertThat(bigQueryTypeCoercer.coerceTo(BigInteger.class, "123567.66884")) + .isEqualTo(BigInteger.valueOf(123567)); + } + + @Test + public void shouldThrowCoercionNotFoundException() { + byte[] bytesArray = {72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33}; + Text text = new Text(bytesArray); + + BigQueryJdbcCoercionNotFoundException exception = + assertThrows( + BigQueryJdbcCoercionNotFoundException.class, + () -> BigQueryTypeCoercer.INSTANCE.coerceTo(Long.class, text)); + assertThat(exception.getMessage()) + .isEqualTo( + "Coercion not found for " + + "[com.google.cloud.bigquery.jdbc.TestType.Text -> java.lang.Long]" + + " conversion"); + } + + @Test + public void shouldThrowCoercionException() { + BigQueryTypeCoercer bigQueryTypeCoercer = + new BigQueryTypeCoercerBuilder() + .registerTypeCoercion(Math::toIntExact, Long.class, Integer.class) + .build(); + BigQueryJdbcCoercionException exception = + assertThrows( + BigQueryJdbcCoercionException.class, + () -> bigQueryTypeCoercer.coerceTo(Integer.class, 2147483648L)); + assertThat(exception.getMessage()).isEqualTo("Coercion error"); + assertThat(exception.getCause()).isInstanceOf(ArithmeticException.class); + } + + private static class StringToBigDecimal implements BigQueryCoercion { + + @Override + public BigDecimal coerce(String value) { + return new BigDecimal(value); + } + } + + private static class BigDecimalToBigInteger implements BigQueryCoercion { + + @Override + public BigInteger coerce(BigDecimal value) { + return value.toBigInteger(); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/FieldValueTypeBigQueryCoercionUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/FieldValueTypeBigQueryCoercionUtilityTest.java new file mode 100644 index 000000000..456b750ca --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/FieldValueTypeBigQueryCoercionUtilityTest.java @@ -0,0 +1,378 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.FieldValue.Attribute.PRIMITIVE; +import static com.google.cloud.bigquery.FieldValue.Attribute.RANGE; +import static com.google.cloud.bigquery.FieldValue.Attribute.RECORD; +import static com.google.cloud.bigquery.FieldValue.Attribute.REPEATED; +import static com.google.cloud.bigquery.jdbc.BigQueryTypeCoercionUtility.INSTANCE; +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.bigquery.FieldElementType; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.Range; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionException; +import com.google.common.collect.ImmutableList; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.temporal.ChronoUnit; +import java.util.concurrent.TimeUnit; +import org.junit.Test; + +public class FieldValueTypeBigQueryCoercionUtilityTest { + private static final FieldValue STRING_VALUE = FieldValue.of(PRIMITIVE, "sample-string"); + private static final FieldValue INTEGER_VALUE = FieldValue.of(PRIMITIVE, "345"); + private static final FieldValue FLOAT_VALUE = FieldValue.of(PRIMITIVE, "345.21"); + private static final FieldValue SHORT_VALUE = FieldValue.of(PRIMITIVE, "345"); + private static final FieldValue LONG_VALUE = FieldValue.of(PRIMITIVE, "4374218905"); + private static final FieldValue DOUBLE_VALUE = FieldValue.of(PRIMITIVE, "56453.458393"); + private static final FieldValue BIG_DECIMAL_VALUE = FieldValue.of(PRIMITIVE, "56453.458393"); + private static final FieldValue BASE64_ENCODED_VALUE = + FieldValue.of(PRIMITIVE, "SGVsbG8gV29ybGQK"); // Hello World! + private static final FieldValue TIMESTAMP_VALUE = FieldValue.of(PRIMITIVE, "1408452095.22"); + private static final FieldValue DATE_VALUE = FieldValue.of(PRIMITIVE, "2023-03-13"); + private static final FieldValue TIME_VALUE = FieldValue.of(PRIMITIVE, "23:59:59"); + private static final FieldValue TIME_WITH_NANOSECOND_VALUE = + FieldValue.of(PRIMITIVE, "23:59:59.99999"); + private static final FieldValue TRUE_VALUE = FieldValue.of(PRIMITIVE, "true"); + private static final FieldValue FALSE_VALUE = FieldValue.of(PRIMITIVE, "false"); + private static final FieldValue NULL_VALUE = FieldValue.of(PRIMITIVE, null); + private static final FieldValue INTEGER_ARRAY = + FieldValue.of( + REPEATED, + FieldValueList.of( + ImmutableList.of(FieldValue.of(PRIMITIVE, 1), FieldValue.of(PRIMITIVE, 2)))); + private static final FieldValue RECORD_VALUE = + FieldValue.of( + RECORD, ImmutableList.of(INTEGER_VALUE, STRING_VALUE, TIME_VALUE, INTEGER_ARRAY)); + + private static final Range RANGE_DATE = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .setStart("1970-01-02") + .setEnd("1970-03-04") + .build(); + + private static final Range RANGE_DATETIME = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .setStart("2014-08-19 05:41:35.220000") + .setEnd("2015-09-20 06:41:35.220000") + .build(); + + private static final Range RANGE_TIMESTAMP = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .setStart("2014-08-19 12:41:35.220000+00:00") + .setEnd("2015-09-20 13:41:35.220000+01:00") + .build(); + + private static final FieldValue RANGE_DATE_VALUE = FieldValue.of(RANGE, RANGE_DATE); + private static final FieldValue RANGE_DATE_TIME_VALUE = FieldValue.of(RANGE, RANGE_DATETIME); + private static final FieldValue RANGE_TIMESTAMP_VALUE = FieldValue.of(RANGE, RANGE_TIMESTAMP); + + @Test + public void fieldValueToStringRangeDate() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATE.getStart().getStringValue(), RANGE_DATE.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATE_VALUE)).isEqualTo(expectedRangeDate); + } + + @Test + public void rangeDateToString() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATE.getStart().getStringValue(), RANGE_DATE.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATE)).isEqualTo(expectedRangeDate); + } + + @Test + public void fieldValueToStringRangeDatetime() { + String expectedRangeDatetime = + String.format( + "[%s, %s)", + RANGE_DATETIME.getStart().getStringValue(), RANGE_DATETIME.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATE_TIME_VALUE)) + .isEqualTo(expectedRangeDatetime); + } + + @Test + public void rangeDatetimeToString() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATETIME.getStart().getStringValue(), RANGE_DATETIME.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATETIME)).isEqualTo(expectedRangeDate); + } + + @Test + public void fieldValueToStringRangeTimestamp() { + String expectedRangeTimestamp = + String.format( + "[%s, %s)", + RANGE_TIMESTAMP.getStart().getStringValue(), RANGE_TIMESTAMP.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_TIMESTAMP_VALUE)) + .isEqualTo(expectedRangeTimestamp); + } + + @Test + public void rangeTimestampToString() { + String expectedRangeTimestamp = + String.format( + "[%s, %s)", + RANGE_TIMESTAMP.getStart().getStringValue(), RANGE_TIMESTAMP.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_TIMESTAMP)).isEqualTo(expectedRangeTimestamp); + } + + @Test + public void fieldValueToString() { + assertThat(INSTANCE.coerceTo(String.class, STRING_VALUE)).isEqualTo("sample-string"); + } + + @Test + public void fieldValueToStringWhenNull() { + assertThat(INSTANCE.coerceTo(String.class, null)).isNull(); + } + + @Test + public void fieldValueToStringWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(String.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToStringWhenInnerValueIsAnArray() { + assertThat(INSTANCE.coerceTo(String.class, INTEGER_ARRAY)) + .isEqualTo( + "[FieldValue{attribute=PRIMITIVE, value=1, useInt64Timestamps=false}, FieldValue{attribute=PRIMITIVE, value=2, useInt64Timestamps=false}]"); + } + + @Test + public void fieldValueToInteger() { + assertThat(INSTANCE.coerceTo(Integer.class, INTEGER_VALUE)).isEqualTo(345); + } + + @Test + public void fieldValueToIntegerWhenNull() { + assertThat(INSTANCE.coerceTo(Integer.class, null)).isEqualTo(0); + } + + @Test + public void fieldValueToIntegerWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Integer.class, NULL_VALUE)).isEqualTo(0); + } + + @Test + public void fieldValueToFloat() { + assertThat(INSTANCE.coerceTo(Float.class, FLOAT_VALUE)).isEqualTo(345.21f); + } + + @Test + public void fieldValueToFloatWhenNull() { + assertThat(INSTANCE.coerceTo(Float.class, null)).isEqualTo(0f); + } + + @Test + public void fieldValueToFloatWhenInnerValueNull() { + assertThat(INSTANCE.coerceTo(Float.class, NULL_VALUE)).isEqualTo(0f); + } + + @Test + public void fieldValueToShort() { + assertThat(INSTANCE.coerceTo(Short.class, SHORT_VALUE)).isEqualTo((short) 345); + } + + @Test + public void fieldValueToShortWhenNull() { + assertThat(INSTANCE.coerceTo(Short.class, null)).isEqualTo((short) 0); + } + + @Test + public void fieldValueToShortWhenInnerValueNull() { + assertThat(INSTANCE.coerceTo(Short.class, NULL_VALUE)).isEqualTo((short) 0); + } + + @Test + public void fieldValueToLong() { + assertThat(INSTANCE.coerceTo(Long.class, LONG_VALUE)).isEqualTo(4374218905L); + } + + @Test + public void fieldValueToLongWhenNull() { + assertThat(INSTANCE.coerceTo(Long.class, null)).isEqualTo(0L); + } + + @Test + public void fieldValueToLongWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Long.class, NULL_VALUE)).isEqualTo(0L); + } + + @Test + public void fieldValueToDouble() { + assertThat(INSTANCE.coerceTo(Double.class, DOUBLE_VALUE)).isEqualTo(56453.458393D); + } + + @Test + public void fieldValueToDoubleWhenNull() { + assertThat(INSTANCE.coerceTo(Double.class, null)).isEqualTo(0D); + } + + @Test + public void fieldValueToDoubleWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Double.class, NULL_VALUE)).isEqualTo(0D); + } + + @Test + public void fieldValueToBigDecimal() { + assertThat(INSTANCE.coerceTo(BigDecimal.class, BIG_DECIMAL_VALUE)) + .isEqualTo(new BigDecimal("56453.458393")); + } + + @Test + public void fieldValueToBigDecimalWhenNull() { + assertThat(INSTANCE.coerceTo(BigDecimal.class, null)).isNull(); + } + + @Test + public void fieldValueToBigDecimalWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(BigDecimal.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToBoolean() { + assertThat(INSTANCE.coerceTo(Boolean.class, TRUE_VALUE)).isTrue(); + assertThat(INSTANCE.coerceTo(Boolean.class, FALSE_VALUE)).isFalse(); + } + + @Test + public void fieldValueToBooleanWhenNull() { + assertThat(INSTANCE.coerceTo(Boolean.class, null)).isFalse(); + } + + @Test + public void fieldValueToBooleanWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Boolean.class, NULL_VALUE)).isFalse(); + } + + @Test + public void fieldValueToBytesArray() { + assertThat(INSTANCE.coerceTo(byte[].class, BASE64_ENCODED_VALUE)) + .isEqualTo(new byte[] {72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 10}); + } + + @Test + public void fieldValueToBytesArrayWhenNull() { + assertThat(INSTANCE.coerceTo(byte[].class, null)).isNull(); + } + + @Test + public void fieldValueToBytesArrayWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(byte[].class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToTimestamp() { + Instant instant = Instant.EPOCH.plus(TIMESTAMP_VALUE.getTimestampValue(), ChronoUnit.MICROS); + LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC")); + assertThat(INSTANCE.coerceTo(Timestamp.class, TIMESTAMP_VALUE)) + .isEqualTo(Timestamp.valueOf(localDateTime)); + } + + @Test + public void fieldValueToTimestampWhenNull() { + assertThat(INSTANCE.coerceTo(Timestamp.class, null)).isNull(); + } + + @Test + public void fieldValueToTimestampWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Timestamp.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToTime() { + LocalTime expectedTime = LocalTime.of(23, 59, 59); + assertThat(INSTANCE.coerceTo(Time.class, TIME_VALUE)) + .isEqualTo(new Time(TimeUnit.NANOSECONDS.toMillis(expectedTime.toNanoOfDay()))); + LocalTime expectedTimeWithNanos = LocalTime.parse("23:59:59.99999"); + assertThat(INSTANCE.coerceTo(Time.class, TIME_WITH_NANOSECOND_VALUE)) + .isEqualTo(new Time(TimeUnit.NANOSECONDS.toMillis(expectedTimeWithNanos.toNanoOfDay()))); + } + + @Test + public void fieldValueToTimeWhenNull() { + assertThat(INSTANCE.coerceTo(Time.class, null)).isNull(); + } + + @Test + public void fieldValueToTimeWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Time.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToTimeWithInvalidValue() { + FieldValue invalidTime = FieldValue.of(PRIMITIVE, "99:99:99"); + + BigQueryJdbcCoercionException coercionException = + assertThrows( + BigQueryJdbcCoercionException.class, () -> INSTANCE.coerceTo(Time.class, invalidTime)); + assertThat(coercionException.getCause()).isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void fieldValueToDate() { + LocalDate expectedDate = LocalDate.of(2023, 3, 13); + assertThat(INSTANCE.coerceTo(Date.class, DATE_VALUE)).isEqualTo(Date.valueOf(expectedDate)); + } + + @Test + public void fieldValueToDateWhenNull() { + assertThat(INSTANCE.coerceTo(Date.class, null)).isNull(); + } + + @Test + public void fieldValueToDateWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Date.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToObject() { + assertThat(INSTANCE.coerceTo(Object.class, RECORD_VALUE)) + .isEqualTo(ImmutableList.of(INTEGER_VALUE, STRING_VALUE, TIME_VALUE, INTEGER_ARRAY)); + } + + @Test + public void fieldValueToObjectWhenNull() { + assertThat(INSTANCE.coerceTo(Object.class, null)).isNull(); + } + + @Test + public void fieldValueToObjectWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Object.class, NULL_VALUE)).isNull(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/NullHandlingTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/NullHandlingTest.java new file mode 100644 index 000000000..ab274c879 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/NullHandlingTest.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import org.junit.Test; + +public class NullHandlingTest { + + @Test + public void shouldReturnNullForNullByDefault() { + assertThat(BigQueryTypeCoercer.INSTANCE.coerceTo(Integer.class, null)).isNull(); + } + + @Test + public void shouldReturnCustomValueForNull() { + BigQueryTypeCoercer bigQueryTypeCoercer = + new BigQueryTypeCoercerBuilder().registerTypeCoercion(new NullToIntegerCoercion()).build(); + + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, null)).isEqualTo(0); + } + + private static class NullToIntegerCoercion implements BigQueryCoercion { + @Override + public Integer coerce(Void value) { + return 0; // returning zero as the default value + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSourceTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSourceTest.java new file mode 100644 index 000000000..bacfdae1d --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSourceTest.java @@ -0,0 +1,80 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.sql.SQLException; +import javax.sql.PooledConnection; +import org.junit.Test; + +public class PooledConnectionDataSourceTest { + private static final Long LISTENER_POOL_SIZE = 20L; + private static final Long CONNECTION_POOL_SIZE = 20L; + private static final Long DEFAULT_LISTENER_POOL_SIZE = 10L; + private static final Long DEFAULT_CONNECTION_POOL_SIZE = 10L; + + @Test + public void testGetPooledConnection() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + BigQueryConnection bqConnection = mock(BigQueryConnection.class); + doReturn(connectionUrl).when(bqConnection).getConnectionUrl(); + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setConnection(bqConnection); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + assertTrue(pooledConnection instanceof BigQueryPooledConnection); + BigQueryPooledConnection bqPooledConnection = (BigQueryPooledConnection) pooledConnection; + assertEquals(LISTENER_POOL_SIZE, bqPooledConnection.getListenerPoolSize()); + assertNotNull(pooledDataSource.getConnectionPoolManager()); + assertEquals( + CONNECTION_POOL_SIZE, pooledDataSource.getConnectionPoolManager().getConnectionPoolSize()); + } + + @Test + public void testGetPooledConnectionNoConnectionURl() throws SQLException { + BigQueryConnection bqConnection = mock(BigQueryConnection.class); + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setConnection(bqConnection); + + assertThrows(BigQueryJdbcRuntimeException.class, () -> pooledDataSource.getPooledConnection()); + } + + @Test + public void testGetPooledConnectionFailInvalidConnectionURl() { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ListenerPoolSize=invalid"; + BigQueryConnection bqConnection = mock(BigQueryConnection.class); + doReturn(connectionUrl).when(bqConnection).getConnectionUrl(); + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setConnection(bqConnection); + + assertThrows(NumberFormatException.class, () -> pooledDataSource.getPooledConnection()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionListenerTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionListenerTest.java new file mode 100644 index 000000000..d200709c4 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionListenerTest.java @@ -0,0 +1,172 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.SQLException; +import org.junit.Before; +import org.junit.Test; + +public class PooledConnectionListenerTest { + private BigQueryConnection bigQueryConnection; + private static final Long LISTENER_POOL_SIZE = 10L; + private static final Long CONNECTION_POOL_SIZE = 10L; + + @Before + public void setUp() throws IOException, SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + } + + @Test + public void testAddConnectionListener() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + assertNull(listener.getPooledConnection()); + } + + @Test + public void testRemoveConnectionListener() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + assertTrue(pooledConnection.isListenerPooled(listener)); + pooledConnection.removeConnectionEventListener(listener); + assertFalse(pooledConnection.isListenerPooled(listener)); + } + + @Test + public void testConnectionHandleClosedByConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertTrue(pooledConnection.isListenerPooled(listener)); + } + + @Test + public void testConnectionHandleClosedByPooledConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + pooledConnection.close(); + assertFalse(pooledConnection.inUse()); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertTrue(pooledConnection.isListenerPooled(listener)); + } + + @Test + public void testFireConnectionError() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + pooledConnection.fireConnectionError(new SQLException("test")); + assertFalse(pooledConnection.inUse()); + assertTrue(listener.isConnectionPoolEmpty()); + assertFalse(pooledConnection.isListenerPooled(listener)); + } + + @Test + public void testGetConnectionWhenPoolEmpty() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + assertNull(listener.getPooledConnection()); + } + + @Test + public void testGetConnectionWhenPoolNonEmpty() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + assertFalse(listener.isConnectionPoolEmpty()); + assertTrue(pooledConnection.isListenerPooled(listener)); + + assertNotNull(listener.getPooledConnection()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/TestType.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/TestType.java new file mode 100644 index 000000000..3222525c5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/TestType.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +public class TestType { + public static class Text { + private final byte[] bytes; + + public Text(byte[] bytes) { + this.bytes = bytes; + } + + public byte[] getBytes() { + return bytes; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBase.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBase.java new file mode 100644 index 000000000..5aa41b297 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBase.java @@ -0,0 +1,21 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import com.google.cloud.bigquery.jdbc.BigQueryJdbcBaseTest; + +public class ITBase extends BigQueryJdbcBaseTest {} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBigQueryJDBCTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBigQueryJDBCTest.java new file mode 100644 index 000000000..411a48871 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBigQueryJDBCTest.java @@ -0,0 +1,4439 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryError; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlSyntaxErrorException; +import com.google.cloud.bigquery.jdbc.BigQueryConnection; +import com.google.cloud.bigquery.jdbc.BigQueryDriver; +import com.google.cloud.bigquery.jdbc.DataSource; +import com.google.cloud.bigquery.jdbc.PooledConnectionDataSource; +import com.google.cloud.bigquery.jdbc.PooledConnectionListener; +import com.google.cloud.bigquery.jdbc.utils.TestUtilities.TestConnectionListener; +import com.google.common.collect.ImmutableMap; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.math.BigDecimal; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.sql.CallableStatement; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.Date; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.Instant; +import java.time.LocalTime; +import java.util.Arrays; +import java.util.Calendar; +import java.util.HashSet; +import java.util.Properties; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.function.BiFunction; +import javax.sql.PooledConnection; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; + +public class ITBigQueryJDBCTest extends ITBase { + static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + static Connection bigQueryConnection; + static BigQuery bigQuery; + static Statement bigQueryStatement; + static Connection bigQueryConnectionNoReadApi; + static Statement bigQueryStatementNoReadApi; + static final String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + static final String session_enabled_connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;EnableSession=1"; + private static final String BASE_QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 order by" + + " trip_distance asc LIMIT %s"; + private static final Random random = new Random(); + private static final int randomNumber = random.nextInt(9999); + private static final String DATASET = "JDBC_PRESUBMIT_INTEGRATION_DATASET"; + private static final String DATASET2 = "JDBC_PRESUBMIT_INTEGRATION_DATASET_2"; + private static final String CONSTRAINTS_DATASET = "JDBC_CONSTRAINTS_TEST_DATASET"; + private static final String CONSTRAINTS_TABLE_NAME = "JDBC_CONSTRAINTS_TEST_TABLE"; + private static final String CONSTRAINTS_TABLE_NAME2 = "JDBC_CONSTRAINTS_TEST_TABLE2"; + private static final String CONSTRAINTS_TABLE_NAME3 = "JDBC_CONSTRAINTS_TEST_TABLE3"; + private static final String CALLABLE_STMT_PROC_NAME = "IT_CALLABLE_STMT_PROC_TEST"; + private static final String CALLABLE_STMT_TABLE_NAME = "IT_CALLABLE_STMT_PROC_TABLE"; + private static final String CALLABLE_STMT_PARAM_KEY = "CALL_STMT_PARAM_KEY"; + private static final String CALLABLE_STMT_DML_INSERT_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_INSERT_TEST"; + private static final String CALLABLE_STMT_DML_UPDATE_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_UPDATE_TEST"; + private static final String CALLABLE_STMT_DML_DELETE_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_DELETE_TEST"; + private static final String CALLABLE_STMT_DML_TABLE_NAME = "IT_CALLABLE_STMT_PROC_DML_TABLE"; + private static final Long DEFAULT_CONN_POOL_SIZE = 10L; + private static final Long CUSTOM_CONN_POOL_SIZE = 5L; + private static final Object EXCEPTION_REPLACEMENT = "EXCEPTION-WAS-RAISED"; + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + private JsonObject getAuthJson() throws IOException { + final String secret = requireEnvVar("SA_SECRET"); + JsonObject authJson; + // Supporting both formats of SA_SECRET: + // - Local runs can point to a json file + // - Cloud Build has JSON value + try { + InputStream stream = Files.newInputStream(Paths.get(secret)); + InputStreamReader reader = new InputStreamReader(stream); + authJson = JsonParser.parseReader(reader).getAsJsonObject(); + } catch (IOException e) { + authJson = JsonParser.parseString(secret).getAsJsonObject(); + } + assertTrue(authJson.has("client_email")); + assertTrue(authJson.has("private_key")); + assertTrue(authJson.has("project_id")); + return authJson; + } + + private void validateConnection(String connection_uri) throws SQLException { + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @BeforeClass + public static void beforeClass() throws SQLException { + bigQueryConnection = DriverManager.getConnection(connection_uri, new Properties()); + bigQueryStatement = bigQueryConnection.createStatement(); + + Properties noReadApi = new Properties(); + noReadApi.setProperty("EnableHighThroughputAPI", "0"); + bigQueryConnectionNoReadApi = DriverManager.getConnection(connection_uri, noReadApi); + bigQueryStatementNoReadApi = bigQueryConnectionNoReadApi.createStatement(); + bigQuery = BigQueryOptions.newBuilder().build().getService(); + } + + @AfterClass + public static void afterClass() throws SQLException { + bigQueryStatement.close(); + bigQueryConnection.close(); + bigQueryStatementNoReadApi.close(); + bigQueryConnectionNoReadApi.close(); + } + + @Test + public void testValidServiceAccountAuthentication() throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + File tempFile = File.createTempFile("auth", ".json"); + tempFile.deleteOnExit(); + Files.write(tempFile.toPath(), authJson.toString().getBytes()); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + authJson.get("project_id").getAsString() + + ";OAuthType=0;" + + "OAuthPvtKeyPath=" + + tempFile.toPath() + + ";"; + + validateConnection(connection_uri); + } + + @Test + public void testServiceAccountAuthenticationMissingOAuthPvtKeyPath() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;"; + + try { + DriverManager.getConnection(connection_uri); + Assert.fail(); + } catch (BigQueryJdbcRuntimeException ex) { + assertTrue(ex.getMessage().contains("No valid credentials provided.")); + } + } + + @Test + public void testValidServiceAccountAuthenticationOAuthPvtKeyAsPath() + throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + File tempFile = File.createTempFile("auth", ".json"); + tempFile.deleteOnExit(); + Files.write(tempFile.toPath(), authJson.toString().getBytes()); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + authJson.get("project_id").getAsString() + + ";OAuthType=0;" + + "OAuthServiceAcctEmail=;" + + ";OAuthPvtKey=" + + tempFile.toPath() + + ";"; + validateConnection(connection_uri); + } + + @Test + public void testValidServiceAccountAuthenticationViaEmailAndPkcs8Key() + throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + authJson.get("project_id").getAsString() + + ";OAuthType=0;" + + "OAuthServiceAcctEmail=" + + authJson.get("client_email").getAsString() + + ";OAuthPvtKey=" + + authJson.get("private_key").getAsString() + + ";"; + validateConnection(connection_uri); + } + + @Test + public void testValidServiceAccountAuthenticationOAuthPvtKeyAsJson() + throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + authJson.get("project_id").getAsString() + + ";OAuthType=0;" + + "OAuthServiceAcctEmail=;" + + ";OAuthPvtKey=" + + authJson.toString() + + ";"; + validateConnection(connection_uri); + } + + // TODO(kirl): Enable this test when pipeline has p12 secret available. + @Test + @Ignore + public void testValidServiceAccountAuthenticationP12() throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + final String p12_file = requireEnvVar("SA_SECRET_P12"); + + final String connectionUri = + getBaseUri(0, authJson.get("project_id").getAsString()) + .append("OAuthServiceAcctEmail", authJson.get("client_email").getAsString()) + .append("OAuthPvtKeyPath", p12_file) + .toString(); + validateConnection(connectionUri); + } + + @Test + @Ignore + public void testValidGoogleUserAccountAuthentication() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAuthType=1;OAuthClientId=client_id;OAuthClientSecret=client_secret;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_USER_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + @Test + @Ignore + public void testValidExternalAccountAuthentication() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=4;" + + "BYOID_AudienceUri=//iam.googleapis.com/projects//locations//workloadIdentityPools//providers/;" + + "BYOID_SubjectTokenType=;BYOID_CredentialSource={\"file\":\"/path/to/file\"};" + + "BYOID_SA_Impersonation_Uri=;BYOID_TokenUri=;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "EXTERNAL_ACCOUNT_AUTH", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + @Test + @Ignore + public void testValidExternalAccountAuthenticationFromFile() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=4;" + + "OAuthPvtKeyPath=/path/to/file;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "EXTERNAL_ACCOUNT_AUTH", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + @Test + @Ignore + public void testValidExternalAccountAuthenticationRawJson() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=4;OAuthPvtKey={\n" + + " \"universe_domain\": \"googleapis.com\",\n" + + " \"type\": \"external_account\",\n" + + " \"audience\":" + + " \"//iam.googleapis.com/projects//locations//workloadIdentityPools//providers/\",\n" + + " \"subject_token_type\": \"\",\n" + + " \"token_url\": \"\",\n" + + " \"credential_source\": {\n" + + " \"file\": \"/path/to/file\"\n" + + " },\n" + + " \"service_account_impersonation_url\": \"\"\n" + + "};"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "EXTERNAL_ACCOUNT_AUTH", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + // TODO(farhan): figure out how to programmatically generate an access token and test + @Test + @Ignore + public void testValidPreGeneratedAccessTokenAuthentication() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=2;OAuthAccessToken=access_token;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "PRE_GENERATED_TOKEN", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + // TODO(obada): figure out how to programmatically generate a refresh token and test + @Test + @Ignore + public void testValidRefreshTokenAuthentication() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=2;OAuthRefreshToken=refresh_token;" + + ";OAuthClientId=client;OAuthClientSecret=secret;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "PRE_GENERATED_TOKEN", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + @Test + public void testValidApplicationDefaultCredentialsAuthentication() throws SQLException { + String connection_uri = getBaseUri(3, PROJECT_ID).toString(); + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "APPLICATION_DEFAULT_CREDENTIALS", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + connection.close(); + } + + // This test is useing the same client email as a main authorization & impersonation. + // It requires account to have 'tokenCreator' permission, see + // https://cloud.google.com/docs/authentication/use-service-account-impersonation#required-roles + @Test + public void testServiceAccountAuthenticationWithImpersonation() throws IOException, SQLException { + final JsonObject authJson = getAuthJson(); + + String connection_uri = + getBaseUri(0, authJson.get("project_id").getAsString()) + .append("OAuthServiceAcctEmail", authJson.get("client_email").getAsString()) + .append("OAuthPvtKey", authJson.get("private_key").getAsString()) + .append("ServiceAccountImpersonationEmail", authJson.get("client_email").getAsString()) + .toString(); + validateConnection(connection_uri); + } + + // This test uses the same client email for the main authorization and a chain of impersonations. + // It requires the account to have 'tokenCreator' permission on itself. + @Test + public void testServiceAccountAuthenticationWithChainedImpersonation() + throws IOException, SQLException { + final JsonObject authJson = getAuthJson(); + String clientEmail = authJson.get("client_email").getAsString(); + + String connection_uri = + getBaseUri(0, authJson.get("project_id").getAsString()) + .append("OAuthServiceAcctEmail", clientEmail) + .append("OAuthPvtKey", authJson.get("private_key").getAsString()) + .append("ServiceAccountImpersonationEmail", clientEmail) + .append("ServiceAccountImpersonationChain", clientEmail + "," + clientEmail) + .toString(); + validateConnection(connection_uri); + } + + @Test + public void testFastQueryPathSmall() throws SQLException { + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 850"; + ResultSet jsonResultSet = bigQueryStatement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(850, resultSetRowCount(jsonResultSet)); + } + + @Test + public void testSmallSelectAndVerifyResults() throws SQLException { + String query = + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` WHERE" + + " repository_name LIKE 'X%' LIMIT 10"; + + ResultSet resultSet = bigQueryStatement.executeQuery(query); + int rowCount = 0; + while (resultSet.next()) { + assertTrue(resultSet.getString(1).startsWith("X")); + rowCount++; + } + assertEquals(10, rowCount); + } + + @Test + // reads without using ReadAPI and makes sure that they are in order, which implies threads worked + // correctly + public void testIterateOrderJsonMultiThread_NoReadApi() throws SQLException { + int expectedCnt = 10000; + String query = String.format(BASE_QUERY, expectedCnt); + ResultSet rs = bigQueryStatementNoReadApi.executeQuery(query); + int cnt = 0; + double oldTriDis = 0.0d; + while (rs.next()) { + double tripDis = rs.getDouble("trip_distance"); + ++cnt; + assertTrue(oldTriDis <= tripDis); + oldTriDis = tripDis; + } + assertEquals(expectedCnt, cnt); // all the records were retrieved + } + + @Test + public void testInvalidQuery() throws SQLException { + String query = "SELECT *"; + + try { + bigQueryStatement.executeQuery(query); + Assert.fail(); + } catch (BigQueryJdbcException e) { + assertTrue(e.getMessage().contains("SELECT * must have a FROM clause")); + } + } + + @Test + public void testDriver() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + assertTrue(driver.acceptsURL(connection_uri)); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertNotNull(connection); + Statement st = connection.createStatement(); + boolean rs = + st.execute("Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"); + assertTrue(rs); + connection.close(); + } + + @Test + public void testDefaultDataset() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;DEFAULTDATASET=testDataset"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + assertTrue(driver.acceptsURL(connection_uri)); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertNotNull(connection); + assertEquals( + DatasetId.of("testDataset"), ((BigQueryConnection) connection).getDefaultDataset()); + + String connection_uri_null_default_dataset = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + + assertTrue(driver.acceptsURL(connection_uri_null_default_dataset)); + + Connection connection2 = driver.connect(connection_uri_null_default_dataset, new Properties()); + assertNotNull(connection2); + assertNull(((BigQueryConnection) connection2).getDefaultDataset()); + connection.close(); + connection2.close(); + } + + @Test + public void testDefaultDatasetWithProject() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;DEFAULTDATASET=" + + PROJECT_ID + + ".testDataset"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + assertTrue(driver.acceptsURL(connection_uri)); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertNotNull(connection); + assertEquals( + DatasetId.of(PROJECT_ID, "testDataset"), + ((BigQueryConnection) connection).getDefaultDataset()); + connection.close(); + } + + @Test + public void testLocation() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;LOCATION=EU"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + assertTrue(driver.acceptsURL(connection_uri)); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertEquals(((BigQueryConnection) connection).getLocation(), "EU"); + + Statement statement = connection.createStatement(); + + // Query a dataset in the EU + String query = + "SELECT name FROM `bigquery-public-data.covid19_italy_eu.data_by_province` LIMIT 100"; + ResultSet resultSet = statement.executeQuery(query); + assertEquals(100, resultSetRowCount(resultSet)); + + String connection_uri_null_location = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + + assertTrue(driver.acceptsURL(connection_uri_null_location)); + + Connection connection2 = driver.connect(connection_uri_null_location, new Properties()); + assertNotNull(connection2); + assertNull(((BigQueryConnection) connection2).getLocation()); + connection.close(); + connection2.close(); + } + + @Test + public void testIncorrectLocation() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;LOCATION=europe-west3"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertEquals(((BigQueryConnection) connection).getLocation(), "europe-west3"); + + // Query a dataset in the US + Statement statement = connection.createStatement(); + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"; + BigQueryJdbcException ex = + assertThrows(BigQueryJdbcException.class, () -> statement.executeQuery(query)); + BigQueryError error = ex.getBigQueryException().getError(); + assertNotNull(error); + assertEquals("accessDenied", error.getReason()); + connection.close(); + } + + @Test + public void testCreateStatementWithResultSetHoldabilityUnsupportedTypeForwardOnly() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, 1, 1)); + } + + @Test + public void testCreateStatementWithResultSetHoldabilityUnsupportedConcurReadOnly() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(1, ResultSet.CONCUR_READ_ONLY, 1)); + } + + @Test + public void testCreateStatementWithResultSetHoldabilityUnsupportedCloseCursorsAtCommit() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(1, 1, ResultSet.CLOSE_CURSORS_AT_COMMIT)); + } + + @Test + public void testCreateStatementWithResultSetConcurrencyUnsupportedTypeForwardOnly() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, 1)); + } + + @Test + public void testCreateStatementWithResultSetConcurrencyUnsupportedConcurReadOnly() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(1, ResultSet.CONCUR_READ_ONLY)); + } + + @Test + public void testSetTransactionIsolationToNotSerializableThrowsNotSupported() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE + 1)); + } + + @Test + public void testSetHoldabilityForNonCloseCursorsThrowsNotSupported() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> connection.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT + 1)); + connection.close(); + } + + @Test + public void testCreateStatementWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, connection::createStatement); + } + + @Test + public void testCreateStatementWithResultSetHoldabilityWhenConnectionClosedThrows() + throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, () -> connection.createStatement(1, 1, 1)); + } + + @Test + public void testCreateStatementWithResultSetConcurrencyWhenConnectionClosedThrows() + throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, () -> connection.createStatement(1, 1)); + } + + @Test + public void testSetAutoCommitWithClosedConnectionThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.close(); + assertThrows(IllegalStateException.class, () -> connection.setAutoCommit(true)); + } + + @Test + public void testSetCommitToFalseWithoutSessionEnabledThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(connection_uri); + assertThrows(IllegalStateException.class, () -> connection.setAutoCommit(false)); + connection.close(); + } + + @Test + public void testCommitWithConnectionClosedThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.close(); + assertThrows(IllegalStateException.class, connection::commit); + } + + @Test + public void testCommitToFalseWithoutSessionEnabledThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(connection_uri); + assertThrows(IllegalStateException.class, connection::commit); + connection.close(); + } + + @Test + public void testCommitWithNoTransactionStartedThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + assertThrows(IllegalStateException.class, connection::commit); + connection.close(); + } + + @Test + public void testRollbackWithConnectionClosedThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.close(); + assertThrows(IllegalStateException.class, connection::rollback); + } + + @Test + public void testRollbackToFalseWithoutSessionEnabledThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(connection_uri); + assertThrows(IllegalStateException.class, connection::rollback); + connection.close(); + } + + @Test + public void testRollbackWithoutTransactionStartedThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + assertThrows(IllegalStateException.class, connection::rollback); + connection.close(); + } + + @Test + public void testGetLocationWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows( + IllegalStateException.class, () -> ((BigQueryConnection) connection).getLocation()); + connection.close(); + } + + @Test + public void testGetDefaultDatasetWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows( + IllegalStateException.class, () -> ((BigQueryConnection) connection).getDefaultDataset()); + } + + @Test + public void testGetAutocommitWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, connection::getAutoCommit); + } + + @Test + public void testSetAutocommitWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, () -> connection.setAutoCommit(true)); + } + + @Test + public void testExecuteUpdate() throws SQLException { + String TABLE_NAME = "JDBC_EXECUTE_UPDATE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING, `IntegerField` INTEGER);", + DATASET, TABLE_NAME); + String insertQuery = + String.format( + "INSERT INTO %s.%s (StringField, IntegerField) " + + "VALUES ('string1',111 ), ('string2',111 ), ('string3',222 ), ('string4',333 );", + DATASET, TABLE_NAME); + String updateQuery = + String.format( + "UPDATE %s.%s SET StringField='Jane Doe' WHERE IntegerField=111", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + int createStatus = bigQueryStatement.executeUpdate(createQuery); + assertEquals(0, createStatus); + + int insertStatus = bigQueryStatement.executeUpdate(insertQuery); + assertEquals(4, insertStatus); + + bigQueryStatement.executeQuery(selectQuery); + int selectStatus = bigQueryStatement.getUpdateCount(); + assertEquals(-1, selectStatus); + + int updateStatus = bigQueryStatement.executeUpdate(updateQuery); + assertEquals(2, updateStatus); + + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + @Test + public void testExecuteQueryWithInsert() throws SQLException { + String TABLE_NAME = "JDBC_EXECUTE_UPDATE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING, `IntegerField` INTEGER);", + DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + + assertEquals(0, bigQueryStatement.executeUpdate(createQuery)); + assertThrows(BigQueryJdbcException.class, () -> bigQueryStatement.executeQuery(dropQuery)); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + @Test + public void testExecuteQueryWithMultipleReturns() throws SQLException { + String query = + String.format("SELECT * FROM bigquery-public-data.samples.github_timeline LIMIT 1;"); + + assertThrows(BigQueryJdbcException.class, () -> bigQueryStatement.executeQuery(query + query)); + } + + @Test + public void testExecuteUpdateWithSelect() throws SQLException { + String selectQuery = + String.format("SELECT * FROM bigquery-public-data.samples.github_timeline LIMIT 1;"); + + assertThrows(BigQueryJdbcException.class, () -> bigQueryStatement.executeUpdate(selectQuery)); + } + + @Test + public void testExecuteMethod() throws SQLException { + + String TABLE_NAME = "JDBC_EXECUTE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING, `IntegerField` INTEGER);", + DATASET, TABLE_NAME); + String insertQuery = + String.format( + "INSERT INTO %s.%s (StringField, IntegerField) " + + "VALUES ('string1',111 ), ('string2',111 ), ('string3',222 ), ('string4',333 );", + DATASET, TABLE_NAME); + String updateQuery = + String.format( + "UPDATE %s.%s SET StringField='Jane Doe' WHERE IntegerField=111", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + boolean createStatus = bigQueryStatement.execute(createQuery); + assertFalse(createStatus); + + boolean insertStatus = bigQueryStatement.execute(insertQuery); + assertFalse(insertStatus); + + boolean selectStatus = bigQueryStatement.execute(selectQuery); + assertTrue(selectStatus); + int selectCount = bigQueryStatement.getUpdateCount(); + assertEquals(-1, selectCount); + ResultSet resultSet = bigQueryStatement.getResultSet(); + assertNotNull(resultSet); + + boolean updateStatus = bigQueryStatement.execute(updateQuery); + assertFalse(updateStatus); + + boolean dropStatus = bigQueryStatement.execute(dropQuery); + assertFalse(dropStatus); + } + + @Test + public void testPreparedExecuteMethod() throws SQLException { + + String TABLE_NAME = "JDBC_PREPARED_EXECUTE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING, `IntegerField` INTEGER);", + DATASET, TABLE_NAME); + String insertQuery = + String.format( + "INSERT INTO %s.%s (StringField, IntegerField) VALUES (?,?), (?,?), (?,?), (?,?);", + DATASET, TABLE_NAME); + String updateQuery = + String.format("UPDATE %s.%s SET StringField=? WHERE IntegerField=?", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT ? FROM %s.%s", DATASET, TABLE_NAME); + + boolean createStatus = bigQueryStatement.execute(createQuery); + assertFalse(createStatus); + + PreparedStatement insertStmt = bigQueryConnection.prepareStatement(insertQuery); + insertStmt.setString(1, "String1"); + insertStmt.setInt(2, 111); + insertStmt.setString(3, "String2"); + insertStmt.setInt(4, 222); + insertStmt.setString(5, "String3"); + insertStmt.setInt(6, 333); + insertStmt.setString(7, "String4"); + insertStmt.setInt(8, 444); + + boolean insertStatus = insertStmt.execute(); + assertFalse(insertStatus); + + PreparedStatement selectStmt = bigQueryConnection.prepareStatement(selectQuery); + selectStmt.setString(1, "StringField"); + boolean selectStatus = selectStmt.execute(); + assertTrue(selectStatus); + + int selectCount = selectStmt.getUpdateCount(); + assertEquals(-1, selectCount); + ResultSet resultSet = selectStmt.getResultSet(); + assertNotNull(resultSet); + + PreparedStatement updateStmt = bigQueryConnection.prepareStatement(updateQuery); + updateStmt.setString(1, "Jane Doe"); + updateStmt.setInt(2, 222); + boolean updateStatus = updateStmt.execute(); + assertFalse(updateStatus); + + boolean dropStatus = bigQueryStatement.execute(dropQuery); + assertFalse(dropStatus); + } + + @Test + public void testPreparedStatementThrowsSyntaxError() throws SQLException { + String TABLE_NAME = "JDBC_PREPARED_SYNTAX_ERR_TABLE_" + randomNumber; + String createQuery = + String.format("CREATE OR REPLACE TABLE %s.%s (? STRING, ? INTEGER);", DATASET, TABLE_NAME); + + PreparedStatement preparedStatement = bigQueryConnection.prepareStatement(createQuery); + preparedStatement.setString(1, "StringField"); + preparedStatement.setString(2, "IntegerField"); + assertThrows(BigQueryJdbcSqlSyntaxErrorException.class, preparedStatement::execute); + + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + @Test + public void testPreparedStatementThrowsJdbcException() throws SQLException { + String TABLE_NAME = "JDBC_PREPARED_MISSING_PARAM_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (StringField STRING, IntegerField INTEGER);", + DATASET, TABLE_NAME); + boolean createStatus = bigQueryStatement.execute(createQuery); + assertFalse(createStatus); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (StringField, IntegerField) " + "VALUES (?,?), (?,?);", + DATASET, TABLE_NAME); + PreparedStatement insertStmt = bigQueryConnection.prepareStatement(insertQuery); + insertStmt.setString(1, "String1"); + insertStmt.setInt(2, 111); + assertThrows(BigQueryJdbcException.class, insertStmt::execute); + + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + @Test + public void testSetFetchDirectionFetchReverseThrowsUnsupported() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryStatement.setFetchDirection(ResultSet.FETCH_REVERSE)); + } + + @Test + public void testSetFetchDirectionFetchUnknownThrowsUnsupported() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryStatement.setFetchDirection(ResultSet.FETCH_UNKNOWN)); + } + + @Test + public void testExecuteBatchQueryTypeSelectThrowsUnsupported() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + String query = + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` WHERE" + + " repository_name LIKE 'X%' LIMIT 10"; + Statement statement = connection.createStatement(); + + assertThrows(IllegalArgumentException.class, () -> statement.addBatch(query)); + connection.close(); + } + + @Test + public void testValidExecuteBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + int[] results; + for (int i = 0; i < 3; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + // assertions + assertEquals(3, results.length); + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testAddBatchWithoutSemicolon() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_MISSING_SEMICOLON_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', 4)"; + statement.addBatch(insertQuery); + statement.addBatch(insertQuery); + int[] results = statement.executeBatch(); + + // assertions + assertEquals(2, results.length); + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testEmptySqlToAddBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EMPTY_EXECUTE_BATCH_TABLE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + String emptySql = ""; + statement.addBatch(emptySql); + int[] results = statement.executeBatch(); + + // assertions + assertEquals(0, results.length); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testEmptyExecuteBatch() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + int[] result = statement.executeBatch(); + + assertEquals(0, result.length); + connection.close(); + } + + @Test + public void testNonValidStatementTypeForAddBatchThrows() { + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_NON_VALID_TYPE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + assertThrows( + IllegalArgumentException.class, () -> bigQueryStatement.addBatch(createBatchTable)); + } + + @Test + public void testAllValidStatementTypesForAddBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_ALL_VALID_TYPES_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + "); "; + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 13 WHERE age = %s;", DATASET, BATCH_TABLE, randomNumber); + String deleteQuery = + String.format("DELETE FROM %s.%s WHERE name='Farhan';", DATASET, BATCH_TABLE); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + statement.addBatch(insertQuery); + statement.addBatch(updateQuery); + statement.addBatch(deleteQuery); + int[] results = statement.executeBatch(); + + // assertion + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testUnsupportedHTAPIFallbacksToStandardQueriesWithRange() throws SQLException { + String selectQuery = "select * from `DATATYPERANGETEST.RangeIntervalTestTable` LIMIT 5000;"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";MaxResults=500;HighThroughputActivationRatio=1;" + + "HighThroughputMinTableSize=100;" + + "EnableHighThroughputAPI=1;UnsupportedHTAPIFallback=1;JobCreationMode=1;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet); + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(3, resultSetMetaData.getColumnCount()); + connection.close(); + } + + @Test + public void testIntervalDataTypeWithArrowResultSet() throws SQLException { + String selectQuery = + "select * from `DATATYPERANGETEST.RangeIntervalTestTable` order by intColumn limit 5000;"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";MaxResults=500;HighThroughputActivationRatio=1;" + + "HighThroughputMinTableSize=100;" + + "EnableHighThroughputAPI=1;JobCreationMode=1;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + + ResultSet resultSet = statement.executeQuery(selectQuery); + assertTrue(resultSet.getClass().getName().contains("BigQueryArrowResultSet")); + resultSet.next(); + assertEquals("0-0 10 -12:30:0.0", resultSet.getString("intervalField")); + + // cleanup + connection.close(); + } + + @Test + public void testIntervalDataTypeWithJsonResultSet() throws SQLException { + String selectQuery = + "select * from `DATATYPERANGETEST.RangeIntervalTestTable` order by intColumn limit 10 ;"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";MaxResults=500;HighThroughputActivationRatio=1;" + + "HighThroughputMinTableSize=100;" + + "EnableHighThroughputAPI=0;JobCreationMode=1;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + + ResultSet resultSet = statement.executeQuery(selectQuery); + assertTrue(resultSet.getClass().getName().contains("BigQueryJsonResultSet")); + resultSet.next(); + assertEquals("0-0 10 -12:30:0", resultSet.getString("intervalField")); + + // cleanup + connection.close(); + } + + @Test + public void testValidLEPEndpointQuery() throws SQLException { + String DATASET = "JDBC_REGIONAL_DATASET"; + String TABLE_NAME = "REGIONAL_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://us-east4-bigquery.googleapis.com;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet.getMetaData()); + connection.close(); + } + + @Test + public void testValidEndpointWithInvalidBQPortThrows() throws SQLException { + String TABLE_NAME = "JDBC_REGIONAL_TABLE_" + randomNumber; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://us-east4-bigquery.googleapis.com:12312312;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.executeQuery(selectQuery)); + connection.close(); + } + + @Test + public void testLEPEndpointDataNotFoundThrows() throws SQLException { + String DATASET = "JDBC_REGIONAL_DATASET"; + String TABLE_NAME = "REGIONAL_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://us-east5-bigquery.googleapis.com;"; + + // Attempting read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.executeQuery(selectQuery)); + connection.close(); + } + + @Test + public void testValidREPEndpointQuery() throws SQLException { + String DATASET = "JDBC_REGIONAL_DATASET"; + String TABLE_NAME = "REGIONAL_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://bigquery.us-east4.rep.googleapis.com;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet.getMetaData()); + connection.close(); + } + + @Test + public void testREPEndpointDataNotFoundThrows() throws SQLException { + String DATASET = "JDBC_REGIONAL_DATASET"; + String TABLE_NAME = "REGIONAL_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://bigquery.us-east7.rep.googleapis.com;"; + + // Attempting read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.executeQuery(selectQuery)); + connection.close(); + } + + @Test + public void testCloseStatement() throws SQLException { + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 10"; + Statement statement = bigQueryConnection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertEquals(10, resultSetRowCount(jsonResultSet)); + statement.close(); + assertTrue(statement.isClosed()); + } + + @Test + public void testCloseableStatementSingleResult() throws SQLException { + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 10"; + Statement statement = bigQueryConnection.createStatement(); + statement.closeOnCompletion(); + assertTrue(statement.isCloseOnCompletion()); + ResultSet jsonResultSet = statement.executeQuery(query); + assertFalse(statement.isClosed()); + jsonResultSet.close(); + assertTrue(statement.isClosed()); + } + + @Test + public void testCloseableStatementMultiResult() throws SQLException { + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 10;"; + Statement statement = bigQueryConnection.createStatement(); + statement.closeOnCompletion(); + assertTrue(statement.isCloseOnCompletion()); + statement.execute(query + query); + assertNotNull(statement.getResultSet()); + assertFalse(statement.isClosed()); + + assertTrue(statement.getMoreResults()); + assertNotNull(statement.getResultSet()); + assertFalse(statement.isClosed()); + + assertFalse(statement.getMoreResults()); + assertTrue(statement.isClosed()); + } + + @Test + public void testCloseableStatementMultiResultExplicitClose() throws SQLException { + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 10;"; + Statement statement = bigQueryConnection.createStatement(); + statement.closeOnCompletion(); + assertTrue(statement.isCloseOnCompletion()); + statement.execute(query + query); + ResultSet result = statement.getResultSet(); + result.close(); + assertFalse(statement.isClosed()); + + assertTrue(statement.getMoreResults()); + result = statement.getResultSet(); + result.close(); + assertTrue(statement.isClosed()); + } + + @Test + public void testConnectionIsValid() throws SQLException { + assertTrue(bigQueryConnection.isValid(10)); + assertTrue(bigQueryConnectionNoReadApi.isValid(10)); + } + + @Test + public void testDataSource() throws SQLException { + DataSource ds = new DataSource(); + ds.setURL("jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;"); + ds.setOAuthType(3); + + try (Connection connection = ds.getConnection()) { + assertFalse(connection.isClosed()); + } + } + + @Test + public void testDataSourceOAuthPvtKeyPath() throws SQLException, IOException { + File tempFile = File.createTempFile("auth", ".json"); + tempFile.deleteOnExit(); + DataSource ds = new DataSource(); + ds.setURL("jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;"); + ds.setOAuthType(0); + ds.setOAuthPvtKeyPath(tempFile.toPath().toString()); + assertEquals(0, ds.getOAuthType()); + assertEquals(tempFile.toPath().toString(), ds.getOAuthPvtKeyPath()); + } + + @Test + public void testPreparedStatementSmallSelect() throws SQLException { + String query = + "SELECT * FROM `bigquery-public-data.samples.github_timeline` where repository_language=?" + + " LIMIT 1000"; + PreparedStatement preparedStatement = bigQueryConnection.prepareStatement(query); + preparedStatement.setString(1, "Java"); + + ResultSet jsonResultSet = preparedStatement.executeQuery(); + + int rowCount = resultSetRowCount(jsonResultSet); + assertEquals(1000, rowCount); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + } + + @Test + public void testPreparedStatementExecuteUpdate() throws SQLException { + Random random = new Random(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME1 = "Inventory" + random.nextInt(9999); + String TABLE_NAME2 = "DetailedInventory" + random.nextInt(9999); + + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER);", + DATASET, TABLE_NAME1); + + String createQuery2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER," + + " `supply_constrained` BOOLEAN, `comment` STRING);", + DATASET, TABLE_NAME2); + + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (product, quantity, supply_constrained, comment) " + + "VALUES ('countertop microwave', 20, NULL,'[]' )," + + " ('front load washer', 20, false,'[]' ), " + + " ('microwave', 20, false,'[]' ), " + + " ('refrigerator', 10, false,'[]' );", + DATASET, TABLE_NAME2); + + bigQueryStatement.execute(createQuery); + bigQueryStatement.execute(createQuery2); + bigQueryStatement.execute(insertQuery2); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (product, quantity) " + "VALUES (?,? ), (?,? );", + DATASET, TABLE_NAME1); + PreparedStatement insertPs = bigQueryConnection.prepareStatement(insertQuery); + insertPs.setString(1, "dishwasher"); + insertPs.setInt(2, 30); + insertPs.setString(3, "dryer"); + insertPs.setInt(4, 30); + + int insertStatus = insertPs.executeUpdate(); + assertEquals(2, insertStatus); + + String updateQuery = + String.format("UPDATE %s.%s SET quantity=? WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement updatePs = bigQueryConnection.prepareStatement(updateQuery); + updatePs.setString(2, "dryer"); + updatePs.setInt(1, 35); + + int updateStatus = updatePs.executeUpdate(); + assertEquals(1, updateStatus); + + String deleteQuery = String.format("DELETE FROM %s.%s WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement deletePs = bigQueryConnection.prepareStatement(deleteQuery); + deletePs.setString(1, "dishwasher"); + + int deleteStatus = deletePs.executeUpdate(); + assertEquals(1, deleteStatus); + + String mergeQuery = + String.format( + "MERGE %s.%s T\n" + + "USING %s.%s S\n" + + "ON T.product = S.product\n" + + "WHEN NOT MATCHED AND quantity < ? THEN\n" + + " INSERT(product, quantity, supply_constrained, comment)\n" + + " VALUES(product, quantity, true, ?)\n" + + "WHEN NOT MATCHED THEN\n" + + " INSERT(product, quantity, supply_constrained)\n" + + " VALUES(product, quantity, false)", + DATASET, TABLE_NAME2, DATASET, TABLE_NAME1); + PreparedStatement mergePs = bigQueryConnection.prepareStatement(mergeQuery); + mergePs.setInt(1, 20); + mergePs.setString(2, "comment" + random.nextInt(999)); + + int mergeStatus = mergePs.executeUpdate(); + assertEquals(1, mergeStatus); + + ResultSet rs = + bigQueryStatement.executeQuery( + String.format("SELECT COUNT(*) AS row_count\n" + "FROM %s.%s", DATASET, TABLE_NAME2)); + rs.next(); + assertEquals(5, rs.getInt(1)); + + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME1); + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME2)); + } + + @Test + public void testPreparedStatementDateTimeValues() throws SQLException { + Random random = new Random(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME1 = "DateTimeTestTable" + random.nextInt(9999); + + final String createTableQuery = + "CREATE OR REPLACE TABLE " + + " `%s.%s` " + + " (\n" + + "`StringField` STRING,\n" + + "`IntegerField` INTEGER,\n" + + "`TimestampField` TIMESTAMP,\n" + + "`TimeField` TIME,\n" + + "`DateField` DATE\n" + + ");"; + + String insertQuery = + String.format("INSERT INTO %s.%s VALUES (?,?,?,?,? );", DATASET, TABLE_NAME1); + + bigQueryStatement.execute(String.format(createTableQuery, DATASET, TABLE_NAME1)); + + PreparedStatement insertPs = bigQueryConnection.prepareStatement(insertQuery); + insertPs.setString(1, "dishwasher"); + insertPs.setInt(2, 1); + insertPs.setTimestamp(3, Timestamp.from(Instant.now())); + insertPs.setTime(4, Time.valueOf(LocalTime.NOON)); + insertPs.setDate(5, Date.valueOf("2025-12-3")); + + int insertStatus = insertPs.executeUpdate(); + assertEquals(1, insertStatus); + + ResultSet rs = + bigQueryStatement.executeQuery( + String.format("SELECT COUNT(*) AS row_count\n" + "FROM %s.%s", DATASET, TABLE_NAME1)); + rs.next(); + assertEquals(1, rs.getInt(1)); + + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME1); + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + } + + @Test + public void testValidDestinationTableSavesQueriesWithLegacySQL() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "AllowLargeResults=1;" + + "LargeResultTable=destination_table_test_legacy;" + + "LargeResultDataset=INTEGRATION_TESTS;"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 200;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String selectQuery = "SELECT * FROM `INTEGRATION_TESTS.destination_table_test_legacy`;"; + ResultSet actualResultSet = bigQueryStatement.executeQuery(selectQuery); + assertTrue(0 < resultSetRowCount(actualResultSet)); + + // clean up + String deleteRows = "DELETE FROM `INTEGRATION_TESTS.destination_table_test_legacy` WHERE 1=1;"; + bigQueryStatement.execute(deleteRows); + connection.close(); + } + + @Test + public void testValidDestinationTableSavesQueriesWithStandardSQL() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=SQL;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;"; + String selectLegacyQuery = + "SELECT * FROM `bigquery-public-data.deepmind_alphafold.metadata` LIMIT 200;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String selectQuery = "SELECT * FROM INTEGRATION_TESTS.destination_table_test;"; + ResultSet actualResultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(200, resultSetRowCount(actualResultSet)); + + // clean up + String deleteRows = "DELETE FROM `INTEGRATION_TESTS.destination_table_test` WHERE 1=1;"; + bigQueryStatement.execute(deleteRows); + connection.close(); + } + + @Test + public void testDestinationTableAndDestinationDatasetThatDoesNotExistsCreates() + throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "AllowLargeResults=1;" + + "LargeResultTable=FakeTable;" + + "LargeResultDataset=FakeDataset;"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 200;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String separateQuery = "SELECT * FROM FakeDataset.FakeTable;"; + boolean result = bigQueryStatement.execute(separateQuery); + assertTrue(result); + + // clean up + bigQueryStatement.execute("DROP SCHEMA FakeDataset CASCADE;"); + connection.close(); + } + + @Test + public void testDestinationTableWithMissingDestinationDatasetDefaults() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "AllowLargeResults=1;" + + "LargeResultTable=FakeTable;"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 200;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String separateQuery = "SELECT * FROM _google_jdbc.FakeTable;"; + boolean result = bigQueryStatement.execute(separateQuery); + assertTrue(result); + connection.close(); + } + + @Test + public void testNonSelectForLegacyDestinationTableThrows() throws SQLException { + // setup + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "AllowLargeResults=1;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + assertThrows(BigQueryJdbcException.class, () -> statement.execute(createTransactionTable)); + connection.close(); + } + + @Test + public void testNonSelectForStandardDestinationTableDoesNotThrow() throws SQLException { + // setup + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=SQL;" + + "AllowLargeResults=1;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + statement.execute(createTransactionTable); + connection.close(); + } + + @Test + public void testTableConstraints() throws SQLException { + ResultSet primaryKey1 = + bigQueryConnection + .getMetaData() + .getPrimaryKeys(PROJECT_ID, CONSTRAINTS_DATASET, CONSTRAINTS_TABLE_NAME); + primaryKey1.next(); + assertEquals("id", primaryKey1.getString(4)); + assertFalse(primaryKey1.next()); + + ResultSet primaryKey2 = + bigQueryConnection + .getMetaData() + .getPrimaryKeys(PROJECT_ID, CONSTRAINTS_DATASET, CONSTRAINTS_TABLE_NAME2); + primaryKey2.next(); + assertEquals("first_name", primaryKey2.getString(4)); + primaryKey2.next(); + assertEquals("last_name", primaryKey2.getString(4)); + assertFalse(primaryKey2.next()); + + ResultSet foreignKeys = + bigQueryConnection + .getMetaData() + .getImportedKeys(PROJECT_ID, CONSTRAINTS_DATASET, CONSTRAINTS_TABLE_NAME); + foreignKeys.next(); + assertEquals(CONSTRAINTS_TABLE_NAME2, foreignKeys.getString(3)); + assertEquals("first_name", foreignKeys.getString(4)); + assertEquals("name", foreignKeys.getString(8)); + foreignKeys.next(); + assertEquals(CONSTRAINTS_TABLE_NAME2, foreignKeys.getString(3)); + assertEquals("last_name", foreignKeys.getString(4)); + assertEquals("second_name", foreignKeys.getString(8)); + foreignKeys.next(); + assertEquals(CONSTRAINTS_TABLE_NAME3, foreignKeys.getString(3)); + assertEquals("address", foreignKeys.getString(4)); + assertEquals("address", foreignKeys.getString(8)); + assertFalse(foreignKeys.next()); + + ResultSet crossReference = + bigQueryConnection + .getMetaData() + .getCrossReference( + PROJECT_ID, + CONSTRAINTS_DATASET, + CONSTRAINTS_TABLE_NAME2, + PROJECT_ID, + CONSTRAINTS_DATASET, + CONSTRAINTS_TABLE_NAME); + crossReference.next(); + assertEquals(CONSTRAINTS_TABLE_NAME2, crossReference.getString(3)); + assertEquals("first_name", crossReference.getString(4)); + assertEquals("name", crossReference.getString(8)); + crossReference.next(); + assertEquals("last_name", crossReference.getString(4)); + assertEquals("second_name", crossReference.getString(8)); + assertFalse(crossReference.next()); + } + + @Test + public void testDatabaseMetadataGetCatalogs() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + try (ResultSet rs = databaseMetaData.getCatalogs()) { + assertNotNull("ResultSet from getCatalogs() should not be null", rs); + + ResultSetMetaData rsmd = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", rsmd); + assertEquals("Should have one column", 1, rsmd.getColumnCount()); + assertEquals("Column name should be TABLE_CAT", "TABLE_CAT", rsmd.getColumnName(1)); + + assertTrue("ResultSet should have one row", rs.next()); + assertEquals("Catalog name should match Project ID", PROJECT_ID, rs.getString("TABLE_CAT")); + assertFalse("ResultSet should have no more rows", rs.next()); + } + } + + @Test + public void testDatabaseMetadataGetProcedures() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String procedureName = "create_customer"; + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + ResultSet resultSet = databaseMetaData.getProcedures(PROJECT_ID, DATASET, procedureName); + while (resultSet.next()) { + assertEquals(PROJECT_ID, resultSet.getString("PROCEDURE_CAT")); + assertEquals(DATASET, resultSet.getString("PROCEDURE_SCHEM")); + assertEquals(procedureName, resultSet.getString("PROCEDURE_NAME")); + assertEquals(procedureName, resultSet.getString("SPECIFIC_NAME")); + assertEquals(DatabaseMetaData.procedureResultUnknown, resultSet.getInt("PROCEDURE_TYPE")); + } + } + + @Test + public void testDatabaseMetadataGetProcedureColumns() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + + // --- Test Case 1: Specific schema and procedure, null column name pattern --- + String specificSchema = "JDBC_INTEGRATION_DATASET"; + String specificProcedure = "create_customer"; + ResultSet resultSet = + databaseMetaData.getProcedureColumns(PROJECT_ID, specificSchema, specificProcedure, null); + int specificProcRows = 0; + boolean foundNameParam = false; + boolean foundIdParam = false; + while (resultSet.next()) { + specificProcRows++; + assertEquals(PROJECT_ID, resultSet.getString("PROCEDURE_CAT")); + assertEquals(specificSchema, resultSet.getString("PROCEDURE_SCHEM")); + assertEquals(specificProcedure, resultSet.getString("PROCEDURE_NAME")); + assertEquals(specificProcedure, resultSet.getString("SPECIFIC_NAME")); + if ("name".equals(resultSet.getString("COLUMN_NAME"))) { + foundNameParam = true; + assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); + } + if ("id".equals(resultSet.getString("COLUMN_NAME"))) { + foundIdParam = true; + assertEquals(2, resultSet.getInt("ORDINAL_POSITION")); + } + } + assertEquals("Should find 2 parameters for " + specificProcedure, 2, specificProcRows); + assertTrue("Parameter 'name' should be found", foundNameParam); + assertTrue("Parameter 'id' should be found", foundIdParam); + resultSet.close(); + + // --- Test Case 2: Specific schema, procedure, and column name pattern --- + String specificColumn = "name"; + resultSet = + databaseMetaData.getProcedureColumns( + PROJECT_ID, specificSchema, specificProcedure, specificColumn); + assertTrue("Should find the specific column 'name'", resultSet.next()); + assertEquals(PROJECT_ID, resultSet.getString("PROCEDURE_CAT")); + assertEquals(specificSchema, resultSet.getString("PROCEDURE_SCHEM")); + assertEquals(specificProcedure, resultSet.getString("PROCEDURE_NAME")); + assertEquals(specificColumn, resultSet.getString("COLUMN_NAME")); + assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); + assertEquals( + (short) DatabaseMetaData.procedureColumnUnknown, resultSet.getShort("COLUMN_TYPE")); + assertEquals(java.sql.Types.NVARCHAR, resultSet.getInt("DATA_TYPE")); + assertEquals("NVARCHAR", resultSet.getString("TYPE_NAME")); + assertFalse("Should only find one row for exact column match", resultSet.next()); + resultSet.close(); + + // --- Test Case 3: Non-existent procedure --- + resultSet = + databaseMetaData.getProcedureColumns( + PROJECT_ID, specificSchema, "non_existent_procedure_xyz", null); + assertFalse("Should not find columns for a non-existent procedure", resultSet.next()); + resultSet.close(); + } + + @Test + public void testDatabaseMetadataGetColumns() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_DATATYPES_INTEGRATION_TEST_TABLE"; + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + + // --- Test Case 1: Specific Column (StringField) --- + ResultSet resultSet = + databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "StringField"); + + assertTrue(resultSet.next()); + assertEquals(PROJECT_ID, resultSet.getString("TABLE_CAT")); + assertEquals(DATASET, resultSet.getString("TABLE_SCHEM")); + assertEquals(TABLE_NAME, resultSet.getString("TABLE_NAME")); + assertEquals("StringField", resultSet.getString("COLUMN_NAME")); + assertEquals("NVARCHAR", resultSet.getString("TYPE_NAME")); + resultSet.getObject("COLUMN_SIZE"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(6, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 2: All Columns --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, null); + assertTrue(resultSet.next()); + int count = 0; + do { + count++; + assertEquals(PROJECT_ID, resultSet.getString("TABLE_CAT")); + assertEquals(DATASET, resultSet.getString("TABLE_SCHEM")); + assertEquals(TABLE_NAME, resultSet.getString("TABLE_NAME")); + assertNotNull(resultSet.getString("COLUMN_NAME")); + } while (resultSet.next()); + assertEquals(16, count); + + // --- Test Case 3: Column Name Pattern Matching (%Field) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "%Time%"); + assertTrue(resultSet.next()); + count = 0; + do { + count++; + String columnName = resultSet.getString("COLUMN_NAME"); + assertTrue(columnName.contains("Time")); + } while (resultSet.next()); + assertEquals(3, count); + + // --- Test Case 4: Column Name Pattern Matching (Integer%) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "Integer%"); + assertTrue(resultSet.next()); + assertEquals("IntegerField", resultSet.getString("COLUMN_NAME")); + assertEquals("BIGINT", resultSet.getString("TYPE_NAME")); + assertEquals(19, resultSet.getInt("COLUMN_SIZE")); + assertEquals(0, resultSet.getInt("DECIMAL_DIGITS")); + assertEquals(10, resultSet.getInt("NUM_PREC_RADIX")); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(2, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 5: Specific Column (BooleanField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "BooleanField"); + assertTrue(resultSet.next()); + assertEquals("BooleanField", resultSet.getString("COLUMN_NAME")); + assertEquals("BOOLEAN", resultSet.getString("TYPE_NAME")); + assertEquals(1, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 6: Specific Column (NumericField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "NumericField"); + assertTrue(resultSet.next()); + assertEquals("NumericField", resultSet.getString("COLUMN_NAME")); + assertEquals("NUMERIC", resultSet.getString("TYPE_NAME")); + assertEquals(38, resultSet.getInt("COLUMN_SIZE")); + assertEquals(9, resultSet.getInt("DECIMAL_DIGITS")); + assertEquals(10, resultSet.getInt("NUM_PREC_RADIX")); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(4, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 7: Specific Column (BytesField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "BytesField"); + assertTrue(resultSet.next()); + assertEquals("BytesField", resultSet.getString("COLUMN_NAME")); + assertEquals("VARBINARY", resultSet.getString("TYPE_NAME")); + resultSet.getObject("COLUMN_SIZE"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(7, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 8: Specific Column (ArrayField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "ArrayField"); + assertTrue(resultSet.next()); + assertEquals("ArrayField", resultSet.getString("COLUMN_NAME")); + assertEquals("ARRAY", resultSet.getString("TYPE_NAME")); + resultSet.getObject("COLUMN_SIZE"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(9, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 9: Specific Column (TimestampField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "TimestampField"); + assertTrue(resultSet.next()); + assertEquals("TimestampField", resultSet.getString("COLUMN_NAME")); + assertEquals("TIMESTAMP", resultSet.getString("TYPE_NAME")); + assertEquals(29, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(10, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 10: Specific Column (DateField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "DateField"); + assertTrue(resultSet.next()); + assertEquals("DateField", resultSet.getString("COLUMN_NAME")); + assertEquals("DATE", resultSet.getString("TYPE_NAME")); + assertEquals(10, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(11, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 11: Specific Column (TimeField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "TimeField"); + assertTrue(resultSet.next()); + assertEquals("TimeField", resultSet.getString("COLUMN_NAME")); + assertEquals("TIME", resultSet.getString("TYPE_NAME")); + assertEquals(15, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(12, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 12: Specific Column (DateTimeField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "DateTimeField"); + assertTrue(resultSet.next()); + assertEquals("DateTimeField", resultSet.getString("COLUMN_NAME")); + assertEquals("TIMESTAMP", resultSet.getString("TYPE_NAME")); + assertEquals(29, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(13, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 13: Specific Column (GeographyField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "GeographyField"); + assertTrue(resultSet.next()); + assertEquals("GeographyField", resultSet.getString("COLUMN_NAME")); + assertEquals("VARCHAR", resultSet.getString("TYPE_NAME")); + resultSet.getObject("COLUMN_SIZE"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(14, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + } + + @Test + public void testDatabaseMetadataGetTables() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + String DATASET = "JDBC_TABLE_TYPES_TEST"; + + // --- Test Case 1: Get all tables (types = null) --- + ResultSet rsAll = databaseMetaData.getTables(PROJECT_ID, DATASET, null, null); + Set allTableNames = new HashSet<>(); + while (rsAll.next()) { + allTableNames.add(rsAll.getString("TABLE_NAME")); + } + assertTrue(allTableNames.contains("base_table")); + assertTrue(allTableNames.contains("my_view")); + assertTrue(allTableNames.contains("external_table")); + assertTrue(allTableNames.contains("my_materialized_view")); + assertTrue(allTableNames.contains("base_table_clone")); + assertTrue(allTableNames.contains("base_table_snapshot")); + assertEquals(6, allTableNames.size()); + + // --- Test Case 2: Get only "TABLE" type --- + ResultSet rsTable = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"TABLE"}); + Set tableNames = new HashSet<>(); + while (rsTable.next()) { + tableNames.add(rsTable.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains("base_table")); + assertTrue(tableNames.contains("base_table_clone")); + assertEquals(2, tableNames.size()); + + // --- Test Case 3: Get "VIEW" type --- + ResultSet rsView = databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"VIEW"}); + assertTrue(rsView.next()); + assertEquals("my_view", rsView.getString("TABLE_NAME")); + assertEquals("VIEW", rsView.getString("TABLE_TYPE")); + assertFalse(rsView.next()); + + // --- Test Case 4: Get "EXTERNAL TABLE" type --- + ResultSet rsExternal = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"EXTERNAL"}); + assertTrue(rsExternal.next()); + assertEquals("external_table", rsExternal.getString("TABLE_NAME")); + assertEquals("EXTERNAL", rsExternal.getString("TABLE_TYPE")); + assertFalse(rsExternal.next()); + + // --- Test Case 5: Get "MATERIALIZED_VIEW" type --- + ResultSet rsMaterialized = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"MATERIALIZED_VIEW"}); + assertTrue(rsMaterialized.next()); + assertEquals("my_materialized_view", rsMaterialized.getString("TABLE_NAME")); + assertEquals("MATERIALIZED_VIEW", rsMaterialized.getString("TABLE_TYPE")); + assertFalse(rsMaterialized.next()); + + // --- Test Case 6: Get "SNAPSHOT" type --- + ResultSet rsSnapshot = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"SNAPSHOT"}); + assertTrue(rsSnapshot.next()); + assertEquals("base_table_snapshot", rsSnapshot.getString("TABLE_NAME")); + assertEquals("SNAPSHOT", rsSnapshot.getString("TABLE_TYPE")); + assertFalse(rsSnapshot.next()); + + // --- Test Case 8: Get multiple types ("TABLE" and "VIEW") --- + ResultSet rsMulti = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"TABLE", "VIEW"}); + Set multiTableNames = new HashSet<>(); + while (rsMulti.next()) { + multiTableNames.add(rsMulti.getString("TABLE_NAME")); + } + assertTrue(multiTableNames.contains("base_table")); + assertTrue(multiTableNames.contains("base_table_clone")); + assertTrue(multiTableNames.contains("my_view")); + assertEquals(3, multiTableNames.size()); + + // --- Test Case 9: tableNamePattern --- + ResultSet rsNamePattern = databaseMetaData.getTables(PROJECT_ID, DATASET, "base%", null); + Set baseTableNames = new HashSet<>(); + while (rsNamePattern.next()) { + baseTableNames.add(rsNamePattern.getString("TABLE_NAME")); + } + assertTrue(baseTableNames.contains("base_table")); + assertTrue(baseTableNames.contains("base_table_clone")); + assertTrue(baseTableNames.contains("base_table_snapshot")); + assertEquals(3, baseTableNames.size()); + + // --- Test Case 10: No matching table --- + ResultSet rsNoMatch = + databaseMetaData.getTables(PROJECT_ID, DATASET, "nonexistent_table", null); + assertFalse(rsNoMatch.next()); + + // --- Test Case 11: Null type in array --- + ResultSet rsNullType = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {null, "VIEW"}); + assertTrue(rsNullType.next()); + assertEquals("VIEW", rsNullType.getString("TABLE_TYPE")); + assertEquals("my_view", rsNullType.getString("TABLE_NAME")); + assertFalse(rsNullType.next()); + } + + @Test + public void testDatabaseMetadataGetSchemas() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + + // Test case 1: Get all schemas with catalog and check for the presence of specific schemas + ResultSet rsAll = databaseMetaData.getSchemas(PROJECT_ID, null); + Set actualSchemas = new HashSet<>(); + while (rsAll.next()) { + assertEquals(PROJECT_ID, rsAll.getString("TABLE_CATALOG")); + actualSchemas.add(rsAll.getString("TABLE_SCHEM")); + } + assertTrue(actualSchemas.contains("JDBC_INTEGRATION_DATASET")); + assertTrue(actualSchemas.contains("JDBC_TABLE_TYPES_TEST")); + assertTrue(actualSchemas.contains("ODBC_TEST_DATASET")); + + // Test case 2: Get schemas with catalog and schemaPattern matching "JDBC_NIGHTLY_IT_DATASET" + ResultSet rsPattern = databaseMetaData.getSchemas(PROJECT_ID, "JDBC_NIGHTLY_IT_DATASET"); + Set actualSchemasPattern = new HashSet<>(); + while (rsPattern.next()) { + assertEquals(PROJECT_ID, rsPattern.getString("TABLE_CATALOG")); + actualSchemasPattern.add(rsPattern.getString("TABLE_SCHEM")); + } + assertTrue(actualSchemasPattern.contains("JDBC_NIGHTLY_IT_DATASET")); + assertEquals(1, actualSchemasPattern.size()); + + // Test case 3: Get schemas with catalog and schemaPattern matching "nonexistent" + ResultSet rsNoMatch = databaseMetaData.getSchemas(PROJECT_ID, "nonexistent"); + assertFalse(rsNoMatch.next()); + + // Test case 4: Get schemas with non-existent catalog + rsNoMatch = databaseMetaData.getSchemas("invalid-catalog", null); + assertFalse(rsNoMatch.next()); + } + + @Test + public void testDatabaseMetadataGetSchemasNoArgs() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + String expectedCatalog = bigQueryConnection.getCatalog(); + assertNotNull("Project ID (catalog) from connection should not be null", expectedCatalog); + + // Test case: Get all schemas (datasets) for the current project + try (ResultSet rsAll = databaseMetaData.getSchemas()) { + assertNotNull("ResultSet from getSchemas() should not be null", rsAll); + boolean foundTestDataset = false; + int rowCount = 0; + while (rsAll.next()) { + rowCount++; + assertEquals( + "TABLE_CATALOG should match the connection's project ID", + expectedCatalog, + rsAll.getString("TABLE_CATALOG")); + String schemaName = rsAll.getString("TABLE_SCHEM"); + assertNotNull("TABLE_SCHEM should not be null", schemaName); + if (DATASET.equals(schemaName) + || DATASET2.equals(schemaName) + || CONSTRAINTS_DATASET.equals(schemaName) + || "JDBC_TABLE_TYPES_TEST".equals(schemaName) + || "JDBC_INTEGRATION_DATASET".equals(schemaName)) { + foundTestDataset = true; + } + } + assertTrue("At least one of the known test datasets should be found", foundTestDataset); + assertTrue("Should retrieve at least one schema/dataset", rowCount > 0); + } + } + + @Test + public void testDatabaseMetaDataGetFunctions() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + String testSchema = "JDBC_TABLE_TYPES_TEST"; + String testCatalog = PROJECT_ID; + + Set expectedFunctionNames = + new HashSet<>( + Arrays.asList( + "complex_scalar_sql_udf", + "persistent_sql_udf_named_params", + "scalar_js_udf", + "scalar_sql_udf")); + + // Test 1: Get all functions from a specific schema + ResultSet rsAll = databaseMetaData.getFunctions(testCatalog, testSchema, null); + Set foundFunctionNames = new HashSet<>(); + int countAll = 0; + while (rsAll.next()) { + countAll++; + assertEquals(testCatalog, rsAll.getString("FUNCTION_CAT")); + assertEquals(testSchema, rsAll.getString("FUNCTION_SCHEM")); + String funcName = rsAll.getString("FUNCTION_NAME"); + foundFunctionNames.add(funcName); + assertNull(rsAll.getString("REMARKS")); + assertEquals(DatabaseMetaData.functionResultUnknown, rsAll.getShort("FUNCTION_TYPE")); + assertEquals(funcName, rsAll.getString("SPECIFIC_NAME")); + } + assertEquals( + "Should find all " + expectedFunctionNames.size() + " functions in " + testSchema, + expectedFunctionNames.size(), + countAll); + assertEquals(expectedFunctionNames, foundFunctionNames); + rsAll.close(); + + // Test 2: Get a specific function using functionNamePattern + String specificFunctionName = "scalar_sql_udf"; + ResultSet rsSpecific = + databaseMetaData.getFunctions(testCatalog, testSchema, specificFunctionName); + assertTrue("Should find the specific function " + specificFunctionName, rsSpecific.next()); + assertEquals(testCatalog, rsSpecific.getString("FUNCTION_CAT")); + assertEquals(testSchema, rsSpecific.getString("FUNCTION_SCHEM")); + assertEquals(specificFunctionName, rsSpecific.getString("FUNCTION_NAME")); + assertNull(rsSpecific.getString("REMARKS")); + assertEquals(DatabaseMetaData.functionResultUnknown, rsSpecific.getShort("FUNCTION_TYPE")); + assertEquals(specificFunctionName, rsSpecific.getString("SPECIFIC_NAME")); + assertFalse("Should only find one row for exact function match", rsSpecific.next()); + rsSpecific.close(); + + // Test 3: Get functions using a wildcard functionNamePattern "scalar%" + // Expected order due to sorting: scalar_js_udf, scalar_sql_udf + ResultSet rsWildcard = databaseMetaData.getFunctions(testCatalog, testSchema, "scalar%"); + assertTrue("Should find functions matching 'scalar%'", rsWildcard.next()); + assertEquals("scalar_js_udf", rsWildcard.getString("FUNCTION_NAME")); + assertEquals(DatabaseMetaData.functionResultUnknown, rsWildcard.getShort("FUNCTION_TYPE")); + + assertTrue("Should find the second function matching 'scalar%'", rsWildcard.next()); + assertEquals("scalar_sql_udf", rsWildcard.getString("FUNCTION_NAME")); + assertEquals(DatabaseMetaData.functionResultUnknown, rsWildcard.getShort("FUNCTION_TYPE")); + assertFalse("Should be no more functions matching 'scalar%'", rsWildcard.next()); + rsWildcard.close(); + + // Test 4: Schema pattern with wildcard + ResultSet rsSchemaWildcard = + databaseMetaData.getFunctions(testCatalog, "JDBC_TABLE_TYPES_T%", "complex_scalar_sql_udf"); + assertTrue("Should find function with schema wildcard", rsSchemaWildcard.next()); + assertEquals(testSchema, rsSchemaWildcard.getString("FUNCTION_SCHEM")); + assertEquals("complex_scalar_sql_udf", rsSchemaWildcard.getString("FUNCTION_NAME")); + assertFalse( + "Should only find one row for this schema wildcard and specific function", + rsSchemaWildcard.next()); + rsSchemaWildcard.close(); + + // Test 5: Non-existent function + ResultSet rsNonExistentFunc = + databaseMetaData.getFunctions(testCatalog, testSchema, "non_existent_function_xyz123"); + assertFalse("Should not find a non-existent function", rsNonExistentFunc.next()); + rsNonExistentFunc.close(); + + // Test 6: Non-existent schema + ResultSet rsNonExistentSchema = + databaseMetaData.getFunctions(testCatalog, "NON_EXISTENT_SCHEMA_XYZ123", null); + assertFalse("Should not find functions in a non-existent schema", rsNonExistentSchema.next()); + rsNonExistentSchema.close(); + + // Test 7: Empty schema pattern + ResultSet rsEmptySchema = databaseMetaData.getFunctions(testCatalog, "", null); + assertFalse("Empty schema pattern should return no results", rsEmptySchema.next()); + rsEmptySchema.close(); + + // Test 8: Empty function name pattern + ResultSet rsEmptyFunction = databaseMetaData.getFunctions(testCatalog, testSchema, ""); + assertFalse("Empty function name pattern should return no results", rsEmptyFunction.next()); + rsEmptyFunction.close(); + + // Test 9: Null catalog + ResultSet rsNullCatalog = databaseMetaData.getFunctions(null, testSchema, null); + assertFalse("Null catalog should return no results", rsNullCatalog.next()); + rsNullCatalog.close(); + } + + @Test + public void testDatabaseMetadataGetFunctionColumns() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + String testCatalog = PROJECT_ID; + String testSchema = "JDBC_TABLE_TYPES_TEST"; + + // Test Case 1: Specific function 'scalar_sql_udf', specific column 'x' + String specificFunction1 = "scalar_sql_udf"; + String specificColumn1 = "x"; + ResultSet rs = + databaseMetaData.getFunctionColumns( + testCatalog, testSchema, specificFunction1, specificColumn1); + + assertTrue("Should find column 'x' for function 'scalar_sql_udf'", rs.next()); + assertEquals(testCatalog, rs.getString("FUNCTION_CAT")); + assertEquals(testSchema, rs.getString("FUNCTION_SCHEM")); + assertEquals(specificFunction1, rs.getString("FUNCTION_NAME")); + assertEquals(specificColumn1, rs.getString("COLUMN_NAME")); + assertEquals(DatabaseMetaData.functionColumnUnknown, rs.getShort("COLUMN_TYPE")); + assertEquals(Types.BIGINT, rs.getInt("DATA_TYPE")); + assertEquals("BIGINT", rs.getString("TYPE_NAME")); + assertEquals(19, rs.getInt("PRECISION")); + assertEquals(null, rs.getObject("LENGTH")); + assertTrue(rs.wasNull()); + assertEquals(0, rs.getShort("SCALE")); + assertEquals(10, rs.getShort("RADIX")); + assertEquals(DatabaseMetaData.functionNullableUnknown, rs.getShort("NULLABLE")); + assertNull(rs.getString("REMARKS")); + assertEquals(null, rs.getObject("CHAR_OCTET_LENGTH")); + assertTrue(rs.wasNull()); + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + assertEquals("", rs.getString("IS_NULLABLE")); + assertEquals(specificFunction1, rs.getString("SPECIFIC_NAME")); + assertFalse("Should only find one row for exact column match", rs.next()); + rs.close(); + + // Test Case 2: Specific function 'complex_scalar_sql_udf', specific column 'arr' + String specificFunction2 = "complex_scalar_sql_udf"; + String specificColumn2 = "arr"; + rs = + databaseMetaData.getFunctionColumns( + testCatalog, testSchema, specificFunction2, specificColumn2); + assertTrue("Should find column 'arr' for function 'complex_scalar_sql_udf'", rs.next()); + assertEquals(testCatalog, rs.getString("FUNCTION_CAT")); + assertEquals(testSchema, rs.getString("FUNCTION_SCHEM")); + assertEquals(specificFunction2, rs.getString("FUNCTION_NAME")); + assertEquals(specificColumn2, rs.getString("COLUMN_NAME")); + assertEquals(DatabaseMetaData.functionColumnUnknown, rs.getShort("COLUMN_TYPE")); + assertEquals(Types.ARRAY, rs.getInt("DATA_TYPE")); + assertEquals("ARRAY", rs.getString("TYPE_NAME")); + assertEquals(null, rs.getObject("PRECISION")); + assertTrue(rs.wasNull()); + assertEquals(null, rs.getObject("LENGTH")); + assertTrue(rs.wasNull()); + assertEquals(null, rs.getObject("SCALE")); + assertTrue(rs.wasNull()); + assertEquals(null, rs.getObject("RADIX")); + assertTrue(rs.wasNull()); + assertEquals(DatabaseMetaData.functionNullableUnknown, rs.getShort("NULLABLE")); + assertNull(rs.getString("REMARKS")); + assertEquals(null, rs.getObject("CHAR_OCTET_LENGTH")); + assertTrue(rs.wasNull()); + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + assertEquals("", rs.getString("IS_NULLABLE")); + assertEquals(specificFunction2, rs.getString("SPECIFIC_NAME")); + assertFalse("Should only find one row for exact column match", rs.next()); + rs.close(); + + // Test Case 3: All columns for 'persistent_sql_udf_named_params' (sorted by ordinal position) + String specificFunction3 = "persistent_sql_udf_named_params"; + rs = databaseMetaData.getFunctionColumns(testCatalog, testSchema, specificFunction3, null); + assertTrue("Should find columns for " + specificFunction3, rs.next()); + assertEquals(specificFunction3, rs.getString("FUNCTION_NAME")); + assertEquals("value1", rs.getString("COLUMN_NAME")); // Ordinal Position 1 + assertEquals(DatabaseMetaData.functionColumnUnknown, rs.getShort("COLUMN_TYPE")); + assertEquals(Types.BIGINT, rs.getInt("DATA_TYPE")); + assertEquals("BIGINT", rs.getString("TYPE_NAME")); + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + + assertTrue("Should find second column for " + specificFunction3, rs.next()); + assertEquals(specificFunction3, rs.getString("FUNCTION_NAME")); + assertEquals("value-two", rs.getString("COLUMN_NAME")); // Ordinal Position 2 + assertEquals(DatabaseMetaData.functionColumnUnknown, rs.getShort("COLUMN_TYPE")); + assertEquals(Types.NVARCHAR, rs.getInt("DATA_TYPE")); + assertEquals("NVARCHAR", rs.getString("TYPE_NAME")); + assertEquals(2, rs.getInt("ORDINAL_POSITION")); + assertFalse("Should be no more columns for " + specificFunction3, rs.next()); + rs.close(); + + // Test Case 4: Wildcard for function name "scalar%", specific column name "x" + rs = databaseMetaData.getFunctionColumns(testCatalog, testSchema, "scalar%", "x"); + assertTrue("Should find column 'x' for functions matching 'scalar%'", rs.next()); + assertEquals("scalar_sql_udf", rs.getString("FUNCTION_NAME")); + assertEquals("x", rs.getString("COLUMN_NAME")); + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + assertFalse("Should be no more columns named 'x' for functions matching 'scalar%'", rs.next()); + rs.close(); + + // Test Case 5: Wildcard for column name "%" for 'scalar_js_udf' + String specificFunction4 = "scalar_js_udf"; + rs = databaseMetaData.getFunctionColumns(testCatalog, testSchema, specificFunction4, "%"); + assertTrue("Should find columns for " + specificFunction4 + " with wildcard", rs.next()); + assertEquals(specificFunction4, rs.getString("FUNCTION_NAME")); + assertEquals("name", rs.getString("COLUMN_NAME")); // Ordinal Position 1 + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + + assertTrue("Should find second column for " + specificFunction4 + " with wildcard", rs.next()); + assertEquals(specificFunction4, rs.getString("FUNCTION_NAME")); + assertEquals("age", rs.getString("COLUMN_NAME")); // Ordinal Position 2 + assertEquals(2, rs.getInt("ORDINAL_POSITION")); + assertFalse("Should be no more columns for " + specificFunction4 + " with wildcard", rs.next()); + rs.close(); + + // Test Case 6: Non-existent function + rs = + databaseMetaData.getFunctionColumns( + testCatalog, testSchema, "non_existent_function_xyz", null); + assertFalse("Should not find columns for a non-existent function", rs.next()); + rs.close(); + } + + @Test + public void testRangeDataTypeWithJsonResultSet() throws SQLException { + String RANGE_DATA_TABLE = "JDBC_RANGE_DATA_TEST_TABLE_" + random.nextInt(99); + String range_date_literal = "RANGE '[2020-01-01, 2020-01-31)'"; + String range_datetime_literal = "RANGE '[2020-01-01 12:00:00, 2020-01-31 12:00:00)'"; + String range_timestamp_literal = + "RANGE '[2020-01-01 12:00:00+08, 2020-01-31 12:00:00+08)'"; + + String createRangeTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `range_date` RANGE," + + " `range_date_time` RANGE, `range_timestamp` RANGE);", + DATASET, RANGE_DATA_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, range_date, range_date_time, range_timestamp) VALUES (1, %s," + + " %s, %s);", + DATASET, + RANGE_DATA_TABLE, + range_date_literal, + range_datetime_literal, + range_timestamp_literal); + String selectQuery = + String.format( + "SELECT id, range_date, range_date_time, range_timestamp FROM %s.%s WHERE id = 1;", + DATASET, RANGE_DATA_TABLE); + + boolean status = bigQueryStatement.execute(createRangeTable); + assertFalse(status); + + status = bigQueryStatement.execute(insertQuery); + assertFalse(status); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + + Integer numRows = 0; + String actual_range_date = ""; + String actual_range_datetime = ""; + String actual_range_timestamp = ""; + + while (resultSet.next()) { + numRows++; + actual_range_date = resultSet.getString("range_date"); + actual_range_datetime = resultSet.getString("range_date_time"); + actual_range_timestamp = resultSet.getString("range_timestamp"); + } + + String expected_range_date = "[2020-01-01, 2020-01-31)"; + String expected_range_datetime = "[2020-01-01T12:00:00, 2020-01-31T12:00:00)"; + String expected_range_timestamp = "[1577851200.000000, 1580443200.000000)"; + + assertThat(numRows).isEqualTo(1); + assertThat(actual_range_date).isEqualTo(expected_range_date); + assertThat(actual_range_datetime).isEqualTo(expected_range_datetime); + assertThat(actual_range_timestamp).isEqualTo(expected_range_timestamp); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, RANGE_DATA_TABLE)); + } + + @Test + public void testRangeDataTypeWithArrowResultSet() throws SQLException { + String selectQuery = + "select * from `DATATYPERANGETEST.RangeIntervalTestTable` order by intColumn limit 5000;"; + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";MaxResults=500;HighThroughputActivationRatio=1;" + + "HighThroughputMinTableSize=100;" + + "EnableHighThroughputAPI=1;JobCreationMode=1;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertTrue(resultSet.getClass().getName().contains("BigQueryArrowResultSet")); + resultSet.next(); + assertEquals("[2024-07-14, 2024-09-23)", resultSet.getString("rangeField")); + connection.close(); + } + + @Test + public void testPrepareCallSql() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc"); + assertNotNull(callableStatement); + callableStatement.close(); + } + + @Test + public void testRegisterOutParamIndex() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testRegisterOutParamName() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testRegisterOutParamIndexScale() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testRegisterOutParamNameScale() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testPrepareCallSqlResultSetTypeConcurrency() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.close(); + } + + @Test + public void testPrepareCallConcurrencyRegisterOutParamIndex() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testPrepareCallConcurrencyRegisterOutParamName() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testPrepareCallConcurrencyRegisterOutParamIndexScale() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testPrepareCallConcurrencyRegisterOutParamNameScale() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testPrepareCallSqlResultSetTypeConcurrencyHoldability() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.close(); + } + + @Test + public void testPrepareCallHoldabilityRegisterOutParamIndex() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testPrepareCallHoldabilityRegisterOutParamName() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testPrepareCallHoldabilityRegisterOutParamIndexScale() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.close(); + } + + @Test + public void testPrepareCallHoldabilityRegisterOutParamNameScale() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testPrepareCallFailureResultSetType() throws SQLException { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> + this.bigQueryConnection.prepareCall( + "call testProc", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY)); + } + + @Test + public void testPrepareCallFailureResultSetConcurrency() throws SQLException { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> + this.bigQueryConnection.prepareCall( + "call testProc", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE)); + } + + @Test + public void testPrepareCallFailureResultSetHoldability() throws SQLException { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> + this.bigQueryConnection.prepareCall( + "call testProc", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.HOLD_CURSORS_OVER_COMMIT)); + } + + // Integration tests for CallableStatement Setters and Getters + @Test + public void testSetterGetterBigDecimal() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + BigDecimal expected = new BigDecimal(12344); + callableStatement.setBigDecimal(CALLABLE_STMT_PARAM_KEY, expected); + BigDecimal actual = callableStatement.getBigDecimal(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterBoolean() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Boolean expected = true; + callableStatement.setBoolean(CALLABLE_STMT_PARAM_KEY, expected); + Boolean actual = callableStatement.getBoolean(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterByte() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Byte expected = "hello".getBytes()[0]; + callableStatement.setByte(CALLABLE_STMT_PARAM_KEY, expected); + Byte actual = callableStatement.getByte(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterBytes() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + byte[] expected = "hello".getBytes(); + callableStatement.setBytes(CALLABLE_STMT_PARAM_KEY, expected); + byte[] actual = callableStatement.getBytes(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterDate() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Date expected = new Date(1234567); + callableStatement.setDate(CALLABLE_STMT_PARAM_KEY, expected); + Date actual = callableStatement.getDate(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterDateCal() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Date expected = new Date(1L); + Calendar cal = Calendar.getInstance(); + callableStatement.setDate(CALLABLE_STMT_PARAM_KEY, expected, cal); + Date actual = callableStatement.getDate(CALLABLE_STMT_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterDouble() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Double expected = 123.2345; + callableStatement.setDouble(CALLABLE_STMT_PARAM_KEY, expected); + Double actual = callableStatement.getDouble(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterFloat() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Float expected = 123.2345F; + callableStatement.setFloat(CALLABLE_STMT_PARAM_KEY, expected); + Float actual = callableStatement.getFloat(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterInt() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Integer expected = 123; + callableStatement.setInt(CALLABLE_STMT_PARAM_KEY, expected); + Integer actual = callableStatement.getInt(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterLong() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Long expected = 123L; + callableStatement.setLong(CALLABLE_STMT_PARAM_KEY, expected); + Long actual = callableStatement.getLong(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterNString() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "heelo"; + callableStatement.setNString(CALLABLE_STMT_PARAM_KEY, expected); + String actual = callableStatement.getNString(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterObject() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "heelo"; + callableStatement.setObject(CALLABLE_STMT_PARAM_KEY, expected); + Object actual = callableStatement.getObject(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterObjectWithSQLType() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "heelo"; + callableStatement.setObject(CALLABLE_STMT_PARAM_KEY, expected, Types.NVARCHAR); + Object actual = callableStatement.getObject(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterObjectWithSqlTypeAndScale() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "heelo"; + callableStatement.setObject(CALLABLE_STMT_PARAM_KEY, expected, Types.NVARCHAR, 0); + Object actual = callableStatement.getObject(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterString() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "123"; + callableStatement.setString(CALLABLE_STMT_PARAM_KEY, expected); + String actual = callableStatement.getString(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterTime() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Time expected = new Time(1234567); + callableStatement.setTime(CALLABLE_STMT_PARAM_KEY, expected); + Time actual = callableStatement.getTime(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterTimeCal() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Time expected = new Time(1L); + Calendar cal = Calendar.getInstance(); + callableStatement.setTime(CALLABLE_STMT_PARAM_KEY, expected, cal); + Time actual = callableStatement.getTime(CALLABLE_STMT_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterTimestamp() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Timestamp expected = new Timestamp(1234567); + callableStatement.setTimestamp(CALLABLE_STMT_PARAM_KEY, expected); + Timestamp actual = callableStatement.getTimestamp(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterTimestampCal() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Timestamp expected = new Timestamp(1L); + Calendar cal = Calendar.getInstance(); + callableStatement.setTimestamp(CALLABLE_STMT_PARAM_KEY, expected, cal); + Timestamp actual = callableStatement.getTimestamp(CALLABLE_STMT_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testPooledConnectionDataSourceSuccess() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + } + + @Test + public void testPooledConnectionDataSourceFailNoConnectionURl() throws SQLException { + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + + assertThrows(BigQueryJdbcException.class, () -> pooledDataSource.getPooledConnection()); + } + + @Test + public void testPooledConnectionDataSourceFailInvalidConnectionURl() { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ListenerPoolSize=invalid"; + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + assertThrows(NumberFormatException.class, () -> pooledDataSource.getPooledConnection()); + } + + @Test + public void testPooledConnectionAddConnectionListener() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + } + + @Test + public void testPooledConnectionRemoveConnectionListener() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.removeConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + } + + @Test + public void testPooledConnectionConnectionClosed() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + connection.close(); + assertEquals(1, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + } + + @Test + public void testPooledConnectionClose() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + + pooledConnection.close(); + assertEquals(1, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + } + + @Test + public void testPooledConnectionConnectionError() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + ExecutorService executor = Executors.newFixedThreadPool(3); + connection.abort(executor); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(1, listener.getConnectionErrorCount()); + + executor.shutdown(); + connection.close(); + pooledConnection.close(); + } + + @Test + public void testPooledConnectionListenerAddListener() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + pooledConnection.close(); + } + + @Test + public void testPooledConnectionListenerRemoveListener() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + pooledConnection.removeConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + pooledConnection.close(); + } + + @Test + public void testPooledConnectionListenerCloseConnection() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + connection.close(); + assertFalse(listener.isConnectionPoolEmpty()); + pooledConnection.close(); + } + + @Test + public void testPooledConnectionListenerClosePooledConnection() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + pooledConnection.close(); + assertFalse(listener.isConnectionPoolEmpty()); + } + + @Test + public void testPooledConnectionListenerConnectionError() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + ExecutorService executor = Executors.newFixedThreadPool(3); + connection.abort(executor); + assertTrue(listener.isConnectionPoolEmpty()); + + executor.shutdown(); + connection.close(); + pooledConnection.close(); + } + + @Test + public void testExecuteQueryWithConnectionPoolingEnabledDefaultPoolSize() throws SQLException { + String connectionURL = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";"; + assertConnectionPoolingResults(connectionURL, DEFAULT_CONN_POOL_SIZE); + } + + @Test + public void testExecuteQueryWithConnectionPoolingEnabledCustomPoolSize() throws SQLException { + String connectionURL = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";" + + "ConnectionPoolSize=" + + CUSTOM_CONN_POOL_SIZE + + ";"; + assertConnectionPoolingResults(connectionURL, CUSTOM_CONN_POOL_SIZE); + } + + private void assertConnectionPoolingResults(String connectionURL, Long connectionPoolSize) + throws SQLException { + // Create Pooled Connection Datasource + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionURL); + + // Get pooled connection and ensure listner was added with default connection pool size. + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = pooledDataSource.getConnectionPoolManager(); + assertNotNull(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + // Get Underlying physical connection + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + // Execute query with physical connection + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + + // Close physical connection + connection.close(); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertEquals(connectionPoolSize, listener.getConnectionPoolSize()); + + // Reuse same physical connection. + connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertEquals(connectionPoolSize, listener.getConnectionPoolSize()); + + // Execute query with reusable physical connection + jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + + // Return connection back to the pool. + connection.close(); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertEquals(connectionPoolSize, listener.getConnectionPoolSize()); + pooledConnection.close(); + } + + @Test + public void testAdditionalProjectsInMetadata() throws SQLException { + String additionalProjectsValue = "bigquery-public-data"; + String datasetInAdditionalProject = "baseball"; + + String urlWithAdditionalProjects = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;ProjectId=" + + PROJECT_ID + + ";OAuthType=3" + + ";AdditionalProjects=" + + additionalProjectsValue; + + try (Connection conn = DriverManager.getConnection(urlWithAdditionalProjects)) { + DatabaseMetaData dbMetaData = conn.getMetaData(); + + // 1. Test getCatalogs() + Set foundCatalogs = new HashSet<>(); + try (ResultSet catalogsRs = dbMetaData.getCatalogs()) { + while (catalogsRs.next()) { + foundCatalogs.add(catalogsRs.getString("TABLE_CAT")); + } + } + assertTrue( + "getCatalogs() should contain the primary project ID", + foundCatalogs.contains(PROJECT_ID)); + assertTrue( + "getCatalogs() should contain the additional project ID", + foundCatalogs.contains(additionalProjectsValue)); + + // 2. Test getSchemas() + Set catalogsForSchemasFromAll = new HashSet<>(); + boolean foundAdditionalDataset = false; + try (ResultSet schemasRs = dbMetaData.getSchemas()) { + while (schemasRs.next()) { + String schemaName = schemasRs.getString("TABLE_SCHEM"); + String catalogName = schemasRs.getString("TABLE_CATALOG"); + catalogsForSchemasFromAll.add(catalogName); + if (additionalProjectsValue.equals(catalogName) + && datasetInAdditionalProject.equals(schemaName)) { + foundAdditionalDataset = true; + } + } + } + assertTrue( + "getSchemas() should list datasets from the primary project", + catalogsForSchemasFromAll.contains(PROJECT_ID)); + assertTrue( + "getSchemas() should list datasets from the additional project", + catalogsForSchemasFromAll.contains(additionalProjectsValue)); + assertTrue( + "Known dataset from additional project not found in getSchemas()", + foundAdditionalDataset); + + } catch (SQLException e) { + System.err.println("SQL Error during AdditionalProjects test: " + e.getMessage()); + throw e; + } + } + + @Test + public void testFilterTablesOnDefaultDataset_getTables() throws SQLException { + String defaultDatasetValue = CONSTRAINTS_DATASET; + String table1InDefaultDataset = CONSTRAINTS_TABLE_NAME; + String table2InDefaultDataset = CONSTRAINTS_TABLE_NAME2; + + String specificDatasetValue = "JDBC_TABLE_TYPES_TEST"; + String table1InSpecificDataset = "base_table"; + String table2InSpecificDataset = "external_table"; + + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;ProjectId=" + + PROJECT_ID + + ";OAuthType=3" + + ";DefaultDataset=" + + defaultDatasetValue + + ";FilterTablesOnDefaultDataset=1"; + try (Connection conn = DriverManager.getConnection(connectionUrl)) { + DatabaseMetaData dbMetaData = conn.getMetaData(); + + // Case 1: Catalog and schemaPattern are null/wildcard, should use DefaultDataset + try (ResultSet rs = dbMetaData.getTables(null, null, null, null)) { + Set tableNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(defaultDatasetValue, rs.getString("TABLE_SCHEM")); + tableNames.add(rs.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains(table1InDefaultDataset)); + assertTrue(tableNames.contains(table2InDefaultDataset)); + } + + // Case 2: Explicit schemaPattern overrides DefaultDataset + try (ResultSet rs = dbMetaData.getTables(null, specificDatasetValue, null, null)) { + Set tableNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(specificDatasetValue, rs.getString("TABLE_SCHEM")); + tableNames.add(rs.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains(table1InSpecificDataset)); + assertTrue(tableNames.contains(table2InSpecificDataset)); + } + + // Case 3: Explicit catalog, schemaPattern is null/wildcard, should use DefaultDataset within + // that catalog + try (ResultSet rs = dbMetaData.getTables(PROJECT_ID, null, null, null)) { + Set tableNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(defaultDatasetValue, rs.getString("TABLE_SCHEM")); + tableNames.add(rs.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains(table1InDefaultDataset)); + assertTrue(tableNames.contains(table2InDefaultDataset)); + } + + // Case 4: Explicit catalog and schemaPattern override DefaultDataset + try (ResultSet rs = dbMetaData.getTables(PROJECT_ID, specificDatasetValue, null, null)) { + Set tableNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(specificDatasetValue, rs.getString("TABLE_SCHEM")); + tableNames.add(rs.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains(table1InSpecificDataset)); + assertTrue(tableNames.contains(table2InSpecificDataset)); + } + } + } + + @Test + public void testFilterTablesOnDefaultDataset_getColumns() throws SQLException { + String defaultDatasetValue = CONSTRAINTS_DATASET; + String tableInDefaultDataset = CONSTRAINTS_TABLE_NAME; + String[] columnsInDefaultTable = {"id", "name", "second_name", "address"}; + + String specificDatasetValue = "JDBC_TABLE_TYPES_TEST"; + String tableInSpecificDataset = "base_table"; + String[] columnsInSpecificTable = {"id", "name", "created_at"}; + + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;ProjectId=" + + PROJECT_ID + + ";OAuthType=3" + + ";DefaultDataset=" + + defaultDatasetValue + + ";FilterTablesOnDefaultDataset=1"; + + try (Connection conn = DriverManager.getConnection(connectionUrl)) { + DatabaseMetaData dbMetaData = conn.getMetaData(); + + // Case 1: Catalog and schemaPattern are null/wildcard, should use DefaultDataset + try (ResultSet rs = dbMetaData.getColumns(null, null, tableInDefaultDataset, null)) { + Set columnNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(defaultDatasetValue, rs.getString("TABLE_SCHEM")); + assertEquals(tableInDefaultDataset, rs.getString("TABLE_NAME")); + columnNames.add(rs.getString("COLUMN_NAME")); + } + for (String expectedCol : columnsInDefaultTable) { + assertTrue(columnNames.contains(expectedCol)); + } + assertEquals(columnsInDefaultTable.length, columnNames.size()); + } + + // Case 2: Explicit schemaPattern overrides DefaultDataset + try (ResultSet rs = + dbMetaData.getColumns(null, specificDatasetValue, tableInSpecificDataset, null)) { + Set columnNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(specificDatasetValue, rs.getString("TABLE_SCHEM")); + assertEquals(tableInSpecificDataset, rs.getString("TABLE_NAME")); + columnNames.add(rs.getString("COLUMN_NAME")); + } + for (String expectedCol : columnsInSpecificTable) { + assertTrue(columnNames.contains(expectedCol)); + } + assertEquals(columnsInSpecificTable.length, columnNames.size()); + } + + // Case 3: Explicit catalog, schemaPattern is null/wildcard, should use DefaultDataset within + // that catalog + try (ResultSet rs = dbMetaData.getColumns(PROJECT_ID, null, tableInDefaultDataset, null)) { + Set columnNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(defaultDatasetValue, rs.getString("TABLE_SCHEM")); + assertEquals(tableInDefaultDataset, rs.getString("TABLE_NAME")); + columnNames.add(rs.getString("COLUMN_NAME")); + } + for (String expectedCol : columnsInDefaultTable) { + assertTrue(columnNames.contains(expectedCol)); + } + assertEquals(columnsInDefaultTable.length, columnNames.size()); + } + + // Case 4: Explicit catalog and schemaPattern override DefaultDataset + try (ResultSet rs = + dbMetaData.getColumns(PROJECT_ID, specificDatasetValue, tableInSpecificDataset, null)) { + Set columnNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(specificDatasetValue, rs.getString("TABLE_SCHEM")); + assertEquals(tableInSpecificDataset, rs.getString("TABLE_NAME")); + columnNames.add(rs.getString("COLUMN_NAME")); + } + for (String expectedCol : columnsInSpecificTable) { + assertTrue(columnNames.contains(expectedCol)); + } + assertEquals(columnsInSpecificTable.length, columnNames.size()); + } + } + } + + @Test + public void testAlterTable() throws SQLException { + String TABLE_NAME = "JDBC_ALTER_TABLE_" + randomNumber; + String createQuery = + String.format("CREATE OR REPLACE TABLE %s.%s (`StringField` STRING);", DATASET, TABLE_NAME); + String addColumnQuery = + String.format("ALTER TABLE %s.%s ADD COLUMN `IntegerField` INTEGER;", DATASET, TABLE_NAME); + String dropColumnQuery = + String.format( + "UPDATE %s.%s SET StringField='Jane Doe' WHERE IntegerField=111", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + int createStatus = bigQueryStatement.executeUpdate(createQuery); + assertEquals(0, createStatus); + + int addColumnStatus = bigQueryStatement.executeUpdate(addColumnQuery); + assertEquals(0, addColumnStatus); + + bigQueryStatement.executeQuery(selectQuery); + int selectStatus = bigQueryStatement.getUpdateCount(); + assertEquals(-1, selectStatus); + + int dropColumnStatus = bigQueryStatement.executeUpdate(dropColumnQuery); + assertEquals(0, dropColumnStatus); + + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + public void testQueryPropertyDataSetProjectIdQueriesToCorrectDataset() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=dataset_project_id=" + + PROJECT_ID + + ";"; + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (15, 'Farhan', 25);", + "INTEGRATION_TESTS", "Test_Table"); + String selectQuery = + "SELECT * FROM `bigquery-devtools-drivers.INTEGRATION_TESTS.Test_Table` WHERE age=25;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + statement.execute(insertQuery); + + // assertions + boolean result = statement.execute(selectQuery); + assertTrue(result); + + // clean up + String deleteQuery = + String.format("DELETE FROM %s.%s WHERE age=25", "INTEGRATION_TESTS", "Test_Table"); + statement.execute(deleteQuery); + connection.close(); + } + + @Test + public void testQueryPropertyDataSetProjectIdQueriesToIncorrectDatasetThrows() + throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=dataset_project_id=bigquerytestdefault" + + ";"; + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (15, 'Farhan', 25);", + "INTEGRATION_TESTS", "Test_Table"); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + assertThrows(BigQueryJdbcException.class, () -> statement.execute(insertQuery)); + connection.close(); + } + + @Test + public void testQueryPropertyTimeZoneQueries() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=time_zone=America/New_York;"; + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(query); + + // assertions + assertNotNull(resultSet); + assertTrue(resultSet.next()); + connection.close(); + } + + @Test + public void testQueryPropertySessionIdSetsStatementSession() + throws SQLException, InterruptedException { + String sessionId = getSessionId(); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=session_id=" + + sessionId + + ";"; + String selectQuery = + "INSERT INTO `bigquery-devtools-drivers.JDBC_INTEGRATION_DATASET.No_KMS_Test_table` (id," + + " name, age) VALUES (132, 'Batman', 531);"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + boolean resultSet = statement.execute(selectQuery); + + // assertions + assertFalse(resultSet); + + // clean up + String deleteQuery = + String.format("DELETE FROM %s.%s WHERE age=25", "INTEGRATION_TESTS", "Test_Table"); + statement.execute(deleteQuery); + connection.close(); + } + + @Test + public void testEncryptedTableWithKmsQueries() throws SQLException { + // setup + String KMSKeyName = requireEnvVar("KMS_RESOURCE_PATH"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";KMSKeyName=" + + KMSKeyName + + ";"; + String selectQuery = "SELECT * FROM `JDBC_INTEGRATION_DATASET.KMS_Test_table`;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectQuery); + + // assertions for data not encrypted + assertNotNull(resultSet); + assertTrue(resultSet.next()); + assertEquals("Farhan", resultSet.getString("name")); + connection.close(); + } + + @Test + public void testIncorrectKmsThrows() throws SQLException { + String KMSKeyName = requireEnvVar("KMS_RESOURCE_PATH"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";KMSKeyName=" + + KMSKeyName + + ";"; + String selectQuery = + "INSERT INTO `bigquery-devtools-drivers.JDBC_INTEGRATION_DATASET.No_KMS_Test_table` (id," + + " name, age) VALUES (132, 'Batman', 531);"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + assertThrows(BigQueryJdbcException.class, () -> statement.execute(selectQuery)); + connection.close(); + } + + @Test + public void testQueryPropertyServiceAccountFollowsIamPermission() throws SQLException { + final String SERVICE_ACCOUNT_EMAIL = requireEnvVar("SA_EMAIL"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=service_account=" + + SERVICE_ACCOUNT_EMAIL + + ";"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(String.format(BASE_QUERY, 100)); + + // assertions + assertNotNull(resultSet); + assertTrue(resultSet.next()); + connection.close(); + } + + @Test + public void testValidLegacySQLStatement() throws SQLException { + String legacyJoinQuery = + "SELECT\n" + + " repo_name\n" + + "FROM\n" + + " [bigquery-public-data.github_repos.commits],\n" + + " [bigquery-public-data.github_repos.sample_commits] LIMIT 10"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;"; + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + + boolean result = statement.execute(legacyJoinQuery); + assertTrue(result); + connection.close(); + } + + @Test + public void testMultipleTransactionsThrowsUnsupported() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.execute("BEGIN TRANSACTION;")); + connection.close(); + } + + @Test + public void testConnectionWithMultipleTransactionCommits() throws SQLException { + String TRANSACTION_TABLE = "JDBC_MULTI_COMMIT_TABLE" + randomNumber; + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'DwightShrute', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + + Connection connection = DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + + Statement statement = connection.createStatement(); + statement.execute(insertQuery); + statement.execute(updateQuery); + connection.commit(); // First transaction + + // After commit, a new transaction should have started. + // Executing another query and then rolling it back. + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (15, 'MichaelScott', 25);", + DATASET, TRANSACTION_TABLE); + statement.execute(insertQuery2); + connection.rollback(); // Second transaction + + // Verify state with the static bigQueryStatement + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + int count = 0; + while (resultSet.next()) { + count++; + assertEquals(14, resultSet.getInt("age")); + } + assertEquals(1, count); // Only first transaction should be committed. + + // Verify the second insert was rolled back + ResultSet rs2 = + bigQueryStatement.executeQuery( + String.format("SELECT * FROM %s.%s WHERE id=15", DATASET, TRANSACTION_TABLE)); + assertFalse(rs2.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %s.%s", DATASET, TRANSACTION_TABLE)); + + statement.close(); + connection.close(); + } + + // Private Helper functions + private String getSessionId() throws InterruptedException { + QueryJobConfiguration stubJobConfig = + QueryJobConfiguration.newBuilder("Select 1;").setCreateSession(true).build(); + Job job = bigQuery.create(JobInfo.of(stubJobConfig)); + job = job.waitFor(); + Job stubJob = bigQuery.getJob(job.getJobId()); + return stubJob.getStatistics().getSessionInfo().getSessionId(); + } + + @Test + public void testCallableStatementScriptExecuteUpdate() throws SQLException { + int randomNum = java.util.UUID.randomUUID().hashCode(); + String insertName = "callable-statement-dml-insert-test"; + String insertResult = String.format("%s-%d", insertName, randomNum); + String updateName = "callable-statement-dml-update-test"; + String updateResult = String.format("%s-%d", updateName, randomNum); + String selectStmtQuery = + String.format("SELECT * FROM %s.%s WHERE id = ?", DATASET, CALLABLE_STMT_DML_TABLE_NAME); + String insertCallStmtQuery = + String.format("CALL %s.%s(?,?,?);", DATASET, CALLABLE_STMT_DML_INSERT_PROC_NAME); + String updateCallStmtQuery = + String.format("CALL %s.%s(?,?,?);", DATASET, CALLABLE_STMT_DML_UPDATE_PROC_NAME); + String deleteCallStmtQuery = + String.format("CALL %s.%s(?);", DATASET, CALLABLE_STMT_DML_DELETE_PROC_NAME); + + // DML INSERT + CallableStatement callableStatement = bigQueryConnection.prepareCall(insertCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setString(1, insertName); + callableStatement.setInt(2, randomNum); + callableStatement.setString(3, insertResult); + int rowsInserted = callableStatement.executeUpdate(); + assertEquals(1, rowsInserted); + + PreparedStatement preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + ResultSet rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertTrue(rs.next()); + + assertEquals(insertName, rs.getString(1)); + assertEquals(randomNum, rs.getInt(2)); + assertEquals(insertResult, rs.getString(3)); + + // DML UPDATE + callableStatement = bigQueryConnection.prepareCall(updateCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setString(1, updateName); + callableStatement.setInt(2, randomNum); + callableStatement.setString(3, updateResult); + int rowsUpdated = callableStatement.executeUpdate(); + assertEquals(1, rowsUpdated); + + preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertTrue(rs.next()); + + assertEquals(updateName, rs.getString(1)); + assertEquals(randomNum, rs.getInt(2)); + assertEquals(updateResult, rs.getString(3)); + + // DML DELETE + callableStatement = bigQueryConnection.prepareCall(deleteCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setInt(1, randomNum); + int rowsDeleted = callableStatement.executeUpdate(); + assertEquals(1, rowsDeleted); + + preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertFalse(rs.next()); + + callableStatement.close(); + } + + @Test + public void testCallableStatementScriptExecuteLargeUpdate() throws SQLException { + int randomNum = java.util.UUID.randomUUID().hashCode(); + String insertName = "callable-statement-dml-insert-test"; + String insertResult = String.format("%s-%d", insertName, randomNum); + String updateName = "callable-statement-dml-update-test"; + String updateResult = String.format("%s-%d", updateName, randomNum); + String selectStmtQuery = + String.format("SELECT * FROM %s.%s WHERE id = ?", DATASET, CALLABLE_STMT_DML_TABLE_NAME); + String insertCallStmtQuery = + String.format("CALL %s.%s(?,?,?);", DATASET, CALLABLE_STMT_DML_INSERT_PROC_NAME); + String updateCallStmtQuery = + String.format("CALL %s.%s(?,?,?);", DATASET, CALLABLE_STMT_DML_UPDATE_PROC_NAME); + String deleteCallStmtQuery = + String.format("CALL %s.%s(?);", DATASET, CALLABLE_STMT_DML_DELETE_PROC_NAME); + + // DML INSERT + CallableStatement callableStatement = bigQueryConnection.prepareCall(insertCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setString(1, insertName); + callableStatement.setInt(2, randomNum); + callableStatement.setString(3, insertResult); + long rowsInserted = callableStatement.executeLargeUpdate(); + assertEquals(1L, rowsInserted); + + PreparedStatement preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + ResultSet rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertTrue(rs.next()); + + assertEquals(insertName, rs.getString(1)); + assertEquals(randomNum, rs.getInt(2)); + assertEquals(insertResult, rs.getString(3)); + + // DML UPDATE + callableStatement = bigQueryConnection.prepareCall(updateCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setString(1, updateName); + callableStatement.setInt(2, randomNum); + callableStatement.setString(3, updateResult); + long rowsUpdated = callableStatement.executeLargeUpdate(); + assertEquals(1L, rowsUpdated); + + preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertTrue(rs.next()); + + assertEquals(updateName, rs.getString(1)); + assertEquals(randomNum, rs.getInt(2)); + assertEquals(updateResult, rs.getString(3)); + + // DML DELETE + callableStatement = bigQueryConnection.prepareCall(deleteCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setInt(1, randomNum); + long rowsDeleted = callableStatement.executeLargeUpdate(); + assertEquals(1L, rowsDeleted); + + preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertFalse(rs.next()); + + callableStatement.close(); + } + + @Test + public void testScript() throws SQLException { + String BASE_QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 order by" + + " trip_distance asc LIMIT %s;"; + String query1 = String.format(BASE_QUERY, 5000); + String query2 = String.format(BASE_QUERY, 7000); + String query3 = String.format(BASE_QUERY, 9000); + + bigQueryStatement.execute(query1 + query2 + query3); + ResultSet resultSet = bigQueryStatement.getResultSet(); + assertEquals(5000, resultSetRowCount(resultSet)); + + boolean hasMoreResult = bigQueryStatement.getMoreResults(); + assertTrue(hasMoreResult); + resultSet = bigQueryStatement.getResultSet(); + assertEquals(7000, resultSetRowCount(resultSet)); + + hasMoreResult = bigQueryStatement.getMoreResults(); + assertTrue(hasMoreResult); + resultSet = bigQueryStatement.getResultSet(); + assertEquals(9000, resultSetRowCount(resultSet)); + } + + @Test + public void testCallableStatementScriptExecute() throws SQLException { + int randomNum = random.nextInt(99); + String callableStmtQuery = + String.format( + "DECLARE call_result STRING;" + + "CALL %s.%s(?,?,call_result);" + + "SELECT * FROM %s.%s WHERE result = call_result;", + DATASET, CALLABLE_STMT_PROC_NAME, DATASET, CALLABLE_STMT_TABLE_NAME); + CallableStatement callableStatement = bigQueryConnection.prepareCall(callableStmtQuery); + callableStatement.setString(1, "callable-stmt-test"); + callableStatement.setInt(2, randomNum); + + assertFalse(callableStatement.execute()); + assertEquals(1, callableStatement.getUpdateCount()); + + // This is an actual SELECT * from the above + assertTrue(callableStatement.getMoreResults()); + ResultSet resultSet = callableStatement.getResultSet(); + ResultSetMetaData rsMetadata = resultSet.getMetaData(); + assertEquals(3, rsMetadata.getColumnCount()); + + assertTrue(resultSet.next()); + + String expected = String.format("callable-stmt-test-%d", randomNum); + String actual = resultSet.getString(3); + + assertEquals(expected, actual); + + // Validate there are no more results + assertFalse(callableStatement.getMoreResults()); + assertEquals(-1, callableStatement.getUpdateCount()); + callableStatement.close(); + } + + @Test + public void testExecuteScriptWithExpession() throws SQLException { + int randomNum = random.nextInt(99); + String query = String.format("DECLARE x INT64; SET x = (SELECT %s); SELECT x;", randomNum); + + assertTrue(bigQueryStatement.execute(query)); + ResultSet rs = bigQueryStatement.getResultSet(); + assertTrue(rs.next()); + assertEquals(randomNum, rs.getInt(1)); + assertFalse(rs.next()); + assertFalse(bigQueryStatement.getMoreResults()); + assertEquals(-1, bigQueryStatement.getUpdateCount()); + } + + @Test + public void testInformationSchemaTables() throws SQLException { + String query = String.format("SELECT * FROM %s.INFORMATION_SCHEMA.TABLES", DATASET); + try (Statement statement = bigQueryConnection.createStatement(); + ResultSet resultSet = statement.executeQuery(query)) { + ResultSetMetaData metaData = resultSet.getMetaData(); + int columnCount = metaData.getColumnCount(); + assertTrue(columnCount > 0); + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + for (int i = 1; i <= columnCount; i++) { + Object obj = resultSet.getObject(i); + if (obj != null) { + assertNotNull(obj.toString()); + } + } + } + assertTrue(rowCount > 0); + } + } + + private void validate( + String method, + BiFunction getter, + ImmutableMap expectedResult) + throws Exception { + + try (Connection connection = DriverManager.getConnection(connection_uri); + Connection connectionHTAPI = + DriverManager.getConnection( + connection_uri + + ";HighThroughputMinTableSize=0;HighThroughputActivationRatio=0;EnableHighThroughputAPI=1;"); + Statement statement = connection.createStatement(); + Statement statementHTAPI = connectionHTAPI.createStatement()) { + + String query = + "SELECT * FROM INTEGRATION_TEST_FORMAT.all_bq_types WHERE stringField is not null"; + ResultSet resultSetRegular = statement.executeQuery(query); + ResultSet resultSetArrow = statementHTAPI.executeQuery(query); + resultSetRegular.next(); + resultSetArrow.next(); + + for (int i = 1; i <= resultSetRegular.getMetaData().getColumnCount(); i++) { + String columnName = resultSetRegular.getMetaData().getColumnName(i); + + String regularApiLabel = + String.format("[Method: %s] [Column: %s] [API: Regular]", method, columnName); + String htapiApiLabel = + String.format("[Method: %s] [Column: %s] [API: HTAPI]", method, columnName); + + if (expectedResult.containsKey(columnName)) { + Object expectedValue = expectedResult.get(columnName); + + assertEquals(regularApiLabel, expectedValue, getter.apply(resultSetRegular, i)); + assertEquals(htapiApiLabel, expectedValue, getter.apply(resultSetArrow, i)); + + } else { + String regularMsg = "Expected exception but got a value. " + regularApiLabel; + assertEquals(regularMsg, EXCEPTION_REPLACEMENT, getter.apply(resultSetRegular, i)); + + String htapiMsg = "Expected exception but got a value. " + htapiApiLabel; + assertEquals(htapiMsg, EXCEPTION_REPLACEMENT, getter.apply(resultSetArrow, i)); + } + } + } + } + + @Test + public void validateGetString() throws Exception { + final ImmutableMap stringResults = + new ImmutableMap.Builder() + .put("stringField", "StringValue") + .put("bytesField", "Qnl0ZXNWYWx1ZQ==") + .put("intField", "123") + .put("floatField", "10.5") + .put("numericField", "12345.67") + .put("bigNumericField", "98765432109876543210.123456789") + .put("booleanField", "true") + .put("timestampFiled", "2023-07-28 12:30:00.000000") + .put("dateField", "2023-07-28") + .put("timeField", "12:30:00.000") + .put("dateTimeField", "2023-07-28 12:30:00.000000") + .put("geographyField", "POINT(-74.006 40.7128)") + .put( + "recordField", + "{\"name\":\"NameValue\",\"recordNested\":{\"lastName\":\"LastNameValue\"}}") + .put("rangeField", "[2023-01-01, 2023-12-01)") + .put("jsonField", "{\"key\":\"value\"}") + .put("arrayString", "[abc, def, ghi]") + .put("arrayRecord", "[{\"value\":\"rec_val1\"}, {\"value\":\"rec_val2\"}]") + .put("arrayBytes", "[Ynl0ZTE=, Ynl0ZTI=]") + .put("arrayInteger", "[10, 20]") + .put("arrayNumeric", "[10.5, 20.5]") + .put("arrayBignumeric", "[100.1, 200.2]") + .put("arrayBoolean", "[true, false]") + .put("arrayTimestamp", "[2023-01-01 01:00:00.0, 2023-01-01 02:00:00.0]") + .put("arrayDate", "[2023-01-01, 2023-01-02]") + .put("arrayTime", "[01:00:00, 02:00:00]") + .put("arrayDatetime", "[2023-01-01 01:00:00.0, 2023-01-01 02:00:00.0]") + .put("arrayGeography", "[POINT(1 1), POINT(2 2)]") + .put("arrayRange", "[[2023-01-01, 2023-01-03), [2023-01-04, 2023-01-06)]") + .put("arrayJson", "[{\"a\":1}, {\"b\":2}]") + .put("arrayFloat", "[1.1, 2.2]") + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getString(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getString", getter, stringResults); + } + + @Test + public void validateGetInt() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", 123) + .put("floatField", 10) + .put("numericField", 12345) + .put("booleanField", 1) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getInt(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getInt", getter, result); + } + + @Test + public void validateGetLong() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", 123L) + .put("floatField", 10L) + .put("numericField", 12345L) + .put("booleanField", 1L) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getLong(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getLong", getter, result); + } + + @Test + public void validateGetBool() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", true) + .put("floatField", true) + .put("numericField", true) + .put("booleanField", true) + .put("bigNumericField", true) + .put("stringField", false) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getBoolean(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getBool", getter, result); + } + + @Test + public void validateGetFloat() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", (float) 123.0) + .put("floatField", (float) 10.5) + .put("numericField", (float) 12345.67) + .put("bigNumericField", (float) 98765432109876543210.123456789) + .put("booleanField", (float) 1.0) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getFloat(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getFloat", getter, result); + } + + @Test + public void validateGetDouble() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", (double) 123.0) + .put("floatField", (double) 10.5) + .put("numericField", (double) 12345.67) + .put("bigNumericField", (double) 98765432109876543210.123456789) + .put("booleanField", (double) 1.0) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getDouble(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getDouble", getter, result); + } + + @Test + public void validateGetShort() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", (short) 123) + .put("floatField", (short) 10) + .put("numericField", (short) 12345) + .put("booleanField", (short) 1) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getShort(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getShort", getter, result); + } + + @Test + public void validateGetTime() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("timeField", Time.valueOf("12:30:00")) + .put("dateTimeField", Time.valueOf("12:30:00")) + .put("timestampFiled", Time.valueOf("12:30:00")) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getTime(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getTime", getter, result); + } + + @Test + public void validateGetDate() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("dateField", Date.valueOf("2023-07-28")) + .put("dateTimeField", Date.valueOf("2023-07-28")) + .put("timestampFiled", Date.valueOf("2023-07-28")) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getDate(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getDate", getter, result); + } + + @Test + public void validateGetTimestamp() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("timeField", Timestamp.valueOf("1970-01-01 12:30:00")) + .put("dateField", Timestamp.valueOf("2023-07-28 00:00:00")) + .put("dateTimeField", Timestamp.valueOf("2023-07-28 12:30:00")) + .put("timestampFiled", Timestamp.valueOf("2023-07-28 12:30:00")) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getTimestamp(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getTimestamp", getter, result); + } + + @Test + public void validateGetByte() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", (byte) 123) + .put("booleanField", (byte) 1) + .put("floatField", (byte) 10) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getByte(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getByte", getter, result); + } + + @Test + public void validateGetObjectNullValues() throws Exception { + try (Connection connection = DriverManager.getConnection(connection_uri); + Connection connectionHTAPI = + DriverManager.getConnection( + connection_uri + + ";HighThroughputMinTableSize=0;HighThroughputActivationRatio=0;EnableHighThroughputAPI=1;"); + Statement statement = connection.createStatement(); + Statement statementHTAPI = connectionHTAPI.createStatement()) { + + String query = + "SELECT * FROM INTEGRATION_TEST_FORMAT.all_bq_types WHERE stringField is null;"; + ResultSet resultSetRegular = statement.executeQuery(query); + ResultSet resultSetArrow = statementHTAPI.executeQuery(query); + resultSetRegular.next(); + resultSetArrow.next(); + + for (int i = 1; i <= resultSetRegular.getMetaData().getColumnCount(); i++) { + String columnName = resultSetRegular.getMetaData().getColumnName(i); + if (!columnName.contains("array")) { + assertNull(resultSetRegular.getObject(i)); + assertNull(resultSetArrow.getObject(i)); + } else { + assertEquals(resultSetRegular.getObject(i).toString(), "[]"); + assertEquals(resultSetArrow.getObject(i).toString(), "[]"); + } + } + } + } + + private int resultSetRowCount(ResultSet resultSet) throws SQLException { + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + return rowCount; + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITNightlyBigQueryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITNightlyBigQueryTest.java new file mode 100644 index 000000000..30124b4a0 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITNightlyBigQueryTest.java @@ -0,0 +1,1713 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static com.google.common.truth.Truth.assertThat; +import static java.util.Arrays.asList; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlSyntaxErrorException; +import com.google.cloud.bigquery.jdbc.BigQueryConnection; +import com.google.cloud.bigquery.jdbc.BigQueryDriver; +import java.nio.charset.StandardCharsets; +import java.sql.Connection; +import java.sql.Date; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Arrays; +import java.util.Properties; +import java.util.Random; +import java.util.concurrent.atomic.AtomicBoolean; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +public class ITNightlyBigQueryTest { + static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + static Connection bigQueryConnection; + static Statement bigQueryStatement; + static BigQuery bigQuery; + private static final Random random = new Random(); + private static final int randomNumber = random.nextInt(9999); + private static final String BASE_QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 order by" + + " trip_distance asc LIMIT %s"; + private static final String CONSTRAINTS_DATASET = "JDBC_CONSTRAINTS_TEST_DATASET"; + private static final String CONSTRAINTS_TABLE_NAME = "JDBC_CONSTRAINTS_TEST_TABLE"; + private static final String CONSTRAINTS_TABLE_NAME2 = "JDBC_CONSTRAINTS_TEST_TABLE2"; + private static final String CALLABLE_STMT_PROC_NAME = "IT_CALLABLE_STMT_PROC_TEST"; + private static final String CALLABLE_STMT_TABLE_NAME = "IT_CALLABLE_STMT_PROC_TABLE"; + private static final String CALLABLE_STMT_PARAM_KEY = "CALL_STMT_PARAM_KEY"; + private static final String CALLABLE_STMT_DML_INSERT_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_INSERT_TEST"; + private static final String CALLABLE_STMT_DML_UPDATE_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_UPDATE_TEST"; + private static final String CALLABLE_STMT_DML_DELETE_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_DELETE_TEST"; + private static final String CALLABLE_STMT_DML_TABLE_NAME = "IT_CALLABLE_STMT_PROC_DML_TABLE"; + private static final String DATASET = "JDBC_NIGHTLY_IT_DATASET"; + private static final String DATASET2 = "JDBC_PRESUBMIT_INTEGRATION_DATASET_2"; + static final String session_enabled_connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;EnableSession=1"; + + static final String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + + @BeforeClass + public static void beforeClass() throws SQLException { + bigQueryConnection = DriverManager.getConnection(connection_uri, new Properties()); + bigQueryStatement = bigQueryConnection.createStatement(); + bigQuery = BigQueryOptions.newBuilder().build().getService(); + } + + @AfterClass + public static void afterClass() throws SQLException { + bigQueryStatement.close(); + bigQueryConnection.close(); + } + + @Test + public void testMergeInExecuteBatch() throws SQLException { + Random random = new Random(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME1 = "Inventory" + random.nextInt(9999); + String TABLE_NAME2 = "DetailedInventory" + random.nextInt(9999); + + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER);", + DATASET, TABLE_NAME1); + + String createQuery2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER," + + " `supply_constrained` BOOLEAN, `comment` STRING);", + DATASET, TABLE_NAME2); + + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (product, quantity, supply_constrained, comment) " + + "VALUES ('countertop microwave', 20, NULL,'[]' )," + + " ('front load washer', 20, false,'[]' ), " + + " ('microwave', 20, false,'[]' ), " + + " ('refrigerator', 10, false,'[]' );", + DATASET, TABLE_NAME2); + + bigQueryStatement.execute(createQuery); + bigQueryStatement.execute(createQuery2); + bigQueryStatement.execute(insertQuery2); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (product, quantity) " + + "VALUES (?,? ), (?,? ), (?,? ), (?,? ), (?,? ), (?,? );", + DATASET, TABLE_NAME1); + PreparedStatement insertPs = bigQueryConnection.prepareStatement(insertQuery); + insertPs.setString(1, "dishwasher"); + insertPs.setInt(2, 30); + insertPs.setString(3, "dryer"); + insertPs.setInt(4, 30); + insertPs.setString(5, "front load washer"); + insertPs.setInt(6, 20); + insertPs.setString(7, "microwave"); + insertPs.setInt(8, 20); + insertPs.setString(9, "oven"); + insertPs.setInt(10, 5); + insertPs.setString(11, "top load washer"); + insertPs.setInt(12, 10); + + int insertStatus = insertPs.executeUpdate(); + assertEquals(6, insertStatus); + + String updateQuery = + String.format("UPDATE %s.%s SET quantity=? WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement updatePs = bigQueryConnection.prepareStatement(updateQuery); + updatePs.setString(2, "dryer"); + updatePs.setInt(1, 35); + + int updateStatus = updatePs.executeUpdate(); + assertEquals(1, updateStatus); + + String deleteQuery = String.format("DELETE FROM %s.%s WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement deletePs = bigQueryConnection.prepareStatement(deleteQuery); + deletePs.setString(1, "dishwasher"); + + int deleteStatus = deletePs.executeUpdate(); + assertEquals(1, deleteStatus); + + Statement statement = bigQueryConnection.createStatement(); + String mergeQuery = + String.format( + "MERGE %s.%s T\n" + + "USING %s.%s S\n" + + "ON T.product = S.product\n" + + "WHEN NOT MATCHED AND quantity < 100 THEN\n" + + " INSERT(product, quantity, supply_constrained, comment)\n" + + " VALUES(product, quantity, true, '[]')\n", + DATASET, TABLE_NAME2, DATASET, TABLE_NAME1); + statement.addBatch(mergeQuery); + int[] result = statement.executeBatch(); + + assertEquals(1, result.length); + assertEquals(3, result[0]); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME1)); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME2)); + } + + @Test + public void testValidLongRunningQuery() throws SQLException { + // setup + String selectQuery = + "SELECT * FROM `bigquery-public-data.deepmind_alphafold.metadata` LIMIT 50000"; + + // Read data via JDBC + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + + for (int i = 0; i < 50000; i++) { + resultSet.next(); + assertFalse(resultSet.wasNull()); + assertNotNull(resultSet.getString(5)); + } + + // clean up + resultSet.close(); + } + + @Test + public void testQueryInterruptGracefullyStopsExplicitJob() + throws SQLException, InterruptedException { + AtomicBoolean threadException = new AtomicBoolean(true); + Connection bigQueryConnection = + DriverManager.getConnection(connection_uri + ";JobCreationMode=1", new Properties()); + Statement bigQueryStatement = bigQueryConnection.createStatement(); + + // This query takes 300 seconds to complete + String query300Seconds = + "DECLARE DELAY_TIME DATETIME; SET DELAY_TIME = DATETIME_ADD(CURRENT_DATETIME, INTERVAL 300" + + " SECOND); WHILE CURRENT_DATETIME < DELAY_TIME DO END WHILE;"; + + // Query will be started in the background thread & we will call cancel from current thread. + Thread t = + new Thread( + () -> { + SQLException e = + assertThrows( + SQLException.class, () -> bigQueryStatement.execute(query300Seconds)); + assertTrue(e.getMessage().contains("User requested cancellation")); + threadException.set(false); + }); + t.start(); + // Allow thread to actually initiate the query + Thread.sleep(3000); + bigQueryStatement.cancel(); + // Wait until background thread is finished + t.join(); + assertFalse(threadException.get()); + // Ensure statement can be used again + assertFalse(bigQueryStatement.isClosed()); + bigQueryStatement.executeQuery("SELECT 1"); + } + + @Test + public void testQueryInterruptGracefullyStopsOptionalJob() + throws SQLException, InterruptedException { + AtomicBoolean threadException = new AtomicBoolean(true); + Connection bigQueryConnection = + DriverManager.getConnection(connection_uri + ";JobCreationMode=2", new Properties()); + Statement bigQueryStatement = bigQueryConnection.createStatement(); + + // This query takes 300 seconds to complete + String query300Seconds = + "DECLARE DELAY_TIME DATETIME; SET DELAY_TIME = DATETIME_ADD(CURRENT_DATETIME, INTERVAL 300" + + " SECOND); WHILE CURRENT_DATETIME < DELAY_TIME DO END WHILE;"; + + // Query will be started in the background thread & we will call cancel from current thread. + Thread t = + new Thread( + () -> { + SQLException e = + assertThrows( + SQLException.class, () -> bigQueryStatement.execute(query300Seconds)); + assertTrue(e.getMessage().contains("Query was cancelled.")); + threadException.set(false); + }); + t.start(); + // Allow thread to actually initiate the query + Thread.sleep(3000); + bigQueryStatement.cancel(); + // Wait until background thread is finished + t.join(); + assertFalse(threadException.get()); + // Ensure statement can be used again + assertFalse(bigQueryStatement.isClosed()); + bigQueryStatement.executeQuery("SELECT 1"); + } + + @Test + public void testWideColumnQueries() throws SQLException { + String selectQuery = + "SELECT * FROM `bigquery-public-data.covid19_open_data_eu.covid19_open_data` LIMIT 50000"; + + // Read data via JDBC + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + + for (int i = 0; i < 50000; i++) { + resultSet.next(); + assertFalse(resultSet.wasNull()); + } + + // clean up + resultSet.close(); + } + + @Test + public void testExecuteLargeUpdate() throws SQLException { + String tableName = "JDBC_LARGE_UPDATE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE TABLE %s.%s (" + + " gbifid STRING, scientificname STRING, " + + " individualcount INTEGER, isReviewed BOOLEAN)", + DATASET, tableName); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (gbifid, scientificname, individualcount) " + + "SELECT gbifid, scientificname, individualcount FROM " + + "bigquery-public-data.gbif.occurrences;", + DATASET, tableName); + String updateQuery = + String.format( + "UPDATE %s.%s SET isReviewed = false WHERE individualcount >= 0 OR individualcount IS" + + " NULL", + DATASET, tableName); + + String selectQuery = String.format("SELECT * FROM %s.%s LIMIT 10", DATASET, tableName); + + bigQueryStatement.execute(createQuery); + + long insertCount = bigQueryStatement.executeLargeUpdate(insertQuery); + assertTrue(insertCount > Integer.MAX_VALUE); + + long updateCount = bigQueryStatement.executeLargeUpdate(updateQuery); + assertTrue(updateCount > Integer.MAX_VALUE); + + ResultSet selectResult = bigQueryStatement.executeQuery(selectQuery); + assertTrue(selectResult.next()); + assertFalse(selectResult.getBoolean("isReviewed")); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, tableName)); + } + + @Test + public void testHTAPIWithValidDestinationTableSavesQueriesWithStandardSQL() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=SQL;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;" + + "EnableHighThroughputAPI=1;"; + String selectLegacyQuery = + "SELECT * FROM `bigquery-public-data.deepmind_alphafold.metadata` LIMIT 200000;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String selectQuery = "SELECT * FROM INTEGRATION_TESTS.destination_table_test;"; + ResultSet actualResultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(200000, resultSetRowCount(actualResultSet)); + + // clean up + String deleteRows = "DELETE FROM `INTEGRATION_TESTS.destination_table_test` WHERE 1=1;"; + bigQueryStatement.execute(deleteRows); + } + + @Test + public void testBigQueryConcurrentLimitWithExecuteBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_CONCURRENT_LIMIT_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + int[] results; + for (int i = 0; i < 30; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + } + + @Test + public void testValidExecuteBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + int[] results; + for (int i = 0; i < 30; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + // assertions + assertEquals(30, results.length); + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + } + + @Test + public void testValidExecuteBatchWithMultipleDatasets() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_MULTIPLE_DATASET_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + String createBatchTable2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET2, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + bigQueryStatement.execute(createBatchTable2); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + int[] results; + for (int i = 0; i < 15; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + for (int i = 0; i < 15; i++) { + String insertQuery = + "INSERT INTO " + + DATASET2 + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + // assertions + for (int updateCount : results) { + assertEquals(1, updateCount); + } + + // do a select to validate row count on each + String selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET, BATCH_TABLE); + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(15, resultSetRowCount(resultSet)); + selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET2, BATCH_TABLE); + resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(15, resultSetRowCount(resultSet)); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + } + + @Test + public void testValidExecuteBatchWithMultipleTables() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_MULTI_TABLES_" + random.nextInt(99); + String BATCH_TABLE_2 = "JDBC_EXECUTE_BATCH_TABLE_MULTI_TABLES_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + String createBatchTable2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE_2); + bigQueryStatement.execute(createBatchTable); + bigQueryStatement.execute(createBatchTable2); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + int[] results; + for (int i = 0; i < 5; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + for (int i = 0; i < 5; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE_2 + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + // assertions + for (int updateCount : results) { + assertEquals(1, updateCount); + } + + // do a select to test row count on each + String selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET, BATCH_TABLE); + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(5, resultSetRowCount(resultSet)); + selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET, BATCH_TABLE_2); + resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(5, resultSetRowCount(resultSet)); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE_2)); + } + + @Test + public void testPreparedStatementExecuteUpdate() throws SQLException { + Random random = new Random(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME1 = "Inventory" + random.nextInt(9999); + String TABLE_NAME2 = "DetailedInventory" + random.nextInt(9999); + + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER);", + DATASET, TABLE_NAME1); + + String createQuery2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER," + + " `supply_constrained` BOOLEAN, `comment` STRING);", + DATASET, TABLE_NAME2); + + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (product, quantity, supply_constrained, comment) " + + "VALUES ('countertop microwave', 20, NULL,'[]' )," + + " ('front load washer', 20, false,'[]' ), " + + " ('microwave', 20, false,'[]' ), " + + " ('refrigerator', 10, false,'[]' );", + DATASET, TABLE_NAME2); + + bigQueryStatement.execute(createQuery); + bigQueryStatement.execute(createQuery2); + bigQueryStatement.execute(insertQuery2); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (product, quantity) " + + "VALUES (?,? ), (?,? ), (?,? ), (?,? ), (?,? ), (?,? );", + DATASET, TABLE_NAME1); + PreparedStatement insertPs = bigQueryConnection.prepareStatement(insertQuery); + insertPs.setString(1, "dishwasher"); + insertPs.setInt(2, 30); + insertPs.setString(3, "dryer"); + insertPs.setInt(4, 30); + insertPs.setString(5, "front load washer"); + insertPs.setInt(6, 20); + insertPs.setString(7, "microwave"); + insertPs.setInt(8, 20); + insertPs.setString(9, "oven"); + insertPs.setInt(10, 5); + insertPs.setString(11, "top load washer"); + insertPs.setInt(12, 10); + + int insertStatus = insertPs.executeUpdate(); + assertEquals(6, insertStatus); + + String updateQuery = + String.format("UPDATE %s.%s SET quantity=? WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement updatePs = bigQueryConnection.prepareStatement(updateQuery); + updatePs.setString(2, "dryer"); + updatePs.setInt(1, 35); + + int updateStatus = updatePs.executeUpdate(); + assertEquals(1, updateStatus); + + String deleteQuery = String.format("DELETE FROM %s.%s WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement deletePs = bigQueryConnection.prepareStatement(deleteQuery); + deletePs.setString(1, "dishwasher"); + + int deleteStatus = deletePs.executeUpdate(); + assertEquals(1, deleteStatus); + + String mergeQuery = + String.format( + "MERGE %s.%s T\n" + + "USING %s.%s S\n" + + "ON T.product = S.product\n" + + "WHEN NOT MATCHED AND quantity < ? THEN\n" + + " INSERT(product, quantity, supply_constrained, comment)\n" + + " VALUES(product, quantity, true, ?)\n" + + "WHEN NOT MATCHED THEN\n" + + " INSERT(product, quantity, supply_constrained)\n" + + " VALUES(product, quantity, false)", + DATASET, TABLE_NAME2, DATASET, TABLE_NAME1); + PreparedStatement mergePs = bigQueryConnection.prepareStatement(mergeQuery); + mergePs.setInt(1, 20); + mergePs.setString(2, "comment" + random.nextInt(999)); + + int mergeStatus = mergePs.executeUpdate(); + assertEquals(3, mergeStatus); + + ResultSet rs = + bigQueryStatement.executeQuery( + String.format("SELECT COUNT(*) AS row_count\n" + "FROM %s.%s", DATASET, TABLE_NAME2)); + rs.next(); + assertEquals(7, rs.getInt(1)); + + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME1); + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME2)); + } + + @Test + public void testFailedStatementInTheMiddleOfExecuteBatchStopsExecuting() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_ERROR_IN_MIDDLE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + String createBatchTable2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET2, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + bigQueryStatement.execute(createBatchTable2); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET2, BATCH_TABLE); + + // act + for (int i = 0; i < 20; i++) { + if (i == 10) { + statement.addBatch( + "INSERT INTO " + + DATASET2 + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "); + } else { + statement.addBatch( + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "); + } + } + bigQueryStatement.execute(dropQuery); + + // assertions + assertThrows(BigQueryJdbcException.class, statement::executeBatch); + String selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET, BATCH_TABLE); + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(10, resultSetRowCount(resultSet)); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, BATCH_TABLE)); + } + + @Test + public void testHTAPIWithValidDestinationTableSavesQueriesWithLegacy() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;" + + "EnableHighThroughputAPI=1;"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 200000;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String selectQuery = "SELECT * FROM INTEGRATION_TESTS.destination_table_test;"; + ResultSet actualResultSet = bigQueryStatement.executeQuery(selectQuery); + assertTrue(0 < resultSetRowCount(actualResultSet)); + + // clean up + String deleteRows = "DELETE FROM `INTEGRATION_TESTS.destination_table_test` WHERE 1=1;"; + bigQueryStatement.execute(deleteRows); + } + + @Test + public void testMultiStatementTransactionRollbackByUser() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + int rows = statement.executeUpdate(updateQuery); + assertEquals(1, rows); + status = statement.execute(selectQuery); + assertTrue(status); + connection.rollback(); + assertTrue( + "After rollback() in manual commit mode, a new transaction should be started.", + connection.isTransactionStarted()); + + // Separate query to check if transaction rollback worked + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testMultiStatementTransactionDoesNotCommitWithoutCommit() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + int rows = statement.executeUpdate(updateQuery); + assertEquals(1, rows); + status = statement.execute(selectQuery); + assertTrue(status); + + // Separate query to check nothing committed + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + statement.close(); + connection.close(); + } + + @Test + public void testValidMultiStatementTransactionCommits() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + status = statement.execute(updateQuery); + assertFalse(status); + status = statement.execute(selectQuery); + assertTrue(status); + connection.commit(); + + // Separate query to check inserted and updated data committed + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertTrue(resultSet.next()); + assertEquals(14, resultSet.getInt(3)); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + statement.close(); + connection.close(); + } + + @Test + public void testConnectionWithMultipleTransactionCommits() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + boolean status = statement.execute(insertQuery); + assertFalse(status); + status = statement.execute(updateQuery); + assertFalse(status); + status = statement.execute(selectQuery); + assertTrue(status); + connection.commit(); + + connection.setAutoCommit(false); + assertTrue(connection.isTransactionStarted()); + statement.execute(insertQuery); + connection.rollback(); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + int count = 0; + while (resultSet.next()) { + count++; + } + assertEquals(1, count); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testTransactionRollbackOnError() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s ;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + String transactionOnError = + "BEGIN\n" + + "\n" + + " BEGIN TRANSACTION;\n" + + " INSERT INTO " + + DATASET + + "." + + TRANSACTION_TABLE + + "\n" + + " VALUES (39, 'Drake', 123);\n" + + " SELECT 1/0;\n" + + " COMMIT TRANSACTION;\n" + + "\n" + + "EXCEPTION WHEN ERROR THEN\n" + + " SELECT @@error.message;\n" + + " ROLLBACK TRANSACTION;\n" + + "END;"; + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + Statement statement = connection.createStatement(); + statement.execute(transactionOnError); + + // do a check to see if no vals inserted + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testClearBatchClears() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_CLEAR_BATCH_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 13 WHERE age = %s;", DATASET, BATCH_TABLE, randomNumber); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + statement.addBatch(updateQuery); + statement.clearBatch(); + int[] results = statement.executeBatch(); + + // assertion + assertEquals(0, results.length); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testMultipleExecuteBatches() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_MULTI_BATCHES_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + "); "; + String insertQuery2 = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', 123), " + + " (12, 'Farhan', 123); "; + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + statement.addBatch(insertQuery); + int[] firstResults = statement.executeBatch(); + statement.addBatch(insertQuery2); + int[] secondResults = statement.executeBatch(); + + // assertions + assertEquals(1, firstResults.length); + assertEquals(1, secondResults.length); + assertEquals(1, firstResults[0]); + assertEquals(2, secondResults[0]); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testValidAllDataTypesSerializationFromSelectQuery() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_DATATYPES_INTEGRATION_TEST_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(16, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.getBoolean(1)); + assertEquals(33, resultSet.getInt(2)); + assertEquals(50.05f, resultSet.getFloat(3), 0.0); + assertEquals(123.456, resultSet.getDouble(4), 0.0); + assertEquals(123.456789, resultSet.getDouble(5), 0.0); + assertEquals("testString", resultSet.getString(6)); + assertEquals("Test String", new String(resultSet.getBytes(7), StandardCharsets.UTF_8)); + Struct expectedStruct = (Struct) resultSet.getObject(8); + assertThat(expectedStruct.getAttributes()).isEqualTo(asList("Eric", 10L).toArray()); + assertArrayEquals( + new String[] {"one", "two", "three"}, (String[]) resultSet.getArray(9).getArray()); + + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356456"), resultSet.getObject(10)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getObject(11)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getObject(12)); + assertEquals(Timestamp.valueOf("2019-02-17 11:24:00"), resultSet.getObject(13)); + assertEquals("POINT(1 2)", resultSet.getString(14)); + assertEquals("{\"class\":{\"students\":[{\"name\":\"Jane\"}]}}", resultSet.getString(15)); + assertEquals("123-7 -19 0:24:12.000006", resultSet.getString(16)); + } + + @Test + public void testRepeatedStructFromSelectQuery() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_REPEATED_STRUCT_INTEGRATION_TEST"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + resultSet.next(); + + Struct[] repeatedStruct = (Struct[]) resultSet.getArray(1).getArray(); + assertEquals(3, Arrays.stream(repeatedStruct).count()); + + Object[] alice = repeatedStruct[0].getAttributes(); + Object[] bob = repeatedStruct[1].getAttributes(); + Object[] charlie = repeatedStruct[2].getAttributes(); + assertEquals("Alice", alice[0]); + assertEquals("30", alice[1]); + assertEquals("Bob", bob[0]); + assertEquals("25", bob[1]); + assertEquals("Charlie", charlie[0]); + assertEquals("35", charlie[1]); + } + + @Test + public void testValidAllDataTypesSerializationFromSelectQueryArrowDataset() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_INTEGRATION_ARROW_TEST_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME + " LIMIT 5000;"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";EnableHighThroughputAPI=1;" + + "HighThroughputActivationRatio=2;" + + "HighThroughputMinTableSize=1000;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet); + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(15, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.getBoolean(1)); + assertEquals(33, resultSet.getInt(2)); + assertEquals(50.05f, resultSet.getFloat(3), 0.0); + assertEquals(123.456, resultSet.getDouble(4), 0.0); + assertEquals(123.456789, resultSet.getDouble(5), 0.0); + assertEquals("testString", resultSet.getString(6)); + assertEquals("Test String", new String(resultSet.getBytes(7), StandardCharsets.UTF_8)); + Struct expectedStruct = (Struct) resultSet.getObject(8); + assertThat(expectedStruct.getAttributes()).isEqualTo(asList("Eric", 10L).toArray()); + assertEquals("{\"name\":\"Eric\",\"age\":10}", expectedStruct.toString()); + assertArrayEquals( + new String[] {"one", "two", "three"}, (String[]) resultSet.getArray(9).getArray()); + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356"), resultSet.getObject(10)); + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356"), resultSet.getTimestamp(10)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getObject(11)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getDate(11)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getObject(12)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getTime(12)); + assertEquals(Timestamp.valueOf("2022-01-22 22:22:12.142265"), resultSet.getObject(13)); + assertEquals("POINT(1 2)", resultSet.getString(14)); + assertEquals("{\"class\":{\"students\":[{\"name\":\"Jane\"}]}}", resultSet.getString(15)); + connection.close(); + } + + /////////////// MARKER + /// + + @Test + public void testBulkInsertOperation() throws SQLException { + String TABLE_NAME = "JDBC_BULK_INSERT_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING,\n" + + " `IntegerField` INTEGER," + + " `FloatField` FLOAT64," + + " `NumericField` NUMERIC," + + " `BigNumericField` BIGNUMERIC," + + " `BooleanField` BOOLEAN" + + " );", + DATASET, TABLE_NAME); + String insertQuery = + String.format("INSERT INTO %s.%s VALUES(?, ?, ?,?, ?, ?);", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;" + + "EnableWriteAPI=1;SWA_ActivationRowCount=5;SWA_AppendRowCount=500"; + + try (Connection connection = DriverManager.getConnection(connection_uri)) { + bigQueryStatement.execute(createQuery); + PreparedStatement statement = connection.prepareStatement(insertQuery); + for (int i = 0; i < 20; ++i) { + statement.setString(1, i + "StringField"); + statement.setInt(2, i); + statement.setFloat(3, (float) (i + .6)); + statement.setInt(4, random.nextInt()); + statement.setInt(5, random.nextInt()); + statement.setBoolean(6, true); + + statement.addBatch(); + } + int[] result = statement.executeBatch(); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(result.length, resultSetRowCount(resultSet)); + + bigQueryStatement.execute(dropQuery); + + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testBulkInsertOperationStandard() throws SQLException { + String TABLE_NAME = "JDBC_BULK_INSERT_STANDARD_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING,\n" + + " `IntegerField` INTEGER," + + " `FloatField` FLOAT64," + + " `NumericField` NUMERIC," + + " `BigNumericField` BIGNUMERIC," + + " `BooleanField` BOOLEAN" + + " );", + DATASET, TABLE_NAME); + String insertQuery = + String.format("INSERT INTO %s.%s VALUES(?, ?, ?,?, ?, ?);", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;" + + "EnableWriteAPI=0;SWA_ActivationRowCount=50;SWA_AppendRowCount=500"; + + try (Connection connection = DriverManager.getConnection(connection_uri)) { + bigQueryStatement.execute(createQuery); + PreparedStatement statement = connection.prepareStatement(insertQuery); + for (int i = 0; i < 20; ++i) { + statement.setString(1, i + "StringField"); + statement.setInt(2, i); + statement.setFloat(3, (float) (i + .6)); + statement.setInt(4, random.nextInt()); + statement.setInt(5, random.nextInt()); + statement.setBoolean(6, true); + + statement.addBatch(); + } + int[] result = statement.executeBatch(); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(result.length, resultSetRowCount(resultSet)); + + bigQueryStatement.execute(dropQuery); + + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testExecuteQueryWithSetMaxRows() throws SQLException { + String TEST_MAX_ROWS_TABLE = "JDBC_TEST_MAX_ROWS_TABLE" + random.nextInt(99); + int id1 = random.nextInt(99); + int id2 = random.nextInt(99); + String createMaxRowsTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING);", + DATASET, TEST_MAX_ROWS_TABLE); + String insertQuery1 = + String.format( + "INSERT INTO %s.%s (id, name) VALUES (%s, 'max-rows-test-1');", + DATASET, TEST_MAX_ROWS_TABLE, id1); + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (id, name) VALUES (%s, 'max-rows-test-2');", + DATASET, TEST_MAX_ROWS_TABLE, id2); + String selectQuery = String.format("SELECT id, name FROM %s.%s;", DATASET, TEST_MAX_ROWS_TABLE); + + boolean executeResult = bigQueryStatement.execute(createMaxRowsTable); + assertFalse(executeResult); + int rowsInserted = bigQueryStatement.executeUpdate(insertQuery1); + assertEquals(1, rowsInserted); + rowsInserted = bigQueryStatement.executeUpdate(insertQuery2); + assertEquals(1, rowsInserted); + + bigQueryStatement.setMaxRows(1); + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + assertEquals(1, resultSetRowCount(resultSet)); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TEST_MAX_ROWS_TABLE)); + } + + @Test + public void testExecuteQueryWithoutSetMaxRows() throws SQLException { + String TEST_MAX_ROWS_TABLE = "JDBC_TEST_MAX_ROWS_TABLE" + random.nextInt(99); + int id1 = random.nextInt(99); + int id2 = random.nextInt(99); + String createMaxRowsTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING);", + DATASET, TEST_MAX_ROWS_TABLE); + String insertQuery1 = + String.format( + "INSERT INTO %s.%s (id, name) VALUES (%s, 'max-rows-test-1');", + DATASET, TEST_MAX_ROWS_TABLE, id1); + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (id, name) VALUES (%s, 'max-rows-test-2');", + DATASET, TEST_MAX_ROWS_TABLE, id2); + String selectQuery = String.format("SELECT id, name FROM %s.%s;", DATASET, TEST_MAX_ROWS_TABLE); + + boolean executeResult = bigQueryStatement.execute(createMaxRowsTable); + assertFalse(executeResult); + int rowsInserted = bigQueryStatement.executeUpdate(insertQuery1); + assertEquals(1, rowsInserted); + rowsInserted = bigQueryStatement.executeUpdate(insertQuery2); + assertEquals(1, rowsInserted); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + assertEquals(2, resultSetRowCount(resultSet)); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TEST_MAX_ROWS_TABLE)); + } + + @Test + public void testQueryPropertySessionIdIsUsedWithTransaction() + throws SQLException, InterruptedException { + // setup + String sessionId = getSessionId(); + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String beginTransaction = "BEGIN TRANSACTION; "; + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + String commitTransaction = "COMMIT TRANSACTION;"; + + String transactionQuery = + beginTransaction + + insertQuery + + insertQuery + + updateQuery + + selectQuery + + commitTransaction; + + bigQueryStatement.execute(createTransactionTable); + + // Run the transaction + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=session_id=" + + sessionId + + ";"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + statement.execute(transactionQuery); + + // Test each query's result with getMoreResults + int resultsCount = 0; + boolean hasMoreResult = statement.getMoreResults(); + while (hasMoreResult || statement.getUpdateCount() != -1) { + if (statement.getUpdateCount() == -1) { + ResultSet result = statement.getResultSet(); + assertTrue(result.next()); + assertEquals(-1, statement.getUpdateCount()); + } else { + assertTrue(statement.getUpdateCount() > -1); + } + hasMoreResult = statement.getMoreResults(); + resultsCount++; + } + assertEquals(5, resultsCount); + + // Check the transaction was actually committed. + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + assertEquals(14, resultSet.getInt(3)); + } + assertEquals(2, rowCount); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testRollbackOnConnectionClosed() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (15, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 12 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + int rows = statement.executeUpdate(updateQuery); + assertEquals(1, rows); + status = statement.execute(selectQuery); + assertTrue(status); + connection.close(); + + // Separate query to check if transaction rollback worked + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + } + + @Test + public void testSingleStatementTransaction() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String beginTransaction = "BEGIN TRANSACTION; "; + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + String commitTransaction = "COMMIT TRANSACTION;"; + + String transactionQuery = + beginTransaction + + insertQuery + + insertQuery + + updateQuery + + selectQuery + + commitTransaction; + + bigQueryStatement.execute(createTransactionTable); + + // Run the transaction + Connection connection = DriverManager.getConnection(session_enabled_connection_uri); + Statement statement = connection.createStatement(); + statement.execute(transactionQuery); + + // Test each query's result with getMoreResults + int resultsCount = 0; + boolean hasMoreResult = statement.getMoreResults(); + while (hasMoreResult || statement.getUpdateCount() != -1) { + if (statement.getUpdateCount() == -1) { + ResultSet result = statement.getResultSet(); + assertTrue(result.next()); + assertEquals(-1, statement.getUpdateCount()); + } else { + assertTrue(statement.getUpdateCount() > -1); + } + hasMoreResult = statement.getMoreResults(); + resultsCount++; + } + assertEquals(5, resultsCount); + + // Check the transaction was actually committed. + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + assertEquals(14, resultSet.getInt(3)); + } + assertEquals(2, rowCount); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testConnectionClosedRollsBackStartedTransactions() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + int rows = statement.executeUpdate(updateQuery); + assertEquals(1, rows); + status = statement.execute(selectQuery); + assertTrue(status); + connection.close(); + + // Separate query to check if transaction rollback worked + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + } + + @Test + public void testStatelessQueryPathSmall() throws SQLException { + Properties jobCreationMode = new Properties(); + jobCreationMode.setProperty("JobCreationMode", "2"); + Connection bigQueryConnectionUseStateless = + DriverManager.getConnection(connection_uri, jobCreationMode); + + Statement statement = bigQueryConnectionUseStateless.createStatement(); + + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 850"; + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(850, resultSetRowCount(jsonResultSet)); + + String queryEmpty = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 0"; + ResultSet jsonResultSetEmpty = statement.executeQuery(queryEmpty); + assertTrue(jsonResultSetEmpty.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(0, resultSetRowCount(jsonResultSetEmpty)); + bigQueryConnectionUseStateless.close(); + } + + @Test + public void testFastQueryPathMedium() throws SQLException { + String query = + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 9000"; + ResultSet jsonResultSet = bigQueryStatement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(9000, resultSetRowCount(jsonResultSet)); + } + + @Test + public void testFastQueryPathLarge() throws SQLException { + String query = + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 18000"; + ResultSet jsonResultSet = bigQueryStatement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(18000, resultSetRowCount(jsonResultSet)); + } + + @Test + // reads using ReadAPI and makes sure that they are in order, which implies threads worked + // correctly + public void testIterateOrderArrowMultiThread() throws SQLException { + int expectedCnt = 200000; + String longQuery = String.format(BASE_QUERY, expectedCnt); + ResultSet rs = bigQueryStatement.executeQuery(longQuery); + int cnt = 0; + double oldTriDis = 0.0d; + while (rs.next()) { + double tripDis = rs.getDouble("trip_distance"); + ++cnt; + assertTrue(oldTriDis <= tripDis); + oldTriDis = tripDis; + } + assertEquals(expectedCnt, cnt); // all the records were retrieved + } + + @Test + public void testNonEnabledUseLegacySQLThrowsSyntaxError() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 20000000;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + assertThrows( + BigQueryJdbcSqlSyntaxErrorException.class, () -> statement.execute(selectLegacyQuery)); + connection.close(); + } + + @Test + public void testFastQueryPathEmpty() throws SQLException { + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 0"; + ResultSet jsonResultSet = bigQueryStatement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(0, resultSetRowCount(jsonResultSet)); + } + + @Test + public void testReadAPIPathLarge() throws SQLException { + Properties withReadApi = new Properties(); + withReadApi.setProperty("EnableHighThroughputAPI", "1"); + withReadApi.setProperty("HighThroughputActivationRatio", "2"); + withReadApi.setProperty("HighThroughputMinTableSize", "1000"); + withReadApi.setProperty("MaxResults", "300"); + + Connection connection = DriverManager.getConnection(connection_uri, withReadApi); + Statement statement = connection.createStatement(); + int expectedCnt = 5000; + String longQuery = String.format(BASE_QUERY, expectedCnt); + ResultSet arrowResultSet = statement.executeQuery(longQuery); + assertTrue(arrowResultSet.getClass().getName().contains("BigQueryArrowResultSet")); + assertEquals(expectedCnt, resultSetRowCount(arrowResultSet)); + arrowResultSet.close(); + connection.close(); + } + + @Test + public void testReadAPIPathLargeWithThresholdParameters() throws SQLException { + String connectionUri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;MaxResults=300;HighThroughputActivationRatio=2;" + + "HighThroughputMinTableSize=100;EnableHighThroughputAPI=1"; + Connection connection = DriverManager.getConnection(connectionUri); + Statement statement = connection.createStatement(); + int expectedCnt = 1000; + String longQuery = String.format(BASE_QUERY, expectedCnt); + ResultSet arrowResultSet = statement.executeQuery(longQuery); + assertTrue(arrowResultSet.getClass().getName().contains("BigQueryArrowResultSet")); + assertEquals(expectedCnt, resultSetRowCount(arrowResultSet)); + arrowResultSet.close(); + connection.close(); + } + + @Test + public void testReadAPIPathLargeWithThresholdNotMet() throws SQLException { + String connectionUri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;HighThroughputActivationRatio=4;" + + "HighThroughputMinTableSize=100;EnableHighThroughputAPI=1"; + Connection connection = DriverManager.getConnection(connectionUri); + Statement statement = connection.createStatement(); + int expectedCnt = 5000; + String longQuery = String.format(BASE_QUERY, expectedCnt); + ResultSet arrowResultSet = statement.executeQuery(longQuery); + assertTrue(arrowResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(expectedCnt, resultSetRowCount(arrowResultSet)); + arrowResultSet.close(); + connection.close(); + } + + private String getSessionId() throws InterruptedException { + QueryJobConfiguration stubJobConfig = + QueryJobConfiguration.newBuilder("Select 1;").setCreateSession(true).build(); + Job job = bigQuery.create(JobInfo.of(stubJobConfig)); + job = job.waitFor(); + Job stubJob = bigQuery.getJob(job.getJobId()); + return stubJob.getStatistics().getSessionInfo().getSessionId(); + } + + private int resultSetRowCount(ResultSet resultSet) throws SQLException { + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + return rowCount; + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITPSCBigQueryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITPSCBigQueryTest.java new file mode 100644 index 000000000..1b73f84b6 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITPSCBigQueryTest.java @@ -0,0 +1,300 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static com.google.common.truth.Truth.assertThat; +import static java.util.Arrays.asList; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.jdbc.BigQueryConnection; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.sql.Connection; +import java.sql.Date; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Properties; +import org.junit.Test; + +public class ITPSCBigQueryTest { + static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + private static final String SERVICE_ACCOUNT_EMAIL = requireEnvVar("SA_EMAIL"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @Test + public void testNoOverrideTimesOut() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "APPLICATION_DEFAULT_CREDENTIALS", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 850"; + Statement statement = connection.createStatement(); + assertThrows(BigQueryException.class, () -> statement.executeQuery(query)); + } + + @Test + public void testValidADCAuthenticationInPSC() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "APPLICATION_DEFAULT_CREDENTIALS", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testValidOAuthType2AuthenticationInPSC() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=2;" + + "OAuthAccessToken=RedactedToken;" // TODO(fahmz): see if there is a way to use SMS + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "PRE_GENERATED_TOKEN", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testValidServiceAccountAuthenticationKeyFileInPSC() throws SQLException, IOException { + final String SERVICE_ACCOUNT_KEY = requireEnvVar("SA_SECRET"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;" + + "OAuthPvtKeyPath=" + + SERVICE_ACCOUNT_KEY + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + connection.close(); + } + + @Test + public void testValidServiceAccountAuthenticationViaEmailInPSC() throws SQLException { + final String SERVICE_ACCOUNT_KEY = requireEnvVar("SA_SECRET"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;" + + "OAuthServiceAcctEmail=" + + SERVICE_ACCOUNT_EMAIL + + ";OAuthPvtKey=" + + SERVICE_ACCOUNT_KEY + + ";EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testValidAllDataTypesSerializationFromSelectQueryInPSC() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;" + + "OAuthPvtKeyPath=.\\google-cloud-bigquery-jdbc\\secret.json;" + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + Connection bigQueryConnection = DriverManager.getConnection(connection_uri, new Properties()); + Statement bigQueryStatement = bigQueryConnection.createStatement(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_DATATYPES_INTEGRATION_TEST_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(16, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.getBoolean(1)); + assertEquals(33, resultSet.getInt(2)); + assertEquals(50.05f, resultSet.getFloat(3), 0.0); + assertEquals(123.456, resultSet.getDouble(4), 0.0); + assertEquals(123.456789, resultSet.getDouble(5), 0.0); + assertEquals("testString", resultSet.getString(6)); + assertEquals("Test String", new String(resultSet.getBytes(7), StandardCharsets.UTF_8)); + Struct expectedStruct = (Struct) resultSet.getObject(8); + assertThat(expectedStruct.getAttributes()).isEqualTo(asList("Eric", 10).toArray()); + assertArrayEquals( + new String[] {"one", "two", "three"}, (String[]) resultSet.getArray(9).getArray()); + + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356456"), resultSet.getObject(10)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getObject(11)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getObject(12)); + assertEquals(Timestamp.valueOf("2019-02-17 11:24:00"), resultSet.getObject(13)); + assertEquals("POINT(1 2)", resultSet.getString(14)); + assertEquals("{\"class\":{\"students\":[{\"name\":\"Jane\"}]}}", resultSet.getString(15)); + assertEquals("123-7 -19 0:24:12.000006", resultSet.getString(16)); + } + + @Test + public void testValidAllDataTypesSerializationFromSelectQueryArrowDatasetInPSC() + throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_INTEGRATION_ARROW_TEST_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;" + + "OAuthPvtKeyPath=.\\google-cloud-bigquery-jdbc\\secret.json;;" + + "EnableHighThroughputAPI=1;" + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "READ_API=bigquerystorage-privateendpoint.p.googleapis.com:443," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet); + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(15, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.getBoolean(1)); + assertEquals(33, resultSet.getInt(2)); + assertEquals(50.05f, resultSet.getFloat(3), 0.0); + assertEquals(123.456, resultSet.getDouble(4), 0.0); + assertEquals(123.456789, resultSet.getDouble(5), 0.0); + assertEquals("testString", resultSet.getString(6)); + assertEquals("Test String", new String(resultSet.getBytes(7), StandardCharsets.UTF_8)); + Struct expectedStruct = (Struct) resultSet.getObject(8); + assertThat(expectedStruct.getAttributes()).isEqualTo(asList("Eric", 10).toArray()); + assertEquals("{\"v\":{\"f\":[{\"v\":\"Eric\"},{\"v\":\"10\"}]}}", expectedStruct.toString()); + assertArrayEquals( + new String[] {"one", "two", "three"}, (String[]) resultSet.getArray(9).getArray()); + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356"), resultSet.getObject(10)); + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356"), resultSet.getTimestamp(10)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getObject(11)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getDate(11)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getObject(12)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getTime(12)); + assertEquals(Timestamp.valueOf("2022-01-22 22:22:12.142265"), resultSet.getObject(13)); + assertEquals("POINT(1 2)", resultSet.getString(14)); + assertEquals("{\"class\":{\"students\":[{\"name\":\"Jane\"}]}}", resultSet.getString(15)); + connection.close(); + } + + @Test + public void testValidExternalAccountAuthenticationInPSC() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=4;" + + "BYOID_AudienceUri=//iam.googleapis.com/projects//locations//workloadIdentityPools//providers/;" + + "BYOID_SubjectTokenType=;" + + "BYOID_CredentialSource={\"file\":\"/path/to/file\"};" + + "BYOID_SA_Impersonation_Uri=;" + + "BYOID_TokenUri=;" + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "EXTERNAL_ACCOUNT_AUTH", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertNotNull(resultSet); + connection.close(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITProxyBigQueryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITProxyBigQueryTest.java new file mode 100644 index 000000000..fbde5ecc6 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITProxyBigQueryTest.java @@ -0,0 +1,234 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Arrays; +import java.util.List; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Enclosed.class) +public class ITProxyBigQueryTest { + static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + static final String PROXY_HOST = "34.94.167.18"; + static final String PROXY_PORT = "3128"; + static final String PROXY_UID = "fahmz"; + static final String PROXY_PWD = "fahmz"; + + public static class NonParameterizedProxyTests { + @Test + public void testValidAuthenticatedProxy() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=" + + PROXY_HOST + + ";ProxyPort=" + + PROXY_PORT + + ";ProxyUid=" + + PROXY_UID + + ";ProxyPwd=" + + PROXY_PWD + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + boolean result = + statement.execute( + "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"); + assertTrue(result); + connection.close(); + } + + @Test + public void testAuthenticatedProxyWithOutAuthDetailsThrows() throws SQLException { + String query = "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=" + + PROXY_HOST + + ";ProxyPort=" + + PROXY_PORT + + ";"; + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.execute(query)); + } + + @Test + public void testNonExistingProxyTimesOut() throws SQLException { + String query = "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=111.12.111.11;" // If the test fails you may have this ip address + // assigned + + "ProxyPort=1111;"; + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.execute(query)); + } + + @Test + @Ignore // Run this when Proxy server has no authentication otherwise you'll get a "407 Proxy + // Authentication Required". + public void testNonAuthenticatedProxy() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=" + + PROXY_HOST + + ";ProxyPort=" + + PROXY_PORT + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + boolean result = + statement.execute( + "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"); + assertTrue(result); + connection.close(); + } + + @Test + public void testValidNonProxyConnectionQueries() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + boolean result = + statement.execute( + "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"); + assertTrue(result); + connection.close(); + } + + @Test + public void testReadAPIEnabledWithProxySettings() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=" + + PROXY_HOST + + ";ProxyPort=" + + PROXY_PORT + + ";EnableHighThroughputAPI=1;" + + "ProxyUid=" + + PROXY_UID + + ";ProxyPwd=" + + PROXY_PWD + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + boolean result = + statement.execute( + "SELECT * FROM `bigquery-public-data.covid19_open_data_eu.covid19_open_data` LIMIT 200000"); + assertTrue(result); + connection.close(); + } + } + + @RunWith(Parameterized.class) + public static class ParametrizedMissingPropertiesTest { + private final String ProxyHost; + private final String ProxyPort; + private final String ProxyUid; + private final String ProxyPwd; + + public ParametrizedMissingPropertiesTest( + String ProxyHost, String ProxyPort, String ProxyUid, String ProxyPwd) { + this.ProxyHost = ProxyHost; + this.ProxyPort = ProxyPort; + this.ProxyUid = ProxyUid; + this.ProxyPwd = ProxyPwd; + } + + @Parameterized.Parameters + public static List ProxyParameters() { + String proxyHost = "ProxyHost=" + PROXY_HOST + ";"; + String proxyPort = "ProxyPort=" + PROXY_PORT + ";"; + String proxyUid = "ProxyUid=" + PROXY_UID + ";"; + String proxyPwd = "ProxyPwd=" + PROXY_PWD + ";"; + return Arrays.asList( + new String[][] { + {"", proxyPort, proxyUid, proxyPwd}, + {proxyHost, "", proxyUid, proxyPwd}, + {proxyHost, proxyPort, "", proxyPwd}, + {proxyHost, proxyPort, proxyUid, ""}, + {"", "", proxyUid, proxyPwd} + }); + } + + @Test + public void testMissingProxyParameterThrowsIllegalArgument() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + ProxyHost + + ProxyPort + + ProxyUid + + ProxyPwd; + assertThrows( + IllegalArgumentException.class, () -> DriverManager.getConnection(connection_uri)); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITTPCBigQueryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITTPCBigQueryTest.java new file mode 100644 index 000000000..3fa2d7d7e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITTPCBigQueryTest.java @@ -0,0 +1,220 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.bigquery.jdbc.BigQueryConnection; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import org.junit.Test; + +public class ITTPCBigQueryTest { + private static final String ENDPOINT_URL = System.getenv("ENDPOINT_URL"); + private static final String UNIVERSE_DOMAIN = System.getenv("UNIVERSE_DOMAIN"); + private static final String TPC_SERVICE_ACCOUNT = System.getenv("SERVICE_ACCOUNT"); + private static final String TPC_PVT_KEY = System.getenv("PRIVATE_KEY"); + private static final String TPC_ACCESS_TOKEN = System.getenv("ACCESS_TOKEN"); + private static final String TPC_PROJECT_ID = System.getenv("PROJECT_ID"); + + private static final String TPC_ENDPOINT = + (ENDPOINT_URL.isEmpty()) + ? "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443" + : "jdbc:bigquery://" + ENDPOINT_URL; + private static final String TPC_UNIVERSE_DOMAIN = + (UNIVERSE_DOMAIN.isEmpty()) ? "googleapis.com" : UNIVERSE_DOMAIN; + + // See here go/bq-cli-tpc for testing setup. + // Use the default test project. + // For the SA you will have to give it bigquery admin permissions cl/627813300 and will have to + // revert after testing. + // Plug in the values for the connection properties from the guide into the connection string. + @Test + public void testServiceAccountAuthenticationViaEmail() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "OAuthType=0;" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";" + + "OAuthServiceAcctEmail=" + + TPC_SERVICE_ACCOUNT + + ";" + + "OAuthPvtKey=" + + TPC_PVT_KEY + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = "SELECT 1"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + // You will need to change the environment variable for GOOGLE_APPLICATION_CREDENTIALS to point to + // the SA key file. + @Test + public void testValidApplicationDefaultCredentialsAuthentication() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "OAuthType=3;" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "APPLICATION_DEFAULT_CREDENTIALS", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = "SELECT * FROM test.test;"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testValidPreGeneratedAccessTokenAuthentication() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "OAuthType=2;" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "OAuthAccessToken=" + + TPC_ACCESS_TOKEN + + ";" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + String query = "SELECT * FROM test.test;"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertNotNull(connection); + assertFalse(connection.isClosed()); + connection.close(); + } + + @Test + public void testSimpleQueryReturns() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "OAuthType=0;" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";" + + "OAuthServiceAcctEmail=" + + TPC_SERVICE_ACCOUNT + + ";" + + "OAuthPvtKey=" + + TPC_PVT_KEY + + ";"; // Plug in this value when testing from the key file + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = "SELECT * FROM test.test;"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testServiceAccountKeyFileReturns() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "OAuthType=0;" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";" + // Point the key path to where you have downloaded it to. + + "OAuthPvtKeyPath=/Users/YourPathToSecretFile/SAKeyFile.json;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = "SELECT * FROM test.test;"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + private void validateTPCEnvironment() { + if (TPC_PROJECT_ID.isEmpty()) { + throw new IllegalArgumentException("TPC_PROJECT_ID is empty"); + } + if (TPC_SERVICE_ACCOUNT.isEmpty()) { + throw new IllegalArgumentException("TPC_SERVICE_ACCOUNT is empty"); + } + if (TPC_ENDPOINT.isEmpty()) { + throw new IllegalArgumentException("TPC_ENDPOINT is empty"); + } + if (TPC_PVT_KEY.isEmpty()) { + throw new IllegalArgumentException("TPC_PVT_KEY is empty"); + } + if (TPC_UNIVERSE_DOMAIN.isEmpty()) { + throw new IllegalArgumentException("TPC_UNIVERSE_DOMAIN is empty"); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/rules/TimeZoneRule.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/rules/TimeZoneRule.java new file mode 100644 index 000000000..ff5db108e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/rules/TimeZoneRule.java @@ -0,0 +1,57 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.rules; + +import java.util.TimeZone; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +public class TimeZoneRule implements TestRule { + + private final String timeZoneId; + private final TimeZone defaultTimeZone; + + public TimeZoneRule(String timeZoneId) { + this.timeZoneId = timeZoneId; + defaultTimeZone = TimeZone.getDefault(); + } + + @Override + public Statement apply(Statement base, Description description) { + return new Statement() { + @Override + public void evaluate() throws Throwable { + try { + TimeZone.setDefault(TimeZone.getTimeZone(timeZoneId)); + base.evaluate(); + } finally { + TimeZone.setDefault(defaultTimeZone); + } + } + }; + } + + /** + * Public method to enforce the rule from places like methods annotated with {@link + * org.junit.runners.Parameterized.Parameters} annotation which gets executed before this rule is + * applied. + */ + public void enforce() { + TimeZone.setDefault(TimeZone.getTimeZone(timeZoneId)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/ArrowUtilities.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/ArrowUtilities.java new file mode 100644 index 000000000..13f300766 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/ArrowUtilities.java @@ -0,0 +1,54 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.utils; + +import com.google.api.core.InternalApi; +import com.google.protobuf.ByteString; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.channels.Channels; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.VectorUnloader; +import org.apache.arrow.vector.ipc.WriteChannel; +import org.apache.arrow.vector.ipc.message.ArrowRecordBatch; +import org.apache.arrow.vector.ipc.message.MessageSerializer; +import org.apache.arrow.vector.types.pojo.Schema; + +@InternalApi("Used for testing purpose") +public class ArrowUtilities { + + public static ByteString serializeSchema(Schema schema) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + MessageSerializer.serialize(new WriteChannel(Channels.newChannel(out)), schema); + return ByteString.readFrom(new ByteArrayInputStream(out.toByteArray())); + } + + public static ByteString serializeVectorSchemaRoot(VectorSchemaRoot root) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + + ArrowRecordBatch recordBatch = new VectorUnloader(root).getRecordBatch(); + MessageSerializer.serialize(new WriteChannel(Channels.newChannel(out)), recordBatch); + return ByteString.readFrom(new ByteArrayInputStream(out.toByteArray())); + + // ArrowStreamWriter writer = new ArrowStreamWriter(root, null, Channels.newChannel(out)); + // writer.start(); + // writer.writeBatch(); + // writer.end(); + // return ByteString.readFrom(new ByteArrayInputStream(out.toByteArray())); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/TestUtilities.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/TestUtilities.java new file mode 100644 index 000000000..419cb9b0b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/TestUtilities.java @@ -0,0 +1,143 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.utils; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.StandardSQLTypeName; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.stream.Collectors; +import javax.sql.ConnectionEvent; +import javax.sql.ConnectionEventListener; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; + +public class TestUtilities { + + public static Tuple primitiveSchemaAndValue( + StandardSQLTypeName typeName, String value) { + Field schema = + Field.newBuilder(typeName.name() + "_column", typeName).setMode(Mode.NULLABLE).build(); + FieldValue primitiveValue = FieldValue.of(Attribute.PRIMITIVE, value); + + return Tuple.of(schema, primitiveValue); + } + + public static Tuple arraySchemaAndValue( + StandardSQLTypeName typeName, String... values) { + Field schema = + Field.newBuilder(typeName.name() + "_arr_column", typeName).setMode(Mode.REPEATED).build(); + + FieldValue arrayValues = + FieldValue.of( + Attribute.REPEATED, + FieldValueList.of( + Arrays.stream(values) + .map(value -> FieldValue.of(Attribute.PRIMITIVE, value)) + .collect(Collectors.toList()))); + + return Tuple.of(schema, arrayValues); + } + + public static Tuple, ArrayList> nestedResultSetToColumnLists( + ResultSet resultSet) throws SQLException { + ArrayList indexes = new ArrayList<>(); + ArrayList columnValues = new ArrayList<>(); + while (resultSet.next()) { + indexes.add(resultSet.getObject(1)); + columnValues.add((T) resultSet.getObject(2)); + } + return Tuple.of(indexes, columnValues); + } + + @SafeVarargs + public static Tuple> arrowStructOf( + Tuple... tuples) { + ArrayList fields = new ArrayList<>(); + JsonStringHashMap values = new JsonStringHashMap<>(); + + for (Tuple tuple : tuples) { + StandardSQLTypeName typeName = tuple.x(); + Object value = tuple.y(); + String name = typeName.name() + "_column"; + Field schema = Field.newBuilder(name, typeName).setMode(Mode.NULLABLE).build(); + fields.add(schema); + values.put(name, value); + } + return Tuple.of(FieldList.of(fields), values); + } + + public static Tuple> arrowArraySchemaAndValue( + StandardSQLTypeName typeName, T... values) { + Field schema = + Field.newBuilder(typeName.name() + "_arr_column", typeName).setMode(Mode.REPEATED).build(); + + JsonStringArrayList arrayValues = new JsonStringArrayList<>(); + arrayValues.addAll(Arrays.asList(values)); + return Tuple.of(schema, arrayValues); + } + + @SafeVarargs + public static JsonStringArrayList arrowArrayOf(T... values) { + JsonStringArrayList arrayValues = new JsonStringArrayList<>(); + arrayValues.addAll(Arrays.asList(values)); + return arrayValues; + } + + // struct of arrays + public static JsonStringHashMap toArrowStruct( + Iterable>> schemaAndValues) { + JsonStringHashMap struct = new JsonStringHashMap<>(); + for (Tuple> schemaAndValue : schemaAndValues) { + Field schema = schemaAndValue.x(); + JsonStringArrayList value = schemaAndValue.y(); + struct.put(schema.getName(), value); + } + return struct; + } + + public static class TestConnectionListener implements ConnectionEventListener { + private int connectionClosedCount = 0; + private int connectionErrorCount = 0; + + @Override + public void connectionClosed(ConnectionEvent arg0) { + connectionClosedCount++; + } + + @Override + public void connectionErrorOccurred(ConnectionEvent arg0) { + connectionErrorCount++; + } + + public int getConnectionClosedCount() { + return connectionClosedCount; + } + + public int getConnectionErrorCount() { + return connectionErrorCount; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/URIBuilder.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/URIBuilder.java new file mode 100644 index 000000000..ce34f42f5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/URIBuilder.java @@ -0,0 +1,34 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.utils; + +public class URIBuilder { + StringBuilder builder; + + public URIBuilder(String baseUri) { + builder = new StringBuilder(baseUri); + } + + public URIBuilder append(String key, Object value) { + builder.append(String.format("%s=%s;", key, value.toString())); + return this; + } + + public String toString() { + return builder.toString(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/resources/fake.p12 b/google-cloud-bigquery-jdbc/src/test/resources/fake.p12 new file mode 100644 index 000000000..d9e064411 Binary files /dev/null and b/google-cloud-bigquery-jdbc/src/test/resources/fake.p12 differ diff --git a/google-cloud-bigquery-jdbc/src/test/resources/test_truststore_nopass.jks b/google-cloud-bigquery-jdbc/src/test/resources/test_truststore_nopass.jks new file mode 100644 index 000000000..c40846550 Binary files /dev/null and b/google-cloud-bigquery-jdbc/src/test/resources/test_truststore_nopass.jks differ diff --git a/google-cloud-bigquery-jdbc/src/test/resources/test_truststore_withpass.jks b/google-cloud-bigquery-jdbc/src/test/resources/test_truststore_withpass.jks new file mode 100644 index 000000000..824be2d6f Binary files /dev/null and b/google-cloud-bigquery-jdbc/src/test/resources/test_truststore_withpass.jks differ