diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 1524dc8d2..69df4a253 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -59,7 +59,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: SARIF file path: results.sarif diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 5f1dc856b..ba9811fa8 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.56.0" + ".": "2.57.1" } diff --git a/benchmark/pom.xml b/benchmark/pom.xml index e58805860..41e1a2fcb 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -6,7 +6,7 @@ google-cloud-bigquery-parent com.google.cloud - 2.56.1-rc1-SNAPSHOT + 2.57.2-SNAPSHOT diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index c750a7c6d..f119bc5c6 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery-bom - 2.56.1-rc1-SNAPSHOT + 2.57.2-SNAPSHOT pom com.google.cloud @@ -54,7 +54,7 @@ com.google.cloud google-cloud-bigquery - 2.56.1-rc1-SNAPSHOT + 2.57.2-SNAPSHOT diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index 2d2124ace..5e86e77fd 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 2.56.1-rc1-SNAPSHOT + 2.57.2-SNAPSHOT jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 2.56.1-rc1-SNAPSHOT + 2.57.2-SNAPSHOT google-cloud-bigquery diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java index ac8fce708..c057cdaca 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java @@ -1731,7 +1731,10 @@ public TableDataList call() throws IOException { new PageImpl<>( new TableDataPageFetcher(tableId, schema, serviceOptions, cursor, pageOptionMap), cursor, - transformTableData(result.getRows(), schema, serviceOptions.getUseInt64Timestamps())), + transformTableData( + result.getRows(), + schema, + serviceOptions.getDataFormatOptions().useInt64Timestamp())), result.getTotalRows()); } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); @@ -2007,7 +2010,9 @@ public com.google.api.services.bigquery.model.QueryResponse call() new QueryPageFetcher(jobId, schema, getOptions(), cursor, optionMap(options)), cursor, transformTableData( - results.getRows(), schema, getOptions().getUseInt64Timestamps()))) + results.getRows(), + schema, + getOptions().getDataFormatOptions().useInt64Timestamp()))) .setJobId(jobId) .setQueryId(results.getQueryId()) .build(); @@ -2021,7 +2026,9 @@ public com.google.api.services.bigquery.model.QueryResponse call() new TableDataPageFetcher(null, schema, getOptions(), null, optionMap(options)), null, transformTableData( - results.getRows(), schema, getOptions().getUseInt64Timestamps()))) + results.getRows(), + schema, + getOptions().getDataFormatOptions().useInt64Timestamp()))) // Return the JobID of the successful job .setJobId( results.getJobReference() != null ? JobId.fromPb(results.getJobReference()) : null) @@ -2066,10 +2073,9 @@ && getOptions().getOpenTelemetryTracer() != null) { } try (Scope queryScope = querySpan != null ? querySpan.makeCurrent() : null) { // If all parameters passed in configuration are supported by the query() method on the - // backend, - // put on fast path + // backend, put on fast path QueryRequestInfo requestInfo = - new QueryRequestInfo(configuration, getOptions().getUseInt64Timestamps()); + new QueryRequestInfo(configuration, getOptions().getDataFormatOptions()); if (requestInfo.isFastQuerySupported(jobId)) { // Be careful when setting the projectID in JobId, if a projectID is specified in the JobId, // the job created by the query method will use that project. This may cause the query to diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java index 7adb58d3a..10ae77930 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import com.google.api.core.BetaApi; +import com.google.api.core.ObsoleteApi; import com.google.api.gax.retrying.ResultRetryAlgorithm; import com.google.cloud.ServiceDefaults; import com.google.cloud.ServiceOptions; @@ -26,6 +27,7 @@ import com.google.cloud.bigquery.spi.BigQueryRpcFactory; import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import com.google.cloud.http.HttpTransportOptions; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; import io.opentelemetry.api.trace.Tracer; import java.util.Set; @@ -41,6 +43,7 @@ public class BigQueryOptions extends ServiceOptions { // set the option ThrowNotFound when you want to throw the exception when the value not found private boolean setThrowNotFound; private boolean useInt64Timestamps; + private DataFormatOptions dataFormatOptions; private JobCreationMode defaultJobCreationMode = JobCreationMode.JOB_CREATION_MODE_UNSPECIFIED; private boolean enableOpenTelemetryTracing; private Tracer openTelemetryTracer; @@ -70,6 +73,7 @@ public static class Builder extends ServiceOptions.Builder resultRetryAlgorithm; @@ -94,11 +98,32 @@ public Builder setLocation(String location) { return this; } + /** + * This setter is marked as Obsolete. Prefer {@link #setDataFormatOptions(DataFormatOptions)} to + * set the int64timestamp configuration instead. + * + *

If useInt64Timestamps value is set in here and via DataFormatOptions, the + * DataFormatOptions configuration value is used. + * + *

{@code DataFormatOptions.newBuilder().setUseInt64Timestamp(...).build()} + */ + @ObsoleteApi("Use setDataFormatOptions(DataFormatOptions) instead") public Builder setUseInt64Timestamps(boolean useInt64Timestamps) { this.useInt64Timestamps = useInt64Timestamps; return this; } + /** + * Set the format options for the BigQuery data types + * + * @param dataFormatOptions Configuration of the formatting options + */ + public Builder setDataFormatOptions(DataFormatOptions dataFormatOptions) { + Preconditions.checkNotNull(dataFormatOptions, "DataFormatOptions cannot be null"); + this.dataFormatOptions = dataFormatOptions; + return this; + } + /** * Enables OpenTelemetry tracing functionality for this BigQuery instance * @@ -143,6 +168,15 @@ private BigQueryOptions(Builder builder) { } else { this.resultRetryAlgorithm = BigQueryBaseService.DEFAULT_BIGQUERY_EXCEPTION_HANDLER; } + + // If dataFormatOptions is not set, then create a new instance and set it with the + // useInt64Timestamps configured in BigQueryOptions + if (builder.dataFormatOptions == null) { + this.dataFormatOptions = + DataFormatOptions.newBuilder().useInt64Timestamp(builder.useInt64Timestamps).build(); + } else { + this.dataFormatOptions = builder.dataFormatOptions; + } } private static class BigQueryDefaults implements ServiceDefaults { @@ -191,8 +225,23 @@ public void setThrowNotFound(boolean setThrowNotFound) { this.setThrowNotFound = setThrowNotFound; } + /** + * This setter is marked as Obsolete. Prefer {@link + * Builder#setDataFormatOptions(DataFormatOptions)} to set the int64timestamp configuration + * instead. + * + *

If useInt64Timestamps is set via DataFormatOptions, then the value in DataFormatOptions will + * be used. Otherwise, this value will be passed to DataFormatOptions. + * + *

Alternative: {@code DataFormatOptions.newBuilder().setUseInt64Timestamp(...).build()} + */ + @ObsoleteApi("Use Builder#setDataFormatOptions(DataFormatOptions) instead") public void setUseInt64Timestamps(boolean useInt64Timestamps) { this.useInt64Timestamps = useInt64Timestamps; + // Because this setter exists outside the Builder, DataFormatOptions needs be rebuilt to + // account for this setting. + this.dataFormatOptions = + dataFormatOptions.toBuilder().useInt64Timestamp(useInt64Timestamps).build(); } @Deprecated @@ -206,8 +255,22 @@ public boolean getThrowNotFound() { return setThrowNotFound; } + /** + * This getter is marked as Obsolete. Prefer {@link + * DataFormatOptions.Builder#useInt64Timestamp(boolean)} to set the int64timestamp configuration + * instead. + * + *

Warning: DataFormatOptions values have precedence. Use {@link + * DataFormatOptions#useInt64Timestamp()} to get `useInt64Timestamp` value used by the BigQuery + * client. + */ + @ObsoleteApi("Use getDataFormatOptions().isUseInt64Timestamp() instead") public boolean getUseInt64Timestamps() { - return useInt64Timestamps; + return dataFormatOptions.useInt64Timestamp(); + } + + public DataFormatOptions getDataFormatOptions() { + return dataFormatOptions; } public JobCreationMode getDefaultJobCreationMode() { diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DataFormatOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DataFormatOptions.java new file mode 100644 index 000000000..beaadf32c --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DataFormatOptions.java @@ -0,0 +1,73 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import java.io.Serializable; + +/** + * Google BigQuery DataFormatOptions. Configures the output format for data types returned from + * BigQuery. + */ +@AutoValue +public abstract class DataFormatOptions implements Serializable { + public enum TimestampFormatOptions { + TIMESTAMP_OUTPUT_FORMAT_UNSPECIFIED("TIMESTAMP_OUTPUT_FORMAT_UNSPECIFIED"), + FLOAT64("FLOAT64"), + INT64("INT64"), + ISO8601_STRING("ISO8601_STRING"); + + private final String format; + + TimestampFormatOptions(String format) { + this.format = format; + } + + @Override + public String toString() { + return format; + } + } + + public abstract boolean useInt64Timestamp(); + + public abstract TimestampFormatOptions timestampFormatOptions(); + + public static Builder newBuilder() { + return new AutoValue_DataFormatOptions.Builder() + .useInt64Timestamp(false) + .timestampFormatOptions(TimestampFormatOptions.TIMESTAMP_OUTPUT_FORMAT_UNSPECIFIED); + } + + public abstract Builder toBuilder(); + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder useInt64Timestamp(boolean useInt64Timestamp); + + public abstract Builder timestampFormatOptions(TimestampFormatOptions timestampFormatOptions); + + public abstract DataFormatOptions build(); + } + + com.google.api.services.bigquery.model.DataFormatOptions toPb() { + com.google.api.services.bigquery.model.DataFormatOptions request = + new com.google.api.services.bigquery.model.DataFormatOptions(); + request.setUseInt64Timestamp(useInt64Timestamp()); + request.setTimestampOutputFormat(timestampFormatOptions().toString()); + return request; + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java index 3c959a73f..88e09c5c4 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java @@ -25,6 +25,7 @@ import com.google.api.services.bigquery.model.TableFieldSchema; import com.google.common.base.Function; import com.google.common.base.MoreObjects; +import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import java.io.Serializable; import java.util.List; @@ -62,6 +63,7 @@ public TableFieldSchema apply(Field field) { private final Long maxLength; private final Long scale; private final Long precision; + private final Long timestampPrecision; private final String defaultValueExpression; private final String collation; private final FieldElementType rangeElementType; @@ -88,6 +90,7 @@ public static final class Builder { private Long maxLength; private Long scale; private Long precision; + private Long timestampPrecision; private String defaultValueExpression; private String collation; private FieldElementType rangeElementType; @@ -104,6 +107,7 @@ private Builder(Field field) { this.maxLength = field.maxLength; this.scale = field.scale; this.precision = field.precision; + this.timestampPrecision = field.timestampPrecision; this.defaultValueExpression = field.defaultValueExpression; this.collation = field.collation; this.rangeElementType = field.rangeElementType; @@ -254,6 +258,19 @@ public Builder setPrecision(Long precision) { return this; } + /** + * Specifies the precision for TIMESTAMP types. + * + *

The default value is 6. Possible values are 6 (microsecond) or 12 (picosecond). + */ + public Builder setTimestampPrecision(Long timestampPrecision) { + Preconditions.checkArgument( + timestampPrecision == 6L || timestampPrecision == 12L, + "Timestamp Precision must be 6 (microsecond) or 12 (picosecond)"); + this.timestampPrecision = timestampPrecision; + return this; + } + /** * DefaultValueExpression is used to specify the default value of a field using a SQL * expression. It can only be set for top level fields (columns). @@ -317,6 +334,7 @@ private Field(Builder builder) { this.maxLength = builder.maxLength; this.scale = builder.scale; this.precision = builder.precision; + this.timestampPrecision = builder.timestampPrecision; this.defaultValueExpression = builder.defaultValueExpression; this.collation = builder.collation; this.rangeElementType = builder.rangeElementType; @@ -370,6 +388,11 @@ public Long getPrecision() { return precision; } + /** Returns the precision for TIMESTAMP type. */ + public Long getTimestampPrecision() { + return timestampPrecision; + } + /** Return the default value of the field. */ public String getDefaultValueExpression() { return defaultValueExpression; @@ -408,6 +431,7 @@ public String toString() { .add("maxLength", maxLength) .add("scale", scale) .add("precision", precision) + .add("timestampPrecision", timestampPrecision) .add("defaultValueExpression", defaultValueExpression) .add("collation", collation) .add("rangeElementType", rangeElementType) @@ -416,7 +440,19 @@ public String toString() { @Override public int hashCode() { - return Objects.hash(name, type, mode, description, policyTags, rangeElementType); + return Objects.hash( + name, + type, + mode, + description, + policyTags, + maxLength, + scale, + precision, + timestampPrecision, + defaultValueExpression, + collation, + rangeElementType); } @Override @@ -490,6 +526,9 @@ TableFieldSchema toPb() { if (precision != null) { fieldSchemaPb.setPrecision(precision); } + if (timestampPrecision != null) { + fieldSchemaPb.setTimestampPrecision(timestampPrecision); + } if (defaultValueExpression != null) { fieldSchemaPb.setDefaultValueExpression(defaultValueExpression); } @@ -527,6 +566,9 @@ static Field fromPb(TableFieldSchema fieldSchemaPb) { if (fieldSchemaPb.getPrecision() != null) { fieldBuilder.setPrecision(fieldSchemaPb.getPrecision()); } + if (fieldSchemaPb.getTimestampPrecision() != null) { + fieldBuilder.setTimestampPrecision(fieldSchemaPb.getTimestampPrecision()); + } if (fieldSchemaPb.getDefaultValueExpression() != null) { fieldBuilder.setDefaultValueExpression(fieldSchemaPb.getDefaultValueExpression()); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java index 0487c3f7c..cb4e44861 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java @@ -26,6 +26,7 @@ import com.google.api.services.bigquery.model.RangeValue; import com.google.auto.value.AutoValue; import com.google.cloud.Timestamp; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -44,6 +45,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import javax.annotation.Nullable; import org.threeten.extra.PeriodDuration; @@ -76,7 +79,7 @@ @AutoValue public abstract class QueryParameterValue implements Serializable { - private static final DateTimeFormatter timestampFormatter = + static final DateTimeFormatter TIMESTAMP_FORMATTER = new DateTimeFormatterBuilder() .parseLenient() .append(DateTimeFormatter.ISO_LOCAL_DATE) @@ -94,15 +97,21 @@ public abstract class QueryParameterValue implements Serializable { .optionalEnd() .toFormatter() .withZone(ZoneOffset.UTC); - private static final DateTimeFormatter timestampValidator = + private static final DateTimeFormatter TIMESTAMP_VALIDATOR = new DateTimeFormatterBuilder() .parseLenient() - .append(timestampFormatter) + .append(TIMESTAMP_FORMATTER) .optionalStart() .appendOffsetId() .optionalEnd() .toFormatter() .withZone(ZoneOffset.UTC); + // Regex to identify >9 digits in the fraction part (e.g. `.123456789123`) + // Matches the dot, followed by 10+ digits (fractional part), followed by non-digits (like `+00`) + // or end of string + private static final Pattern ISO8601_TIMESTAMP_HIGH_PRECISION_PATTERN = + Pattern.compile("\\.(\\d{10,})(?:\\D|$)"); + private static final DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); private static final DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSS"); @@ -303,6 +312,9 @@ public static QueryParameterValue bytes(byte[] value) { /** * Creates a {@code QueryParameterValue} object with a type of TIMESTAMP. * + *

This method only supports microsecond precision for timestamp. To use higher precision, + * prefer {@link #timestamp(String)} with an ISO8601 String + * * @param value Microseconds since epoch, e.g. 1733945416000000 corresponds to 2024-12-11 * 19:30:16.929Z */ @@ -311,8 +323,14 @@ public static QueryParameterValue timestamp(Long value) { } /** - * Creates a {@code QueryParameterValue} object with a type of TIMESTAMP. Must be in the format - * "yyyy-MM-dd HH:mm:ss.SSSSSSZZ", e.g. "2014-08-19 12:41:35.220000+00:00". + * Creates a {@code QueryParameterValue} object with a type of TIMESTAMP. + * + *

This method supports up to picosecond precision (12 digits) for timestamp. Input should + * conform to ISO8601 format. + * + *

Must be in the format "yyyy-MM-dd HH:mm:ss.SSSSSS{SSSSSSS}ZZ", e.g. "2014-08-19 + * 12:41:35.123456+00:00" for microsecond precision and "2014-08-19 12:41:35.123456789123+00:00" + * for picosecond precision */ public static QueryParameterValue timestamp(String value) { return of(value, StandardSQLTypeName.TIMESTAMP); @@ -481,12 +499,15 @@ private static String valueToStringOrNull(T value, StandardSQLTypeName type) throw new IllegalArgumentException("Cannot convert RANGE to String value"); case TIMESTAMP: if (value instanceof Long) { + // Timestamp passed as a Long only support Microsecond precision Timestamp timestamp = Timestamp.ofTimeMicroseconds((Long) value); - return timestampFormatter.format( + return TIMESTAMP_FORMATTER.format( Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos())); } else if (value instanceof String) { - // verify that the String is in the right format - checkFormat(value, timestampValidator); + // Timestamp passed as a String can support up picosecond precision, however, + // DateTimeFormatter only supports nanosecond precision. Higher than nanosecond + // requires a custom validator. + validateTimestamp((String) value); return (String) value; } break; @@ -521,9 +542,42 @@ private static String valueToStringOrNull(T value, StandardSQLTypeName type) "Type " + type + " incompatible with " + value.getClass().getCanonicalName()); } + /** + * Internal helper method to check that the timestamp follows the expected String input of ISO8601 + * string. Allows the fractional portion of the timestamp to support up to 12 digits of precision + * (up to picosecond). + * + * @throws IllegalArgumentException if timestamp is invalid or exceeds picosecond precision + */ + @VisibleForTesting + static void validateTimestamp(String timestamp) { + // Check if the string has greater than nanosecond precision (>9 digits in fractional second) + Matcher matcher = ISO8601_TIMESTAMP_HIGH_PRECISION_PATTERN.matcher(timestamp); + if (matcher.find()) { + // Group 1 is the fractional second part of the ISO8601 string + String fraction = matcher.group(1); + // Pos 10-12 of the fractional second are guaranteed to be digits. The regex only + // matches the fraction section as long as they are digits. + if (fraction.length() > 12) { + throw new IllegalArgumentException( + "Fractional second portion of ISO8601 only supports up to picosecond (12 digits) in BigQuery"); + } + + // Replace the entire fractional second portion with just the nanosecond portion. + // The new timestamp will be validated against the JDK's DateTimeFormatter + String truncatedFraction = fraction.substring(0, 9); + timestamp = + new StringBuilder(timestamp) + .replace(matcher.start(1), matcher.end(1), truncatedFraction) + .toString(); + } + + // It is valid as long as DateTimeFormatter doesn't throw an exception + checkFormat(timestamp, TIMESTAMP_VALIDATOR); + } + private static void checkFormat(Object value, DateTimeFormatter formatter) { try { - formatter.parse((String) value); } catch (DateTimeParseException e) { throw new IllegalArgumentException(e.getMessage(), e); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java index 588b7cae8..c7033817c 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java @@ -46,7 +46,8 @@ final class QueryRequestInfo { private final DataFormatOptions formatOptions; private final String reservation; - QueryRequestInfo(QueryJobConfiguration config, Boolean useInt64Timestamps) { + QueryRequestInfo( + QueryJobConfiguration config, com.google.cloud.bigquery.DataFormatOptions dataFormatOptions) { this.config = config; this.connectionProperties = config.getConnectionProperties(); this.defaultDataset = config.getDefaultDataset(); @@ -61,7 +62,7 @@ final class QueryRequestInfo { this.useLegacySql = config.useLegacySql(); this.useQueryCache = config.useQueryCache(); this.jobCreationMode = config.getJobCreationMode(); - this.formatOptions = new DataFormatOptions().setUseInt64Timestamp(useInt64Timestamps); + this.formatOptions = dataFormatOptions.toPb(); this.reservation = config.getReservation(); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java index 4176ec24d..e77d7936a 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java @@ -16,6 +16,11 @@ package com.google.cloud.bigquery; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + import com.google.cloud.TransportOptions; import org.junit.Assert; import org.junit.Test; @@ -35,4 +40,55 @@ public void testInvalidTransport() { Assert.assertNotNull(expected.getMessage()); } } + + @Test + public void dataFormatOptions_createdByDefault() { + BigQueryOptions options = BigQueryOptions.newBuilder().setProjectId("project-id").build(); + + assertNotNull(options.getDataFormatOptions()); + assertFalse(options.getDataFormatOptions().useInt64Timestamp()); + assertEquals( + DataFormatOptions.TimestampFormatOptions.TIMESTAMP_OUTPUT_FORMAT_UNSPECIFIED, + options.getDataFormatOptions().timestampFormatOptions()); + } + + @Test + public void nonBuilderSetUseInt64Timestamp_capturedInDataFormatOptions() { + BigQueryOptions options = + BigQueryOptions.newBuilder() + .setDataFormatOptions(DataFormatOptions.newBuilder().useInt64Timestamp(false).build()) + .setProjectId("project-id") + .build(); + options.setUseInt64Timestamps(true); + + assertTrue(options.getDataFormatOptions().useInt64Timestamp()); + } + + @Test + public void nonBuilderSetUseInt64Timestamp_overridesEverything() { + BigQueryOptions options = BigQueryOptions.newBuilder().setProjectId("project-id").build(); + options.setUseInt64Timestamps(true); + + assertTrue(options.getDataFormatOptions().useInt64Timestamp()); + } + + @Test + public void noDataFormatOptions_capturesUseInt64TimestampSetInBuilder() { + BigQueryOptions options = + BigQueryOptions.newBuilder().setUseInt64Timestamps(true).setProjectId("project-id").build(); + + assertTrue(options.getDataFormatOptions().useInt64Timestamp()); + } + + @Test + public void dataFormatOptionsSetterHasPrecedence() { + BigQueryOptions options = + BigQueryOptions.newBuilder() + .setProjectId("project-id") + .setDataFormatOptions(DataFormatOptions.newBuilder().useInt64Timestamp(true).build()) + .setUseInt64Timestamps(false) + .build(); + + assertTrue(options.getDataFormatOptions().useInt64Timestamp()); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java index ce431ca29..d7c5e25a2 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -213,6 +214,20 @@ public void testSubFieldWithClonedType() throws Exception { Field.of("field", clonedRecord, Field.of("subfield", LegacySQLTypeName.BOOLEAN)); } + @Test + public void setTimestampPrecisionValues() { + Field.Builder builder = Field.newBuilder(FIELD_NAME1, FIELD_TYPE1); + + // Value values: 6L or 12L + builder.setTimestampPrecision(6L); + builder.setTimestampPrecision(12L); + + assertThrows(IllegalArgumentException.class, () -> builder.setTimestampPrecision(-1L)); + assertThrows(IllegalArgumentException.class, () -> builder.setTimestampPrecision(0L)); + assertThrows(IllegalArgumentException.class, () -> builder.setTimestampPrecision(5L)); + assertThrows(IllegalArgumentException.class, () -> builder.setTimestampPrecision(13L)); + } + private void compareFieldSchemas(Field expected, Field value) { assertEquals(expected, value); assertEquals(expected.getName(), value.getName()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java index 75060a4f0..25649388e 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java @@ -16,11 +16,9 @@ package com.google.cloud.bigquery; +import static com.google.cloud.bigquery.QueryParameterValue.TIMESTAMP_FORMATTER; import static com.google.common.truth.Truth.assertThat; -import static java.time.temporal.ChronoField.HOUR_OF_DAY; -import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; -import static java.time.temporal.ChronoField.NANO_OF_SECOND; -import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; +import static org.junit.Assert.assertThrows; import com.google.api.services.bigquery.model.QueryParameterType; import com.google.common.collect.ImmutableMap; @@ -29,9 +27,6 @@ import java.text.ParseException; import java.time.Instant; import java.time.Period; -import java.time.ZoneOffset; -import java.time.format.DateTimeFormatter; -import java.time.format.DateTimeFormatterBuilder; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; @@ -43,25 +38,6 @@ public class QueryParameterValueTest { - private static final DateTimeFormatter TIMESTAMPFORMATTER = - new DateTimeFormatterBuilder() - .parseLenient() - .append(DateTimeFormatter.ISO_LOCAL_DATE) - .appendLiteral(' ') - .appendValue(HOUR_OF_DAY, 2) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 2) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 2) - .optionalStart() - .appendFraction(NANO_OF_SECOND, 6, 9, true) - .optionalStart() - .appendOffset("+HHMM", "+00:00") - .optionalEnd() - .toFormatter() - .withZone(ZoneOffset.UTC); - private static final QueryParameterValue QUERY_PARAMETER_VALUE = QueryParameterValue.newBuilder() .setType(StandardSQLTypeName.STRING) @@ -326,11 +302,9 @@ public void testStringArray() { @Test public void testTimestampFromLong() { - QueryParameterValue value = QueryParameterValue.timestamp(1408452095220000L); - assertThat(value.getValue()).isEqualTo("2014-08-19 12:41:35.220000+00:00"); - assertThat(value.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value.getArrayType()).isNull(); - assertThat(value.getArrayValues()).isNull(); + // Expects output to be ISO8601 string with microsecond precision + assertTimestampValue( + QueryParameterValue.timestamp(1408452095220000L), "2014-08-19 12:41:35.220000+00:00"); } @Test @@ -340,43 +314,77 @@ public void testTimestampWithFormatter() { long secs = Math.floorDiv(timestampInMicroseconds, microseconds); int nano = (int) Math.floorMod(timestampInMicroseconds, microseconds) * 1000; Instant instant = Instant.ofEpochSecond(secs, nano); - String expected = TIMESTAMPFORMATTER.format(instant); - assertThat(expected) - .isEqualTo(QueryParameterValue.timestamp(timestampInMicroseconds).getValue()); + String expected = TIMESTAMP_FORMATTER.format(instant); + assertTimestampValue(QueryParameterValue.timestamp(timestampInMicroseconds), expected); } @Test - public void testTimestamp() { - QueryParameterValue value = QueryParameterValue.timestamp("2014-08-19 12:41:35.220000+00:00"); - assertThat(value.getValue()).isEqualTo("2014-08-19 12:41:35.220000+00:00"); - assertThat(value.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value.getArrayType()).isNull(); - assertThat(value.getArrayValues()).isNull(); + public void testTimestampFromString() { + assertTimestampValue( + QueryParameterValue.timestamp("2014-08-19 12:41:35.220000+00:00"), + "2014-08-19 12:41:35.220000+00:00"); + assertTimestampValue( + QueryParameterValue.timestamp("2025-08-19 12:34:56.123456789+00:00"), + "2025-08-19 12:34:56.123456789+00:00"); + + // The following test cases test more than nanosecond precision + // 10 digits of precision (1 digit more than nanosecond) + assertTimestampValue( + QueryParameterValue.timestamp("2025-12-08 12:34:56.1234567890+00:00"), + "2025-12-08 12:34:56.1234567890+00:00"); + // 12 digits (picosecond precision) + assertTimestampValue( + QueryParameterValue.timestamp("2025-12-08 12:34:56.123456789123+00:00"), + "2025-12-08 12:34:56.123456789123+00:00"); + + // More than picosecond precision + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.timestamp("2025-12-08 12:34:56.1234567891234+00:00")); + assertThrows( + IllegalArgumentException.class, + () -> + QueryParameterValue.timestamp("2025-12-08 12:34:56.123456789123456789123456789+00:00")); } @Test public void testTimestampWithDateTimeFormatterBuilder() { - QueryParameterValue value = QueryParameterValue.timestamp("2019-02-14 12:34:45.938993Z"); - assertThat(value.getValue()).isEqualTo("2019-02-14 12:34:45.938993Z"); - assertThat(value.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value.getArrayType()).isNull(); - assertThat(value.getArrayValues()).isNull(); - QueryParameterValue value1 = QueryParameterValue.timestamp("2019-02-14 12:34:45.938993+0000"); - assertThat(value1.getValue()).isEqualTo("2019-02-14 12:34:45.938993+0000"); - assertThat(value1.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value1.getArrayType()).isNull(); - assertThat(value1.getArrayValues()).isNull(); - QueryParameterValue value2 = QueryParameterValue.timestamp("2019-02-14 12:34:45.102+00:00"); - assertThat(value2.getValue()).isEqualTo("2019-02-14 12:34:45.102+00:00"); - assertThat(value2.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value2.getArrayType()).isNull(); - assertThat(value2.getArrayValues()).isNull(); + assertTimestampValue( + QueryParameterValue.timestamp("2019-02-14 12:34:45.938993Z"), + "2019-02-14 12:34:45.938993Z"); + assertTimestampValue( + QueryParameterValue.timestamp("2019-02-14 12:34:45.938993+0000"), + "2019-02-14 12:34:45.938993+0000"); + assertTimestampValue( + QueryParameterValue.timestamp("2019-02-14 12:34:45.102+00:00"), + "2019-02-14 12:34:45.102+00:00"); } - @Test(expected = IllegalArgumentException.class) - public void testInvalidTimestamp() { + @Test + public void testInvalidTimestampStringValues() { + assertThrows(IllegalArgumentException.class, () -> QueryParameterValue.timestamp("abc")); + // missing the time - QueryParameterValue.timestamp("2014-08-19"); + assertThrows(IllegalArgumentException.class, () -> QueryParameterValue.timestamp("2014-08-19")); + + // missing the hour + assertThrows( + IllegalArgumentException.class, () -> QueryParameterValue.timestamp("2014-08-19 12")); + + // can't have the 'T' separator + assertThrows( + IllegalArgumentException.class, () -> QueryParameterValue.timestamp("2014-08-19T12")); + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.timestamp("2014-08-19T12:34:00.123456")); + + // Fractional part has picosecond length, but fractional part is not a valid number + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.timestamp("2014-08-19 12:34:00.123456789abc+00:00")); + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.timestamp("2014-08-19 12:34:00.123456abc789+00:00")); } @Test @@ -683,4 +691,11 @@ private static void testRangeDataEquals(String start, String end, FieldElementTy assertThat(queryParameterValue.getStructValues()).isNull(); assertThat(queryParameterValue.getValue()).isNull(); } + + private void assertTimestampValue(QueryParameterValue value, String expectedStringValue) { + assertThat(value.getValue()).isEqualTo(expectedStringValue); + assertThat(value.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); + assertThat(value.getArrayType()).isNull(); + assertThat(value.getArrayValues()).isNull(); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java index ed9effe0b..866134677 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java @@ -140,7 +140,8 @@ public class QueryRequestInfoTest { .setJobCreationMode(jobCreationModeRequired) .setReservation(RESERVATION) .build(); - QueryRequestInfo REQUEST_INFO = new QueryRequestInfo(QUERY_JOB_CONFIGURATION, false); + QueryRequestInfo REQUEST_INFO = + new QueryRequestInfo(QUERY_JOB_CONFIGURATION, DataFormatOptions.newBuilder().build()); private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_SUPPORTED = QueryJobConfiguration.newBuilder(QUERY) .setUseQueryCache(USE_QUERY_CACHE) @@ -156,7 +157,8 @@ public class QueryRequestInfoTest { .setReservation(RESERVATION) .build(); QueryRequestInfo REQUEST_INFO_SUPPORTED = - new QueryRequestInfo(QUERY_JOB_CONFIGURATION_SUPPORTED, false); + new QueryRequestInfo( + QUERY_JOB_CONFIGURATION_SUPPORTED, DataFormatOptions.newBuilder().build()); @Test public void testIsFastQuerySupported() { @@ -177,17 +179,25 @@ public void testToPb() { @Test public void equalTo() { compareQueryRequestInfo( - new QueryRequestInfo(QUERY_JOB_CONFIGURATION_SUPPORTED, false), REQUEST_INFO_SUPPORTED); - compareQueryRequestInfo(new QueryRequestInfo(QUERY_JOB_CONFIGURATION, false), REQUEST_INFO); + new QueryRequestInfo( + QUERY_JOB_CONFIGURATION_SUPPORTED, DataFormatOptions.newBuilder().build()), + REQUEST_INFO_SUPPORTED); + compareQueryRequestInfo( + new QueryRequestInfo(QUERY_JOB_CONFIGURATION, DataFormatOptions.newBuilder().build()), + REQUEST_INFO); } @Test public void testInt64Timestamp() { - QueryRequestInfo requestInfo = new QueryRequestInfo(QUERY_JOB_CONFIGURATION, false); + QueryRequestInfo requestInfo = + new QueryRequestInfo(QUERY_JOB_CONFIGURATION, DataFormatOptions.newBuilder().build()); QueryRequest requestPb = requestInfo.toPb(); assertFalse(requestPb.getFormatOptions().getUseInt64Timestamp()); - QueryRequestInfo requestInfoLosslessTs = new QueryRequestInfo(QUERY_JOB_CONFIGURATION, true); + QueryRequestInfo requestInfoLosslessTs = + new QueryRequestInfo( + QUERY_JOB_CONFIGURATION, + DataFormatOptions.newBuilder().useInt64Timestamp(true).build()); QueryRequest requestLosslessTsPb = requestInfoLosslessTs.toPb(); assertTrue(requestLosslessTsPb.getFormatOptions().getUseInt64Timestamp()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index ec1f7b5a0..80605884d 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -26,6 +26,7 @@ import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -44,7 +45,6 @@ import com.google.cloud.bigquery.Acl.Expr; import com.google.cloud.bigquery.Acl.User; import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption; import com.google.cloud.bigquery.BigQuery.DatasetField; import com.google.cloud.bigquery.BigQuery.DatasetListOption; import com.google.cloud.bigquery.BigQuery.DatasetOption; @@ -71,6 +71,7 @@ import com.google.cloud.bigquery.ConnectionSettings; import com.google.cloud.bigquery.CopyJobConfiguration; import com.google.cloud.bigquery.CsvOptions; +import com.google.cloud.bigquery.DataFormatOptions; import com.google.cloud.bigquery.Dataset; import com.google.cloud.bigquery.DatasetId; import com.google.cloud.bigquery.DatasetInfo; @@ -1098,10 +1099,12 @@ public static void beforeClass() throws InterruptedException, IOException { .setContentType("application/json") .build(), JSON_CONTENT_SIMPLE.getBytes(StandardCharsets.UTF_8)); - InputStream stream = - ITBigQueryTest.class.getClassLoader().getResourceAsStream("QueryTestData.csv"); - storage.createFrom( - BlobInfo.newBuilder(BUCKET, LOAD_FILE_LARGE).setContentType("text/plain").build(), stream); + try (InputStream stream = + ITBigQueryTest.class.getClassLoader().getResourceAsStream("QueryTestData.csv")) { + storage.createFrom( + BlobInfo.newBuilder(BUCKET, LOAD_FILE_LARGE).setContentType("text/plain").build(), + stream); + } storage.create( BlobInfo.newBuilder(BUCKET, JSON_LOAD_FILE_BQ_RESULTSET) .setContentType("application/json") @@ -1178,10 +1181,11 @@ public static void beforeClass() throws InterruptedException, IOException { } @AfterClass - public static void afterClass() throws ExecutionException, InterruptedException { + public static void afterClass() throws Exception { if (bigquery != null) { RemoteBigQueryHelper.forceDelete(bigquery, DATASET); RemoteBigQueryHelper.forceDelete(bigquery, UK_DATASET); + RemoteBigQueryHelper.forceDelete(bigquery, OTHER_DATASET); RemoteBigQueryHelper.forceDelete(bigquery, MODEL_DATASET); RemoteBigQueryHelper.forceDelete(bigquery, ROUTINE_DATASET); } @@ -1190,7 +1194,21 @@ public static void afterClass() throws ExecutionException, InterruptedException if (!wasDeleted && LOG.isLoggable(Level.WARNING)) { LOG.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", BUCKET); } + storage.close(); + } + + if (otel instanceof OpenTelemetrySdk) { + ((OpenTelemetrySdk) otel).close(); + } + } + + static GoogleCredentials loadCredentials(String credentialFile) { + try (InputStream keyStream = new ByteArrayInputStream(credentialFile.getBytes())) { + return GoogleCredentials.fromStream(keyStream); + } catch (IOException e) { + fail("Couldn't create fake JSON credentials."); } + return null; } @Test @@ -1219,9 +1237,10 @@ public void testListDatasetsWithFilter() { for (Dataset dataset : datasets.getValues()) { assertTrue( "failed to find label key in dataset", dataset.getLabels().containsKey("example-label1")); - assertTrue( + assertEquals( "failed to find label value in dataset", - dataset.getLabels().get("example-label1").equals("example-value1")); + "example-value1", + dataset.getLabels().get("example-label1")); count++; } assertTrue(count > 0); @@ -1425,8 +1444,8 @@ public void testUpdateDatasetWithAccessPolicyVersion() throws IOException { datasetOption, updateModeOption); assertNotNull(updatedDataset); - assertEquals(updatedDataset.getDescription(), "Updated Description"); - assertThat(updatedDataset.getLabels().isEmpty()); + assertEquals("Updated Description", updatedDataset.getDescription()); + assertTrue(updatedDataset.getLabels().isEmpty()); Acl updatedAclWithCond = null; for (Acl updatedAcl : updatedDataset.getAcl()) { @@ -1565,14 +1584,14 @@ public void testJsonType() throws InterruptedException { .setUseLegacySql(false) .addPositionalParameter(badJsonParameter) .build(); - try { - bigquery.query(dmlQueryJobConfiguration2); - fail("Querying with malformed JSON shouldn't work"); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error); - assertEquals("invalidQuery", error.getReason()); - } + BigQueryException exception = + assertThrows( + "Querying with malformed JSON shouldn't work", + BigQueryException.class, + () -> bigquery.query(dmlQueryJobConfiguration2)); + BigQueryError error = exception.getError(); + assertNotNull(error); + assertEquals("invalidQuery", error.getReason()); } finally { assertTrue(bigquery.delete(tableId)); } @@ -1866,7 +1885,7 @@ public void testCreateDatasetWithAccessPolicyVersion() throws IOException { DatasetOption datasetOption = DatasetOption.accessPolicyVersion(3); Dataset dataset = bigquery.create(info, datasetOption); assertNotNull(dataset); - assertEquals(dataset.getDescription(), DESCRIPTION); + assertEquals(DESCRIPTION, dataset.getDescription()); Acl remoteAclWithCond = null; for (Acl remoteAcl : dataset.getAcl()) { @@ -1972,7 +1991,7 @@ public void testCreateFieldWithDefaultCollation() { Schema remoteSchema = remoteTable.getDefinition().getSchema(); // Schema should be equal because collation has been added to the fields. assertEquals(schema, remoteSchema); - assertEquals(null, remoteTable.getDefaultCollation()); + assertNull(remoteTable.getDefaultCollation()); FieldList fieldList = remoteSchema.getFields(); for (Field field : fieldList) { if (field.getName().equals("stringFieldWithoutDefaultCollation")) { @@ -2416,7 +2435,7 @@ public void testCreateExternalTable() throws InterruptedException { assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); assertEquals(integerValue, integerCell.getLongValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); integerValue = ~integerValue & 0x1; rowCount++; } @@ -2522,7 +2541,7 @@ public void testCreateViewTable() throws InterruptedException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); rowCount++; } assertEquals(2, rowCount); @@ -2800,15 +2819,15 @@ public void testUpdateNonExistingTable() { TableInfo.of( TableId.of(DATASET, "test_update_non_existing_table"), StandardTableDefinition.of(SIMPLE_SCHEMA)); - try { - bigquery.update(tableInfo); - fail("BigQueryException was expected"); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error); - assertEquals("notFound", error.getReason()); - assertNotNull(error.getMessage()); - } + BigQueryException exception = + assertThrows( + "BigQueryException was expected", + BigQueryException.class, + () -> bigquery.update(tableInfo)); + BigQueryError error = exception.getError(); + assertNotNull(error); + assertEquals("notFound", error.getReason()); + assertNotNull(error.getMessage()); } @Test @@ -3056,13 +3075,13 @@ public void testListAllTableData() { assertEquals("stringValue", stringCell.getStringValue()); assertEquals(0, integerArrayCell.getRepeatedValue().get(0).getLongValue()); assertEquals(1, integerArrayCell.getRepeatedValue().get(1).getLongValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); assertArrayEquals(BYTES, bytesCell.getBytesValue()); assertEquals(-14182916000000L, recordCell.getRecordValue().get(0).getTimestampValue()); assertTrue(recordCell.getRecordValue().get(1).isNull()); assertEquals(1, recordCell.getRecordValue().get(2).getRepeatedValue().get(0).getLongValue()); assertEquals(0, recordCell.getRecordValue().get(2).getRepeatedValue().get(1).getLongValue()); - assertEquals(true, recordCell.getRecordValue().get(3).getBooleanValue()); + assertTrue(recordCell.getRecordValue().get(3).getBooleanValue()); assertEquals(3, integerCell.getLongValue()); assertEquals(1.2, floatCell.getDoubleValue(), 0.0001); assertEquals("POINT(-122.35022 47.649154)", geographyCell.getStringValue()); @@ -3123,21 +3142,21 @@ public void testModelLifecycle() throws InterruptedException { ModelId modelId = ModelId.of(MODEL_DATASET, modelName); Model model = bigquery.getModel(modelId); assertNotNull(model); - assertEquals(model.getModelType(), "LINEAR_REGRESSION"); + assertEquals("LINEAR_REGRESSION", model.getModelType()); // Compare the extended model metadata. - assertEquals(model.getFeatureColumns().get(0).getName(), "f1"); - assertEquals(model.getLabelColumns().get(0).getName(), "predicted_label"); + assertEquals("f1", model.getFeatureColumns().get(0).getName()); + assertEquals("predicted_label", model.getLabelColumns().get(0).getName()); assertEquals( - model.getTrainingRuns().get(0).getTrainingOptions().getLearnRateStrategy(), "CONSTANT"); + "CONSTANT", model.getTrainingRuns().get(0).getTrainingOptions().getLearnRateStrategy()); // Mutate metadata. ModelInfo info = model.toBuilder().setDescription("TEST").build(); Model afterUpdate = bigquery.update(info); - assertEquals(afterUpdate.getDescription(), "TEST"); + assertEquals("TEST", afterUpdate.getDescription()); // Ensure model is present in listModels. Page models = bigquery.listModels(MODEL_DATASET); - Boolean found = false; + boolean found = false; for (Model m : models.getValues()) { if (m.getModelId().getModel().equals(modelName)) { found = true; @@ -3158,7 +3177,7 @@ public void testEmptyListModels() { assertEquals(0, Iterables.size(models.getValues())); assertFalse(models.hasNextPage()); assertNull(models.getNextPageToken()); - assertTrue(bigquery.delete(datasetId)); + RemoteBigQueryHelper.forceDelete(bigquery, datasetId); } @Test @@ -3170,7 +3189,7 @@ public void testEmptyListRoutines() { assertEquals(0, Iterables.size(routines.getValues())); assertFalse(routines.hasNextPage()); assertNull(routines.getNextPageToken()); - assertTrue(bigquery.delete(datasetId)); + RemoteBigQueryHelper.forceDelete(bigquery, datasetId); } @Test @@ -3188,7 +3207,7 @@ public void testRoutineLifecycle() throws InterruptedException { RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); Routine routine = bigquery.getRoutine(routineId); assertNotNull(routine); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); // Mutate metadata. RoutineInfo newInfo = @@ -3199,11 +3218,11 @@ public void testRoutineLifecycle() throws InterruptedException { .setRoutineType(routine.getRoutineType()) .build(); Routine afterUpdate = bigquery.update(newInfo); - assertEquals(afterUpdate.getBody(), "x * 4"); + assertEquals("x * 4", afterUpdate.getBody()); // Ensure routine is present in listRoutines. Page routines = bigquery.listRoutines(ROUTINE_DATASET); - Boolean found = false; + boolean found = false; for (Routine r : routines.getValues()) { if (r.getRoutineId().getRoutine().equals(routineName)) { found = true; @@ -3235,7 +3254,7 @@ public void testRoutineAPICreation() { Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); } @Test @@ -3260,10 +3279,10 @@ public void testRoutineAPICreationJavascriptUDF() { Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getLanguage(), "JAVASCRIPT"); - assertEquals(routine.getDeterminismLevel(), "DETERMINISTIC"); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); - assertEquals(routine.getReturnType(), StandardSQLDataType.newBuilder("STRING").build()); + assertEquals("JAVASCRIPT", routine.getLanguage()); + assertEquals("DETERMINISTIC", routine.getDeterminismLevel()); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); + assertEquals(StandardSQLDataType.newBuilder("STRING").build(), routine.getReturnType()); } @Test @@ -3290,8 +3309,8 @@ public void testRoutineAPICreationTVF() { .build(); Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getRoutineType(), "TABLE_VALUED_FUNCTION"); - assertEquals(routine.getReturnTableType(), returnTableType); + assertEquals("TABLE_VALUED_FUNCTION", routine.getRoutineType()); + assertEquals(returnTableType, routine.getReturnTableType()); } @Test @@ -3315,10 +3334,10 @@ public void testRoutineDataGovernanceType() { Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getLanguage(), "SQL"); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); - assertEquals(routine.getReturnType(), StandardSQLDataType.newBuilder("INT64").build()); - assertEquals(routine.getDataGovernanceType(), "DATA_MASKING"); + assertEquals("SQL", routine.getLanguage()); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); + assertEquals(StandardSQLDataType.newBuilder("INT64").build(), routine.getReturnType()); + assertEquals("DATA_MASKING", routine.getDataGovernanceType()); } @Test @@ -3339,7 +3358,7 @@ public void testAuthorizeRoutine() { .build(); Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); Dataset routineDataset = bigquery.getDataset(ROUTINE_DATASET); List routineAcl = new ArrayList<>(routineDataset.getAcl()); routineAcl.add(Acl.of(new Acl.Routine(routineId))); @@ -3361,7 +3380,7 @@ public void testAuthorizeDataset() { DatasetInfo.newBuilder(datasetId).setAcl(acl).setDescription("shared Dataset").build(); Dataset sharedDataset = bigquery.create(datasetInfo); assertNotNull(sharedDataset); - assertEquals(sharedDataset.getDescription(), "shared Dataset"); + assertEquals("shared Dataset", sharedDataset.getDescription()); // Get the current metadata for the dataset you want to share by calling the datasets.get method List sharedDatasetAcl = new ArrayList<>(sharedDataset.getAcl()); @@ -3375,7 +3394,7 @@ public void testAuthorizeDataset() { Dataset authorizedDataset = bigquery.create(authorizedDatasetInfo); assertNotNull(authorizedDataset); assertEquals( - authorizedDataset.getDescription(), "new Dataset to be authorized by the sharedDataset"); + "new Dataset to be authorized by the sharedDataset", authorizedDataset.getDescription()); // Add the new DatasetAccessEntry object to the existing sharedDatasetAcl list DatasetAclEntity datasetEntity = new DatasetAclEntity(authorizedDatasetId, targetTypes); @@ -3386,6 +3405,9 @@ public void testAuthorizeDataset() { // Verify that the authorized dataset has been added assertEquals(sharedDatasetAcl, updatedDataset.getAcl()); + + RemoteBigQueryHelper.forceDelete(bigquery, datasetName); + RemoteBigQueryHelper.forceDelete(bigquery, authorizedDatasetName); } /* TODO(prasmish): replicate the entire test case for executeSelect */ @@ -3393,16 +3415,16 @@ public void testAuthorizeDataset() { public void testSingleStatementsQueryException() throws InterruptedException { String invalidQuery = String.format("INSERT %s.%s VALUES('3', 10);", DATASET, TABLE_ID.getTable()); - try { - bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor(); - fail("BigQueryException was expected"); - } catch (BigQueryException ex) { - assertEquals("invalidQuery", ex.getReason()); - assertNotNull(ex.getMessage()); - BigQueryError error = ex.getError(); - assertEquals("invalidQuery", error.getReason()); - assertNotNull(error.getMessage()); - } + BigQueryException exception = + assertThrows( + "BigQueryException was expected", + BigQueryException.class, + () -> bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor()); + assertEquals("invalidQuery", exception.getReason()); + assertNotNull(exception.getMessage()); + BigQueryError error = exception.getError(); + assertEquals("invalidQuery", error.getReason()); + assertNotNull(error.getMessage()); } /* TODO(prasmish): replicate the entire test case for executeSelect */ @@ -3412,16 +3434,16 @@ public void testMultipleStatementsQueryException() throws InterruptedException { String.format( "INSERT %s.%s VALUES('3', 10); DELETE %s.%s where c2=3;", DATASET, TABLE_ID.getTable(), DATASET, TABLE_ID.getTable()); - try { - bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor(); - fail("BigQueryException was expected"); - } catch (BigQueryException ex) { - assertEquals("invalidQuery", ex.getReason()); - assertNotNull(ex.getMessage()); - BigQueryError error = ex.getError(); - assertEquals("invalidQuery", error.getReason()); - assertNotNull(error.getMessage()); - } + BigQueryException exception = + assertThrows( + "BigQueryException was expected", + BigQueryException.class, + () -> bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor()); + assertEquals("invalidQuery", exception.getReason()); + assertNotNull(exception.getMessage()); + BigQueryError error = exception.getError(); + assertEquals("invalidQuery", error.getReason()); + assertNotNull(error.getMessage()); } @Test @@ -3462,8 +3484,11 @@ public void testLosslessTimestamp() throws InterruptedException { // Create new BQ object to toggle lossless timestamps without affecting // other tests. RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); - BigQuery bigqueryLossless = bigqueryHelper.getOptions().getService(); - bigqueryLossless.getOptions().setUseInt64Timestamps(true); + DataFormatOptions dataFormatOptions = + DataFormatOptions.newBuilder().useInt64Timestamp(true).build(); + BigQueryOptions options = + bigqueryHelper.getOptions().toBuilder().setDataFormatOptions(dataFormatOptions).build(); + BigQuery bigqueryLossless = options.getService(); TableResult resultLossless = bigqueryLossless.query( @@ -3502,7 +3527,7 @@ public void testQuery() throws InterruptedException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); rowCount++; } assertEquals(2, rowCount); @@ -3654,12 +3679,7 @@ public void testExecuteSelectWithCredentials() throws SQLException { BigQuery bigQueryBadCredentials = bigQueryOptionsBadCredentials.getService(); Connection connectionBadCredentials = bigQueryBadCredentials.createConnection(connectionSettings); - try { - connectionBadCredentials.executeSelect(query); - fail(); // this line should not be reached - } catch (BigQuerySQLException e) { - assertNotNull(e); - } + assertThrows(BigQuerySQLException.class, () -> connectionBadCredentials.executeSelect(query)); } /* TODO(prasmish): replicate the entire test case for executeSelect */ @@ -3933,14 +3953,14 @@ public void testExecuteSelectSinglePageTableRow() throws SQLException { assertTrue(rs.next()); // first row // checking for the null or 0 column values assertNull(rs.getString("StringField")); - assertTrue(rs.getDouble("BigNumericField") == 0.0d); + assertEquals(0.0d, rs.getDouble("BigNumericField"), 1e-9); assertFalse(rs.getBoolean("BooleanField")); assertNull(rs.getBytes("BytesField")); - assertEquals(rs.getInt("IntegerField"), 0); + assertEquals(0, rs.getInt("IntegerField")); assertNull(rs.getTimestamp("TimestampField")); assertNull(rs.getDate("DateField")); - assertTrue(rs.getDouble("FloatField") == 0.0d); - assertTrue(rs.getDouble("NumericField") == 0.0d); + assertEquals(0.0d, rs.getDouble("FloatField"), 1e-9); + assertEquals(0.0d, rs.getDouble("NumericField"), 1e-9); assertNull(rs.getTime("TimeField")); assertNull(rs.getString("DateTimeField")); assertNull(rs.getString("GeographyField")); @@ -3950,14 +3970,14 @@ public void testExecuteSelectSinglePageTableRow() throws SQLException { assertTrue(rs.next()); // second row // second row is non null, comparing the values assertEquals("StringValue1", rs.getString("StringField")); - assertTrue(rs.getDouble("BigNumericField") == 0.3333333333333333d); + assertEquals(0.3333333333333333d, rs.getDouble("BigNumericField"), 1e-9); assertFalse(rs.getBoolean("BooleanField")); assertNotNull(rs.getBytes("BytesField")); assertEquals(1, rs.getInt("IntegerField")); assertEquals(1534680695123L, rs.getTimestamp("TimestampField").getTime()); assertEquals(java.sql.Date.valueOf("2018-08-19"), rs.getDate("DateField")); - assertTrue(rs.getDouble("FloatField") == 10.1d); - assertTrue(rs.getDouble("NumericField") == 100.0d); + assertEquals(10.1d, rs.getDouble("FloatField"), 1e-9); + assertEquals(100.0d, rs.getDouble("NumericField"), 1e-9); assertEquals(Time.valueOf(LocalTime.of(12, 11, 35, 123456)), rs.getTime("TimeField")); assertEquals("2018-08-19T12:11:35.123456", rs.getString("DateTimeField")); assertEquals("POINT(-122.35022 47.649154)", rs.getString("GeographyField")); @@ -4003,14 +4023,14 @@ public void testExecuteSelectSinglePageTableRowWithReadAPI() throws SQLException assertTrue(rs.next()); // first row // checking for the null or 0 column values assertNull(rs.getString("StringField")); - assertTrue(rs.getDouble("BigNumericField") == 0.0d); + assertEquals(0.0d, rs.getDouble("BigNumericField"), 1e-9); assertFalse(rs.getBoolean("BooleanField")); assertNull(rs.getBytes("BytesField")); - assertEquals(rs.getInt("IntegerField"), 0); + assertEquals(0, rs.getInt("IntegerField")); assertNull(rs.getTimestamp("TimestampField")); assertNull(rs.getDate("DateField")); - assertTrue(rs.getDouble("FloatField") == 0.0d); - assertTrue(rs.getDouble("NumericField") == 0.0d); + assertEquals(0.0d, rs.getDouble("FloatField"), 1e-9); + assertEquals(0.0d, rs.getDouble("NumericField"), 1e-9); assertNull(rs.getTime("TimeField")); assertNull(rs.getString("DateTimeField")); assertNull(rs.getString("GeographyField")); @@ -4020,14 +4040,14 @@ public void testExecuteSelectSinglePageTableRowWithReadAPI() throws SQLException assertTrue(rs.next()); // second row // second row is non null, comparing the values assertEquals("StringValue1", rs.getString("StringField")); - assertTrue(rs.getDouble("BigNumericField") == 0.3333333333333333d); + assertEquals(0.3333333333333333d, rs.getDouble("BigNumericField"), 1e-9); assertFalse(rs.getBoolean("BooleanField")); assertNotNull(rs.getBytes("BytesField")); assertEquals(1, rs.getInt("IntegerField")); assertEquals(1534680695123L, rs.getTimestamp("TimestampField").getTime()); assertEquals(java.sql.Date.valueOf("2018-08-19"), rs.getDate("DateField")); - assertTrue(rs.getDouble("FloatField") == 10.1d); - assertTrue(rs.getDouble("NumericField") == 100.0d); + assertEquals(10.1d, rs.getDouble("FloatField"), 1e-9); + assertEquals(100.0d, rs.getDouble("NumericField"), 1e-9); assertEquals( Time.valueOf(LocalTime.of(12, 11, 35, 123456)).toString(), rs.getTime("TimeField").toString()); @@ -4214,12 +4234,7 @@ public void testExecuteSelectAsyncCancel() }); testCloseAsync.start(); - try { - executeSelectFut.get(); - fail(); // this line should not be reached - } catch (CancellationException e) { - assertNotNull(e); - } + assertThrows(CancellationException.class, executeSelectFut::get); } @Test @@ -4352,7 +4367,7 @@ public void testExecuteSelectSinglePageTableRowColInd() throws SQLException { assertEquals(2, bigQueryResult.getTotalRows()); // Expecting 2 rows while (rs.next()) { assertEquals(rs.getString(0), rs.getString("StringField")); - assertTrue(rs.getDouble(1) == rs.getDouble("BigNumericField")); + assertEquals(rs.getDouble(1), rs.getDouble("BigNumericField"), 1e-9); assertEquals(rs.getBoolean(2), rs.getBoolean("BooleanField")); if (rs.getBytes(3) == null) { // both overloads should be null assertEquals(rs.getBytes(3), rs.getBytes("BytesField")); @@ -4364,8 +4379,8 @@ public void testExecuteSelectSinglePageTableRowColInd() throws SQLException { assertEquals(rs.getInt(4), rs.getInt("IntegerField")); assertEquals(rs.getTimestamp(5), rs.getTimestamp("TimestampField")); assertEquals(rs.getDate(9), rs.getDate("DateField")); - assertTrue(rs.getDouble("FloatField") == rs.getDouble(6)); - assertTrue(rs.getDouble("NumericField") == rs.getDouble(7)); + assertEquals(rs.getDouble("FloatField"), rs.getDouble(6), 1e-9); + assertEquals(rs.getDouble("NumericField"), rs.getDouble(7), 1e-9); assertEquals(rs.getTime(8), rs.getTime("TimeField")); assertEquals(rs.getString(10), rs.getString("DateTimeField")); assertEquals(rs.getString(11), rs.getString("GeographyField")); @@ -4626,7 +4641,7 @@ public void testFastSQLQuery() throws InterruptedException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); } } @@ -4796,7 +4811,7 @@ public void testFastDDLQuery() throws InterruptedException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); } } @@ -4841,14 +4856,14 @@ public void testFastQueryHTTPException() throws InterruptedException { QueryJobConfiguration.newBuilder(queryInvalid) .setDefaultDataset(DatasetId.of(DATASET)) .build(); - try { - bigquery.query(configInvalidQuery); - fail("\"BigQueryException was expected\""); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error.getMessage()); - assertEquals("invalidQuery", error.getReason()); - } + BigQueryException exception = + assertThrows( + "BigQueryException was expected", + BigQueryException.class, + () -> bigquery.query(configInvalidQuery)); + BigQueryError error = exception.getError(); + assertNotNull(error.getMessage()); + assertEquals("invalidQuery", error.getReason()); String queryMissingTable = "SELECT * FROM " + TableId.of(DATASET, "non_existing_table").getTable(); @@ -4856,14 +4871,15 @@ public void testFastQueryHTTPException() throws InterruptedException { QueryJobConfiguration.newBuilder(queryMissingTable) .setDefaultDataset(DatasetId.of(DATASET)) .build(); - try { - bigquery.query(configMissingTable); - fail("\"BigQueryException was expected\""); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error.getMessage()); - assertEquals("notFound", error.getReason()); - } + + BigQueryException exception1 = + assertThrows( + "BigQueryException was expected", + BigQueryException.class, + () -> bigquery.query(configMissingTable)); + BigQueryError error1 = exception1.getError(); + assertNotNull(error1.getMessage()); + assertEquals("notFound", error1.getReason()); } @Test @@ -5237,7 +5253,7 @@ public void testPositionalQueryParameters() throws InterruptedException { (long) Double.parseDouble("1.40845209522E9"), (long) Double.parseDouble(values.get(0).getValue().toString())); assertEquals("stringValue", values.get(1).getValue()); - assertEquals(false, values.get(2).getBooleanValue()); + assertFalse(values.get(2).getBooleanValue()); assertEquals("0.33333333333333333333333333333333333333", values.get(3).getValue()); assertEquals("0.00000000000000000000000000000000000001", values.get(4).getValue()); assertEquals("-100000000000000000000000000000000000000", values.get(5).getValue()); @@ -5545,12 +5561,11 @@ public void testEmptyRepeatedRecordNamedQueryParameters() throws InterruptedExce .setUseLegacySql(false) .addNamedParameter("repeatedRecordField", repeatedRecord) .build(); - try { - bigquery.query(config); - fail("an empty array of struct query parameter shouldn't work with 'IN UNNEST'"); - } catch (BigQueryException e) { - // Nothing to do - } + + assertThrows( + "an empty array of struct query parameter shouldn't work with 'IN UNNEST'", + BigQueryException.class, + () -> bigquery.query(config)); } @Test @@ -5567,15 +5582,15 @@ public void testStructQuery() throws InterruptedException { assertEquals(2, Iterables.size(result.getValues())); for (FieldValueList values : result.iterateAll()) { for (FieldValue value : values) { - assertEquals(null, value.getRecordValue().get("StringField").getValue()); - assertEquals(true, value.getRecordValue().get("BooleanField").getBooleanValue()); + assertNull(value.getRecordValue().get("StringField").getValue()); + assertTrue(value.getRecordValue().get("BooleanField").getBooleanValue()); } } } private static void assertsFieldValue(FieldValue record) { assertEquals(FieldValue.Attribute.RECORD, record.getAttribute()); - assertEquals(true, record.getRecordValue().get("booleanField").getBooleanValue()); + assertTrue(record.getRecordValue().get("booleanField").getBooleanValue()); assertEquals(10, record.getRecordValue().get("integerField").getLongValue()); assertEquals("test-stringField", record.getRecordValue().get("stringField").getStringValue()); } @@ -5610,12 +5625,12 @@ public void testNestedStructNamedQueryParameters() throws InterruptedException { for (FieldValueList values : result.iterateAll()) { for (FieldValue value : values) { assertEquals(Attribute.RECORD, value.getAttribute()); - assertEquals(true, value.getRecordValue().get(0).getRecordValue().get(0).getBooleanValue()); + assertTrue(value.getRecordValue().get(0).getRecordValue().get(0).getBooleanValue()); assertEquals(10, value.getRecordValue().get(0).getRecordValue().get(1).getLongValue()); assertEquals( "test-stringField", value.getRecordValue().get(0).getRecordValue().get(2).getStringValue()); - assertEquals(true, value.getRecordValue().get(1).getBooleanValue()); + assertTrue(value.getRecordValue().get(1).getBooleanValue()); assertEquals("test-stringField", value.getRecordValue().get(2).getStringValue()); assertEquals(10, value.getRecordValue().get(3).getLongValue()); } @@ -5662,7 +5677,7 @@ public void testGeographyParameter() throws Exception { int rowCount = 0; for (FieldValueList row : result.getValues()) { rowCount++; - assertEquals(true, row.get(0).getBooleanValue()); + assertTrue(row.get(0).getBooleanValue()); } assertEquals(1, rowCount); } @@ -5890,7 +5905,7 @@ public void testSnapshotTableCopyJob() throws InterruptedException { String sourceTableName = "test_copy_job_base_table"; String ddlTableName = TABLE_ID_DDL.getTable(); // this creates a snapshot table at specified snapshotTime - String snapshotTableName = String.format("test_snapshot_table"); + String snapshotTableName = "test_snapshot_table"; // Create source table with some data in it String ddlQuery = String.format( @@ -5926,7 +5941,6 @@ public void testSnapshotTableCopyJob() throws InterruptedException { assertNotNull(snapshotTable); assertEquals(snapshotTableId.getDataset(), snapshotTable.getTableId().getDataset()); assertEquals(snapshotTableName, snapshotTable.getTableId().getTable()); - System.out.println(snapshotTable.getDefinition()); assertTrue(snapshotTable.getDefinition() instanceof SnapshotTableDefinition); assertEquals(DDL_TABLE_SCHEMA, snapshotTable.getDefinition().getSchema()); assertNotNull(((SnapshotTableDefinition) snapshotTable.getDefinition()).getSnapshotTime()); @@ -6022,7 +6036,7 @@ public void testQueryJob() throws InterruptedException, TimeoutException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); rowCount++; } assertEquals(2, rowCount); @@ -6030,12 +6044,12 @@ public void testQueryJob() throws InterruptedException, TimeoutException { Job queryJob = bigquery.getJob(remoteJob.getJobId()); JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); if (statistics.getBiEngineStats() != null) { - assertEquals(statistics.getBiEngineStats().getBiEngineMode(), "DISABLED"); + assertEquals("DISABLED", statistics.getBiEngineStats().getBiEngineMode()); assertEquals( - statistics.getBiEngineStats().getBiEngineReasons().get(0).getCode(), "OTHER_REASON"); + "OTHER_REASON", statistics.getBiEngineStats().getBiEngineReasons().get(0).getCode()); assertEquals( - statistics.getBiEngineStats().getBiEngineReasons().get(0).getMessage(), - "Only SELECT queries without a destination table can be accelerated."); + "Only SELECT queries without a destination table can be accelerated.", + statistics.getBiEngineStats().getBiEngineReasons().get(0).getMessage()); } assertNotNull(statistics.getQueryPlan()); } @@ -6103,11 +6117,11 @@ public void testQueryJobWithSearchReturnsSearchStatisticsUnused() throws Interru assertNull(remoteJob.getStatus().getError()); JobStatistics.QueryStatistics stats = remoteJob.getStatistics(); assertNotNull(stats.getSearchStats()); - assertEquals(stats.getSearchStats().getIndexUsageMode(), "UNUSED"); + assertEquals("UNUSED", stats.getSearchStats().getIndexUsageMode()); assertNotNull(stats.getSearchStats().getIndexUnusedReasons()); - assertNotNull( - stats.getSearchStats().getIndexUnusedReasons().get(0).getCode(), - "INDEX_CONFIG_NOT_AVAILABLE"); + assertEquals( + "INDEX_CONFIG_NOT_AVAILABLE", + stats.getSearchStats().getIndexUnusedReasons().get(0).getCode()); } finally { bigquery.delete(destinationTable); } @@ -6184,8 +6198,8 @@ public void testLoadJobWithDecimalTargetTypes() throws InterruptedException { Table remoteTable = bigquery.getTable(DATASET, tableName); assertNotNull(remoteTable); assertEquals( - remoteTable.getDefinition().getSchema().getFields().get(0).getType().toString(), - "BIGNUMERIC"); + "BIGNUMERIC", + remoteTable.getDefinition().getSchema().getFields().get(0).getType().toString()); } finally { bigquery.delete(destinationTable); } @@ -6206,8 +6220,8 @@ public void testExternalTableWithDecimalTargetTypes() throws InterruptedExceptio Table remoteTable = bigquery.getTable(DATASET, tableName); assertNotNull(remoteTable); assertEquals( - remoteTable.getDefinition().getSchema().getFields().get(0).getType().toString(), - "BIGNUMERIC"); + "BIGNUMERIC", + remoteTable.getDefinition().getSchema().getFields().get(0).getType().toString()); assertTrue(remoteTable.delete()); } @@ -6361,20 +6375,17 @@ public void testInsertFromFile() throws InterruptedException, IOException, Timeo .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) .setSchema(TABLE_SCHEMA) .build(); - TableDataWriteChannel channel = bigquery.writer(configuration); - try { + try (TableDataWriteChannel channel = bigquery.writer(configuration)) { // A zero byte write should not throw an exception. assertEquals(0, channel.write(ByteBuffer.wrap("".getBytes(StandardCharsets.UTF_8)))); - } finally { - // Force the channel to flush by calling `close`. - channel.close(); } - channel = bigquery.writer(configuration); + TableDataWriteChannel channel = bigquery.writer(configuration); try { channel.write(ByteBuffer.wrap(JSON_CONTENT.getBytes(StandardCharsets.UTF_8))); } finally { channel.close(); } + // Channel must close before retrieving the job Job job = channel.getJob().waitFor(); LoadStatistics statistics = job.getStatistics(); assertEquals(1L, statistics.getInputFiles().longValue()); @@ -6410,13 +6421,13 @@ public void testInsertFromFile() throws InterruptedException, IOException, Timeo assertEquals("stringValue", stringCell.getStringValue()); assertEquals(0, integerArrayCell.getRepeatedValue().get(0).getLongValue()); assertEquals(1, integerArrayCell.getRepeatedValue().get(1).getLongValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); assertArrayEquals(BYTES, bytesCell.getBytesValue()); assertEquals(-14182916000000L, recordCell.getRecordValue().get(0).getTimestampValue()); assertTrue(recordCell.getRecordValue().get(1).isNull()); assertEquals(1, recordCell.getRecordValue().get(2).getRepeatedValue().get(0).getLongValue()); assertEquals(0, recordCell.getRecordValue().get(2).getRepeatedValue().get(1).getLongValue()); - assertEquals(true, recordCell.getRecordValue().get(3).getBooleanValue()); + assertTrue(recordCell.getRecordValue().get(3).getBooleanValue()); assertEquals(3, integerCell.getLongValue()); assertEquals(1.2, floatCell.getDoubleValue(), 0.0001); assertEquals("POINT(-122.35022 47.649154)", geographyCell.getStringValue()); @@ -6439,20 +6450,17 @@ public void testInsertFromFileWithLabels() .setSchema(TABLE_SCHEMA) .setLabels(LABELS) .build(); - TableDataWriteChannel channel = bigquery.writer(configuration); - try { + try (TableDataWriteChannel channel = bigquery.writer(configuration)) { // A zero byte write should not throw an exception. assertEquals(0, channel.write(ByteBuffer.wrap("".getBytes(StandardCharsets.UTF_8)))); - } finally { - // Force the channel to flush by calling `close`. - channel.close(); } - channel = bigquery.writer(configuration); + TableDataWriteChannel channel = bigquery.writer(configuration); try { channel.write(ByteBuffer.wrap(JSON_CONTENT.getBytes(StandardCharsets.UTF_8))); } finally { channel.close(); } + // Channel must close before retrieving the job Job job = channel.getJob().waitFor(); LoadJobConfiguration jobConfiguration = job.getConfiguration(); assertEquals(TABLE_SCHEMA, jobConfiguration.getSchema()); @@ -6478,6 +6486,7 @@ public void testInsertWithDecimalTargetTypes() } finally { channel.close(); } + // Channel must close before retrieving the job Job job = channel.getJob().waitFor(); LoadJobConfiguration jobConfiguration = job.getConfiguration(); assertNull(job.getStatus().getError()); @@ -6495,23 +6504,21 @@ public void testLocation() throws Exception { assertThat(location).isNotEqualTo(wrongLocation); Tracer tracer = otel.getTracer("Test Tracer"); - bigquery = + BigQuery otelBigquery = bigquery.getOptions().toBuilder() .setEnableOpenTelemetryTracing(true) .setOpenTelemetryTracer(tracer) .build() .getService(); + String datasetName = "locationset_" + UUID.randomUUID().toString().replace("-", "_"); Dataset dataset = - bigquery.create( - DatasetInfo.newBuilder("locationset_" + UUID.randomUUID().toString().replace("-", "_")) - .setLocation(location) - .build()); + otelBigquery.create(DatasetInfo.newBuilder(datasetName).setLocation(location).build()); try { TableId tableId = TableId.of(dataset.getDatasetId().getDataset(), "sometable"); Schema schema = Schema.of(Field.of("name", LegacySQLTypeName.STRING)); TableDefinition tableDef = StandardTableDefinition.of(schema); - Table table = bigquery.create(TableInfo.newBuilder(tableId, tableDef).build()); + Table table = otelBigquery.create(TableInfo.newBuilder(tableId, tableDef).build()); String query = String.format( @@ -6521,52 +6528,47 @@ public void testLocation() throws Exception { table.getTableId().getTable()); // Test create/get - { - Job job = - bigquery.create( - JobInfo.of( - JobId.newBuilder().setLocation(location).build(), - QueryJobConfiguration.of(query))); - job = job.waitFor(); - assertThat(job.getStatus().getError()).isNull(); - - assertThat(job.getJobId().getLocation()).isEqualTo(location); - - JobId jobId = job.getJobId(); - JobId wrongId = jobId.toBuilder().setLocation(wrongLocation).build(); - - // Getting with location should work. - assertThat(bigquery.getJob(jobId)).isNotNull(); - // Getting with wrong location shouldn't work. - assertThat(bigquery.getJob(wrongId)).isNull(); - - // Cancelling with location should work. (Cancelling already finished job is fine.) - assertThat(bigquery.cancel(jobId)).isTrue(); - // Cancelling with wrong location shouldn't work. - assertThat(bigquery.cancel(wrongId)).isFalse(); - } + Job job = + otelBigquery.create( + JobInfo.of( + JobId.newBuilder().setLocation(location).build(), + QueryJobConfiguration.of(query))); + job = job.waitFor(); + assertThat(job.getStatus().getError()).isNull(); + + assertThat(job.getJobId().getLocation()).isEqualTo(location); + + JobId jobId = job.getJobId(); + JobId wrongId = jobId.toBuilder().setLocation(wrongLocation).build(); + + // Getting with location should work. + assertThat(otelBigquery.getJob(jobId)).isNotNull(); + // Getting with wrong location shouldn't work. + assertThat(otelBigquery.getJob(wrongId)).isNull(); + + // Cancelling with location should work. (Cancelling already finished job is fine.) + assertThat(otelBigquery.cancel(jobId)).isTrue(); + // Cancelling with wrong location shouldn't work. + assertThat(otelBigquery.cancel(wrongId)).isFalse(); // Test query - { - assertThat( - bigquery - .query( - QueryJobConfiguration.of(query), - JobId.newBuilder().setLocation(location).build()) - .iterateAll()) - .isEmpty(); - - try { - bigquery - .query( - QueryJobConfiguration.of(query), - JobId.newBuilder().setLocation(wrongLocation).build()) - .iterateAll(); - fail("querying a table with wrong location shouldn't work"); - } catch (BigQueryException e) { - // Nothing to do - } - } + assertThat( + otelBigquery + .query( + QueryJobConfiguration.of(query), + JobId.newBuilder().setLocation(location).build()) + .iterateAll()) + .isEmpty(); + + assertThrows( + "querying a table with wrong location shouldn't work", + BigQueryException.class, + () -> + otelBigquery + .query( + QueryJobConfiguration.of(query), + JobId.newBuilder().setLocation(wrongLocation).build()) + .iterateAll()); // Test write { @@ -6575,7 +6577,7 @@ public void testLocation() throws Exception { .setFormatOptions(FormatOptions.csv()) .build(); try (TableDataWriteChannel writer = - bigquery.writer( + otelBigquery.writer( JobId.newBuilder().setLocation(location).build(), writeChannelConfiguration)) { writer.write(ByteBuffer.wrap("foo".getBytes())); assertEquals( @@ -6585,22 +6587,18 @@ public void testLocation() throws Exception { location); } - try { - bigquery.writer( - JobId.newBuilder().setLocation(wrongLocation).build(), writeChannelConfiguration); - fail("writing to a table with wrong location shouldn't work"); - } catch (BigQueryException e) { - // Nothing to do - } + assertThrows( + "writing to a table with wrong location shouldn't work", + BigQueryException.class, + () -> { + try (TableDataWriteChannel ignore = + otelBigquery.writer( + JobId.newBuilder().setLocation(wrongLocation).build(), + writeChannelConfiguration)) {} + }); } } finally { - bigquery.delete(dataset.getDatasetId(), DatasetDeleteOption.deleteContents()); - bigquery = - bigquery.getOptions().toBuilder() - .setEnableOpenTelemetryTracing(false) - .setOpenTelemetryTracer(null) - .build() - .getService(); + RemoteBigQueryHelper.forceDelete(bigquery, datasetName); } } @@ -6622,6 +6620,7 @@ public void testWriteChannelPreserveAsciiControlCharacters() } finally { channel.close(); } + // Channel must close before retrieving the job Job job = channel.getJob().waitFor(); assertNull(job.getStatus().getError()); Page rows = bigquery.listTableData(tableId); @@ -6696,17 +6695,16 @@ public void testReferenceFileSchemaUriForAvro() { Job job = bigquery.create(JobInfo.of(loadJobConfiguration)); // Blocks until this load table job completes its execution, either failing or succeeding. job = job.waitFor(); - assertEquals(true, job.isDone()); + assertTrue(job.isDone()); LoadJobConfiguration actualLoadJobConfiguration = job.getConfiguration(); Table generatedTable = bigquery.getTable(actualLoadJobConfiguration.getDestinationTable()); assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); // clean up after test to avoid conflict with other tests - boolean success = bigquery.delete(tableId); - assertEquals(true, success); + assertTrue(bigquery.delete(tableId)); } catch (BigQueryException | InterruptedException e) { - System.out.println("Column not added during load append \n" + e.toString()); + System.out.println("Column not added during load append \n" + e); } } @@ -6756,16 +6754,15 @@ public void testReferenceFileSchemaUriForParquet() { Job job = bigquery.create(JobInfo.of(loadJobConfiguration)); // Blocks until this load table job completes its execution, either failing or succeeding. job = job.waitFor(); - assertEquals(true, job.isDone()); + assertTrue(job.isDone()); LoadJobConfiguration actualLoadJobConfiguration = job.getConfiguration(); Table generatedTable = bigquery.getTable(actualLoadJobConfiguration.getDestinationTable()); assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); // clean up after test to avoid conflict with other tests - boolean success = bigquery.delete(tableId); - assertEquals(true, success); + assertTrue(bigquery.delete(tableId)); } catch (BigQueryException | InterruptedException e) { - System.out.println("Column not added during load append \n" + e.toString()); + System.out.println("Column not added during load append \n" + e); } } @@ -6805,8 +6802,7 @@ public void testCreateExternalTableWithReferenceFileSchemaAvro() { Table generatedTable = bigquery.getTable(createdTable.getTableId()); assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); // clean up after test to avoid conflict with other tests - boolean success = bigquery.delete(tableId); - assertEquals(true, success); + assertTrue(bigquery.delete(tableId)); } @Test @@ -6847,15 +6843,14 @@ public void testCreateExternalTableWithReferenceFileSchemaParquet() { Table generatedTable = bigquery.getTable(createdTable.getTableId()); assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); // clean up after test to avoid conflict with other tests - boolean success = bigquery.delete(tableId); - assertEquals(true, success); + assertTrue(bigquery.delete(tableId)); } @Test public void testCloneTableCopyJob() throws InterruptedException { String sourceTableName = "test_copy_job_base_table"; String ddlTableName = TABLE_ID_DDL.getTable(); - String cloneTableName = String.format("test_clone_table"); + String cloneTableName = "test_clone_table"; // Create source table with some data in it String ddlQuery = String.format( @@ -7204,8 +7199,7 @@ public void testStatelessQueries() throws InterruptedException { private TableResult executeSimpleQuery(BigQuery bigQuery) throws InterruptedException { String query = "SELECT CURRENT_TIMESTAMP() as ts"; QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).build(); - TableResult result = bigQuery.query(config); - return result; + return bigQuery.query(config); } @Test @@ -7265,11 +7259,9 @@ public void testStatelessQueriesWithLocation() throws Exception { BigQuery bigQuery = bigqueryHelper.getOptions().toBuilder().setLocation(location).build().getService(); + String datasetName = "locationset_" + UUID.randomUUID().toString().replace("-", "_"); Dataset dataset = - bigQuery.create( - DatasetInfo.newBuilder("locationset_" + UUID.randomUUID().toString().replace("-", "_")) - .setLocation(location) - .build()); + bigQuery.create(DatasetInfo.newBuilder(datasetName).setLocation(location).build()); try { TableId tableId = TableId.of(dataset.getDatasetId().getDataset(), "sometable"); Schema schema = Schema.of(Field.of("name", LegacySQLTypeName.STRING)); @@ -7289,19 +7281,22 @@ public void testStatelessQueriesWithLocation() throws Exception { assertNull(tb.getJobId()); // Test stateless query when BigQueryOption location does not match dataset location. - try { - BigQuery bigQueryWrongLocation = - bigqueryHelper.getOptions().toBuilder().setLocation(wrongLocation).build().getService(); - bigQueryWrongLocation - .getOptions() - .setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); - bigQueryWrongLocation.query(QueryJobConfiguration.of(query)); - fail("querying a table with wrong location shouldn't work"); - } catch (BigQueryException e) { - // Nothing to do - } + assertThrows( + "querying a table with wrong location shouldn't work", + BigQueryException.class, + () -> { + BigQuery bigQueryWrongLocation = + bigqueryHelper.getOptions().toBuilder() + .setLocation(wrongLocation) + .build() + .getService(); + bigQueryWrongLocation + .getOptions() + .setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); + bigQueryWrongLocation.query(QueryJobConfiguration.of(query)); + }); } finally { - bigQuery.delete(dataset.getDatasetId(), DatasetDeleteOption.deleteContents()); + RemoteBigQueryHelper.forceDelete(bigQuery, datasetName); } } @@ -7359,18 +7354,17 @@ public void testUniverseDomainWithInvalidUniverseDomain() { .build(); BigQuery bigQuery = bigQueryOptions.getService(); - try { - // Use list dataset to send RPC to invalid domain. - bigQuery.listDatasets("bigquery-public-data"); - fail("RPCs to invalid universe domain should fail"); - } catch (BigQueryException e) { - assertEquals(e.getCode(), HTTP_UNAUTHORIZED); - assertNotNull(e.getMessage()); - assertThat( - (e.getMessage() - .contains("does not match the universe domain found in the credentials"))) - .isTrue(); - } + BigQueryException exception = + assertThrows( + "RPCs to invalid universe domain should fail", + BigQueryException.class, + () -> bigQuery.listDatasets("bigquery-public-data")); + assertEquals(HTTP_UNAUTHORIZED, exception.getCode()); + assertNotNull(exception.getMessage()); + assertTrue( + exception + .getMessage() + .contains("does not match the universe domain found in the credentials")); } @Test @@ -7382,18 +7376,17 @@ public void testInvalidUniverseDomainWithMismatchCredentials() { .build(); BigQuery bigQuery = bigQueryOptions.getService(); - try { - // Use list dataset to send RPC to invalid domain. - bigQuery.listDatasets("bigquery-public-data"); - fail("RPCs to invalid universe domain should fail"); - } catch (BigQueryException e) { - assertEquals(e.getCode(), HTTP_UNAUTHORIZED); - assertNotNull(e.getMessage()); - assertThat( - (e.getMessage() - .contains("does not match the universe domain found in the credentials"))) - .isTrue(); - } + BigQueryException exception = + assertThrows( + "RPCs to invalid universe domain should fail", + BigQueryException.class, + () -> bigQuery.listDatasets("bigquery-public-data")); + assertEquals(HTTP_UNAUTHORIZED, exception.getCode()); + assertNotNull(exception.getMessage()); + assertTrue( + exception + .getMessage() + .contains("does not match the universe domain found in the credentials")); } @Test @@ -7475,17 +7468,18 @@ public void testExternalMetadataCacheModeFailForNonBiglake() { .build(); TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); - try { - bigquery.create(tableInfo); - fail("BigQueryException was expected"); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error); - assertEquals("invalid", error.getReason()); - assertThat( - e.getMessage().contains("metadataCacheMode provided for non BigLake external table")) - .isTrue(); - } + BigQueryException exception = + assertThrows( + "BigQueryException was expected", + BigQueryException.class, + () -> bigquery.create(tableInfo)); + BigQueryError error = exception.getError(); + assertNotNull(error); + assertEquals("invalid", error.getReason()); + assertTrue( + exception + .getMessage() + .contains("metadataCacheMode provided for non BigLake external table")); } @Test @@ -7531,16 +7525,6 @@ public void testObjectTable() throws InterruptedException { } } - static GoogleCredentials loadCredentials(String credentialFile) { - try { - InputStream keyStream = new ByteArrayInputStream(credentialFile.getBytes()); - return GoogleCredentials.fromStream(keyStream); - } catch (IOException e) { - fail("Couldn't create fake JSON credentials."); - } - return null; - } - @Test public void testQueryExportStatistics() throws InterruptedException { String query = @@ -7681,49 +7665,50 @@ public void testOpenTelemetryTracingDatasets() { .build(); dataset = bigquery.update(updatedInfo, DatasetOption.accessPolicyVersion(2)); - assertEquals(dataset.getDescription(), "Updated Description"); + assertEquals("Updated Description", dataset.getDescription()); assertTrue(bigquery.delete(dataset.getDatasetId())); } finally { parentSpan.end(); Map, Object> createMap = OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.createDataset"); - assertEquals(createMap.get(AttributeKey.stringKey("bq.dataset.location")), "null"); + assertEquals("null", createMap.get(AttributeKey.stringKey("bq.dataset.location"))); assertEquals( + "DatasetService", OTEL_ATTRIBUTES .get("com.google.cloud.bigquery.BigQueryRpc.createDataset") - .get(AttributeKey.stringKey("bq.rpc.service")), - "DatasetService"); + .get(AttributeKey.stringKey("bq.rpc.service"))); Map, Object> getMap = OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.getDataset"); - assertEquals(getMap.get(AttributeKey.stringKey("bq.dataset.id")), billingModelDataset); + assertEquals(billingModelDataset, getMap.get(AttributeKey.stringKey("bq.dataset.id"))); Map, Object> updateMap = OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.updateDataset"); - assertEquals(updateMap.get(AttributeKey.stringKey("bq.option.ACCESS_POLICY_VERSION")), "2"); + assertEquals("2", updateMap.get(AttributeKey.stringKey("bq.option.ACCESS_POLICY_VERSION"))); Map, Object> deleteMap = OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.deleteDataset"); - assertEquals(deleteMap.get(AttributeKey.stringKey("bq.dataset.id")), billingModelDataset); + assertEquals(billingModelDataset, deleteMap.get(AttributeKey.stringKey("bq.dataset.id"))); // All should be children spans of parentSpan + String testParentSpanName = "Test Parent Span"; assertEquals( + testParentSpanName, OTEL_SPAN_IDS_TO_NAMES.get( - OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.getDataset")), - "Test Parent Span"); + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.getDataset"))); assertEquals( + testParentSpanName, OTEL_SPAN_IDS_TO_NAMES.get( - OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.createDataset")), - "Test Parent Span"); + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.createDataset"))); assertEquals( + testParentSpanName, OTEL_SPAN_IDS_TO_NAMES.get( - OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.deleteDataset")), - "Test Parent Span"); + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.deleteDataset"))); assertEquals( OTEL_SPAN_IDS_TO_NAMES.get( OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQueryRpc.createDataset")), "com.google.cloud.bigquery.BigQueryRetryHelper.runWithRetries"); - assertEquals(OTEL_PARENT_SPAN_IDS.get("Test Parent Span"), OTEL_PARENT_SPAN_ID); + assertEquals(OTEL_PARENT_SPAN_ID, OTEL_PARENT_SPAN_IDS.get(testParentSpanName)); RemoteBigQueryHelper.forceDelete(bigquery, billingModelDataset); } } @@ -7745,26 +7730,26 @@ public void testOpenTelemetryTracingTables() { .setDescription("Some Description") .build(); Table createdTable = bigquery.create(tableInfo); - assertThat(createdTable.getDescription()).isEqualTo("Some Description"); + assertEquals("Some Description", createdTable.getDescription()); assertEquals( - OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.createTable"), - OTEL_PARENT_SPAN_ID); + OTEL_PARENT_SPAN_ID, + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.createTable")); assertEquals( + tableName, OTEL_ATTRIBUTES .get("com.google.cloud.bigquery.BigQuery.createTable") - .get(AttributeKey.stringKey("bq.table.id")), - tableName); + .get(AttributeKey.stringKey("bq.table.id"))); assertEquals( + "null", OTEL_ATTRIBUTES .get("com.google.cloud.bigquery.BigQuery.createTable") - .get(AttributeKey.stringKey("bq.table.creation_time")), - "null"); + .get(AttributeKey.stringKey("bq.table.creation_time"))); assertEquals( + "InsertTable", OTEL_ATTRIBUTES .get("com.google.cloud.bigquery.BigQueryRpc.createTable") - .get(AttributeKey.stringKey("bq.rpc.method")), - "InsertTable"); + .get(AttributeKey.stringKey("bq.rpc.method"))); Table updatedTable = bigquery.update(createdTable.toBuilder().setDescription("Updated Description").build()); @@ -7773,8 +7758,8 @@ public void testOpenTelemetryTracingTables() { assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.updateTable")); assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRpc.patchTable")); assertEquals( - OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.updateTable"), - OTEL_PARENT_SPAN_ID); + OTEL_PARENT_SPAN_ID, + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.updateTable")); assertTrue(bigquery.delete(updatedTable.getTableId())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java index 588484749..790f35fe5 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java @@ -63,7 +63,6 @@ import java.util.Map; import java.util.TimeZone; import java.util.UUID; -import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.arrow.vector.util.JsonStringArrayList; @@ -183,7 +182,7 @@ public static void beforeClass() throws InterruptedException, IOException { } @AfterClass - public static void afterClass() throws ExecutionException, InterruptedException { + public static void afterClass() { try { if (bigquery != null) { deleteTable(DATASET, TABLE); diff --git a/pom.xml b/pom.xml index cd870346b..6ef15742d 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 2.56.1-rc1-SNAPSHOT + 2.57.2-SNAPSHOT BigQuery Parent https://github.com/googleapis/java-bigquery @@ -93,7 +93,7 @@ com.google.cloud google-cloud-bigquery - 2.56.1-rc1-SNAPSHOT + 2.57.2-SNAPSHOT diff --git a/renovate.json b/renovate.json index 38fdb78eb..dc6b01e79 100644 --- a/renovate.json +++ b/renovate.json @@ -44,14 +44,6 @@ "/^com.google.guava:/" ] }, - { - "semanticCommitType": "deps", - "semanticCommitScope": null, - "matchPackageNames": [ - "*", - "/^com.google.cloud:google-cloud-bigquerystorage/" - ] - }, { "semanticCommitType": "build", "semanticCommitScope": "deps", @@ -68,7 +60,7 @@ "semanticCommitType": "chore", "semanticCommitScope": "deps", "matchPackageNames": [ - "/^com.google.cloud:google-cloud-bigquery/", + "/^com.google.cloud:google-cloud-bigquery$/", "/^com.google.cloud:google-cloud-bigtable/", "/^com.google.cloud:libraries-bom/", "/^com.google.cloud.samples:shared-configuration/" diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 53aa95ed6..e10b51dc7 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -56,7 +56,7 @@ com.google.cloud google-cloud-bigquery - 2.56.1-rc1-SNAPSHOT + 2.57.2-SNAPSHOT diff --git a/versions.txt b/versions.txt index ab1733d50..75d369bbf 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:2.56.0:2.56.1-rc1-SNAPSHOT \ No newline at end of file +google-cloud-bigquery:2.57.1:2.57.2-SNAPSHOT