From bff393c4463ccccff6a2cd05385fc74150c1864c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 9 Oct 2024 15:27:50 +0200 Subject: [PATCH 01/23] test(deps): update dependency com.google.cloud:google-cloud-storage to v2.43.2 (#3526) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 45acaae34..02529ba0a 100644 --- a/pom.xml +++ b/pom.xml @@ -137,7 +137,7 @@ com.google.cloud google-cloud-storage - 2.43.1 + 2.43.2 test From 776a5541cc94e8ffb1f5e5c6969ae06585571b45 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 9 Oct 2024 15:55:54 +0200 Subject: [PATCH 02/23] deps: update actions/upload-artifact action to v4.4.2 (#3524) --- .github/workflows/scorecard.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index eea48d1a6..725beb86f 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -59,7 +59,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@604373da6381bf24206979c74d06a550515601b9 # v4.4.1 + uses: actions/upload-artifact@84480863f228bb9747b473957fcc9e309aa96097 # v4.4.2 with: name: SARIF file path: results.sarif From d95d0a4267be258378dac944a2472cfc14f3a517 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 9 Oct 2024 15:57:56 +0200 Subject: [PATCH 03/23] chore(deps): update dependency com.google.cloud:google-cloud-bigquery to v2.43.1 (#3528) --- samples/install-without-bom/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml index 80ebad004..3b733b41d 100644 --- a/samples/install-without-bom/pom.xml +++ b/samples/install-without-bom/pom.xml @@ -45,7 +45,7 @@ com.google.cloud google-cloud-bigquery - 2.43.0 + 2.43.1 From 98694ea4aec57b9c1317da1b42703cb4e175086f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 12:44:35 -0400 Subject: [PATCH 04/23] chore(main): release 2.43.2-SNAPSHOT (#3527) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- benchmark/pom.xml | 2 +- google-cloud-bigquery-bom/pom.xml | 4 ++-- google-cloud-bigquery/pom.xml | 4 ++-- pom.xml | 4 ++-- samples/snapshot/pom.xml | 2 +- versions.txt | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 080adb3e2..2d1239ead 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -6,7 +6,7 @@ google-cloud-bigquery-parent com.google.cloud - 2.43.1 + 2.43.2-SNAPSHOT diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index 33fef70f2..32d22c8b4 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery-bom - 2.43.1 + 2.43.2-SNAPSHOT pom com.google.cloud @@ -54,7 +54,7 @@ com.google.cloud google-cloud-bigquery - 2.43.1 + 2.43.2-SNAPSHOT diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index b39456281..845a34a39 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 2.43.1 + 2.43.2-SNAPSHOT jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 2.43.1 + 2.43.2-SNAPSHOT google-cloud-bigquery diff --git a/pom.xml b/pom.xml index 02529ba0a..3823f9458 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 2.43.1 + 2.43.2-SNAPSHOT BigQuery Parent https://github.com/googleapis/java-bigquery @@ -93,7 +93,7 @@ com.google.cloud google-cloud-bigquery - 2.43.1 + 2.43.2-SNAPSHOT diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 2a0bd1db2..00ea151e9 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -44,7 +44,7 @@ com.google.cloud google-cloud-bigquery - 2.43.1 + 2.43.2-SNAPSHOT diff --git a/versions.txt b/versions.txt index 3a4efcace..c189d4ed9 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:2.43.1:2.43.1 \ No newline at end of file +google-cloud-bigquery:2.43.1:2.43.2-SNAPSHOT \ No newline at end of file From 25be311c1477db0993a5825a2b839a295170790f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 10 Oct 2024 17:16:46 +0200 Subject: [PATCH 05/23] deps: update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.54.0 (#3532) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 3823f9458..86570700a 100644 --- a/pom.xml +++ b/pom.xml @@ -149,7 +149,7 @@ com.google.api.grpc proto-google-cloud-bigqueryconnection-v1 - 2.53.0 + 2.54.0 test From 2f87fd9d777175cb5a8e5b0dc55f07546351e504 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 10 Oct 2024 17:17:56 +0200 Subject: [PATCH 06/23] deps: update actions/upload-artifact action to v4.4.3 (#3530) --- .github/workflows/scorecard.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 725beb86f..8872597ec 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -59,7 +59,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@84480863f228bb9747b473957fcc9e309aa96097 # v4.4.2 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 with: name: SARIF file path: results.sarif From cad26430f21a37eec2b87ea417f0cf67dcf9c97a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 10 Oct 2024 17:18:42 +0200 Subject: [PATCH 07/23] deps: update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.58.0 (#3533) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 86570700a..33236b670 100644 --- a/pom.xml +++ b/pom.xml @@ -79,7 +79,7 @@ com.google.cloud google-cloud-datacatalog-bom - 1.57.0 + 1.58.0 pom import From c0d2056d1f13998332146ea2fc0fecb2ce3122f0 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 11 Oct 2024 19:03:12 +0200 Subject: [PATCH 08/23] chore(deps): update dependency com.google.cloud:google-cloud-bigqueryconnection to v2.54.0 (#3531) --- pom.xml | 2 +- samples/install-without-bom/pom.xml | 2 +- samples/snapshot/pom.xml | 2 +- samples/snippets/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 33236b670..6cf78489d 100644 --- a/pom.xml +++ b/pom.xml @@ -143,7 +143,7 @@ com.google.cloud google-cloud-bigqueryconnection - 2.53.0 + 2.54.0 test diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml index 3b733b41d..fe9ea02b0 100644 --- a/samples/install-without-bom/pom.xml +++ b/samples/install-without-bom/pom.xml @@ -69,7 +69,7 @@ com.google.cloud google-cloud-bigqueryconnection - 2.53.0 + 2.54.0 test diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 00ea151e9..0ee2b245e 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -67,7 +67,7 @@ com.google.cloud google-cloud-bigqueryconnection - 2.53.0 + 2.54.0 test diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml index 97033ca32..4c4308c57 100644 --- a/samples/snippets/pom.xml +++ b/samples/snippets/pom.xml @@ -85,7 +85,7 @@ com.google.cloud google-cloud-bigqueryconnection - 2.53.0 + 2.54.0 test From 844744f3dea804a31abc806592f557a26cffbab4 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 15 Oct 2024 16:19:03 +0200 Subject: [PATCH 09/23] deps: update github/codeql-action action to v2.26.13 (#3536) --- .github/workflows/scorecard.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 8872597ec..7d2892a2f 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -67,6 +67,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@4d85deb8548d03be541760228f3fe9e6a4d5d27d # v2.26.12 + uses: github/codeql-action/upload-sarif@083cd45dc7d463f048a5d0975943f0e19e9c9378 # v2.26.13 with: sarif_file: results.sarif From e78e78cba7769a55b3e5937cf6a5a84f5259d116 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 15 Oct 2024 17:07:31 +0200 Subject: [PATCH 10/23] chore(deps): update dependency com.google.cloud:google-cloud-bigtable to v2.45.1 (#3535) --- samples/install-without-bom/pom.xml | 2 +- samples/snapshot/pom.xml | 2 +- samples/snippets/pom.xml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml index fe9ea02b0..667606a0a 100644 --- a/samples/install-without-bom/pom.xml +++ b/samples/install-without-bom/pom.xml @@ -63,7 +63,7 @@ com.google.cloud google-cloud-bigtable - 2.45.0 + 2.45.1 test diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 0ee2b245e..7f45a7209 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -61,7 +61,7 @@ com.google.cloud google-cloud-bigtable - 2.45.0 + 2.45.1 test diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml index 4c4308c57..68190cb05 100644 --- a/samples/snippets/pom.xml +++ b/samples/snippets/pom.xml @@ -79,7 +79,7 @@ com.google.cloud google-cloud-bigtable - 2.45.0 + 2.45.1 test From 903a0f7db0926f3d166eebada1710413056fb4a2 Mon Sep 17 00:00:00 2001 From: Phong Chuong <147636638+PhongChuong@users.noreply.github.com> Date: Thu, 24 Oct 2024 12:03:27 -0400 Subject: [PATCH 11/23] docs: Update SimpleApp to explicitly set project id (#3534) * docs: Update SimpleApp to explicitly set project id When running in Google Cloud console, the default google cloud project might not be set leading to 404 errors. This update requires the reader to explicitly set the project value. * Removes the explicit use of UUID to set jobId as the library now internally generates the value. * Fix missing braces * Add try/catch for errors * Add missing import * Add more missing import * Add requireEnvVar check * Add missing test import * Add missing test import --- .../java/com/example/bigquery/SimpleApp.java | 90 ++++++++++--------- .../com/example/bigquery/SimpleAppIT.java | 18 +++- 2 files changed, 67 insertions(+), 41 deletions(-) diff --git a/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java b/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java index 65ebbc241..49975a9cc 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java +++ b/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java @@ -20,6 +20,7 @@ // [START bigquery_simple_app_deps] import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; import com.google.cloud.bigquery.BigQueryOptions; import com.google.cloud.bigquery.FieldValueList; import com.google.cloud.bigquery.Job; @@ -27,56 +28,65 @@ import com.google.cloud.bigquery.JobInfo; import com.google.cloud.bigquery.QueryJobConfiguration; import com.google.cloud.bigquery.TableResult; -import java.util.UUID; // [END bigquery_simple_app_deps] public class SimpleApp { + public static void main(String... args) throws Exception { - // [START bigquery_simple_app_client] - BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - // [END bigquery_simple_app_client] - // [START bigquery_simple_app_query] - QueryJobConfiguration queryConfig = - QueryJobConfiguration.newBuilder( - "SELECT CONCAT('https://stackoverflow.com/questions/', " - + "CAST(id as STRING)) as url, view_count " - + "FROM `bigquery-public-data.stackoverflow.posts_questions` " - + "WHERE tags like '%google-bigquery%' " - + "ORDER BY view_count DESC " - + "LIMIT 10") - // Use standard SQL syntax for queries. - // See: https://cloud.google.com/bigquery/sql-reference/ - .setUseLegacySql(false) - .build(); + // TODO(developer): Replace these variables before running the app. + String projectId = "MY_PROJECT_ID"; + simpleApp(projectId); + } - // Create a job ID so that we can safely retry. - JobId jobId = JobId.of(UUID.randomUUID().toString()); - Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); + public static void simpleApp(String projectId) { + try { + // [START bigquery_simple_app_client] + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + // [END bigquery_simple_app_client] + // [START bigquery_simple_app_query] + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder( + "SELECT CONCAT('https://stackoverflow.com/questions/', " + + "CAST(id as STRING)) as url, view_count " + + "FROM `bigquery-public-data.stackoverflow.posts_questions` " + + "WHERE tags like '%google-bigquery%' " + + "ORDER BY view_count DESC " + + "LIMIT 10") + // Use standard SQL syntax for queries. + // See: https://cloud.google.com/bigquery/sql-reference/ + .setUseLegacySql(false) + .build(); - // Wait for the query to complete. - queryJob = queryJob.waitFor(); + JobId jobId = JobId.newBuilder().setProject(projectId).build(); + Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); - // Check for errors - if (queryJob == null) { - throw new RuntimeException("Job no longer exists"); - } else if (queryJob.getStatus().getError() != null) { - // You can also look at queryJob.getStatus().getExecutionErrors() for all - // errors, not just the latest one. - throw new RuntimeException(queryJob.getStatus().getError().toString()); - } - // [END bigquery_simple_app_query] + // Wait for the query to complete. + queryJob = queryJob.waitFor(); + + // Check for errors + if (queryJob == null) { + throw new RuntimeException("Job no longer exists"); + } else if (queryJob.getStatus().getError() != null) { + // You can also look at queryJob.getStatus().getExecutionErrors() for all + // errors, not just the latest one. + throw new RuntimeException(queryJob.getStatus().getError().toString()); + } + // [END bigquery_simple_app_query] - // [START bigquery_simple_app_print] - // Get the results. - TableResult result = queryJob.getQueryResults(); + // [START bigquery_simple_app_print] + // Get the results. + TableResult result = queryJob.getQueryResults(); - // Print all pages of the results. - for (FieldValueList row : result.iterateAll()) { - // String type - String url = row.get("url").getStringValue(); - String viewCount = row.get("view_count").getStringValue(); - System.out.printf("%s : %s views\n", url, viewCount); + // Print all pages of the results. + for (FieldValueList row : result.iterateAll()) { + // String type + String url = row.get("url").getStringValue(); + String viewCount = row.get("view_count").getStringValue(); + System.out.printf("%s : %s views\n", url, viewCount); + } + } catch (BigQueryException | InterruptedException e) { + System.out.println("Simple App failed due to error: \n" + e.toString()); } // [END bigquery_simple_app_print] } diff --git a/samples/snippets/src/test/java/com/example/bigquery/SimpleAppIT.java b/samples/snippets/src/test/java/com/example/bigquery/SimpleAppIT.java index cfb77f107..4c4030c7d 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/SimpleAppIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/SimpleAppIT.java @@ -17,6 +17,7 @@ package com.example.bigquery; import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; import java.io.ByteArrayOutputStream; import java.io.PrintStream; @@ -24,6 +25,7 @@ import java.util.logging.Logger; import org.junit.After; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @@ -37,6 +39,20 @@ public class SimpleAppIT { private ByteArrayOutputStream bout; private PrintStream out; private PrintStream originalPrintStream; + private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("GOOGLE_CLOUD_PROJECT"); + } @Before public void setUp() { @@ -56,7 +72,7 @@ public void tearDown() { @Test public void testQuickstart() throws Exception { - SimpleApp.main(); + SimpleApp.simpleApp(PROJECT_ID); String got = bout.toString(); assertThat(got).contains("https://stackoverflow.com/questions/"); } From 16448eec7c7f00a113c923a0fcde463c8ac91f9b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 24 Oct 2024 18:11:26 +0200 Subject: [PATCH 12/23] deps: update dependency com.google.cloud:sdk-platform-java-config to v3.38.0 (#3542) --- .github/workflows/unmanaged_dependency_check.yaml | 2 +- .kokoro/continuous/graalvm-native-17.cfg | 2 +- .kokoro/continuous/graalvm-native.cfg | 2 +- .kokoro/presubmit/graalvm-native-17.cfg | 2 +- .kokoro/presubmit/graalvm-native.cfg | 2 +- google-cloud-bigquery-bom/pom.xml | 2 +- pom.xml | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/unmanaged_dependency_check.yaml b/.github/workflows/unmanaged_dependency_check.yaml index ef7f25f5e..d34d268f9 100644 --- a/.github/workflows/unmanaged_dependency_check.yaml +++ b/.github/workflows/unmanaged_dependency_check.yaml @@ -17,7 +17,7 @@ jobs: # repository .kokoro/build.sh - name: Unmanaged dependency check - uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.37.0 + uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.38.0 with: # java-bigquery does not produce a BOM. Fortunately the root pom.xml # defines google-cloud-bigquery in dependencyManagement section. So diff --git a/.kokoro/continuous/graalvm-native-17.cfg b/.kokoro/continuous/graalvm-native-17.cfg index 0b98d0a81..d2374101e 100644 --- a/.kokoro/continuous/graalvm-native-17.cfg +++ b/.kokoro/continuous/graalvm-native-17.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.37.0" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.38.0" } env_vars: { diff --git a/.kokoro/continuous/graalvm-native.cfg b/.kokoro/continuous/graalvm-native.cfg index c468a59f7..1ef28925b 100644 --- a/.kokoro/continuous/graalvm-native.cfg +++ b/.kokoro/continuous/graalvm-native.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.37.0" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.38.0" } env_vars: { diff --git a/.kokoro/presubmit/graalvm-native-17.cfg b/.kokoro/presubmit/graalvm-native-17.cfg index 9d44e5304..cef8b0b6c 100644 --- a/.kokoro/presubmit/graalvm-native-17.cfg +++ b/.kokoro/presubmit/graalvm-native-17.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.37.0"" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.38.0"" } env_vars: { diff --git a/.kokoro/presubmit/graalvm-native.cfg b/.kokoro/presubmit/graalvm-native.cfg index e6553bd6e..e434ee684 100644 --- a/.kokoro/presubmit/graalvm-native.cfg +++ b/.kokoro/presubmit/graalvm-native.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.37.0" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.38.0" } env_vars: { diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index 32d22c8b4..b949a354f 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -8,7 +8,7 @@ com.google.cloud sdk-platform-java-config - 3.37.0 + 3.38.0 diff --git a/pom.xml b/pom.xml index 6cf78489d..1cbbf7c7d 100644 --- a/pom.xml +++ b/pom.xml @@ -14,7 +14,7 @@ com.google.cloud sdk-platform-java-config - 3.37.0 + 3.38.0 From 4763f73ad854ca4bfdddbbdc0bb43fe639238665 Mon Sep 17 00:00:00 2001 From: Joe Wang <106995533+JoeWang1127@users.noreply.github.com> Date: Thu, 24 Oct 2024 16:39:54 +0000 Subject: [PATCH 13/23] docs: reformat javadoc (#3545) * docs: reformat javadoc * lint --- .../com/google/cloud/bigquery/BigQuery.java | 843 ++++++++---------- 1 file changed, 366 insertions(+), 477 deletions(-) diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java index e391c054d..675e6c1e8 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java @@ -699,20 +699,17 @@ public int hashCode() { * *

Example of creating a dataset. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   Dataset dataset = null;
-   *   DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build();
-   *   try {
-   *     // the dataset was created
-   *     dataset = bigquery.create(datasetInfo);
-   *   } catch (BigQueryException e) {
-   *     // the dataset was not created
-   *   }
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * Dataset dataset = null;
+   * DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build();
+   * try {
+   *   // the dataset was created
+   *   dataset = bigquery.create(datasetInfo);
+   * } catch (BigQueryException e) {
+   *   // the dataset was not created
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -723,22 +720,19 @@ public int hashCode() { * *

Example of creating a table. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   String fieldName = "string_field";
-   *   TableId tableId = TableId.of(datasetName, tableName);
-   *   // Table field definition
-   *   Field field = Field.of(fieldName, LegacySQLTypeName.STRING);
-   *   // Table schema definition
-   *   Schema schema = Schema.of(field);
-   *   TableDefinition tableDefinition = StandardTableDefinition.of(schema);
-   *   TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build();
-   *   Table table = bigquery.create(tableInfo);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * String fieldName = "string_field";
+   * TableId tableId = TableId.of(datasetName, tableName);
+   * // Table field definition
+   * Field field = Field.of(fieldName, LegacySQLTypeName.STRING);
+   * // Table schema definition
+   * Schema schema = Schema.of(field);
+   * TableDefinition tableDefinition = StandardTableDefinition.of(schema);
+   * TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build();
+   * Table table = bigquery.create(tableInfo);
+   * }
* * @throws BigQueryException upon failure */ @@ -756,46 +750,41 @@ public int hashCode() { * *

Example of loading a newline-delimited-json file with textual fields from GCS to a table. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.json";
-   *   TableId tableId = TableId.of(datasetName, tableName);
-   *   // Table field definition
-   *   Field[] fields = new Field[] { Field.of("name", LegacySQLTypeName.STRING),
-   *       Field.of("post_abbr", LegacySQLTypeName.STRING) };
-   *   // Table schema definition
-   *   Schema schema = Schema.of(fields);
-   *   LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri)
-   *       .setFormatOptions(FormatOptions.json()).setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
-   *       .setSchema(schema).build();
-   *   // Load the table
-   *   Job loadJob = bigquery.create(JobInfo.of(configuration));
-   *   loadJob = loadJob.waitFor();
-   *   // Check the table
-   *   System.out.println("State: " + loadJob.getStatus().getState());
-   *   return ((StandardTableDefinition) bigquery.getTable(tableId).getDefinition()).getNumRows();
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.json";
+   * TableId tableId = TableId.of(datasetName, tableName);
+   * // Table field definition
+   * Field[] fields = new Field[] { Field.of("name", LegacySQLTypeName.STRING),
+   * Field.of("post_abbr", LegacySQLTypeName.STRING) };
+   * // Table schema definition
+   * Schema schema = Schema.of(fields);
+   * LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri)
+   *     .setFormatOptions(FormatOptions.json())
+   *     .setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
+   *     .setSchema(schema).build();
+   * // Load the table
+   * Job loadJob = bigquery.create(JobInfo.of(configuration));
+   * loadJob = loadJob.waitFor();
+   * // Check the table
+   * System.out.println("State: " + loadJob.getStatus().getState());
+   * return ((StandardTableDefinition) bigquery.getTable(tableId).getDefinition()).getNumRows();
+   * }
* *

Example of creating a query job. * - *

-   * {
-   *   @code
-   *   String query = "SELECT field FROM my_dataset_name.my_table_name";
-   *   Job job = null;
-   *   JobConfiguration jobConfiguration = QueryJobConfiguration.of(query);
-   *   JobInfo jobInfo = JobInfo.of(jobConfiguration);
-   *   try {
-   *     job = bigquery.create(jobInfo);
-   *   } catch (BigQueryException e) {
-   *     // the job was not created
-   *   }
+   * 
{@code
+   * String query = "SELECT field FROM my_dataset_name.my_table_name";
+   * Job job = null;
+   * JobConfiguration jobConfiguration = QueryJobConfiguration.of(query);
+   * JobInfo jobInfo = JobInfo.of(jobConfiguration);
+   * try {
+   *   job = bigquery.create(jobInfo);
+   * } catch (BigQueryException e) {
+   *   // the job was not created
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -808,18 +797,15 @@ public int hashCode() { * *

Example of creating a query connection. * - *

-   * {
-   *   @code
-   *       ConnectionSettings connectionSettings =
-   *         ConnectionSettings.newBuilder()
-   *             .setRequestTimeout(10L)
-   *             .setMaxResults(100L)
-   *             .setUseQueryCache(true)
-   *             .build();
-   *       Connection connection = bigquery.createConnection(connectionSettings);
-   * }
-   * 
+ *
{@code
+   * ConnectionSettings connectionSettings =
+   *     ConnectionSettings.newBuilder()
+   *         .setRequestTimeout(10L)
+   *         .setMaxResults(100L)
+   *         .setUseQueryCache(true)
+   *         .build();
+   * Connection connection = bigquery.createConnection(connectionSettings);
+   * }
* * @throws BigQueryException upon failure * @param connectionSettings @@ -836,12 +822,9 @@ public int hashCode() { * *

Example of creating a query connection. * - *

-   * {
-   *   @code
-   *       Connection connection = bigquery.createConnection();
-   * }
-   * 
+ *
{@code
+   * Connection connection = bigquery.createConnection();
+   * }
* * @throws BigQueryException upon failure */ @@ -853,13 +836,10 @@ public int hashCode() { * *

Example of getting a dataset. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset";
-   *   Dataset dataset = bigquery.getDataset(datasetName);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset";
+   * Dataset dataset = bigquery.getDataset(datasetName);
+   * }
* * @throws BigQueryException upon failure */ @@ -870,15 +850,12 @@ public int hashCode() { * *

Example of getting a dataset. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   DatasetId datasetId = DatasetId.of(projectId, datasetName);
-   *   Dataset dataset = bigquery.getDataset(datasetId);
-   * }
-   * 
+ *
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * DatasetId datasetId = DatasetId.of(projectId, datasetName);
+   * Dataset dataset = bigquery.getDataset(datasetId);
+   * }
* * @throws BigQueryException upon failure */ @@ -892,16 +869,13 @@ public int hashCode() { * *

Example of listing datasets, specifying the page size. * - *

-   * {
-   *   @code
-   *   // List datasets in the default project
-   *   Page<Dataset> datasets = bigquery.listDatasets(DatasetListOption.pageSize(100));
-   *   for (Dataset dataset : datasets.iterateAll()) {
-   *     // do something with the dataset
-   *   }
+   * 
{@code
+   * // List datasets in the default project
+   * Page datasets = bigquery.listDatasets(DatasetListOption.pageSize(100));
+   * for (Dataset dataset : datasets.iterateAll()) {
+   *   // do something with the dataset
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -915,17 +889,14 @@ public int hashCode() { * *

Example of listing datasets in a project, specifying the page size. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   // List datasets in a specified project
-   *   Page<{@link Dataset}> datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100));
-   *   for (Dataset dataset : datasets.iterateAll()) {
-   *     // do something with the dataset
-   *   }
+   * 
{@code
+   * String projectId = "my_project_id";
+   * // List datasets in a specified project
+   * Page datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100));
+   * for (Dataset dataset : datasets.iterateAll()) {
+   *   // do something with the dataset
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -936,18 +907,15 @@ public int hashCode() { * *

Example of deleting a dataset from its id, even if non-empty. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
-   *   if (deleted) {
-   *     // the dataset was deleted
-   *   } else {
-   *     // the dataset was not found
-   *   }
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
+   * if (deleted) {
+   *   // the dataset was deleted
+   * } else {
+   *   // the dataset was not found
    * }
-   * 
+ * }
* * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -959,20 +927,17 @@ public int hashCode() { * *

Example of deleting a dataset, even if non-empty. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   DatasetId datasetId = DatasetId.of(projectId, datasetName);
-   *   boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
-   *   if (deleted) {
-   *     // the dataset was deleted
-   *   } else {
-   *     // the dataset was not found
-   *   }
+   * 
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * DatasetId datasetId = DatasetId.of(projectId, datasetName);
+   * boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
+   * if (deleted) {
+   *   // the dataset was deleted
+   * } else {
+   *   // the dataset was not found
    * }
-   * 
+ * }
* * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -996,21 +961,18 @@ public int hashCode() { * *

Example of deleting a table. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   TableId tableId = TableId.of(projectId, datasetName, tableName);
-   *   boolean deleted = bigquery.delete(tableId);
-   *   if (deleted) {
-   *     // the table was deleted
-   *   } else {
-   *     // the table was not found
-   *   }
+   * 
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * TableId tableId = TableId.of(projectId, datasetName, tableName);
+   * boolean deleted = bigquery.delete(tableId);
+   * if (deleted) {
+   *   // the table was deleted
+   * } else {
+   *   // the table was not found
    * }
-   * 
+ * }
* * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -1022,21 +984,18 @@ public int hashCode() { * *

Example of deleting a model. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_model_name";
-   *   ModelId modelId = ModelId.of(projectId, datasetName, modelName);
-   *   boolean deleted = bigquery.delete(modelId);
-   *   if (deleted) {
-   *     // the model was deleted
-   *   } else {
-   *     // the model was not found
-   *   }
+   * 
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_model_name";
+   * ModelId modelId = ModelId.of(projectId, datasetName, modelName);
+   * boolean deleted = bigquery.delete(modelId);
+   * if (deleted) {
+   *   // the model was deleted
+   * } else {
+   *   // the model was not found
    * }
-   * 
+ * }
* * @return {@code true} if model was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -1081,19 +1040,15 @@ public int hashCode() { * * - *
-   * {
-   *   @code
-   *   // String datasetName = "my_dataset_name";
-   *   // String tableName = "my_table_name";
-   *   // String newDescription = "new_description";
-   *
-   *   Table beforeTable = bigquery.getTable(datasetName, tableName);
-   *   TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build();
-   *   Table afterTable = bigquery.update(tableInfo);
+   * 
{@code
+   * // String datasetName = "my_dataset_name";
+   * // String tableName = "my_table_name";
+   * // String newDescription = "new_description";
    *
-   * }
-   * 
+ * Table beforeTable = bigquery.getTable(datasetName, tableName); + * TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build(); + * Table afterTable = bigquery.update(tableInfo); + * }
* * * @@ -1106,33 +1061,27 @@ public int hashCode() { * *

Example of updating a table by changing its description. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   String newDescription = "new_description";
-   *   Table beforeTable = bigquery.getTable(datasetName, tableName);
-   *   TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build();
-   *   Table afterTable = bigquery.update(tableInfo);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * String newDescription = "new_description";
+   * Table beforeTable = bigquery.getTable(datasetName, tableName);
+   * TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build();
+   * Table afterTable = bigquery.update(tableInfo);
+   * }
* *

Example of updating a table by changing its expiration. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   Table beforeTable = bigquery.getTable(datasetName, tableName);
-   *
-   *   // Set table to expire 5 days from now.
-   *   long expirationMillis = DateTime.now().plusDays(5).getMillis();
-   *   TableInfo tableInfo = beforeTable.toBuilder().setExpirationTime(expirationMillis).build();
-   *   Table afterTable = bigquery.update(tableInfo);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * Table beforeTable = bigquery.getTable(datasetName, tableName);
+   *
+   * // Set table to expire 5 days from now.
+   * long expirationMillis = DateTime.now().plusDays(5).getMillis();
+   * TableInfo tableInfo = beforeTable.toBuilder().setExpirationTime(expirationMillis).build();
+   * Table afterTable = bigquery.update(tableInfo);
+   * }
* * @throws BigQueryException upon failure */ @@ -1143,33 +1092,27 @@ public int hashCode() { * *

Example of updating a model by changing its description. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String modelName = "my_model_name";
-   *   String newDescription = "new_description";
-   *   Model beforeModel = bigquery.getModel(datasetName, modelName);
-   *   ModelInfo modelInfo = beforeModel.toBuilder().setDescription(newDescription).build();
-   *   Model afterModel = bigquery.update(modelInfo);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String modelName = "my_model_name";
+   * String newDescription = "new_description";
+   * Model beforeModel = bigquery.getModel(datasetName, modelName);
+   * ModelInfo modelInfo = beforeModel.toBuilder().setDescription(newDescription).build();
+   * Model afterModel = bigquery.update(modelInfo);
+   * }
* *

Example of updating a model by changing its expiration. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String modelName = "my_model_name";
-   *   Model beforeModel = bigquery.getModel(datasetName, modelName);
-   *
-   *   // Set model to expire 5 days from now.
-   *   long expirationMillis = DateTime.now().plusDays(5).getMillis();
-   *   ModelInfo modelInfo = beforeModel.toBuilder().setExpirationTime(expirationMillis).build();
-   *   Model afterModel = bigquery.update(modelInfo);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String modelName = "my_model_name";
+   * Model beforeModel = bigquery.getModel(datasetName, modelName);
+   *
+   * // Set model to expire 5 days from now.
+   * long expirationMillis = DateTime.now().plusDays(5).getMillis();
+   * ModelInfo modelInfo = beforeModel.toBuilder().setExpirationTime(expirationMillis).build();
+   * Model afterModel = bigquery.update(modelInfo);
+   * }
* * @throws BigQueryException upon failure */ @@ -1187,14 +1130,11 @@ public int hashCode() { * *

Example of getting a table. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   Table table = bigquery.getTable(datasetName, tableName);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * Table table = bigquery.getTable(datasetName, tableName);
+   * }
* * @throws BigQueryException upon failure */ @@ -1205,16 +1145,13 @@ public int hashCode() { * *

Example of getting a table. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   TableId tableId = TableId.of(projectId, datasetName, tableName);
-   *   Table table = bigquery.getTable(tableId);
-   * }
-   * 
+ *
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * TableId tableId = TableId.of(projectId, datasetName, tableName);
+   * Table table = bigquery.getTable(tableId);
+   * }
* * @throws BigQueryException upon failure */ @@ -1232,16 +1169,13 @@ public int hashCode() { * *

Example of getting a model. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   String modelName = "my_model_name";
-   *   ModelId modelId = ModelId.of(projectId, datasetName, tableName);
-   *   Model model = bigquery.getModel(modelId);
-   * }
-   * 
+ *
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * String modelName = "my_model_name";
+   * ModelId modelId = ModelId.of(projectId, datasetName, tableName);
+   * Model model = bigquery.getModel(modelId);
+   * }
* * @throws BigQueryException upon failure */ @@ -1275,16 +1209,13 @@ public int hashCode() { * *

Example of listing the tables in a dataset, specifying the page size. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   Page<Table> tables = bigquery.listTables(datasetName, TableListOption.pageSize(100));
-   *   for (Table table : tables.iterateAll()) {
-   *     // do something with the table
-   *   }
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * Page tables = bigquery.listTables(datasetName, TableListOption.pageSize(100));
+   * for (Table table : tables.iterateAll()) {
+   *   // do something with the table
    * }
-   * 
+   * }
    *
    * @throws BigQueryException upon failure
    */
@@ -1298,18 +1229,15 @@ public int hashCode() {
    *
    * 

Example of listing the tables in a dataset. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   DatasetId datasetId = DatasetId.of(projectId, datasetName);
-   *   Page<Table> tables = bigquery.listTables(datasetId, TableListOption.pageSize(100));
-   *   for (Table table : tables.iterateAll()) {
-   *     // do something with the table
-   *   }
+   * 
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * DatasetId datasetId = DatasetId.of(projectId, datasetName);
+   * Page
tables = bigquery.listTables(datasetId, TableListOption.pageSize(100)); + * for (Table table : tables.iterateAll()) { + * // do something with the table * } - * + * } * * @throws BigQueryException upon failure */ @@ -1332,33 +1260,30 @@ public int hashCode() { * *

Example of inserting rows into a table without running a load job. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   TableId tableId = TableId.of(datasetName, tableName);
-   *   // Values of the row to insert
-   *   Map<String, Object> rowContent = new HashMap<>();
-   *   rowContent.put("booleanField", true);
-   *   // Bytes are passed in base64
-   *   rowContent.put("bytesField", "Cg0NDg0="); // 0xA, 0xD, 0xD, 0xE, 0xD in base64
-   *   // Records are passed as a map
-   *   Map<String, Object> recordsContent = new HashMap<>();
-   *   recordsContent.put("stringField", "Hello, World!");
-   *   rowContent.put("recordField", recordsContent);
-   *   InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow("rowId", rowContent)
-   *       // More rows can be added in the same RPC by invoking .addRow() on the
-   *       // builder
-   *       .build());
-   *   if (response.hasErrors()) {
-   *     // If any of the insertions failed, this lets you inspect the errors
-   *     for (Entry<Long, List<BigQueryError>> entry : response.getInsertErrors().entrySet()) {
-   *       // inspect row error
-   *     }
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * TableId tableId = TableId.of(datasetName, tableName);
+   * // Values of the row to insert
+   * Map rowContent = new HashMap<>();
+   * rowContent.put("booleanField", true);
+   * // Bytes are passed in base64
+   * rowContent.put("bytesField", "Cg0NDg0="); // 0xA, 0xD, 0xD, 0xE, 0xD in base64
+   * // Records are passed as a map
+   * Map recordsContent = new HashMap<>();
+   * recordsContent.put("stringField", "Hello, World!");
+   * rowContent.put("recordField", recordsContent);
+   * InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow("rowId", rowContent)
+   *     // More rows can be added in the same RPC by invoking .addRow() on the
+   *     // builder
+   *     .build());
+   * if (response.hasErrors()) {
+   *   // If any of the insertions failed, this lets you inspect the errors
+   *   for (Entry> entry : response.getInsertErrors().entrySet()) {
+   *     // inspect row error
    *   }
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -1369,20 +1294,17 @@ public int hashCode() { * *

Example of listing table rows, specifying the page size. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   // This example reads the result 100 rows per RPC call. If there's no need
-   *   // to limit the number,
-   *   // simply omit the option.
-   *   TableResult tableData = bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
-   *   for (FieldValueList row : tableData.iterateAll()) {
-   *     // do something with the row
-   *   }
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * // This example reads the result 100 rows per RPC call. If there's no need
+   * // to limit the number,
+   * // simply omit the option.
+   * TableResult tableData = bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
+   * for (FieldValueList row : tableData.iterateAll()) {
+   *   // do something with the row
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -1393,21 +1315,18 @@ public int hashCode() { * *

Example of listing table rows, specifying the page size. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   TableId tableIdObject = TableId.of(datasetName, tableName);
-   *   // This example reads the result 100 rows per RPC call. If there's no need
-   *   // to limit the number,
-   *   // simply omit the option.
-   *   TableResult tableData = bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
-   *   for (FieldValueList row : tableData.iterateAll()) {
-   *     // do something with the row
-   *   }
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * TableId tableIdObject = TableId.of(datasetName, tableName);
+   * // This example reads the result 100 rows per RPC call. If there's no need
+   * // to limit the number,
+   * // simply omit the option.
+   * TableResult tableData = bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
+   * for (FieldValueList row : tableData.iterateAll()) {
+   *   // do something with the row
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -1441,18 +1360,15 @@ TableResult listTableData( * *

Example of listing table rows with schema. * - *

-   * {
-   *   @code
-   *   Schema schema = Schema.of(Field.of("word", LegacySQLTypeName.STRING),
-   *       Field.of("word_count", LegacySQLTypeName.STRING), Field.of("corpus", LegacySQLTypeName.STRING),
-   *       Field.of("corpus_date", LegacySQLTypeName.STRING));
-   *   TableResult tableData = bigquery.listTableData(TableId.of("bigquery-public-data", "samples", "shakespeare"),
-   *       schema);
-   *   FieldValueList row = tableData.getValues().iterator().next();
-   *   System.out.println(row.get("word").getStringValue());
-   * }
-   * 
+ *
{@code
+   * Schema schema = Schema.of(Field.of("word", LegacySQLTypeName.STRING),
+   *     Field.of("word_count", LegacySQLTypeName.STRING), Field.of("corpus", LegacySQLTypeName.STRING),
+   *     Field.of("corpus_date", LegacySQLTypeName.STRING));
+   * TableResult tableData = bigquery.listTableData(TableId.of("bigquery-public-data", "samples", "shakespeare"),
+   *     schema);
+   * FieldValueList row = tableData.getValues().iterator().next();
+   * System.out.println(row.get("word").getStringValue());
+   * }
* * @throws BigQueryException upon failure */ @@ -1464,16 +1380,13 @@ TableResult listTableData( * *

Example of getting a job. * - *

-   * {
-   *   @code
-   *   String jobName = "my_job_name";
-   *   Job job = bigquery.getJob(jobName);
-   *   if (job == null) {
-   *     // job was not found
-   *   }
+   * 
{@code
+   * String jobName = "my_job_name";
+   * Job job = bigquery.getJob(jobName);
+   * if (job == null) {
+   *   // job was not found
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -1485,17 +1398,14 @@ TableResult listTableData( * *

Example of getting a job. * - *

-   * {
-   *   @code
-   *   String jobName = "my_job_name";
-   *   JobId jobIdObject = JobId.of(jobName);
-   *   Job job = bigquery.getJob(jobIdObject);
-   *   if (job == null) {
-   *     // job was not found
-   *   }
+   * 
{@code
+   * String jobName = "my_job_name";
+   * JobId jobIdObject = JobId.of(jobName);
+   * Job job = bigquery.getJob(jobIdObject);
+   * if (job == null) {
+   *   // job was not found
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -1506,15 +1416,12 @@ TableResult listTableData( * *

Example of listing jobs, specifying the page size. * - *

-   * {
-   *   @code
-   *   Page<Job> jobs = bigquery.listJobs(JobListOption.pageSize(100));
-   *   for (Job job : jobs.iterateAll()) {
-   *     // do something with the job
-   *   }
+   * 
{@code
+   * Page jobs = bigquery.listJobs(JobListOption.pageSize(100));
+   * for (Job job : jobs.iterateAll()) {
+   *   // do something with the job
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -1529,18 +1436,15 @@ TableResult listTableData( * *

Example of cancelling a job. * - *

-   * {
-   *   @code
-   *   String jobName = "my_job_name";
-   *   boolean success = bigquery.cancel(jobName);
-   *   if (success) {
-   *     // job was cancelled
-   *   } else {
-   *     // job was not found
-   *   }
+   * 
{@code
+   * String jobName = "my_job_name";
+   * boolean success = bigquery.cancel(jobName);
+   * if (success) {
+   *   // job was cancelled
+   * } else {
+   *   // job was not found
    * }
-   * 
+ * }
* * @return {@code true} if cancel was requested successfully, {@code false} if the job was not * found @@ -1558,19 +1462,16 @@ TableResult listTableData( * *

Example of cancelling a job. * - *

-   * {
-   *   @code
-   *   String jobName = "my_job_name";
-   *   JobId jobId = JobId.of(jobName);
-   *   boolean success = bigquery.cancel(jobId);
-   *   if (success) {
-   *     // job was cancelled
-   *   } else {
-   *     // job was not found
-   *   }
+   * 
{@code
+   * String jobName = "my_job_name";
+   * JobId jobId = JobId.of(jobName);
+   * boolean success = bigquery.cancel(jobId);
+   * if (success) {
+   *   // job was cancelled
+   * } else {
+   *   // job was not found
    * }
-   * 
+ * }
* * @return {@code true} if cancel was requested successfully, {@code false} if the job was not * found @@ -1589,22 +1490,19 @@ TableResult listTableData( * *

Example of running a query. * - *

-   * {
-   *   @code
-   *   // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
-   *   String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
-   *   QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
-   *
-   *   // Print the results.
-   *   for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
-   *     for (FieldValue val : row) {
-   *       System.out.printf("%s,", val.toString());
-   *     }
-   *     System.out.printf("\n");
+   * 
{@code
+   * // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
+   * String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
+   * QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
+   *
+   * // Print the results.
+   * for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
+   *   for (FieldValue val : row) {
+   *     System.out.printf("%s,", val.toString());
    *   }
+   *   System.out.printf("\n");
    * }
-   * 
+ * }
* * This method supports query-related preview features via environmental variables (enabled by * setting the {@code QUERY_PREVIEW_ENABLED} environment variable to "TRUE"). Specifically, this @@ -1630,7 +1528,7 @@ TableResult query(QueryJobConfiguration configuration, JobOption... options) *

If the location of the job is not "US" or "EU", the {@code jobId} must specify the job * location. * - *

This method cannot be used in conjuction with {@link QueryJobConfiguration#dryRun()} + *

This method cannot be used in conjunction with {@link QueryJobConfiguration#dryRun()} * queries. Since dry-run queries are not actually executed, there's no way to retrieve results. * *

See {@link #query(QueryJobConfiguration, JobOption...)} for examples on populating a {@link @@ -1659,56 +1557,50 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... * *

Example of creating a channel with which to write to a table. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   String csvData = "StringValue1\nStringValue2\n";
-   *   TableId tableId = TableId.of(datasetName, tableName);
-   *   WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
-   *       .setFormatOptions(FormatOptions.csv()).build();
-   *   TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration);
-   *   // Write data to writer
-   *   try {
-   *     writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
-   *   } finally {
-   *     writer.close();
-   *   }
-   *   // Get load job
-   *   Job job = writer.getJob();
-   *   job = job.waitFor();
-   *   LoadStatistics stats = job.getStatistics();
-   *   return stats.getOutputRows();
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * String csvData = "StringValue1\nStringValue2\n";
+   * TableId tableId = TableId.of(datasetName, tableName);
+   * WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
+   *     .setFormatOptions(FormatOptions.csv()).build();
+   * TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration);
+   * // Write data to writer
+   * try {
+   *   writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
+   * } finally {
+   *   writer.close();
    * }
-   * 
+ * // Get load job + * Job job = writer.getJob(); + * job = job.waitFor(); + * LoadStatistics stats = job.getStatistics(); + * return stats.getOutputRows(); + * }
* *

Example of writing a local file to a table. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv");
-   *   String location = "us";
-   *   TableId tableId = TableId.of(datasetName, tableName);
-   *   WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
-   *       .setFormatOptions(FormatOptions.csv()).build();
-   *   // The location must be specified; other fields can be auto-detected.
-   *   JobId jobId = JobId.newBuilder().setLocation(location).build();
-   *   TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
-   *   // Write data to writer
-   *   try (OutputStream stream = Channels.newOutputStream(writer)) {
-   *     Files.copy(csvPath, stream);
-   *   }
-   *   // Get load job
-   *   Job job = writer.getJob();
-   *   job = job.waitFor();
-   *   LoadStatistics stats = job.getStatistics();
-   *   return stats.getOutputRows();
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv");
+   * String location = "us";
+   * TableId tableId = TableId.of(datasetName, tableName);
+   * WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
+   *     .setFormatOptions(FormatOptions.csv()).build();
+   * // The location must be specified; other fields can be auto-detected.
+   * JobId jobId = JobId.newBuilder().setLocation(location).build();
+   * TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
+   * // Write data to writer
+   * try (OutputStream stream = Channels.newOutputStream(writer)) {
+   *   Files.copy(csvPath, stream);
    * }
-   * 
+ * // Get load job + * Job job = writer.getJob(); + * job = job.waitFor(); + * LoadStatistics stats = job.getStatistics(); + * return stats.getOutputRows(); + * }
* * @throws BigQueryException upon failure */ @@ -1721,32 +1613,29 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... * *

Example of creating a channel with which to write to a table. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   String csvData = "StringValue1\nStringValue2\n";
-   *   String location = "us";
-   *   TableId tableId = TableId.of(datasetName, tableName);
-   *   WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
-   *       .setFormatOptions(FormatOptions.csv()).build();
-   *   // The location must be specified; other fields can be auto-detected.
-   *   JobId jobId = JobId.newBuilder().setLocation(location).build();
-   *   TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
-   *   // Write data to writer
-   *   try {
-   *     writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
-   *   } finally {
-   *     writer.close();
-   *   }
-   *   // Get load job
-   *   Job job = writer.getJob();
-   *   job = job.waitFor();
-   *   LoadStatistics stats = job.getStatistics();
-   *   return stats.getOutputRows();
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * String csvData = "StringValue1\nStringValue2\n";
+   * String location = "us";
+   * TableId tableId = TableId.of(datasetName, tableName);
+   * WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
+   *     .setFormatOptions(FormatOptions.csv()).build();
+   * // The location must be specified; other fields can be auto-detected.
+   * JobId jobId = JobId.newBuilder().setLocation(location).build();
+   * TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
+   * // Write data to writer
+   * try {
+   *   writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
+   * } finally {
+   *   writer.close();
    * }
-   * 
+ * // Get load job + * Job job = writer.getJob(); + * job = job.waitFor(); + * LoadStatistics stats = job.getStatistics(); + * return stats.getOutputRows(); + * }
*/ TableDataWriteChannel writer(JobId jobId, WriteChannelConfiguration writeChannelConfiguration); From c0d8839a0930cce03713b9a915519367df087387 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 24 Oct 2024 21:04:28 +0200 Subject: [PATCH 14/23] build(deps): update dependency org.apache.maven.plugins:maven-project-info-reports-plugin to v3.8.0 (#3539) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 1cbbf7c7d..482f53a0b 100644 --- a/pom.xml +++ b/pom.xml @@ -180,7 +180,7 @@ org.apache.maven.plugins maven-project-info-reports-plugin - 3.7.0 + 3.8.0 From 1616a0f6057916e21f3b4a6d418d1431d8d1fa16 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 24 Oct 2024 21:05:06 +0200 Subject: [PATCH 15/23] deps: update github/codeql-action action to v2.27.0 (#3540) --- .github/workflows/scorecard.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 7d2892a2f..4802d1f8a 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -67,6 +67,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@083cd45dc7d463f048a5d0975943f0e19e9c9378 # v2.26.13 + uses: github/codeql-action/upload-sarif@6a89f57882288b3d2f190cda65000eec9e9ebb7c # v2.27.0 with: sarif_file: results.sarif From c36c123f5cd298b1481c9073ac9f5e634b0e1e68 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 24 Oct 2024 21:05:38 +0200 Subject: [PATCH 16/23] deps: update actions/checkout action to v4.2.2 (#3541) --- .github/workflows/scorecard.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 4802d1f8a..1be1c2a32 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -32,7 +32,7 @@ jobs: steps: - name: "Checkout code" - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4.2.1 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false From fa796cb836fc588e64c4b7260ff9ceba6e703e91 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 24 Oct 2024 21:06:05 +0200 Subject: [PATCH 17/23] test(deps): update dependency com.google.cloud:google-cloud-storage to v2.44.0 (#3543) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 482f53a0b..cc8c6dc59 100644 --- a/pom.xml +++ b/pom.xml @@ -137,7 +137,7 @@ com.google.cloud google-cloud-storage - 2.43.2 + 2.44.0 test From 0c42092e34912d21a4d13f041577056faadf914a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 24 Oct 2024 21:06:27 +0200 Subject: [PATCH 18/23] deps: update dependency com.google.apis:google-api-services-bigquery to v2-rev20241013-2.0.0 (#3544) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index cc8c6dc59..8fc92a393 100644 --- a/pom.xml +++ b/pom.xml @@ -54,7 +54,7 @@ UTF-8 github google-cloud-bigquery-parent - v2-rev20240919-2.0.0 + v2-rev20241013-2.0.0 From 0bd3c862636271c5a851fcd229b4cf6878a8c5d4 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 25 Oct 2024 00:30:43 +0200 Subject: [PATCH 19/23] chore(deps): update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.10.0 (#3546) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8fc92a393..9165396fa 100644 --- a/pom.xml +++ b/pom.xml @@ -71,7 +71,7 @@ com.google.cloud google-cloud-bigquerystorage-bom - 3.9.3 + 3.10.0 pom import From 616b2f611f313994bf0ec2889daea3b569a84baf Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 26 Oct 2024 03:53:39 +0200 Subject: [PATCH 20/23] deps: update dependency com.google.cloud:sdk-platform-java-config to v3.39.0 (#3548) --- .github/workflows/unmanaged_dependency_check.yaml | 2 +- .kokoro/continuous/graalvm-native-17.cfg | 2 +- .kokoro/continuous/graalvm-native.cfg | 2 +- .kokoro/presubmit/graalvm-native-17.cfg | 2 +- .kokoro/presubmit/graalvm-native.cfg | 2 +- google-cloud-bigquery-bom/pom.xml | 2 +- pom.xml | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/unmanaged_dependency_check.yaml b/.github/workflows/unmanaged_dependency_check.yaml index d34d268f9..4df9466e7 100644 --- a/.github/workflows/unmanaged_dependency_check.yaml +++ b/.github/workflows/unmanaged_dependency_check.yaml @@ -17,7 +17,7 @@ jobs: # repository .kokoro/build.sh - name: Unmanaged dependency check - uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.38.0 + uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.39.0 with: # java-bigquery does not produce a BOM. Fortunately the root pom.xml # defines google-cloud-bigquery in dependencyManagement section. So diff --git a/.kokoro/continuous/graalvm-native-17.cfg b/.kokoro/continuous/graalvm-native-17.cfg index d2374101e..4bc28d0af 100644 --- a/.kokoro/continuous/graalvm-native-17.cfg +++ b/.kokoro/continuous/graalvm-native-17.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.38.0" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.39.0" } env_vars: { diff --git a/.kokoro/continuous/graalvm-native.cfg b/.kokoro/continuous/graalvm-native.cfg index 1ef28925b..33a4a9d95 100644 --- a/.kokoro/continuous/graalvm-native.cfg +++ b/.kokoro/continuous/graalvm-native.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.38.0" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.39.0" } env_vars: { diff --git a/.kokoro/presubmit/graalvm-native-17.cfg b/.kokoro/presubmit/graalvm-native-17.cfg index cef8b0b6c..ba53eb9ce 100644 --- a/.kokoro/presubmit/graalvm-native-17.cfg +++ b/.kokoro/presubmit/graalvm-native-17.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.38.0"" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.39.0"" } env_vars: { diff --git a/.kokoro/presubmit/graalvm-native.cfg b/.kokoro/presubmit/graalvm-native.cfg index e434ee684..418a399b7 100644 --- a/.kokoro/presubmit/graalvm-native.cfg +++ b/.kokoro/presubmit/graalvm-native.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.38.0" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.39.0" } env_vars: { diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index b949a354f..21f719a49 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -8,7 +8,7 @@ com.google.cloud sdk-platform-java-config - 3.38.0 + 3.39.0 diff --git a/pom.xml b/pom.xml index 9165396fa..da427159b 100644 --- a/pom.xml +++ b/pom.xml @@ -14,7 +14,7 @@ com.google.cloud sdk-platform-java-config - 3.38.0 + 3.39.0 From c03a63a0da4f4915e9761dc1ca7429c46748688c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sun, 27 Oct 2024 02:37:55 +0100 Subject: [PATCH 21/23] chore(deps): update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.10.1 (#3550) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index da427159b..57a54a70b 100644 --- a/pom.xml +++ b/pom.xml @@ -71,7 +71,7 @@ com.google.cloud google-cloud-bigquerystorage-bom - 3.10.0 + 3.10.1 pom import From fb5cabdb875456a894c98cb7b63d524faedded9d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sun, 27 Oct 2024 02:39:25 +0100 Subject: [PATCH 22/23] test(deps): update dependency com.google.cloud:google-cloud-storage to v2.44.1 (#3549) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 57a54a70b..eab2b84b7 100644 --- a/pom.xml +++ b/pom.xml @@ -137,7 +137,7 @@ com.google.cloud google-cloud-storage - 2.44.0 + 2.44.1 test From 1e33145110a2d273fc9cf49f6f3b2c87e2673eed Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 10:17:11 -0400 Subject: [PATCH 23/23] chore(main): release 2.43.2 (#3529) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 24 ++++++++++++++++++++++++ benchmark/pom.xml | 2 +- google-cloud-bigquery-bom/pom.xml | 4 ++-- google-cloud-bigquery/pom.xml | 4 ++-- pom.xml | 4 ++-- samples/snapshot/pom.xml | 2 +- versions.txt | 2 +- 7 files changed, 33 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index aaf9551fe..36bb39c29 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,29 @@ # Changelog +## [2.43.2](https://github.com/googleapis/java-bigquery/compare/v2.43.1...v2.43.2) (2024-10-27) + + +### Dependencies + +* Update actions/checkout action to v4.2.2 ([#3541](https://github.com/googleapis/java-bigquery/issues/3541)) ([c36c123](https://github.com/googleapis/java-bigquery/commit/c36c123f5cd298b1481c9073ac9f5e634b0e1e68)) +* Update actions/upload-artifact action to v4.4.2 ([#3524](https://github.com/googleapis/java-bigquery/issues/3524)) ([776a554](https://github.com/googleapis/java-bigquery/commit/776a5541cc94e8ffb1f5e5c6969ae06585571b45)) +* Update actions/upload-artifact action to v4.4.3 ([#3530](https://github.com/googleapis/java-bigquery/issues/3530)) ([2f87fd9](https://github.com/googleapis/java-bigquery/commit/2f87fd9d777175cb5a8e5b0dc55f07546351e504)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.54.0 ([#3532](https://github.com/googleapis/java-bigquery/issues/3532)) ([25be311](https://github.com/googleapis/java-bigquery/commit/25be311c1477db0993a5825a2b839a295170790f)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20241013-2.0.0 ([#3544](https://github.com/googleapis/java-bigquery/issues/3544)) ([0c42092](https://github.com/googleapis/java-bigquery/commit/0c42092e34912d21a4d13f041577056faadf914a)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.10.0 ([0bd3c86](https://github.com/googleapis/java-bigquery/commit/0bd3c862636271c5a851fcd229b4cf6878a8c5d4)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.10.1 ([c03a63a](https://github.com/googleapis/java-bigquery/commit/c03a63a0da4f4915e9761dc1ca7429c46748688c)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.58.0 ([#3533](https://github.com/googleapis/java-bigquery/issues/3533)) ([cad2643](https://github.com/googleapis/java-bigquery/commit/cad26430f21a37eec2b87ea417f0cf67dcf9c97a)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.38.0 ([#3542](https://github.com/googleapis/java-bigquery/issues/3542)) ([16448ee](https://github.com/googleapis/java-bigquery/commit/16448eec7c7f00a113c923a0fcde463c8ac91f9b)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.39.0 ([#3548](https://github.com/googleapis/java-bigquery/issues/3548)) ([616b2f6](https://github.com/googleapis/java-bigquery/commit/616b2f611f313994bf0ec2889daea3b569a84baf)) +* Update github/codeql-action action to v2.26.13 ([#3536](https://github.com/googleapis/java-bigquery/issues/3536)) ([844744f](https://github.com/googleapis/java-bigquery/commit/844744f3dea804a31abc806592f557a26cffbab4)) +* Update github/codeql-action action to v2.27.0 ([#3540](https://github.com/googleapis/java-bigquery/issues/3540)) ([1616a0f](https://github.com/googleapis/java-bigquery/commit/1616a0f6057916e21f3b4a6d418d1431d8d1fa16)) + + +### Documentation + +* Reformat javadoc ([#3545](https://github.com/googleapis/java-bigquery/issues/3545)) ([4763f73](https://github.com/googleapis/java-bigquery/commit/4763f73ad854ca4bfdddbbdc0bb43fe639238665)) +* Update SimpleApp to explicitly set project id ([#3534](https://github.com/googleapis/java-bigquery/issues/3534)) ([903a0f7](https://github.com/googleapis/java-bigquery/commit/903a0f7db0926f3d166eebada1710413056fb4a2)) + ## [2.43.1](https://github.com/googleapis/java-bigquery/compare/v2.43.0...v2.43.1) (2024-10-09) diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 2d1239ead..ec1f12a3d 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -6,7 +6,7 @@ google-cloud-bigquery-parent com.google.cloud - 2.43.2-SNAPSHOT + 2.43.2 diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml index 21f719a49..8035b666c 100644 --- a/google-cloud-bigquery-bom/pom.xml +++ b/google-cloud-bigquery-bom/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery-bom - 2.43.2-SNAPSHOT + 2.43.2 pom com.google.cloud @@ -54,7 +54,7 @@ com.google.cloud google-cloud-bigquery - 2.43.2-SNAPSHOT + 2.43.2 diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index 845a34a39..ca7e31834 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 2.43.2-SNAPSHOT + 2.43.2 jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 2.43.2-SNAPSHOT + 2.43.2 google-cloud-bigquery diff --git a/pom.xml b/pom.xml index eab2b84b7..5dacb64d5 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 2.43.2-SNAPSHOT + 2.43.2 BigQuery Parent https://github.com/googleapis/java-bigquery @@ -93,7 +93,7 @@ com.google.cloud google-cloud-bigquery - 2.43.2-SNAPSHOT + 2.43.2 diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 7f45a7209..78ddff3b7 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -44,7 +44,7 @@ com.google.cloud google-cloud-bigquery - 2.43.2-SNAPSHOT + 2.43.2 diff --git a/versions.txt b/versions.txt index c189d4ed9..40ca7de53 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:2.43.1:2.43.2-SNAPSHOT \ No newline at end of file +google-cloud-bigquery:2.43.2:2.43.2 \ No newline at end of file