diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClient.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClient.java index 496de9e90..99e954f4e 100644 --- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClient.java +++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClient.java @@ -92,7 +92,7 @@ *
Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@BetaApi
-@Generated("by gapic-generator")
+@Generated("by gapic-generator-java")
public class DatasetServiceClient implements BackgroundResource {
private final DatasetServiceSettings settings;
private final DatasetServiceStub stub;
@@ -276,8 +276,7 @@ public final UnaryCallable [FieldMask](https: //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * definition, see [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name` * `description` * `labels`
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceSettings.java
index dfc367edc..1f03ae479 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceSettings.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceSettings.java
@@ -71,6 +71,7 @@
* DatasetServiceSettings datasetServiceSettings = datasetServiceSettingsBuilder.build();
* }
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class DatasetServiceSettings extends ClientSettings Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@BetaApi
-@Generated("by gapic-generator")
+@Generated("by gapic-generator-java")
public class EndpointServiceClient implements BackgroundResource {
private final EndpointServiceSettings settings;
private final EndpointServiceStub stub;
@@ -342,7 +342,8 @@ public final UnaryCallable Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@BetaApi
-@Generated("by gapic-generator")
+@Generated("by gapic-generator-java")
public class JobServiceClient implements BackgroundResource {
private final JobServiceSettings settings;
private final JobServiceStub stub;
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceSettings.java
index 3887fa8f6..643c8a2db 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceSettings.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceSettings.java
@@ -71,6 +71,7 @@
* JobServiceSettings jobServiceSettings = jobServiceSettingsBuilder.build();
* }
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class JobServiceSettings extends ClientSettings Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@BetaApi
-@Generated("by gapic-generator")
+@Generated("by gapic-generator-java")
public class MigrationServiceClient implements BackgroundResource {
private final MigrationServiceSettings settings;
private final MigrationServiceStub stub;
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceSettings.java
index 37d770f56..8f084e1e6 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceSettings.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceSettings.java
@@ -68,6 +68,7 @@
* MigrationServiceSettings migrationServiceSettings = migrationServiceSettingsBuilder.build();
* }
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class MigrationServiceSettings extends ClientSettings Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@BetaApi
-@Generated("by gapic-generator")
+@Generated("by gapic-generator-java")
public class ModelServiceClient implements BackgroundResource {
private final ModelServiceSettings settings;
private final ModelServiceStub stub;
@@ -338,9 +338,7 @@ public final UnaryCallable [FieldMask](https: //developers.google.com/protocol-buffers //
- * /docs/reference/google.protobuf#fieldmask).
+ * definition, see [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Model updateModel(Model model, FieldMask updateMask) {
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelServiceSettings.java
index 333a9ee9b..8c7dbb2c3 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelServiceSettings.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelServiceSettings.java
@@ -70,6 +70,7 @@
* ModelServiceSettings modelServiceSettings = modelServiceSettingsBuilder.build();
* }
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class ModelServiceSettings extends ClientSettings Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@BetaApi
-@Generated("by gapic-generator")
+@Generated("by gapic-generator-java")
public class PipelineServiceClient implements BackgroundResource {
private final PipelineServiceSettings settings;
private final PipelineServiceStub stub;
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceSettings.java
index 9672468ae..4f1e9d135 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceSettings.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PipelineServiceSettings.java
@@ -69,6 +69,7 @@
* PipelineServiceSettings pipelineServiceSettings = pipelineServiceSettingsBuilder.build();
* }
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class PipelineServiceSettings extends ClientSettings Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@BetaApi
-@Generated("by gapic-generator")
+@Generated("by gapic-generator-java")
public class PredictionServiceClient implements BackgroundResource {
private final PredictionServiceSettings settings;
private final PredictionServiceStub stub;
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceSettings.java
index d094cbc0c..403cd46ce 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceSettings.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceSettings.java
@@ -63,6 +63,7 @@
* PredictionServiceSettings predictionServiceSettings = predictionServiceSettingsBuilder.build();
* }
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class PredictionServiceSettings extends ClientSettings Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@BetaApi
-@Generated("by gapic-generator")
+@Generated("by gapic-generator-java")
public class SpecialistPoolServiceClient implements BackgroundResource {
private final SpecialistPoolServiceSettings settings;
private final SpecialistPoolServiceStub stub;
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/SpecialistPoolServiceSettings.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/SpecialistPoolServiceSettings.java
index 0f1f1fecc..6ff9a5274 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/SpecialistPoolServiceSettings.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/SpecialistPoolServiceSettings.java
@@ -70,6 +70,7 @@
* specialistPoolServiceSettingsBuilder.build();
* }
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class SpecialistPoolServiceSettings extends ClientSettings This class is for advanced usage and reflects the underlying API directly.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public abstract class DatasetServiceStub implements BackgroundResource {
public OperationsStub getOperationsStub() {
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/EndpointServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/EndpointServiceStub.java
index 72d4c8e7e..77a61562b 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/EndpointServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/EndpointServiceStub.java
@@ -18,6 +18,7 @@
import static com.google.cloud.aiplatform.v1beta1.EndpointServiceClient.ListEndpointsPagedResponse;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
@@ -47,7 +48,8 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public abstract class EndpointServiceStub implements BackgroundResource {
public OperationsStub getOperationsStub() {
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceCallableFactory.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceCallableFactory.java
index 867fbc8b4..131004953 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceCallableFactory.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceCallableFactory.java
@@ -16,6 +16,7 @@
package com.google.cloud.aiplatform.v1beta1.stub;
+import com.google.api.core.BetaApi;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcCallableFactory;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
@@ -41,7 +42,8 @@
*
* This class is for advanced usage.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public class GrpcDatasetServiceCallableFactory implements GrpcStubCallableFactory {
@Override
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceStub.java
index df5240454..6dcc2963d 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcDatasetServiceStub.java
@@ -20,6 +20,7 @@
import static com.google.cloud.aiplatform.v1beta1.DatasetServiceClient.ListDataItemsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.DatasetServiceClient.ListDatasetsPagedResponse;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
@@ -66,6 +67,7 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class GrpcDatasetServiceStub extends DatasetServiceStub {
private static final MethodDescriptor This class is for advanced usage.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public class GrpcEndpointServiceCallableFactory implements GrpcStubCallableFactory {
@Override
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcEndpointServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcEndpointServiceStub.java
index ba20e502a..3c58ca08b 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcEndpointServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcEndpointServiceStub.java
@@ -18,6 +18,7 @@
import static com.google.cloud.aiplatform.v1beta1.EndpointServiceClient.ListEndpointsPagedResponse;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
@@ -58,6 +59,7 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class GrpcEndpointServiceStub extends EndpointServiceStub {
private static final MethodDescriptor This class is for advanced usage.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public class GrpcJobServiceCallableFactory implements GrpcStubCallableFactory {
@Override
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcJobServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcJobServiceStub.java
index 472f4ccd2..467748f17 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcJobServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcJobServiceStub.java
@@ -21,6 +21,7 @@
import static com.google.cloud.aiplatform.v1beta1.JobServiceClient.ListDataLabelingJobsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.JobServiceClient.ListHyperparameterTuningJobsPagedResponse;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
@@ -75,6 +76,7 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class GrpcJobServiceStub extends JobServiceStub {
private static final MethodDescriptor This class is for advanced usage.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public class GrpcMigrationServiceCallableFactory implements GrpcStubCallableFactory {
@Override
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcMigrationServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcMigrationServiceStub.java
index 4fcb60e3f..839250d8f 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcMigrationServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcMigrationServiceStub.java
@@ -18,6 +18,7 @@
import static com.google.cloud.aiplatform.v1beta1.MigrationServiceClient.SearchMigratableResourcesPagedResponse;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
@@ -47,6 +48,7 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class GrpcMigrationServiceStub extends MigrationServiceStub {
private static final MethodDescriptor<
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceCallableFactory.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceCallableFactory.java
index f56cc2f5f..688783f6f 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceCallableFactory.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceCallableFactory.java
@@ -16,6 +16,7 @@
package com.google.cloud.aiplatform.v1beta1.stub;
+import com.google.api.core.BetaApi;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcCallableFactory;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
@@ -41,7 +42,8 @@
*
* This class is for advanced usage.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public class GrpcModelServiceCallableFactory implements GrpcStubCallableFactory {
@Override
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceStub.java
index 5684f69a2..e5dcb1a6e 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcModelServiceStub.java
@@ -20,6 +20,7 @@
import static com.google.cloud.aiplatform.v1beta1.ModelServiceClient.ListModelEvaluationsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.ModelServiceClient.ListModelsPagedResponse;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
@@ -66,6 +67,7 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class GrpcModelServiceStub extends ModelServiceStub {
private static final MethodDescriptor This class is for advanced usage.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public class GrpcPipelineServiceCallableFactory implements GrpcStubCallableFactory {
@Override
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPipelineServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPipelineServiceStub.java
index 84aaf3c5c..2974d20ad 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPipelineServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPipelineServiceStub.java
@@ -18,6 +18,7 @@
import static com.google.cloud.aiplatform.v1beta1.PipelineServiceClient.ListTrainingPipelinesPagedResponse;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
@@ -51,6 +52,7 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class GrpcPipelineServiceStub extends PipelineServiceStub {
private static final MethodDescriptor This class is for advanced usage.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public class GrpcPredictionServiceCallableFactory implements GrpcStubCallableFactory {
@Override
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPredictionServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPredictionServiceStub.java
index 9835ec3e8..b82d9fd46 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPredictionServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcPredictionServiceStub.java
@@ -16,6 +16,7 @@
package com.google.cloud.aiplatform.v1beta1.stub;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
@@ -42,6 +43,7 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class GrpcPredictionServiceStub extends PredictionServiceStub {
private static final MethodDescriptor This class is for advanced usage.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public class GrpcSpecialistPoolServiceCallableFactory implements GrpcStubCallableFactory {
@Override
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcSpecialistPoolServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcSpecialistPoolServiceStub.java
index 3983992be..e406240bb 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcSpecialistPoolServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/GrpcSpecialistPoolServiceStub.java
@@ -18,6 +18,7 @@
import static com.google.cloud.aiplatform.v1beta1.SpecialistPoolServiceClient.ListSpecialistPoolsPagedResponse;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
@@ -53,6 +54,7 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
+@BetaApi
@Generated("by gapic-generator-java")
public class GrpcSpecialistPoolServiceStub extends SpecialistPoolServiceStub {
private static final MethodDescriptor This class is for advanced usage and reflects the underlying API directly.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public abstract class JobServiceStub implements BackgroundResource {
public OperationsStub getOperationsStub() {
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/MigrationServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/MigrationServiceStub.java
index 18d58132f..0838a35d4 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/MigrationServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/MigrationServiceStub.java
@@ -18,6 +18,7 @@
import static com.google.cloud.aiplatform.v1beta1.MigrationServiceClient.SearchMigratableResourcesPagedResponse;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
@@ -36,7 +37,8 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public abstract class MigrationServiceStub implements BackgroundResource {
public OperationsStub getOperationsStub() {
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/ModelServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/ModelServiceStub.java
index 04ee41e41..a0496db40 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/ModelServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/ModelServiceStub.java
@@ -20,6 +20,7 @@
import static com.google.cloud.aiplatform.v1beta1.ModelServiceClient.ListModelEvaluationsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.ModelServiceClient.ListModelsPagedResponse;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
@@ -55,7 +56,8 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public abstract class ModelServiceStub implements BackgroundResource {
public OperationsStub getOperationsStub() {
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PipelineServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PipelineServiceStub.java
index f0520c87a..3cba40f35 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PipelineServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PipelineServiceStub.java
@@ -18,6 +18,7 @@
import static com.google.cloud.aiplatform.v1beta1.PipelineServiceClient.ListTrainingPipelinesPagedResponse;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
@@ -40,7 +41,8 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public abstract class PipelineServiceStub implements BackgroundResource {
public OperationsStub getOperationsStub() {
diff --git a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PredictionServiceStub.java b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PredictionServiceStub.java
index 8839170a4..0dc82229b 100644
--- a/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PredictionServiceStub.java
+++ b/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/stub/PredictionServiceStub.java
@@ -16,6 +16,7 @@
package com.google.cloud.aiplatform.v1beta1.stub;
+import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.aiplatform.v1beta1.ExplainRequest;
@@ -30,7 +31,8 @@
*
* This class is for advanced usage and reflects the underlying API directly.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public abstract class PredictionServiceStub implements BackgroundResource {
public UnaryCallable This class is for advanced usage and reflects the underlying API directly.
*/
-@Generated("by gapic-generator")
+@BetaApi
+@Generated("by gapic-generator-java")
public abstract class SpecialistPoolServiceStub implements BackgroundResource {
public OperationsStub getOperationsStub() {
diff --git a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClientTest.java b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClientTest.java
index 3d923f8af..9a2f6acfe 100644
--- a/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClientTest.java
+++ b/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/DatasetServiceClientTest.java
@@ -102,6 +102,7 @@ public void createDatasetTest() throws Exception {
.setUpdateTime(Timestamp.newBuilder().build())
.setEtag("etag3123477")
.putAllLabels(new HashMap
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally by several iterations. For every iteration, it
* will select a batch of data based on the sampling strategy.
*
@@ -563,7 +563,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally by several iterations. For every iteration, it
* will select a batch of data based on the sampling strategy.
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Attribution.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Attribution.java
index f09ee1f08..cd3cf1562 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Attribution.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Attribution.java
@@ -404,8 +404,8 @@ public int getOutputIndex(int index) {
*
*
*
- * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
- * predicted class name by a multi-classification Model.
+ * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+ * the predicted class name by a multi-classification Model.
* This field is only populated iff the Model predicts display names as a
* separate field along with the explained output. The predicted display name
* must has the same shape of the explained output, and can be located using
@@ -432,8 +432,8 @@ public java.lang.String getOutputDisplayName() {
*
*
*
- * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
- * predicted class name by a multi-classification Model.
+ * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+ * the predicted class name by a multi-classification Model.
* This field is only populated iff the Model predicts display names as a
* separate field along with the explained output. The predicted display name
* must has the same shape of the explained output, and can be located using
@@ -465,20 +465,19 @@ public com.google.protobuf.ByteString getOutputDisplayNameBytes() {
*
* Output only. Error of [feature_attributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] caused by approximation used in the
* explanation method. Lower value means more precise attributions.
- * * For [Sampled Shapley
- * attribution][ExplanationParameters.sampled_shapley_attribution], increasing
- * [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] may reduce the error.
- * * For [Integrated Gradients
- * attribution][ExplanationParameters.integrated_gradients_attribution],
- * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] may
+ * * For Sampled Shapley
+ * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.sampled_shapley_attribution],
+ * increasing [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] might reduce
+ * the error.
+ * * For Integrated Gradients
+ * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.integrated_gradients_attribution],
+ * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] might
* reduce the error.
- * * For [XRAI
- * attribution][ExplanationParameters.xrai_attribution], increasing
- * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] may reduce the error.
- * Refer to AI Explanations Whitepaper for more details:
- * https:
- * //storage.googleapis.com/cloud-ai-whitep
- * // apers/AI%20Explainability%20Whitepaper.pdf
+ * * For [XRAI attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution],
+ * increasing
+ * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] might reduce the error.
+ * See [this introduction](/ai-platform-unified/docs/explainable-ai/overview)
+ * for more information.
*
*
* double approximation_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
@@ -1679,8 +1678,8 @@ public Builder clearOutputIndex() {
*
*
*
- * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
- * predicted class name by a multi-classification Model.
+ * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+ * the predicted class name by a multi-classification Model.
* This field is only populated iff the Model predicts display names as a
* separate field along with the explained output. The predicted display name
* must has the same shape of the explained output, and can be located using
@@ -1706,8 +1705,8 @@ public java.lang.String getOutputDisplayName() {
*
*
*
- * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
- * predicted class name by a multi-classification Model.
+ * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+ * the predicted class name by a multi-classification Model.
* This field is only populated iff the Model predicts display names as a
* separate field along with the explained output. The predicted display name
* must has the same shape of the explained output, and can be located using
@@ -1733,8 +1732,8 @@ public com.google.protobuf.ByteString getOutputDisplayNameBytes() {
*
*
*
- * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
- * predicted class name by a multi-classification Model.
+ * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+ * the predicted class name by a multi-classification Model.
* This field is only populated iff the Model predicts display names as a
* separate field along with the explained output. The predicted display name
* must has the same shape of the explained output, and can be located using
@@ -1759,8 +1758,8 @@ public Builder setOutputDisplayName(java.lang.String value) {
*
*
*
- * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
- * predicted class name by a multi-classification Model.
+ * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+ * the predicted class name by a multi-classification Model.
* This field is only populated iff the Model predicts display names as a
* separate field along with the explained output. The predicted display name
* must has the same shape of the explained output, and can be located using
@@ -1781,8 +1780,8 @@ public Builder clearOutputDisplayName() {
*
*
*
- * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
- * predicted class name by a multi-classification Model.
+ * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+ * the predicted class name by a multi-classification Model.
* This field is only populated iff the Model predicts display names as a
* separate field along with the explained output. The predicted display name
* must has the same shape of the explained output, and can be located using
@@ -1812,20 +1811,19 @@ public Builder setOutputDisplayNameBytes(com.google.protobuf.ByteString value) {
*
* Output only. Error of [feature_attributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] caused by approximation used in the
* explanation method. Lower value means more precise attributions.
- * * For [Sampled Shapley
- * attribution][ExplanationParameters.sampled_shapley_attribution], increasing
- * [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] may reduce the error.
- * * For [Integrated Gradients
- * attribution][ExplanationParameters.integrated_gradients_attribution],
- * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] may
+ * * For Sampled Shapley
+ * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.sampled_shapley_attribution],
+ * increasing [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] might reduce
+ * the error.
+ * * For Integrated Gradients
+ * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.integrated_gradients_attribution],
+ * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] might
* reduce the error.
- * * For [XRAI
- * attribution][ExplanationParameters.xrai_attribution], increasing
- * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] may reduce the error.
- * Refer to AI Explanations Whitepaper for more details:
- * https:
- * //storage.googleapis.com/cloud-ai-whitep
- * // apers/AI%20Explainability%20Whitepaper.pdf
+ * * For [XRAI attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution],
+ * increasing
+ * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] might reduce the error.
+ * See [this introduction](/ai-platform-unified/docs/explainable-ai/overview)
+ * for more information.
*
*
* double approximation_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
@@ -1842,20 +1840,19 @@ public double getApproximationError() {
*
* Output only. Error of [feature_attributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] caused by approximation used in the
* explanation method. Lower value means more precise attributions.
- * * For [Sampled Shapley
- * attribution][ExplanationParameters.sampled_shapley_attribution], increasing
- * [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] may reduce the error.
- * * For [Integrated Gradients
- * attribution][ExplanationParameters.integrated_gradients_attribution],
- * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] may
+ * * For Sampled Shapley
+ * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.sampled_shapley_attribution],
+ * increasing [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] might reduce
+ * the error.
+ * * For Integrated Gradients
+ * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.integrated_gradients_attribution],
+ * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] might
* reduce the error.
- * * For [XRAI
- * attribution][ExplanationParameters.xrai_attribution], increasing
- * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] may reduce the error.
- * Refer to AI Explanations Whitepaper for more details:
- * https:
- * //storage.googleapis.com/cloud-ai-whitep
- * // apers/AI%20Explainability%20Whitepaper.pdf
+ * * For [XRAI attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution],
+ * increasing
+ * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] might reduce the error.
+ * See [this introduction](/ai-platform-unified/docs/explainable-ai/overview)
+ * for more information.
*
*
* double approximation_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
@@ -1875,20 +1872,19 @@ public Builder setApproximationError(double value) {
*
* Output only. Error of [feature_attributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] caused by approximation used in the
* explanation method. Lower value means more precise attributions.
- * * For [Sampled Shapley
- * attribution][ExplanationParameters.sampled_shapley_attribution], increasing
- * [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] may reduce the error.
- * * For [Integrated Gradients
- * attribution][ExplanationParameters.integrated_gradients_attribution],
- * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] may
+ * * For Sampled Shapley
+ * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.sampled_shapley_attribution],
+ * increasing [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] might reduce
+ * the error.
+ * * For Integrated Gradients
+ * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.integrated_gradients_attribution],
+ * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] might
* reduce the error.
- * * For [XRAI
- * attribution][ExplanationParameters.xrai_attribution], increasing
- * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] may reduce the error.
- * Refer to AI Explanations Whitepaper for more details:
- * https:
- * //storage.googleapis.com/cloud-ai-whitep
- * // apers/AI%20Explainability%20Whitepaper.pdf
+ * * For [XRAI attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution],
+ * increasing
+ * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] might reduce the error.
+ * See [this introduction](/ai-platform-unified/docs/explainable-ai/overview)
+ * for more information.
*
*
* double approximation_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/AttributionOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/AttributionOrBuilder.java
index 48bc07ced..3dc3f1e63 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/AttributionOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/AttributionOrBuilder.java
@@ -214,8 +214,8 @@ public interface AttributionOrBuilder
*
*
*
- * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
- * predicted class name by a multi-classification Model.
+ * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+ * the predicted class name by a multi-classification Model.
* This field is only populated iff the Model predicts display names as a
* separate field along with the explained output. The predicted display name
* must has the same shape of the explained output, and can be located using
@@ -231,8 +231,8 @@ public interface AttributionOrBuilder
*
*
*
- * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index], e.g. the
- * predicted class name by a multi-classification Model.
+ * Output only. The display name of the output identified by [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index]. For example,
+ * the predicted class name by a multi-classification Model.
* This field is only populated iff the Model predicts display names as a
* separate field along with the explained output. The predicted display name
* must has the same shape of the explained output, and can be located using
@@ -251,20 +251,19 @@ public interface AttributionOrBuilder
*
* Output only. Error of [feature_attributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] caused by approximation used in the
* explanation method. Lower value means more precise attributions.
- * * For [Sampled Shapley
- * attribution][ExplanationParameters.sampled_shapley_attribution], increasing
- * [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] may reduce the error.
- * * For [Integrated Gradients
- * attribution][ExplanationParameters.integrated_gradients_attribution],
- * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] may
+ * * For Sampled Shapley
+ * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.sampled_shapley_attribution],
+ * increasing [path_count][google.cloud.aiplatform.v1beta1.SampledShapleyAttribution.path_count] might reduce
+ * the error.
+ * * For Integrated Gradients
+ * [attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.integrated_gradients_attribution],
+ * increasing [step_count][google.cloud.aiplatform.v1beta1.IntegratedGradientsAttribution.step_count] might
* reduce the error.
- * * For [XRAI
- * attribution][ExplanationParameters.xrai_attribution], increasing
- * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] may reduce the error.
- * Refer to AI Explanations Whitepaper for more details:
- * https:
- * //storage.googleapis.com/cloud-ai-whitep
- * // apers/AI%20Explainability%20Whitepaper.pdf
+ * * For [XRAI attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution],
+ * increasing
+ * [step_count][google.cloud.aiplatform.v1beta1.XraiAttribution.step_count] might reduce the error.
+ * See [this introduction](/ai-platform-unified/docs/explainable-ai/overview)
+ * for more information.
*
*
* double approximation_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadata.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadata.java
index e8c8039c3..dbfb12ec4 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadata.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadata.java
@@ -39,7 +39,9 @@ private BatchMigrateResourcesOperationMetadata(
super(builder);
}
- private BatchMigrateResourcesOperationMetadata() {}
+ private BatchMigrateResourcesOperationMetadata() {
+ partialResults_ = java.util.Collections.emptyList();
+ }
@java.lang.Override
@SuppressWarnings({"unused"})
@@ -60,6 +62,7 @@ private BatchMigrateResourcesOperationMetadata(
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
+ int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
@@ -88,6 +91,22 @@ private BatchMigrateResourcesOperationMetadata(
break;
}
+ case 18:
+ {
+ if (!((mutable_bitField0_ & 0x00000001) != 0)) {
+ partialResults_ =
+ new java.util.ArrayList<
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult>();
+ mutable_bitField0_ |= 0x00000001;
+ }
+ partialResults_.add(
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult.parser(),
+ extensionRegistry));
+ break;
+ }
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
@@ -102,6 +121,9 @@ private BatchMigrateResourcesOperationMetadata(
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
+ if (((mutable_bitField0_ & 0x00000001) != 0)) {
+ partialResults_ = java.util.Collections.unmodifiableList(partialResults_);
+ }
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
@@ -123,53 +145,1861 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
.class);
}
- public static final int GENERIC_METADATA_FIELD_NUMBER = 1;
- private com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata genericMetadata_;
+ public interface PartialResultOrBuilder
+ extends
+ // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult)
+ com.google.protobuf.MessageOrBuilder {
+
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ *
+ * @return Whether the error field is set.
+ */
+ boolean hasError();
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ *
+ * @return The error.
+ */
+ com.google.rpc.Status getError();
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ */
+ com.google.rpc.StatusOrBuilder getErrorOrBuilder();
+
+ /**
+ *
+ *
+ *
+ * Migrated model resource name.
+ *
+ *
+ * string model = 3 [(.google.api.resource_reference) = { ... }
+ *
+ * @return The model.
+ */
+ java.lang.String getModel();
+ /**
+ *
+ *
+ *
+ * Migrated model resource name.
+ *
+ *
+ * string model = 3 [(.google.api.resource_reference) = { ... }
+ *
+ * @return The bytes for model.
+ */
+ com.google.protobuf.ByteString getModelBytes();
+
+ /**
+ *
+ *
+ *
+ * Migrated dataset resource name.
+ *
+ *
+ * string dataset = 4 [(.google.api.resource_reference) = { ... }
+ *
+ * @return The dataset.
+ */
+ java.lang.String getDataset();
+ /**
+ *
+ *
+ *
+ * Migrated dataset resource name.
+ *
+ *
+ * string dataset = 4 [(.google.api.resource_reference) = { ... }
+ *
+ * @return The bytes for dataset.
+ */
+ com.google.protobuf.ByteString getDatasetBytes();
+
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ *
+ * @return Whether the request field is set.
+ */
+ boolean hasRequest();
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ *
+ * @return The request.
+ */
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest getRequest();
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ */
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequestOrBuilder getRequestOrBuilder();
+
+ public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ .ResultCase
+ getResultCase();
+ }
+ /**
+ *
+ *
+ *
+ * Represents a partial result in batch migration opreation for one
+ * [MigrateResourceRequest][google.cloud.aiplatform.v1beta1.MigrateResourceRequest].
+ *
+ *
+ * Protobuf type {@code
+ * google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult}
+ */
+ public static final class PartialResult extends com.google.protobuf.GeneratedMessageV3
+ implements
+ // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult)
+ PartialResultOrBuilder {
+ private static final long serialVersionUID = 0L;
+ // Use PartialResult.newBuilder() to construct.
+ private PartialResult(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ super(builder);
+ }
+
+ private PartialResult() {}
+
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
+ return new PartialResult();
+ }
+
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
+ return this.unknownFields;
+ }
+
+ private PartialResult(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ case 10:
+ {
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.Builder subBuilder =
+ null;
+ if (request_ != null) {
+ subBuilder = request_.toBuilder();
+ }
+ request_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(request_);
+ request_ = subBuilder.buildPartial();
+ }
+
+ break;
+ }
+ case 18:
+ {
+ com.google.rpc.Status.Builder subBuilder = null;
+ if (resultCase_ == 2) {
+ subBuilder = ((com.google.rpc.Status) result_).toBuilder();
+ }
+ result_ = input.readMessage(com.google.rpc.Status.parser(), extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom((com.google.rpc.Status) result_);
+ result_ = subBuilder.buildPartial();
+ }
+ resultCase_ = 2;
+ break;
+ }
+ case 26:
+ {
+ java.lang.String s = input.readStringRequireUtf8();
+ resultCase_ = 3;
+ result_ = s;
+ break;
+ }
+ case 34:
+ {
+ java.lang.String s = input.readStringRequireUtf8();
+ resultCase_ = 4;
+ result_ = s;
+ break;
+ }
+ default:
+ {
+ if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
+ return com.google.cloud.aiplatform.v1beta1.MigrationServiceProto
+ .internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_descriptor;
+ }
+
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return com.google.cloud.aiplatform.v1beta1.MigrationServiceProto
+ .internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult.class,
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult.Builder.class);
+ }
+
+ private int resultCase_ = 0;
+ private java.lang.Object result_;
+
+ public enum ResultCase
+ implements
+ com.google.protobuf.Internal.EnumLite,
+ com.google.protobuf.AbstractMessage.InternalOneOfEnum {
+ ERROR(2),
+ MODEL(3),
+ DATASET(4),
+ RESULT_NOT_SET(0);
+ private final int value;
+
+ private ResultCase(int value) {
+ this.value = value;
+ }
+ /**
+ * @param value The number of the enum to look for.
+ * @return The enum associated with the given number.
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+ @java.lang.Deprecated
+ public static ResultCase valueOf(int value) {
+ return forNumber(value);
+ }
+
+ public static ResultCase forNumber(int value) {
+ switch (value) {
+ case 2:
+ return ERROR;
+ case 3:
+ return MODEL;
+ case 4:
+ return DATASET;
+ case 0:
+ return RESULT_NOT_SET;
+ default:
+ return null;
+ }
+ }
+
+ public int getNumber() {
+ return this.value;
+ }
+ };
+
+ public ResultCase getResultCase() {
+ return ResultCase.forNumber(resultCase_);
+ }
+
+ public static final int ERROR_FIELD_NUMBER = 2;
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ *
+ * @return Whether the error field is set.
+ */
+ @java.lang.Override
+ public boolean hasError() {
+ return resultCase_ == 2;
+ }
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ *
+ * @return The error.
+ */
+ @java.lang.Override
+ public com.google.rpc.Status getError() {
+ if (resultCase_ == 2) {
+ return (com.google.rpc.Status) result_;
+ }
+ return com.google.rpc.Status.getDefaultInstance();
+ }
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ */
+ @java.lang.Override
+ public com.google.rpc.StatusOrBuilder getErrorOrBuilder() {
+ if (resultCase_ == 2) {
+ return (com.google.rpc.Status) result_;
+ }
+ return com.google.rpc.Status.getDefaultInstance();
+ }
+
+ public static final int MODEL_FIELD_NUMBER = 3;
+ /**
+ *
+ *
+ *
+ * Migrated model resource name.
+ *
+ *
+ * string model = 3 [(.google.api.resource_reference) = { ... }
+ *
+ * @return The model.
+ */
+ public java.lang.String getModel() {
+ java.lang.Object ref = "";
+ if (resultCase_ == 3) {
+ ref = result_;
+ }
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (resultCase_ == 3) {
+ result_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Migrated model resource name.
+ *
+ *
+ * string model = 3 [(.google.api.resource_reference) = { ... }
+ *
+ * @return The bytes for model.
+ */
+ public com.google.protobuf.ByteString getModelBytes() {
+ java.lang.Object ref = "";
+ if (resultCase_ == 3) {
+ ref = result_;
+ }
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ if (resultCase_ == 3) {
+ result_ = b;
+ }
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ public static final int DATASET_FIELD_NUMBER = 4;
+ /**
+ *
+ *
+ *
+ * Migrated dataset resource name.
+ *
+ *
+ * string dataset = 4 [(.google.api.resource_reference) = { ... }
+ *
+ * @return The dataset.
+ */
+ public java.lang.String getDataset() {
+ java.lang.Object ref = "";
+ if (resultCase_ == 4) {
+ ref = result_;
+ }
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (resultCase_ == 4) {
+ result_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Migrated dataset resource name.
+ *
+ *
+ * string dataset = 4 [(.google.api.resource_reference) = { ... }
+ *
+ * @return The bytes for dataset.
+ */
+ public com.google.protobuf.ByteString getDatasetBytes() {
+ java.lang.Object ref = "";
+ if (resultCase_ == 4) {
+ ref = result_;
+ }
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ if (resultCase_ == 4) {
+ result_ = b;
+ }
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ public static final int REQUEST_FIELD_NUMBER = 1;
+ private com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest request_;
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ *
+ * @return Whether the request field is set.
+ */
+ @java.lang.Override
+ public boolean hasRequest() {
+ return request_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ *
+ * @return The request.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest getRequest() {
+ return request_ == null
+ ? com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.getDefaultInstance()
+ : request_;
+ }
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.MigrateResourceRequestOrBuilder
+ getRequestOrBuilder() {
+ return getRequest();
+ }
+
+ private byte memoizedIsInitialized = -1;
+
+ @java.lang.Override
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ @java.lang.Override
+ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
+ if (request_ != null) {
+ output.writeMessage(1, getRequest());
+ }
+ if (resultCase_ == 2) {
+ output.writeMessage(2, (com.google.rpc.Status) result_);
+ }
+ if (resultCase_ == 3) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 3, result_);
+ }
+ if (resultCase_ == 4) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 4, result_);
+ }
+ unknownFields.writeTo(output);
+ }
+
+ @java.lang.Override
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (request_ != null) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getRequest());
+ }
+ if (resultCase_ == 2) {
+ size +=
+ com.google.protobuf.CodedOutputStream.computeMessageSize(
+ 2, (com.google.rpc.Status) result_);
+ }
+ if (resultCase_ == 3) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, result_);
+ }
+ if (resultCase_ == 4) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, result_);
+ }
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj
+ instanceof
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult)) {
+ return super.equals(obj);
+ }
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ other =
+ (com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult)
+ obj;
+
+ if (hasRequest() != other.hasRequest()) return false;
+ if (hasRequest()) {
+ if (!getRequest().equals(other.getRequest())) return false;
+ }
+ if (!getResultCase().equals(other.getResultCase())) return false;
+ switch (resultCase_) {
+ case 2:
+ if (!getError().equals(other.getError())) return false;
+ break;
+ case 3:
+ if (!getModel().equals(other.getModel())) return false;
+ break;
+ case 4:
+ if (!getDataset().equals(other.getDataset())) return false;
+ break;
+ case 0:
+ default:
+ }
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptor().hashCode();
+ if (hasRequest()) {
+ hash = (37 * hash) + REQUEST_FIELD_NUMBER;
+ hash = (53 * hash) + getRequest().hashCode();
+ }
+ switch (resultCase_) {
+ case 2:
+ hash = (37 * hash) + ERROR_FIELD_NUMBER;
+ hash = (53 * hash) + getError().hashCode();
+ break;
+ case 3:
+ hash = (37 * hash) + MODEL_FIELD_NUMBER;
+ hash = (53 * hash) + getModel().hashCode();
+ break;
+ case 4:
+ hash = (37 * hash) + DATASET_FIELD_NUMBER;
+ hash = (53 * hash) + getDataset().hashCode();
+ break;
+ case 0:
+ default:
+ }
+ hash = (29 * hash) + unknownFields.hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ parseFrom(java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ parseFrom(
+ java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ parseFrom(com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ parseFrom(java.io.InputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ parseFrom(
+ java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ parseDelimitedFrom(
+ java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ @java.lang.Override
+ public Builder newBuilderForType() {
+ return newBuilder();
+ }
+
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+
+ public static Builder newBuilder(
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ *
+ *
+ *
+ * Represents a partial result in batch migration opreation for one
+ * [MigrateResourceRequest][google.cloud.aiplatform.v1beta1.MigrateResourceRequest].
+ *
+ *
+ * Protobuf type {@code
+ * google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult}
+ */
+ public static final class Builder
+ extends com.google.protobuf.GeneratedMessageV3.Builder
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ *
+ * @return Whether the error field is set.
+ */
+ @java.lang.Override
+ public boolean hasError() {
+ return resultCase_ == 2;
+ }
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ *
+ * @return The error.
+ */
+ @java.lang.Override
+ public com.google.rpc.Status getError() {
+ if (errorBuilder_ == null) {
+ if (resultCase_ == 2) {
+ return (com.google.rpc.Status) result_;
+ }
+ return com.google.rpc.Status.getDefaultInstance();
+ } else {
+ if (resultCase_ == 2) {
+ return errorBuilder_.getMessage();
+ }
+ return com.google.rpc.Status.getDefaultInstance();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ */
+ public Builder setError(com.google.rpc.Status value) {
+ if (errorBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ result_ = value;
+ onChanged();
+ } else {
+ errorBuilder_.setMessage(value);
+ }
+ resultCase_ = 2;
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ */
+ public Builder setError(com.google.rpc.Status.Builder builderForValue) {
+ if (errorBuilder_ == null) {
+ result_ = builderForValue.build();
+ onChanged();
+ } else {
+ errorBuilder_.setMessage(builderForValue.build());
+ }
+ resultCase_ = 2;
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ */
+ public Builder mergeError(com.google.rpc.Status value) {
+ if (errorBuilder_ == null) {
+ if (resultCase_ == 2 && result_ != com.google.rpc.Status.getDefaultInstance()) {
+ result_ =
+ com.google.rpc.Status.newBuilder((com.google.rpc.Status) result_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ result_ = value;
+ }
+ onChanged();
+ } else {
+ if (resultCase_ == 2) {
+ errorBuilder_.mergeFrom(value);
+ }
+ errorBuilder_.setMessage(value);
+ }
+ resultCase_ = 2;
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ */
+ public Builder clearError() {
+ if (errorBuilder_ == null) {
+ if (resultCase_ == 2) {
+ resultCase_ = 0;
+ result_ = null;
+ onChanged();
+ }
+ } else {
+ if (resultCase_ == 2) {
+ resultCase_ = 0;
+ result_ = null;
+ }
+ errorBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ */
+ public com.google.rpc.Status.Builder getErrorBuilder() {
+ return getErrorFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ */
+ @java.lang.Override
+ public com.google.rpc.StatusOrBuilder getErrorOrBuilder() {
+ if ((resultCase_ == 2) && (errorBuilder_ != null)) {
+ return errorBuilder_.getMessageOrBuilder();
+ } else {
+ if (resultCase_ == 2) {
+ return (com.google.rpc.Status) result_;
+ }
+ return com.google.rpc.Status.getDefaultInstance();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * The error result of the migration request in case of failure.
+ *
+ *
+ * .google.rpc.Status error = 2;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
+ getErrorFieldBuilder() {
+ if (errorBuilder_ == null) {
+ if (!(resultCase_ == 2)) {
+ result_ = com.google.rpc.Status.getDefaultInstance();
+ }
+ errorBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.rpc.Status,
+ com.google.rpc.Status.Builder,
+ com.google.rpc.StatusOrBuilder>(
+ (com.google.rpc.Status) result_, getParentForChildren(), isClean());
+ result_ = null;
+ }
+ resultCase_ = 2;
+ onChanged();
+ ;
+ return errorBuilder_;
+ }
+
+ /**
+ *
+ *
+ *
+ * Migrated model resource name.
+ *
+ *
+ * string model = 3 [(.google.api.resource_reference) = { ... }
+ *
+ * @return The model.
+ */
+ @java.lang.Override
+ public java.lang.String getModel() {
+ java.lang.Object ref = "";
+ if (resultCase_ == 3) {
+ ref = result_;
+ }
+ if (!(ref instanceof java.lang.String)) {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (resultCase_ == 3) {
+ result_ = s;
+ }
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Migrated model resource name.
+ *
+ *
+ * string model = 3 [(.google.api.resource_reference) = { ... }
+ *
+ * @return The bytes for model.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString getModelBytes() {
+ java.lang.Object ref = "";
+ if (resultCase_ == 3) {
+ ref = result_;
+ }
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ if (resultCase_ == 3) {
+ result_ = b;
+ }
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Migrated model resource name.
+ *
+ *
+ * string model = 3 [(.google.api.resource_reference) = { ... }
+ *
+ * @param value The model to set.
+ * @return This builder for chaining.
+ */
+ public Builder setModel(java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ resultCase_ = 3;
+ result_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Migrated model resource name.
+ *
+ *
+ * string model = 3 [(.google.api.resource_reference) = { ... }
+ *
+ * @return This builder for chaining.
+ */
+ public Builder clearModel() {
+ if (resultCase_ == 3) {
+ resultCase_ = 0;
+ result_ = null;
+ onChanged();
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Migrated model resource name.
+ *
+ *
+ * string model = 3 [(.google.api.resource_reference) = { ... }
+ *
+ * @param value The bytes for model to set.
+ * @return This builder for chaining.
+ */
+ public Builder setModelBytes(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ checkByteStringIsUtf8(value);
+ resultCase_ = 3;
+ result_ = value;
+ onChanged();
+ return this;
+ }
+
+ /**
+ *
+ *
+ *
+ * Migrated dataset resource name.
+ *
+ *
+ * string dataset = 4 [(.google.api.resource_reference) = { ... }
+ *
+ * @return The dataset.
+ */
+ @java.lang.Override
+ public java.lang.String getDataset() {
+ java.lang.Object ref = "";
+ if (resultCase_ == 4) {
+ ref = result_;
+ }
+ if (!(ref instanceof java.lang.String)) {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (resultCase_ == 4) {
+ result_ = s;
+ }
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Migrated dataset resource name.
+ *
+ *
+ * string dataset = 4 [(.google.api.resource_reference) = { ... }
+ *
+ * @return The bytes for dataset.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString getDatasetBytes() {
+ java.lang.Object ref = "";
+ if (resultCase_ == 4) {
+ ref = result_;
+ }
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ if (resultCase_ == 4) {
+ result_ = b;
+ }
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Migrated dataset resource name.
+ *
+ *
+ * string dataset = 4 [(.google.api.resource_reference) = { ... }
+ *
+ * @param value The dataset to set.
+ * @return This builder for chaining.
+ */
+ public Builder setDataset(java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ resultCase_ = 4;
+ result_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Migrated dataset resource name.
+ *
+ *
+ * string dataset = 4 [(.google.api.resource_reference) = { ... }
+ *
+ * @return This builder for chaining.
+ */
+ public Builder clearDataset() {
+ if (resultCase_ == 4) {
+ resultCase_ = 0;
+ result_ = null;
+ onChanged();
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Migrated dataset resource name.
+ *
+ *
+ * string dataset = 4 [(.google.api.resource_reference) = { ... }
+ *
+ * @param value The bytes for dataset to set.
+ * @return This builder for chaining.
+ */
+ public Builder setDatasetBytes(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ checkByteStringIsUtf8(value);
+ resultCase_ = 4;
+ result_ = value;
+ onChanged();
+ return this;
+ }
+
+ private com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest request_;
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest,
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.Builder,
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequestOrBuilder>
+ requestBuilder_;
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ *
+ * @return Whether the request field is set.
+ */
+ public boolean hasRequest() {
+ return requestBuilder_ != null || request_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ *
+ * @return The request.
+ */
+ public com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest getRequest() {
+ if (requestBuilder_ == null) {
+ return request_ == null
+ ? com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.getDefaultInstance()
+ : request_;
+ } else {
+ return requestBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ */
+ public Builder setRequest(com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest value) {
+ if (requestBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ request_ = value;
+ onChanged();
+ } else {
+ requestBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ */
+ public Builder setRequest(
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.Builder builderForValue) {
+ if (requestBuilder_ == null) {
+ request_ = builderForValue.build();
+ onChanged();
+ } else {
+ requestBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ */
+ public Builder mergeRequest(
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest value) {
+ if (requestBuilder_ == null) {
+ if (request_ != null) {
+ request_ =
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.newBuilder(request_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ request_ = value;
+ }
+ onChanged();
+ } else {
+ requestBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ */
+ public Builder clearRequest() {
+ if (requestBuilder_ == null) {
+ request_ = null;
+ onChanged();
+ } else {
+ request_ = null;
+ requestBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ */
+ public com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.Builder
+ getRequestBuilder() {
+
+ onChanged();
+ return getRequestFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ */
+ public com.google.cloud.aiplatform.v1beta1.MigrateResourceRequestOrBuilder
+ getRequestOrBuilder() {
+ if (requestBuilder_ != null) {
+ return requestBuilder_.getMessageOrBuilder();
+ } else {
+ return request_ == null
+ ? com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.getDefaultInstance()
+ : request_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * It's the same as the value in
+ * [MigrateResourceRequest.migrate_resource_requests][].
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.MigrateResourceRequest request = 1;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest,
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.Builder,
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequestOrBuilder>
+ getRequestFieldBuilder() {
+ if (requestBuilder_ == null) {
+ requestBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest,
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.Builder,
+ com.google.cloud.aiplatform.v1beta1.MigrateResourceRequestOrBuilder>(
+ getRequest(), getParentForChildren(), isClean());
+ request_ = null;
+ }
+ return requestBuilder_;
+ }
+
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
+ // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult)
+ }
+
+ // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult)
+ private static final com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ DEFAULT_INSTANCE;
+
+ static {
+ DEFAULT_INSTANCE =
+ new com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult();
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult
+ getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ private static final com.google.protobuf.Parser
+ * The common part of the operation metadata.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;
+ *
+ * @return Whether the genericMetadata field is set.
+ */
+ @java.lang.Override
+ public boolean hasGenericMetadata() {
+ return genericMetadata_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * The common part of the operation metadata.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;
+ *
+ * @return The genericMetadata.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata getGenericMetadata() {
+ return genericMetadata_ == null
+ ? com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.getDefaultInstance()
+ : genericMetadata_;
+ }
+ /**
+ *
+ *
+ *
+ * The common part of the operation metadata.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder
+ getGenericMetadataOrBuilder() {
+ return getGenericMetadata();
+ }
+
+ public static final int PARTIAL_RESULTS_FIELD_NUMBER = 2;
+ private java.util.List<
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult>
+ partialResults_;
/**
*
*
*
- * The common part of the operation metadata.
+ * Partial results that reflects the latest migration operation progress.
*
*
- * .google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ @java.lang.Override
+ public java.util.List<
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult>
+ getPartialResultsList() {
+ return partialResults_;
+ }
+ /**
*
- * @return Whether the genericMetadata field is set.
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
*/
@java.lang.Override
- public boolean hasGenericMetadata() {
- return genericMetadata_ != null;
+ public java.util.List<
+ ? extends
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResultOrBuilder>
+ getPartialResultsOrBuilderList() {
+ return partialResults_;
}
/**
*
*
*
- * The common part of the operation metadata.
+ * Partial results that reflects the latest migration operation progress.
*
*
- * .google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ @java.lang.Override
+ public int getPartialResultsCount() {
+ return partialResults_.size();
+ }
+ /**
*
- * @return The genericMetadata.
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
*/
@java.lang.Override
- public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata getGenericMetadata() {
- return genericMetadata_ == null
- ? com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.getDefaultInstance()
- : genericMetadata_;
+ public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ getPartialResults(int index) {
+ return partialResults_.get(index);
}
/**
*
*
*
- * The common part of the operation metadata.
+ * Partial results that reflects the latest migration operation progress.
*
*
- * .google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
*/
@java.lang.Override
- public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder
- getGenericMetadataOrBuilder() {
- return getGenericMetadata();
+ public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResultOrBuilder
+ getPartialResultsOrBuilder(int index) {
+ return partialResults_.get(index);
}
private byte memoizedIsInitialized = -1;
@@ -189,6 +2019,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
if (genericMetadata_ != null) {
output.writeMessage(1, getGenericMetadata());
}
+ for (int i = 0; i < partialResults_.size(); i++) {
+ output.writeMessage(2, partialResults_.get(i));
+ }
unknownFields.writeTo(output);
}
@@ -201,6 +2034,9 @@ public int getSerializedSize() {
if (genericMetadata_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGenericMetadata());
}
+ for (int i = 0; i < partialResults_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, partialResults_.get(i));
+ }
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
@@ -222,6 +2058,7 @@ public boolean equals(final java.lang.Object obj) {
if (hasGenericMetadata()) {
if (!getGenericMetadata().equals(other.getGenericMetadata())) return false;
}
+ if (!getPartialResultsList().equals(other.getPartialResultsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@@ -237,6 +2074,10 @@ public int hashCode() {
hash = (37 * hash) + GENERIC_METADATA_FIELD_NUMBER;
hash = (53 * hash) + getGenericMetadata().hashCode();
}
+ if (getPartialResultsCount() > 0) {
+ hash = (37 * hash) + PARTIAL_RESULTS_FIELD_NUMBER;
+ hash = (53 * hash) + getPartialResultsList().hashCode();
+ }
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
@@ -385,7 +2226,9 @@ private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
+ if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
+ getPartialResultsFieldBuilder();
+ }
}
@java.lang.Override
@@ -397,6 +2240,12 @@ public Builder clear() {
genericMetadata_ = null;
genericMetadataBuilder_ = null;
}
+ if (partialResultsBuilder_ == null) {
+ partialResults_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ partialResultsBuilder_.clear();
+ }
return this;
}
@@ -428,11 +2277,21 @@ public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadat
buildPartial() {
com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata result =
new com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata(this);
+ int from_bitField0_ = bitField0_;
if (genericMetadataBuilder_ == null) {
result.genericMetadata_ = genericMetadata_;
} else {
result.genericMetadata_ = genericMetadataBuilder_.build();
}
+ if (partialResultsBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) != 0)) {
+ partialResults_ = java.util.Collections.unmodifiableList(partialResults_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.partialResults_ = partialResults_;
+ } else {
+ result.partialResults_ = partialResultsBuilder_.build();
+ }
onBuilt();
return result;
}
@@ -490,6 +2349,33 @@ public Builder mergeFrom(
if (other.hasGenericMetadata()) {
mergeGenericMetadata(other.getGenericMetadata());
}
+ if (partialResultsBuilder_ == null) {
+ if (!other.partialResults_.isEmpty()) {
+ if (partialResults_.isEmpty()) {
+ partialResults_ = other.partialResults_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensurePartialResultsIsMutable();
+ partialResults_.addAll(other.partialResults_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.partialResults_.isEmpty()) {
+ if (partialResultsBuilder_.isEmpty()) {
+ partialResultsBuilder_.dispose();
+ partialResultsBuilder_ = null;
+ partialResults_ = other.partialResults_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ partialResultsBuilder_ =
+ com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
+ ? getPartialResultsFieldBuilder()
+ : null;
+ } else {
+ partialResultsBuilder_.addAllMessages(other.partialResults_);
+ }
+ }
+ }
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
@@ -522,6 +2408,8 @@ public Builder mergeFrom(
return this;
}
+ private int bitField0_;
+
private com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata genericMetadata_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata,
@@ -713,6 +2601,451 @@ public Builder clearGenericMetadata() {
return genericMetadataBuilder_;
}
+ private java.util.List<
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult>
+ partialResults_ = java.util.Collections.emptyList();
+
+ private void ensurePartialResultsIsMutable() {
+ if (!((bitField0_ & 0x00000001) != 0)) {
+ partialResults_ =
+ new java.util.ArrayList<
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult>(partialResults_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult,
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ .Builder,
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResultOrBuilder>
+ partialResultsBuilder_;
+
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public java.util.List<
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult>
+ getPartialResultsList() {
+ if (partialResultsBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(partialResults_);
+ } else {
+ return partialResultsBuilder_.getMessageList();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public int getPartialResultsCount() {
+ if (partialResultsBuilder_ == null) {
+ return partialResults_.size();
+ } else {
+ return partialResultsBuilder_.getCount();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ getPartialResults(int index) {
+ if (partialResultsBuilder_ == null) {
+ return partialResults_.get(index);
+ } else {
+ return partialResultsBuilder_.getMessage(index);
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public Builder setPartialResults(
+ int index,
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ value) {
+ if (partialResultsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePartialResultsIsMutable();
+ partialResults_.set(index, value);
+ onChanged();
+ } else {
+ partialResultsBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public Builder setPartialResults(
+ int index,
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ .Builder
+ builderForValue) {
+ if (partialResultsBuilder_ == null) {
+ ensurePartialResultsIsMutable();
+ partialResults_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ partialResultsBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public Builder addPartialResults(
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ value) {
+ if (partialResultsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePartialResultsIsMutable();
+ partialResults_.add(value);
+ onChanged();
+ } else {
+ partialResultsBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public Builder addPartialResults(
+ int index,
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ value) {
+ if (partialResultsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensurePartialResultsIsMutable();
+ partialResults_.add(index, value);
+ onChanged();
+ } else {
+ partialResultsBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public Builder addPartialResults(
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ .Builder
+ builderForValue) {
+ if (partialResultsBuilder_ == null) {
+ ensurePartialResultsIsMutable();
+ partialResults_.add(builderForValue.build());
+ onChanged();
+ } else {
+ partialResultsBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public Builder addPartialResults(
+ int index,
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ .Builder
+ builderForValue) {
+ if (partialResultsBuilder_ == null) {
+ ensurePartialResultsIsMutable();
+ partialResults_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ partialResultsBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public Builder addAllPartialResults(
+ java.lang.Iterable<
+ ? extends
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult>
+ values) {
+ if (partialResultsBuilder_ == null) {
+ ensurePartialResultsIsMutable();
+ com.google.protobuf.AbstractMessageLite.Builder.addAll(values, partialResults_);
+ onChanged();
+ } else {
+ partialResultsBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public Builder clearPartialResults() {
+ if (partialResultsBuilder_ == null) {
+ partialResults_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ partialResultsBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public Builder removePartialResults(int index) {
+ if (partialResultsBuilder_ == null) {
+ ensurePartialResultsIsMutable();
+ partialResults_.remove(index);
+ onChanged();
+ } else {
+ partialResultsBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ .Builder
+ getPartialResultsBuilder(int index) {
+ return getPartialResultsFieldBuilder().getBuilder(index);
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResultOrBuilder
+ getPartialResultsOrBuilder(int index) {
+ if (partialResultsBuilder_ == null) {
+ return partialResults_.get(index);
+ } else {
+ return partialResultsBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public java.util.List<
+ ? extends
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResultOrBuilder>
+ getPartialResultsOrBuilderList() {
+ if (partialResultsBuilder_ != null) {
+ return partialResultsBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(partialResults_);
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ .Builder
+ addPartialResultsBuilder() {
+ return getPartialResultsFieldBuilder()
+ .addBuilder(
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult.getDefaultInstance());
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ .Builder
+ addPartialResultsBuilder(int index) {
+ return getPartialResultsFieldBuilder()
+ .addBuilder(
+ index,
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult.getDefaultInstance());
+ }
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ public java.util.List<
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ .Builder>
+ getPartialResultsBuilderList() {
+ return getPartialResultsFieldBuilder().getBuilderList();
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult,
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ .Builder,
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResultOrBuilder>
+ getPartialResultsFieldBuilder() {
+ if (partialResultsBuilder_ == null) {
+ partialResultsBuilder_ =
+ new com.google.protobuf.RepeatedFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult,
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResult.Builder,
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResultOrBuilder>(
+ partialResults_,
+ ((bitField0_ & 0x00000001) != 0),
+ getParentForChildren(),
+ isClean());
+ partialResults_ = null;
+ }
+ return partialResultsBuilder_;
+ }
+
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadataOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadataOrBuilder.java
index 27c36f4e1..b613cd4c8 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadataOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchMigrateResourcesOperationMetadataOrBuilder.java
@@ -58,4 +58,73 @@ public interface BatchMigrateResourcesOperationMetadataOrBuilder
*/
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder();
+
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ java.util.List<
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult>
+ getPartialResultsList();
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult
+ getPartialResults(int index);
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ int getPartialResultsCount();
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ java.util.List<
+ ? extends
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata
+ .PartialResultOrBuilder>
+ getPartialResultsOrBuilderList();
+ /**
+ *
+ *
+ *
+ * Partial results that reflects the latest migration operation progress.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResult partial_results = 2;
+ *
+ */
+ com.google.cloud.aiplatform.v1beta1.BatchMigrateResourcesOperationMetadata.PartialResultOrBuilder
+ getPartialResultsOrBuilder(int index);
}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJob.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJob.java
index 82a4704aa..ce4996373 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJob.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJob.java
@@ -344,6 +344,23 @@ private BatchPredictionJob(
case 184:
{
generateExplanation_ = input.readBool();
+ break;
+ }
+ case 194:
+ {
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null;
+ if (encryptionSpec_ != null) {
+ subBuilder = encryptionSpec_.toBuilder();
+ }
+ encryptionSpec_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(encryptionSpec_);
+ encryptionSpec_ = subBuilder.buildPartial();
+ }
+
break;
}
case 202:
@@ -420,7 +437,7 @@ public interface InputConfigOrBuilder
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -432,7 +449,7 @@ public interface InputConfigOrBuilder
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -444,7 +461,7 @@ public interface InputConfigOrBuilder
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -713,7 +730,7 @@ public SourceCase getSourceCase() {
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -728,7 +745,7 @@ public boolean hasGcsSource() {
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -746,7 +763,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsSource getGcsSource() {
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -1304,7 +1321,7 @@ public Builder clearSource() {
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -1319,7 +1336,7 @@ public boolean hasGcsSource() {
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -1344,7 +1361,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsSource getGcsSource() {
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -1366,7 +1383,7 @@ public Builder setGcsSource(com.google.cloud.aiplatform.v1beta1.GcsSource value)
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -1386,7 +1403,7 @@ public Builder setGcsSource(
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -1417,7 +1434,7 @@ public Builder mergeGcsSource(com.google.cloud.aiplatform.v1beta1.GcsSource valu
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -1442,7 +1459,7 @@ public Builder clearGcsSource() {
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -1454,7 +1471,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsSource.Builder getGcsSourceBuilder
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -1474,7 +1491,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsSourceOrBuilder getGcsSourceOrBuil
*
*
*
- * The Google Cloud Storage location for the input instances.
+ * The Cloud Storage location for the input instances.
*
*
* .google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
@@ -1931,7 +1948,7 @@ public interface OutputConfigOrBuilder
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -1963,7 +1980,7 @@ public interface OutputConfigOrBuilder
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -1995,7 +2012,7 @@ public interface OutputConfigOrBuilder
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -2328,7 +2345,7 @@ public DestinationCase getDestinationCase() {
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -2363,7 +2380,7 @@ public boolean hasGcsDestination() {
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -2401,7 +2418,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getGcsDestination() {
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3020,7 +3037,7 @@ public Builder clearDestination() {
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3055,7 +3072,7 @@ public boolean hasGcsDestination() {
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3100,7 +3117,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getGcsDestination() {
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3142,7 +3159,7 @@ public Builder setGcsDestination(com.google.cloud.aiplatform.v1beta1.GcsDestinat
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3182,7 +3199,7 @@ public Builder setGcsDestination(
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3234,7 +3251,7 @@ public Builder mergeGcsDestination(com.google.cloud.aiplatform.v1beta1.GcsDestin
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3279,7 +3296,7 @@ public Builder clearGcsDestination() {
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3311,7 +3328,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder getGcsDestinat
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3352,7 +3369,7 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder getGcsDestinat
*
*
*
- * The Google Cloud Storage location of the directory where the output is
+ * The Cloud Storage location of the directory where the output is
* to be written to. In the given directory a new directory is created.
* Its name is `prediction-<model-display-name>-<job-create-time>`,
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
@@ -3950,7 +3967,7 @@ public interface OutputInfoOrBuilder
*
*
*
- * Output only. The full path of the Google Cloud Storage directory created, into which
+ * Output only. The full path of the Cloud Storage directory created, into which
* the prediction output is written.
*
*
@@ -3963,7 +3980,7 @@ public interface OutputInfoOrBuilder
*
*
*
- * Output only. The full path of the Google Cloud Storage directory created, into which
+ * Output only. The full path of the Cloud Storage directory created, into which
* the prediction output is written.
*
*
@@ -4156,7 +4173,7 @@ public OutputLocationCase getOutputLocationCase() {
*
*
*
- * Output only. The full path of the Google Cloud Storage directory created, into which
+ * Output only. The full path of the Cloud Storage directory created, into which
* the prediction output is written.
*
*
@@ -4184,7 +4201,7 @@ public java.lang.String getGcsOutputDirectory() {
*
*
*
- * Output only. The full path of the Google Cloud Storage directory created, into which
+ * Output only. The full path of the Cloud Storage directory created, into which
* the prediction output is written.
*
*
@@ -4667,7 +4684,7 @@ public Builder clearOutputLocation() {
*
*
*
- * Output only. The full path of the Google Cloud Storage directory created, into which
+ * Output only. The full path of the Cloud Storage directory created, into which
* the prediction output is written.
*
*
@@ -4696,7 +4713,7 @@ public java.lang.String getGcsOutputDirectory() {
*
*
*
- * Output only. The full path of the Google Cloud Storage directory created, into which
+ * Output only. The full path of the Cloud Storage directory created, into which
* the prediction output is written.
*
*
@@ -4725,7 +4742,7 @@ public com.google.protobuf.ByteString getGcsOutputDirectoryBytes() {
*
*
*
- * Output only. The full path of the Google Cloud Storage directory created, into which
+ * Output only. The full path of the Cloud Storage directory created, into which
* the prediction output is written.
*
*
@@ -4747,7 +4764,7 @@ public Builder setGcsOutputDirectory(java.lang.String value) {
*
*
*
- * Output only. The full path of the Google Cloud Storage directory created, into which
+ * Output only. The full path of the Cloud Storage directory created, into which
* the prediction output is written.
*
*
@@ -4767,7 +4784,7 @@ public Builder clearGcsOutputDirectory() {
*
*
*
- * Output only. The full path of the Google Cloud Storage directory created, into which
+ * Output only. The full path of the Cloud Storage directory created, into which
* the prediction output is written.
*
*
@@ -5463,15 +5480,18 @@ public boolean hasManualBatchTuningParameters() {
*
*
*
- * Generate explanation along with the batch prediction results.
- * When it's true, the batch prediction output will change based on the
- * [output format][BatchPredictionJob.output_config.predictions_format]:
- * * `bigquery`: output will include a column named `explanation`. The value
+ * Generate explanation with the batch prediction results.
+ * When set to `true`, the batch prediction output changes based on the
+ * `predictions_format` field of the
+ * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] object:
+ * * `bigquery`: output includes a column named `explanation`. The value
* is a struct that conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
- * * `jsonl`: The JSON objects on each line will include an additional entry
+ * * `jsonl`: The JSON objects on each line include an additional entry
* keyed `explanation`. The value of the entry is a JSON object that
* conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
* * `csv`: Generating explanations for CSV format is not supported.
+ * If this field is set to true, the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be
+ * populated.
*
*
* bool generate_explanation = 23;
@@ -5489,15 +5509,12 @@ public boolean getGenerateExplanation() {
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -5512,15 +5529,12 @@ public boolean hasExplanationSpec() {
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -5537,15 +5551,12 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec()
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -6232,6 +6243,60 @@ public java.lang.String getLabelsOrThrow(java.lang.String key) {
return map.get(key);
}
+ public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 24;
+ private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ @java.lang.Override
+ public boolean hasEncryptionSpec() {
+ return encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ *
+ * @return The encryptionSpec.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() {
+ return getEncryptionSpec();
+ }
+
private byte memoizedIsInitialized = -1;
@java.lang.Override
@@ -6305,6 +6370,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
if (generateExplanation_ != false) {
output.writeBool(23, generateExplanation_);
}
+ if (encryptionSpec_ != null) {
+ output.writeMessage(24, getEncryptionSpec());
+ }
if (explanationSpec_ != null) {
output.writeMessage(25, getExplanationSpec());
}
@@ -6386,6 +6454,9 @@ public int getSerializedSize() {
if (generateExplanation_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(23, generateExplanation_);
}
+ if (encryptionSpec_ != null) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(24, getEncryptionSpec());
+ }
if (explanationSpec_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(25, getExplanationSpec());
}
@@ -6469,6 +6540,10 @@ public boolean equals(final java.lang.Object obj) {
if (!getUpdateTime().equals(other.getUpdateTime())) return false;
}
if (!internalGetLabels().equals(other.internalGetLabels())) return false;
+ if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false;
+ if (hasEncryptionSpec()) {
+ if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false;
+ }
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@@ -6554,6 +6629,10 @@ public int hashCode() {
hash = (37 * hash) + LABELS_FIELD_NUMBER;
hash = (53 * hash) + internalGetLabels().hashCode();
}
+ if (hasEncryptionSpec()) {
+ hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER;
+ hash = (53 * hash) + getEncryptionSpec().hashCode();
+ }
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
@@ -6826,6 +6905,12 @@ public Builder clear() {
updateTimeBuilder_ = null;
}
internalGetMutableLabels().clear();
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
return this;
}
@@ -6940,6 +7025,11 @@ public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob buildPartial() {
}
result.labels_ = internalGetLabels();
result.labels_.makeImmutable();
+ if (encryptionSpecBuilder_ == null) {
+ result.encryptionSpec_ = encryptionSpec_;
+ } else {
+ result.encryptionSpec_ = encryptionSpecBuilder_.build();
+ }
onBuilt();
return result;
}
@@ -7078,6 +7168,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.BatchPredictionJob
mergeUpdateTime(other.getUpdateTime());
}
internalGetMutableLabels().mergeFrom(other.internalGetLabels());
+ if (other.hasEncryptionSpec()) {
+ mergeEncryptionSpec(other.getEncryptionSpec());
+ }
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
@@ -8658,15 +8751,18 @@ public Builder clearManualBatchTuningParameters() {
*
*
*
- * Generate explanation along with the batch prediction results.
- * When it's true, the batch prediction output will change based on the
- * [output format][BatchPredictionJob.output_config.predictions_format]:
- * * `bigquery`: output will include a column named `explanation`. The value
+ * Generate explanation with the batch prediction results.
+ * When set to `true`, the batch prediction output changes based on the
+ * `predictions_format` field of the
+ * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] object:
+ * * `bigquery`: output includes a column named `explanation`. The value
* is a struct that conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
- * * `jsonl`: The JSON objects on each line will include an additional entry
+ * * `jsonl`: The JSON objects on each line include an additional entry
* keyed `explanation`. The value of the entry is a JSON object that
* conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
* * `csv`: Generating explanations for CSV format is not supported.
+ * If this field is set to true, the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be
+ * populated.
*
*
* bool generate_explanation = 23;
@@ -8681,15 +8777,18 @@ public boolean getGenerateExplanation() {
*
*
*
- * Generate explanation along with the batch prediction results.
- * When it's true, the batch prediction output will change based on the
- * [output format][BatchPredictionJob.output_config.predictions_format]:
- * * `bigquery`: output will include a column named `explanation`. The value
+ * Generate explanation with the batch prediction results.
+ * When set to `true`, the batch prediction output changes based on the
+ * `predictions_format` field of the
+ * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] object:
+ * * `bigquery`: output includes a column named `explanation`. The value
* is a struct that conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
- * * `jsonl`: The JSON objects on each line will include an additional entry
+ * * `jsonl`: The JSON objects on each line include an additional entry
* keyed `explanation`. The value of the entry is a JSON object that
* conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
* * `csv`: Generating explanations for CSV format is not supported.
+ * If this field is set to true, the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be
+ * populated.
*
*
* bool generate_explanation = 23;
@@ -8707,15 +8806,18 @@ public Builder setGenerateExplanation(boolean value) {
*
*
*
- * Generate explanation along with the batch prediction results.
- * When it's true, the batch prediction output will change based on the
- * [output format][BatchPredictionJob.output_config.predictions_format]:
- * * `bigquery`: output will include a column named `explanation`. The value
+ * Generate explanation with the batch prediction results.
+ * When set to `true`, the batch prediction output changes based on the
+ * `predictions_format` field of the
+ * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] object:
+ * * `bigquery`: output includes a column named `explanation`. The value
* is a struct that conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
- * * `jsonl`: The JSON objects on each line will include an additional entry
+ * * `jsonl`: The JSON objects on each line include an additional entry
* keyed `explanation`. The value of the entry is a JSON object that
* conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
* * `csv`: Generating explanations for CSV format is not supported.
+ * If this field is set to true, the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be
+ * populated.
*
*
* bool generate_explanation = 23;
@@ -8739,15 +8841,12 @@ public Builder clearGenerateExplanation() {
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -8761,15 +8860,12 @@ public boolean hasExplanationSpec() {
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -8789,15 +8885,12 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec()
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -8819,15 +8912,12 @@ public Builder setExplanationSpec(com.google.cloud.aiplatform.v1beta1.Explanatio
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -8847,15 +8937,12 @@ public Builder setExplanationSpec(
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -8881,15 +8968,12 @@ public Builder mergeExplanationSpec(com.google.cloud.aiplatform.v1beta1.Explanat
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -8909,15 +8993,12 @@ public Builder clearExplanationSpec() {
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -8931,15 +9012,12 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder getExplanatio
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -8958,15 +9036,12 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder getExplanatio
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -11354,6 +11429,211 @@ public Builder putAllLabels(java.util.Map
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ public boolean hasEncryptionSpec() {
+ return encryptionSpecBuilder_ != null || encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ *
+ * @return The encryptionSpec.
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ } else {
+ return encryptionSpecBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ encryptionSpec_ = value;
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ public Builder setEncryptionSpec(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = builderForValue.build();
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (encryptionSpec_ != null) {
+ encryptionSpec_ =
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ encryptionSpec_ = value;
+ }
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ public Builder clearEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ onChanged();
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() {
+
+ onChanged();
+ return getEncryptionSpecFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder
+ getEncryptionSpecOrBuilder() {
+ if (encryptionSpecBuilder_ != null) {
+ return encryptionSpecBuilder_.getMessageOrBuilder();
+ } else {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>
+ getEncryptionSpecFieldBuilder() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpecBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>(
+ getEncryptionSpec(), getParentForChildren(), isClean());
+ encryptionSpec_ = null;
+ }
+ return encryptionSpecBuilder_;
+ }
+
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobOrBuilder.java
index 98a1d0819..bb249963f 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobOrBuilder.java
@@ -369,15 +369,18 @@ public interface BatchPredictionJobOrBuilder
*
*
*
- * Generate explanation along with the batch prediction results.
- * When it's true, the batch prediction output will change based on the
- * [output format][BatchPredictionJob.output_config.predictions_format]:
- * * `bigquery`: output will include a column named `explanation`. The value
+ * Generate explanation with the batch prediction results.
+ * When set to `true`, the batch prediction output changes based on the
+ * `predictions_format` field of the
+ * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] object:
+ * * `bigquery`: output includes a column named `explanation`. The value
* is a struct that conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
- * * `jsonl`: The JSON objects on each line will include an additional entry
+ * * `jsonl`: The JSON objects on each line include an additional entry
* keyed `explanation`. The value of the entry is a JSON object that
* conforms to the [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object.
* * `csv`: Generating explanations for CSV format is not supported.
+ * If this field is set to true, the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be
+ * populated.
*
*
* bool generate_explanation = 23;
@@ -390,15 +393,12 @@ public interface BatchPredictionJobOrBuilder
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -410,15 +410,12 @@ public interface BatchPredictionJobOrBuilder
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -430,15 +427,12 @@ public interface BatchPredictionJobOrBuilder
*
*
*
- * Explanation configuration for this BatchPredictionJob. Can only be
- * specified if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`. It's invalid to
- * specified it with generate_explanation set to false or unset.
+ * Explanation configuration for this BatchPredictionJob. Can be
+ * specified only if [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] is set to `true`.
* This value overrides the value of [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. All fields of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of
- * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] is not populated, the value of the same field of
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] is inherited. The corresponding
- * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] must be populated, otherwise explanation for
- * this Model is not allowed.
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] are optional in the request. If a field of the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] object is not populated, the corresponding field of
+ * the [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] object is inherited.
*
*
* .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;
@@ -955,4 +949,45 @@ public interface BatchPredictionJobOrBuilder
* map<string, string> labels = 19;
*/
java.lang.String getLabelsOrThrow(java.lang.String key);
+
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ boolean hasEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ *
+ * @return The encryptionSpec.
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a BatchPredictionJob. If this
+ * is set, then all resources created by the BatchPredictionJob will be
+ * encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder();
}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobProto.java
index fb08c35ec..afffd8030 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchPredictionJobProto.java
@@ -61,72 +61,75 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "tform.v1beta1\032\037google/api/field_behavior"
+ ".proto\032\031google/api/resource.proto\0326googl"
+ "e/cloud/aiplatform/v1beta1/completion_st"
- + "ats.proto\0321google/cloud/aiplatform/v1bet"
- + "a1/explanation.proto\032(google/cloud/aipla"
- + "tform/v1beta1/io.proto\032/google/cloud/aip"
- + "latform/v1beta1/job_state.proto\0327google/"
- + "cloud/aiplatform/v1beta1/machine_resourc"
- + "es.proto\032Dgoogle/cloud/aiplatform/v1beta"
- + "1/manual_batch_tuning_parameters.proto\032\034"
- + "google/protobuf/struct.proto\032\037google/pro"
- + "tobuf/timestamp.proto\032\027google/rpc/status"
- + ".proto\032\034google/api/annotations.proto\"\241\020\n"
- + "\022BatchPredictionJob\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031"
- + "\n\014display_name\030\002 \001(\tB\003\340A\002\0226\n\005model\030\003 \001(\t"
- + "B\'\340A\002\372A!\n\037aiplatform.googleapis.com/Mode"
- + "l\022Z\n\014input_config\030\004 \001(\0132?.google.cloud.a"
- + "iplatform.v1beta1.BatchPredictionJob.Inp"
- + "utConfigB\003\340A\002\0220\n\020model_parameters\030\005 \001(\0132"
- + "\026.google.protobuf.Value\022\\\n\routput_config"
- + "\030\006 \001(\0132@.google.cloud.aiplatform.v1beta1"
- + ".BatchPredictionJob.OutputConfigB\003\340A\002\022U\n"
- + "\023dedicated_resources\030\007 \001(\01328.google.clou"
- + "d.aiplatform.v1beta1.BatchDedicatedResou"
- + "rces\022i\n\036manual_batch_tuning_parameters\030\010"
- + " \001(\0132<.google.cloud.aiplatform.v1beta1.M"
- + "anualBatchTuningParametersB\003\340A\005\022\034\n\024gener"
- + "ate_explanation\030\027 \001(\010\022J\n\020explanation_spe"
- + "c\030\031 \001(\01320.google.cloud.aiplatform.v1beta"
- + "1.ExplanationSpec\022X\n\013output_info\030\t \001(\0132>"
+ + "ats.proto\0325google/cloud/aiplatform/v1bet"
+ + "a1/encryption_spec.proto\0321google/cloud/a"
+ + "iplatform/v1beta1/explanation.proto\032(goo"
+ + "gle/cloud/aiplatform/v1beta1/io.proto\032/g"
+ + "oogle/cloud/aiplatform/v1beta1/job_state"
+ + ".proto\0327google/cloud/aiplatform/v1beta1/"
+ + "machine_resources.proto\032Dgoogle/cloud/ai"
+ + "platform/v1beta1/manual_batch_tuning_par"
+ + "ameters.proto\032\034google/protobuf/struct.pr"
+ + "oto\032\037google/protobuf/timestamp.proto\032\027go"
+ + "ogle/rpc/status.proto\032\034google/api/annota"
+ + "tions.proto\"\353\020\n\022BatchPredictionJob\022\021\n\004na"
+ + "me\030\001 \001(\tB\003\340A\003\022\031\n\014display_name\030\002 \001(\tB\003\340A\002"
+ + "\0226\n\005model\030\003 \001(\tB\'\340A\002\372A!\n\037aiplatform.goog"
+ + "leapis.com/Model\022Z\n\014input_config\030\004 \001(\0132?"
+ ".google.cloud.aiplatform.v1beta1.BatchPr"
- + "edictionJob.OutputInfoB\003\340A\003\022=\n\005state\030\n \001"
- + "(\0162).google.cloud.aiplatform.v1beta1.Job"
- + "StateB\003\340A\003\022&\n\005error\030\013 \001(\0132\022.google.rpc.S"
- + "tatusB\003\340A\003\0221\n\020partial_failures\030\014 \003(\0132\022.g"
- + "oogle.rpc.StatusB\003\340A\003\022S\n\022resources_consu"
- + "med\030\r \001(\01322.google.cloud.aiplatform.v1be"
- + "ta1.ResourcesConsumedB\003\340A\003\022O\n\020completion"
- + "_stats\030\016 \001(\01320.google.cloud.aiplatform.v"
- + "1beta1.CompletionStatsB\003\340A\003\0224\n\013create_ti"
- + "me\030\017 \001(\0132\032.google.protobuf.TimestampB\003\340A"
- + "\003\0223\n\nstart_time\030\020 \001(\0132\032.google.protobuf."
- + "TimestampB\003\340A\003\0221\n\010end_time\030\021 \001(\0132\032.googl"
- + "e.protobuf.TimestampB\003\340A\003\0224\n\013update_time"
- + "\030\022 \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022"
- + "O\n\006labels\030\023 \003(\0132?.google.cloud.aiplatfor"
- + "m.v1beta1.BatchPredictionJob.LabelsEntry"
- + "\032\304\001\n\013InputConfig\022@\n\ngcs_source\030\002 \001(\0132*.g"
- + "oogle.cloud.aiplatform.v1beta1.GcsSource"
- + "H\000\022J\n\017bigquery_source\030\003 \001(\0132/.google.clo"
- + "ud.aiplatform.v1beta1.BigQuerySourceH\000\022\035"
- + "\n\020instances_format\030\001 \001(\tB\003\340A\002B\010\n\006source\032"
- + "\340\001\n\014OutputConfig\022J\n\017gcs_destination\030\002 \001("
- + "\0132/.google.cloud.aiplatform.v1beta1.GcsD"
- + "estinationH\000\022T\n\024bigquery_destination\030\003 \001"
- + "(\01324.google.cloud.aiplatform.v1beta1.Big"
- + "QueryDestinationH\000\022\037\n\022predictions_format"
- + "\030\001 \001(\tB\003\340A\002B\r\n\013destination\032l\n\nOutputInfo"
- + "\022#\n\024gcs_output_directory\030\001 \001(\tB\003\340A\003H\000\022&\n"
- + "\027bigquery_output_dataset\030\002 \001(\tB\003\340A\003H\000B\021\n"
- + "\017output_location\032-\n\013LabelsEntry\022\013\n\003key\030\001"
- + " \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:\206\001\352A\202\001\n,aiplatfo"
- + "rm.googleapis.com/BatchPredictionJob\022Rpr"
- + "ojects/{project}/locations/{location}/ba"
- + "tchPredictionJobs/{batch_prediction_job}"
- + "B\213\001\n#com.google.cloud.aiplatform.v1beta1"
- + "B\027BatchPredictionJobProtoP\001ZIgoogle.gola"
- + "ng.org/genproto/googleapis/cloud/aiplatf"
- + "orm/v1beta1;aiplatformb\006proto3"
+ + "edictionJob.InputConfigB\003\340A\002\0220\n\020model_pa"
+ + "rameters\030\005 \001(\0132\026.google.protobuf.Value\022\\"
+ + "\n\routput_config\030\006 \001(\0132@.google.cloud.aip"
+ + "latform.v1beta1.BatchPredictionJob.Outpu"
+ + "tConfigB\003\340A\002\022U\n\023dedicated_resources\030\007 \001("
+ + "\01328.google.cloud.aiplatform.v1beta1.Batc"
+ + "hDedicatedResources\022i\n\036manual_batch_tuni"
+ + "ng_parameters\030\010 \001(\0132<.google.cloud.aipla"
+ + "tform.v1beta1.ManualBatchTuningParameter"
+ + "sB\003\340A\005\022\034\n\024generate_explanation\030\027 \001(\010\022J\n\020"
+ + "explanation_spec\030\031 \001(\01320.google.cloud.ai"
+ + "platform.v1beta1.ExplanationSpec\022X\n\013outp"
+ + "ut_info\030\t \001(\0132>.google.cloud.aiplatform."
+ + "v1beta1.BatchPredictionJob.OutputInfoB\003\340"
+ + "A\003\022=\n\005state\030\n \001(\0162).google.cloud.aiplatf"
+ + "orm.v1beta1.JobStateB\003\340A\003\022&\n\005error\030\013 \001(\013"
+ + "2\022.google.rpc.StatusB\003\340A\003\0221\n\020partial_fai"
+ + "lures\030\014 \003(\0132\022.google.rpc.StatusB\003\340A\003\022S\n\022"
+ + "resources_consumed\030\r \001(\01322.google.cloud."
+ + "aiplatform.v1beta1.ResourcesConsumedB\003\340A"
+ + "\003\022O\n\020completion_stats\030\016 \001(\01320.google.clo"
+ + "ud.aiplatform.v1beta1.CompletionStatsB\003\340"
+ + "A\003\0224\n\013create_time\030\017 \001(\0132\032.google.protobu"
+ + "f.TimestampB\003\340A\003\0223\n\nstart_time\030\020 \001(\0132\032.g"
+ + "oogle.protobuf.TimestampB\003\340A\003\0221\n\010end_tim"
+ + "e\030\021 \001(\0132\032.google.protobuf.TimestampB\003\340A\003"
+ + "\0224\n\013update_time\030\022 \001(\0132\032.google.protobuf."
+ + "TimestampB\003\340A\003\022O\n\006labels\030\023 \003(\0132?.google."
+ + "cloud.aiplatform.v1beta1.BatchPrediction"
+ + "Job.LabelsEntry\022H\n\017encryption_spec\030\030 \001(\013"
+ + "2/.google.cloud.aiplatform.v1beta1.Encry"
+ + "ptionSpec\032\304\001\n\013InputConfig\022@\n\ngcs_source\030"
+ + "\002 \001(\0132*.google.cloud.aiplatform.v1beta1."
+ + "GcsSourceH\000\022J\n\017bigquery_source\030\003 \001(\0132/.g"
+ + "oogle.cloud.aiplatform.v1beta1.BigQueryS"
+ + "ourceH\000\022\035\n\020instances_format\030\001 \001(\tB\003\340A\002B\010"
+ + "\n\006source\032\340\001\n\014OutputConfig\022J\n\017gcs_destina"
+ + "tion\030\002 \001(\0132/.google.cloud.aiplatform.v1b"
+ + "eta1.GcsDestinationH\000\022T\n\024bigquery_destin"
+ + "ation\030\003 \001(\01324.google.cloud.aiplatform.v1"
+ + "beta1.BigQueryDestinationH\000\022\037\n\022predictio"
+ + "ns_format\030\001 \001(\tB\003\340A\002B\r\n\013destination\032l\n\nO"
+ + "utputInfo\022#\n\024gcs_output_directory\030\001 \001(\tB"
+ + "\003\340A\003H\000\022&\n\027bigquery_output_dataset\030\002 \001(\tB"
+ + "\003\340A\003H\000B\021\n\017output_location\032-\n\013LabelsEntry"
+ + "\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:\206\001\352A\202\001\n"
+ + ",aiplatform.googleapis.com/BatchPredicti"
+ + "onJob\022Rprojects/{project}/locations/{loc"
+ + "ation}/batchPredictionJobs/{batch_predic"
+ + "tion_job}B\213\001\n#com.google.cloud.aiplatfor"
+ + "m.v1beta1B\027BatchPredictionJobProtoP\001ZIgo"
+ + "ogle.golang.org/genproto/googleapis/clou"
+ + "d/aiplatform/v1beta1;aiplatformb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -135,6 +138,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.CompletionStatsProto.getDescriptor(),
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.ExplanationProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor(),
@@ -172,6 +176,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"EndTime",
"UpdateTime",
"Labels",
+ "EncryptionSpec",
});
internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InputConfig_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_descriptor
@@ -223,6 +228,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.CompletionStatsProto.getDescriptor();
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.ExplanationProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor();
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestination.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestination.java
index 21da5207e..7f47accd5 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestination.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestination.java
@@ -117,9 +117,13 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
*
*
*
- * Required. BigQuery URI to a project, up to 2000 characters long.
+ * Required. BigQuery URI to a project or table, up to 2000 characters long.
+ * When only project is specified, Dataset and Table is created.
+ * When full table reference is specified, Dataset must exist and table must
+ * not exist.
* Accepted forms:
- * * BigQuery path. For example: `bq://projectId`.
+ * * BigQuery path. For example:
+ * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
*
*
* string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];
@@ -142,9 +146,13 @@ public java.lang.String getOutputUri() {
*
*
*
- * Required. BigQuery URI to a project, up to 2000 characters long.
+ * Required. BigQuery URI to a project or table, up to 2000 characters long.
+ * When only project is specified, Dataset and Table is created.
+ * When full table reference is specified, Dataset must exist and table must
+ * not exist.
* Accepted forms:
- * * BigQuery path. For example: `bq://projectId`.
+ * * BigQuery path. For example:
+ * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
*
*
* string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];
@@ -488,9 +496,13 @@ public Builder mergeFrom(
*
*
*
- * Required. BigQuery URI to a project, up to 2000 characters long.
+ * Required. BigQuery URI to a project or table, up to 2000 characters long.
+ * When only project is specified, Dataset and Table is created.
+ * When full table reference is specified, Dataset must exist and table must
+ * not exist.
* Accepted forms:
- * * BigQuery path. For example: `bq://projectId`.
+ * * BigQuery path. For example:
+ * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
*
*
* string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];
@@ -512,9 +524,13 @@ public java.lang.String getOutputUri() {
*
*
*
- * Required. BigQuery URI to a project, up to 2000 characters long.
+ * Required. BigQuery URI to a project or table, up to 2000 characters long.
+ * When only project is specified, Dataset and Table is created.
+ * When full table reference is specified, Dataset must exist and table must
+ * not exist.
* Accepted forms:
- * * BigQuery path. For example: `bq://projectId`.
+ * * BigQuery path. For example:
+ * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
*
*
* string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];
@@ -536,9 +552,13 @@ public com.google.protobuf.ByteString getOutputUriBytes() {
*
*
*
- * Required. BigQuery URI to a project, up to 2000 characters long.
+ * Required. BigQuery URI to a project or table, up to 2000 characters long.
+ * When only project is specified, Dataset and Table is created.
+ * When full table reference is specified, Dataset must exist and table must
+ * not exist.
* Accepted forms:
- * * BigQuery path. For example: `bq://projectId`.
+ * * BigQuery path. For example:
+ * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
*
*
* string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];
@@ -559,9 +579,13 @@ public Builder setOutputUri(java.lang.String value) {
*
*
*
- * Required. BigQuery URI to a project, up to 2000 characters long.
+ * Required. BigQuery URI to a project or table, up to 2000 characters long.
+ * When only project is specified, Dataset and Table is created.
+ * When full table reference is specified, Dataset must exist and table must
+ * not exist.
* Accepted forms:
- * * BigQuery path. For example: `bq://projectId`.
+ * * BigQuery path. For example:
+ * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
*
*
* string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];
@@ -578,9 +602,13 @@ public Builder clearOutputUri() {
*
*
*
- * Required. BigQuery URI to a project, up to 2000 characters long.
+ * Required. BigQuery URI to a project or table, up to 2000 characters long.
+ * When only project is specified, Dataset and Table is created.
+ * When full table reference is specified, Dataset must exist and table must
+ * not exist.
* Accepted forms:
- * * BigQuery path. For example: `bq://projectId`.
+ * * BigQuery path. For example:
+ * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
*
*
* string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestinationOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestinationOrBuilder.java
index aff64073c..0ff99b258 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestinationOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BigQueryDestinationOrBuilder.java
@@ -27,9 +27,13 @@ public interface BigQueryDestinationOrBuilder
*
*
*
- * Required. BigQuery URI to a project, up to 2000 characters long.
+ * Required. BigQuery URI to a project or table, up to 2000 characters long.
+ * When only project is specified, Dataset and Table is created.
+ * When full table reference is specified, Dataset must exist and table must
+ * not exist.
* Accepted forms:
- * * BigQuery path. For example: `bq://projectId`.
+ * * BigQuery path. For example:
+ * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
*
*
* string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];
@@ -41,9 +45,13 @@ public interface BigQueryDestinationOrBuilder
*
*
*
- * Required. BigQuery URI to a project, up to 2000 characters long.
+ * Required. BigQuery URI to a project or table, up to 2000 characters long.
+ * When only project is specified, Dataset and Table is created.
+ * When full table reference is specified, Dataset must exist and table must
+ * not exist.
* Accepted forms:
- * * BigQuery path. For example: `bq://projectId`.
+ * * BigQuery path. For example:
+ * `bq://projectId` or `bq://projectId.bqDatasetId.bqTableId`.
*
*
* string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJob.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJob.java
index 469374d81..058277cd5 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJob.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJob.java
@@ -199,6 +199,23 @@ private CustomJob(
input.readMessage(
LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry);
labels_.getMutableMap().put(labels__.getKey(), labels__.getValue());
+ break;
+ }
+ case 98:
+ {
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null;
+ if (encryptionSpec_ != null) {
+ subBuilder = encryptionSpec_.toBuilder();
+ }
+ encryptionSpec_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(encryptionSpec_);
+ encryptionSpec_ = subBuilder.buildPartial();
+ }
+
break;
}
default:
@@ -807,6 +824,60 @@ public java.lang.String getLabelsOrThrow(java.lang.String key) {
return map.get(key);
}
+ public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 12;
+ private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ @java.lang.Override
+ public boolean hasEncryptionSpec() {
+ return encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ *
+ * @return The encryptionSpec.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() {
+ return getEncryptionSpec();
+ }
+
private byte memoizedIsInitialized = -1;
@java.lang.Override
@@ -850,6 +921,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 11);
+ if (encryptionSpec_ != null) {
+ output.writeMessage(12, getEncryptionSpec());
+ }
unknownFields.writeTo(output);
}
@@ -896,6 +970,9 @@ public int getSerializedSize() {
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(11, labels__);
}
+ if (encryptionSpec_ != null) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(12, getEncryptionSpec());
+ }
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
@@ -940,6 +1017,10 @@ public boolean equals(final java.lang.Object obj) {
if (!getError().equals(other.getError())) return false;
}
if (!internalGetLabels().equals(other.internalGetLabels())) return false;
+ if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false;
+ if (hasEncryptionSpec()) {
+ if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false;
+ }
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@@ -985,6 +1066,10 @@ public int hashCode() {
hash = (37 * hash) + LABELS_FIELD_NUMBER;
hash = (53 * hash) + internalGetLabels().hashCode();
}
+ if (hasEncryptionSpec()) {
+ hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER;
+ hash = (53 * hash) + getEncryptionSpec().hashCode();
+ }
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
@@ -1196,6 +1281,12 @@ public Builder clear() {
errorBuilder_ = null;
}
internalGetMutableLabels().clear();
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
return this;
}
@@ -1259,6 +1350,11 @@ public com.google.cloud.aiplatform.v1beta1.CustomJob buildPartial() {
}
result.labels_ = internalGetLabels();
result.labels_.makeImmutable();
+ if (encryptionSpecBuilder_ == null) {
+ result.encryptionSpec_ = encryptionSpec_;
+ } else {
+ result.encryptionSpec_ = encryptionSpecBuilder_.build();
+ }
onBuilt();
return result;
}
@@ -1338,6 +1434,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.CustomJob other) {
mergeError(other.getError());
}
internalGetMutableLabels().mergeFrom(other.internalGetLabels());
+ if (other.hasEncryptionSpec()) {
+ mergeEncryptionSpec(other.getEncryptionSpec());
+ }
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
@@ -3059,6 +3158,211 @@ public Builder putAllLabels(java.util.Map
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ public boolean hasEncryptionSpec() {
+ return encryptionSpecBuilder_ != null || encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ *
+ * @return The encryptionSpec.
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ } else {
+ return encryptionSpecBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ */
+ public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ encryptionSpec_ = value;
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ */
+ public Builder setEncryptionSpec(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = builderForValue.build();
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ */
+ public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (encryptionSpec_ != null) {
+ encryptionSpec_ =
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ encryptionSpec_ = value;
+ }
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ */
+ public Builder clearEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ onChanged();
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() {
+
+ onChanged();
+ return getEncryptionSpecFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder
+ getEncryptionSpecOrBuilder() {
+ if (encryptionSpecBuilder_ != null) {
+ return encryptionSpecBuilder_.getMessageOrBuilder();
+ } else {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>
+ getEncryptionSpecFieldBuilder() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpecBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>(
+ getEncryptionSpec(), getParentForChildren(), isClean());
+ encryptionSpec_ = null;
+ }
+ return encryptionSpecBuilder_;
+ }
+
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobOrBuilder.java
index 23e21ff9d..516d9aa0a 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobOrBuilder.java
@@ -416,4 +416,45 @@ public interface CustomJobOrBuilder
* map<string, string> labels = 11;
*/
java.lang.String getLabelsOrThrow(java.lang.String key);
+
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ boolean hasEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ *
+ * @return The encryptionSpec.
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a CustomJob. If this is set,
+ * then all resources created by the CustomJob will be encrypted with the
+ * provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 12;
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder();
}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobProto.java
index 0261623f3..78f43fce9 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobProto.java
@@ -67,58 +67,62 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"\n0google/cloud/aiplatform/v1beta1/custom"
+ "_job.proto\022\037google.cloud.aiplatform.v1be"
+ "ta1\032\037google/api/field_behavior.proto\032\031go"
- + "ogle/api/resource.proto\032-google/cloud/ai"
- + "platform/v1beta1/env_var.proto\032(google/c"
- + "loud/aiplatform/v1beta1/io.proto\032/google"
- + "/cloud/aiplatform/v1beta1/job_state.prot"
- + "o\0327google/cloud/aiplatform/v1beta1/machi"
- + "ne_resources.proto\032\036google/protobuf/dura"
- + "tion.proto\032\037google/protobuf/timestamp.pr"
- + "oto\032\027google/rpc/status.proto\032\034google/api"
- + "/annotations.proto\"\235\005\n\tCustomJob\022\021\n\004name"
- + "\030\001 \001(\tB\003\340A\003\022\031\n\014display_name\030\002 \001(\tB\003\340A\002\022E"
- + "\n\010job_spec\030\004 \001(\0132..google.cloud.aiplatfo"
- + "rm.v1beta1.CustomJobSpecB\003\340A\002\022=\n\005state\030\005"
- + " \001(\0162).google.cloud.aiplatform.v1beta1.J"
- + "obStateB\003\340A\003\0224\n\013create_time\030\006 \001(\0132\032.goog"
- + "le.protobuf.TimestampB\003\340A\003\0223\n\nstart_time"
- + "\030\007 \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022"
- + "1\n\010end_time\030\010 \001(\0132\032.google.protobuf.Time"
- + "stampB\003\340A\003\0224\n\013update_time\030\t \001(\0132\032.google"
- + ".protobuf.TimestampB\003\340A\003\022&\n\005error\030\n \001(\0132"
- + "\022.google.rpc.StatusB\003\340A\003\022F\n\006labels\030\013 \003(\013"
- + "26.google.cloud.aiplatform.v1beta1.Custo"
- + "mJob.LabelsEntry\032-\n\013LabelsEntry\022\013\n\003key\030\001"
- + " \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:i\352Af\n#aiplatform"
- + ".googleapis.com/CustomJob\022?projects/{pro"
- + "ject}/locations/{location}/customJobs/{c"
- + "ustom_job}\"\233\002\n\rCustomJobSpec\022O\n\021worker_p"
- + "ool_specs\030\001 \003(\0132/.google.cloud.aiplatfor"
- + "m.v1beta1.WorkerPoolSpecB\003\340A\002\022?\n\nschedul"
- + "ing\030\003 \001(\0132+.google.cloud.aiplatform.v1be"
- + "ta1.Scheduling\022\027\n\017service_account\030\004 \001(\t\022"
- + "\017\n\007network\030\005 \001(\t\022N\n\025base_output_director"
- + "y\030\006 \001(\0132/.google.cloud.aiplatform.v1beta"
- + "1.GcsDestination\"\333\002\n\016WorkerPoolSpec\022H\n\016c"
- + "ontainer_spec\030\006 \001(\0132..google.cloud.aipla"
- + "tform.v1beta1.ContainerSpecH\000\022Q\n\023python_"
- + "package_spec\030\007 \001(\01322.google.cloud.aiplat"
- + "form.v1beta1.PythonPackageSpecH\000\022J\n\014mach"
- + "ine_spec\030\001 \001(\0132,.google.cloud.aiplatform"
- + ".v1beta1.MachineSpecB\006\340A\002\340A\005\022\032\n\rreplica_"
- + "count\030\002 \001(\003B\003\340A\002\022<\n\tdisk_spec\030\005 \001(\0132).go"
- + "ogle.cloud.aiplatform.v1beta1.DiskSpecB\006"
- + "\n\004task\"F\n\rContainerSpec\022\026\n\timage_uri\030\001 \001"
- + "(\tB\003\340A\002\022\017\n\007command\030\002 \003(\t\022\014\n\004args\030\003 \003(\t\"y"
- + "\n\021PythonPackageSpec\022\037\n\022executor_image_ur"
- + "i\030\001 \001(\tB\003\340A\002\022\031\n\014package_uris\030\002 \003(\tB\003\340A\002\022"
- + "\032\n\rpython_module\030\003 \001(\tB\003\340A\002\022\014\n\004args\030\004 \003("
- + "\t\"_\n\nScheduling\022*\n\007timeout\030\001 \001(\0132\031.googl"
- + "e.protobuf.Duration\022%\n\035restart_job_on_wo"
- + "rker_restart\030\003 \001(\010B\202\001\n#com.google.cloud."
- + "aiplatform.v1beta1B\016CustomJobProtoP\001ZIgo"
- + "ogle.golang.org/genproto/googleapis/clou"
- + "d/aiplatform/v1beta1;aiplatformb\006proto3"
+ + "ogle/api/resource.proto\0325google/cloud/ai"
+ + "platform/v1beta1/encryption_spec.proto\032-"
+ + "google/cloud/aiplatform/v1beta1/env_var."
+ + "proto\032(google/cloud/aiplatform/v1beta1/i"
+ + "o.proto\032/google/cloud/aiplatform/v1beta1"
+ + "/job_state.proto\0327google/cloud/aiplatfor"
+ + "m/v1beta1/machine_resources.proto\032\036googl"
+ + "e/protobuf/duration.proto\032\037google/protob"
+ + "uf/timestamp.proto\032\027google/rpc/status.pr"
+ + "oto\032\034google/api/annotations.proto\"\347\005\n\tCu"
+ + "stomJob\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031\n\014display_na"
+ + "me\030\002 \001(\tB\003\340A\002\022E\n\010job_spec\030\004 \001(\0132..google"
+ + ".cloud.aiplatform.v1beta1.CustomJobSpecB"
+ + "\003\340A\002\022=\n\005state\030\005 \001(\0162).google.cloud.aipla"
+ + "tform.v1beta1.JobStateB\003\340A\003\0224\n\013create_ti"
+ + "me\030\006 \001(\0132\032.google.protobuf.TimestampB\003\340A"
+ + "\003\0223\n\nstart_time\030\007 \001(\0132\032.google.protobuf."
+ + "TimestampB\003\340A\003\0221\n\010end_time\030\010 \001(\0132\032.googl"
+ + "e.protobuf.TimestampB\003\340A\003\0224\n\013update_time"
+ + "\030\t \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022"
+ + "&\n\005error\030\n \001(\0132\022.google.rpc.StatusB\003\340A\003\022"
+ + "F\n\006labels\030\013 \003(\01326.google.cloud.aiplatfor"
+ + "m.v1beta1.CustomJob.LabelsEntry\022H\n\017encry"
+ + "ption_spec\030\014 \001(\0132/.google.cloud.aiplatfo"
+ + "rm.v1beta1.EncryptionSpec\032-\n\013LabelsEntry"
+ + "\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:i\352Af\n#a"
+ + "iplatform.googleapis.com/CustomJob\022?proj"
+ + "ects/{project}/locations/{location}/cust"
+ + "omJobs/{custom_job}\"\233\002\n\rCustomJobSpec\022O\n"
+ + "\021worker_pool_specs\030\001 \003(\0132/.google.cloud."
+ + "aiplatform.v1beta1.WorkerPoolSpecB\003\340A\002\022?"
+ + "\n\nscheduling\030\003 \001(\0132+.google.cloud.aiplat"
+ + "form.v1beta1.Scheduling\022\027\n\017service_accou"
+ + "nt\030\004 \001(\t\022\017\n\007network\030\005 \001(\t\022N\n\025base_output"
+ + "_directory\030\006 \001(\0132/.google.cloud.aiplatfo"
+ + "rm.v1beta1.GcsDestination\"\333\002\n\016WorkerPool"
+ + "Spec\022H\n\016container_spec\030\006 \001(\0132..google.cl"
+ + "oud.aiplatform.v1beta1.ContainerSpecH\000\022Q"
+ + "\n\023python_package_spec\030\007 \001(\01322.google.clo"
+ + "ud.aiplatform.v1beta1.PythonPackageSpecH"
+ + "\000\022J\n\014machine_spec\030\001 \001(\0132,.google.cloud.a"
+ + "iplatform.v1beta1.MachineSpecB\006\340A\001\340A\005\022\032\n"
+ + "\rreplica_count\030\002 \001(\003B\003\340A\001\022<\n\tdisk_spec\030\005"
+ + " \001(\0132).google.cloud.aiplatform.v1beta1.D"
+ + "iskSpecB\006\n\004task\"F\n\rContainerSpec\022\026\n\timag"
+ + "e_uri\030\001 \001(\tB\003\340A\002\022\017\n\007command\030\002 \003(\t\022\014\n\004arg"
+ + "s\030\003 \003(\t\"y\n\021PythonPackageSpec\022\037\n\022executor"
+ + "_image_uri\030\001 \001(\tB\003\340A\002\022\031\n\014package_uris\030\002 "
+ + "\003(\tB\003\340A\002\022\032\n\rpython_module\030\003 \001(\tB\003\340A\002\022\014\n\004"
+ + "args\030\004 \003(\t\"_\n\nScheduling\022*\n\007timeout\030\001 \001("
+ + "\0132\031.google.protobuf.Duration\022%\n\035restart_"
+ + "job_on_worker_restart\030\003 \001(\010B\202\001\n#com.goog"
+ + "le.cloud.aiplatform.v1beta1B\016CustomJobPr"
+ + "otoP\001ZIgoogle.golang.org/genproto/google"
+ + "apis/cloud/aiplatform/v1beta1;aiplatform"
+ + "b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -126,6 +130,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.EnvVarProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor(),
@@ -151,6 +156,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"UpdateTime",
"Error",
"Labels",
+ "EncryptionSpec",
});
internal_static_google_cloud_aiplatform_v1beta1_CustomJob_LabelsEntry_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_CustomJob_descriptor
@@ -215,6 +221,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
descriptor, registry);
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.EnvVarProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor();
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpec.java
index 137e2116c..b0c020694 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpec.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpec.java
@@ -307,6 +307,8 @@ public com.google.cloud.aiplatform.v1beta1.SchedulingOrBuilder getSchedulingOrBu
*
* Specifies the service account for workload run-as account.
* Users submitting jobs must have act-as permission on this run-as account.
+ * If unspecified, the AI Platform Custom Code Service Agent for the
+ * CustomJob's project is used.
*
*
* string service_account = 4;
@@ -331,6 +333,8 @@ public java.lang.String getServiceAccount() {
*
* Specifies the service account for workload run-as account.
* Users submitting jobs must have act-as permission on this run-as account.
+ * If unspecified, the AI Platform Custom Code Service Agent for the
+ * CustomJob's project is used.
*
*
* string service_account = 4;
@@ -358,11 +362,10 @@ public com.google.protobuf.ByteString getServiceAccountBytes() {
*
* The full name of the Compute Engine
* [network](/compute/docs/networks-and-firewalls#networks) to which the Job
- * should be peered. For example, projects/12345/global/networks/myVPC.
- * [Format](https:
- * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
- * is of the form projects/{project}/global/networks/{network}.
- * Where {project} is a project number, as in '12345', and {network} is
+ * should be peered. For example, `projects/12345/global/networks/myVPC`.
+ * [Format](/compute/docs/reference/rest/v1/networks/insert)
+ * is of the form `projects/{project}/global/networks/{network}`.
+ * Where {project} is a project number, as in `12345`, and {network} is a
* network name.
* Private services access must already be configured for the network. If left
* unspecified, the job is not peered with any network.
@@ -390,11 +393,10 @@ public java.lang.String getNetwork() {
*
* The full name of the Compute Engine
* [network](/compute/docs/networks-and-firewalls#networks) to which the Job
- * should be peered. For example, projects/12345/global/networks/myVPC.
- * [Format](https:
- * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
- * is of the form projects/{project}/global/networks/{network}.
- * Where {project} is a project number, as in '12345', and {network} is
+ * should be peered. For example, `projects/12345/global/networks/myVPC`.
+ * [Format](/compute/docs/reference/rest/v1/networks/insert)
+ * is of the form `projects/{project}/global/networks/{network}`.
+ * Where {project} is a project number, as in `12345`, and {network} is a
* network name.
* Private services access must already be configured for the network. If left
* unspecified, the job is not peered with any network.
@@ -423,14 +425,14 @@ public com.google.protobuf.ByteString getNetworkBytes() {
*
*
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -453,14 +455,14 @@ public boolean hasBaseOutputDirectory() {
*
*
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -485,14 +487,14 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getBaseOutputDirectory
*
*
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -1550,6 +1552,8 @@ public com.google.cloud.aiplatform.v1beta1.SchedulingOrBuilder getSchedulingOrBu
*
* Specifies the service account for workload run-as account.
* Users submitting jobs must have act-as permission on this run-as account.
+ * If unspecified, the AI Platform Custom Code Service Agent for the
+ * CustomJob's project is used.
*
*
* string service_account = 4;
@@ -1573,6 +1577,8 @@ public java.lang.String getServiceAccount() {
*
* Specifies the service account for workload run-as account.
* Users submitting jobs must have act-as permission on this run-as account.
+ * If unspecified, the AI Platform Custom Code Service Agent for the
+ * CustomJob's project is used.
*
*
* string service_account = 4;
@@ -1596,6 +1602,8 @@ public com.google.protobuf.ByteString getServiceAccountBytes() {
*
* Specifies the service account for workload run-as account.
* Users submitting jobs must have act-as permission on this run-as account.
+ * If unspecified, the AI Platform Custom Code Service Agent for the
+ * CustomJob's project is used.
*
*
* string service_account = 4;
@@ -1618,6 +1626,8 @@ public Builder setServiceAccount(java.lang.String value) {
*
* Specifies the service account for workload run-as account.
* Users submitting jobs must have act-as permission on this run-as account.
+ * If unspecified, the AI Platform Custom Code Service Agent for the
+ * CustomJob's project is used.
*
*
* string service_account = 4;
@@ -1636,6 +1646,8 @@ public Builder clearServiceAccount() {
*
* Specifies the service account for workload run-as account.
* Users submitting jobs must have act-as permission on this run-as account.
+ * If unspecified, the AI Platform Custom Code Service Agent for the
+ * CustomJob's project is used.
*
*
* string service_account = 4;
@@ -1661,11 +1673,10 @@ public Builder setServiceAccountBytes(com.google.protobuf.ByteString value) {
*
* The full name of the Compute Engine
* [network](/compute/docs/networks-and-firewalls#networks) to which the Job
- * should be peered. For example, projects/12345/global/networks/myVPC.
- * [Format](https:
- * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
- * is of the form projects/{project}/global/networks/{network}.
- * Where {project} is a project number, as in '12345', and {network} is
+ * should be peered. For example, `projects/12345/global/networks/myVPC`.
+ * [Format](/compute/docs/reference/rest/v1/networks/insert)
+ * is of the form `projects/{project}/global/networks/{network}`.
+ * Where {project} is a project number, as in `12345`, and {network} is a
* network name.
* Private services access must already be configured for the network. If left
* unspecified, the job is not peered with any network.
@@ -1692,11 +1703,10 @@ public java.lang.String getNetwork() {
*
* The full name of the Compute Engine
* [network](/compute/docs/networks-and-firewalls#networks) to which the Job
- * should be peered. For example, projects/12345/global/networks/myVPC.
- * [Format](https:
- * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
- * is of the form projects/{project}/global/networks/{network}.
- * Where {project} is a project number, as in '12345', and {network} is
+ * should be peered. For example, `projects/12345/global/networks/myVPC`.
+ * [Format](/compute/docs/reference/rest/v1/networks/insert)
+ * is of the form `projects/{project}/global/networks/{network}`.
+ * Where {project} is a project number, as in `12345`, and {network} is a
* network name.
* Private services access must already be configured for the network. If left
* unspecified, the job is not peered with any network.
@@ -1723,11 +1733,10 @@ public com.google.protobuf.ByteString getNetworkBytes() {
*
* The full name of the Compute Engine
* [network](/compute/docs/networks-and-firewalls#networks) to which the Job
- * should be peered. For example, projects/12345/global/networks/myVPC.
- * [Format](https:
- * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
- * is of the form projects/{project}/global/networks/{network}.
- * Where {project} is a project number, as in '12345', and {network} is
+ * should be peered. For example, `projects/12345/global/networks/myVPC`.
+ * [Format](/compute/docs/reference/rest/v1/networks/insert)
+ * is of the form `projects/{project}/global/networks/{network}`.
+ * Where {project} is a project number, as in `12345`, and {network} is a
* network name.
* Private services access must already be configured for the network. If left
* unspecified, the job is not peered with any network.
@@ -1753,11 +1762,10 @@ public Builder setNetwork(java.lang.String value) {
*
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.FractionSplit}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJob.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJob.java
index 69a860a4e..cd90bc6c8 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJob.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJob.java
@@ -241,6 +241,23 @@ private HyperparameterTuningJob(
input.readMessage(
LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry);
labels_.getMutableMap().put(labels__.getKey(), labels__.getValue());
+ break;
+ }
+ case 138:
+ {
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null;
+ if (encryptionSpec_ != null) {
+ subBuilder = encryptionSpec_.toBuilder();
+ }
+ encryptionSpec_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(encryptionSpec_);
+ encryptionSpec_ = subBuilder.buildPartial();
+ }
+
break;
}
default:
@@ -1045,6 +1062,60 @@ public java.lang.String getLabelsOrThrow(java.lang.String key) {
return map.get(key);
}
+ public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 17;
+ private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+ /**
+ *
+ *
+ *
* The full name of the Compute Engine
* [network](/compute/docs/networks-and-firewalls#networks) to which the Job
- * should be peered. For example, projects/12345/global/networks/myVPC.
- * [Format](https:
- * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
- * is of the form projects/{project}/global/networks/{network}.
- * Where {project} is a project number, as in '12345', and {network} is
+ * should be peered. For example, `projects/12345/global/networks/myVPC`.
+ * [Format](/compute/docs/reference/rest/v1/networks/insert)
+ * is of the form `projects/{project}/global/networks/{network}`.
+ * Where {project} is a project number, as in `12345`, and {network} is a
* network name.
* Private services access must already be configured for the network. If left
* unspecified, the job is not peered with any network.
@@ -1779,11 +1787,10 @@ public Builder clearNetwork() {
*
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.FractionSplit}
@@ -374,7 +374,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build
* `test_fraction` may optionally be provided, they must sum to up to 1. If the
* provided ones sum to less than 1, the remainder is assigned to sets as
* decided by AI Platform. If none of the fractions are set, by default roughly
- * 80% of data will be used for training, 10% for validation, and 10% for test.
+ * 80% of data is used for training, 10% for validation, and 10% for test.
*
* The full name of the Compute Engine
* [network](/compute/docs/networks-and-firewalls#networks) to which the Job
- * should be peered. For example, projects/12345/global/networks/myVPC.
- * [Format](https:
- * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
- * is of the form projects/{project}/global/networks/{network}.
- * Where {project} is a project number, as in '12345', and {network} is
+ * should be peered. For example, `projects/12345/global/networks/myVPC`.
+ * [Format](/compute/docs/reference/rest/v1/networks/insert)
+ * is of the form `projects/{project}/global/networks/{network}`.
+ * Where {project} is a project number, as in `12345`, and {network} is a
* network name.
* Private services access must already be configured for the network. If left
* unspecified, the job is not peered with any network.
@@ -1815,14 +1822,14 @@ public Builder setNetworkBytes(com.google.protobuf.ByteString value) {
*
*
*
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FractionSplit.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FractionSplit.java
index 5f81ef874..090e468b4 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FractionSplit.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FractionSplit.java
@@ -27,7 +27,7 @@
* `test_fraction` may optionally be provided, they must sum to up to 1. If the
* provided ones sum to less than 1, the remainder is assigned to sets as
* decided by AI Platform. If none of the fractions are set, by default roughly
- * 80% of data will be used for training, 10% for validation, and 10% for test.
+ * 80% of data is used for training, 10% for validation, and 10% for test.
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -1844,14 +1851,14 @@ public boolean hasBaseOutputDirectory() {
*
*
*
*
@@ -118,7 +118,7 @@ public interface FilterSplitOrBuilder
* this filter are used to test the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -1879,14 +1886,14 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getBaseOutputDirectory
*
*
*
*
@@ -101,7 +101,7 @@ public interface FilterSplitOrBuilder
* this filter are used to test the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -1917,14 +1924,14 @@ public Builder setBaseOutputDirectory(
*
*
*
*
@@ -83,7 +83,7 @@ public interface FilterSplitOrBuilder
* this filter are used to validate the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -1952,14 +1959,14 @@ public Builder setBaseOutputDirectory(
*
*
*
*
@@ -66,7 +66,7 @@ public interface FilterSplitOrBuilder
* this filter are used to validate the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -1994,14 +2001,14 @@ public Builder mergeBaseOutputDirectory(
*
*
*
*
@@ -48,7 +48,7 @@ public interface FilterSplitOrBuilder
* this filter are used to train the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -2029,14 +2036,14 @@ public Builder clearBaseOutputDirectory() {
*
*
*
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplitOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplitOrBuilder.java
index 1ea8fdbbd..b859b4ca0 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplitOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplitOrBuilder.java
@@ -31,7 +31,7 @@ public interface FilterSplitOrBuilder
* this filter are used to train the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -2059,14 +2066,14 @@ public Builder clearBaseOutputDirectory() {
*
*
*
*
@@ -1034,7 +1036,7 @@ public Builder clearTestFilter() {
* this filter are used to test the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -2093,14 +2100,14 @@ public Builder clearBaseOutputDirectory() {
*
*
*
*
@@ -1012,7 +1014,7 @@ public Builder setTestFilter(java.lang.String value) {
* this filter are used to test the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpecOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpecOrBuilder.java
index a6c089ef2..483100384 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpecOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CustomJobSpecOrBuilder.java
@@ -127,6 +127,8 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr
*
*
@@ -986,7 +988,7 @@ public com.google.protobuf.ByteString getTestFilterBytes() {
* this filter are used to test the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* Specifies the service account for workload run-as account.
* Users submitting jobs must have act-as permission on this run-as account.
+ * If unspecified, the AI Platform Custom Code Service Agent for the
+ * CustomJob's project is used.
*
*
* string service_account = 4;
@@ -140,6 +142,8 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr
*
* Specifies the service account for workload run-as account.
* Users submitting jobs must have act-as permission on this run-as account.
+ * If unspecified, the AI Platform Custom Code Service Agent for the
+ * CustomJob's project is used.
*
*
* string service_account = 4;
@@ -154,11 +158,10 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr
*
* The full name of the Compute Engine
* [network](/compute/docs/networks-and-firewalls#networks) to which the Job
- * should be peered. For example, projects/12345/global/networks/myVPC.
- * [Format](https:
- * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
- * is of the form projects/{project}/global/networks/{network}.
- * Where {project} is a project number, as in '12345', and {network} is
+ * should be peered. For example, `projects/12345/global/networks/myVPC`.
+ * [Format](/compute/docs/reference/rest/v1/networks/insert)
+ * is of the form `projects/{project}/global/networks/{network}`.
+ * Where {project} is a project number, as in `12345`, and {network} is a
* network name.
* Private services access must already be configured for the network. If left
* unspecified, the job is not peered with any network.
@@ -175,11 +178,10 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr
*
*
@@ -959,7 +961,7 @@ public java.lang.String getTestFilter() {
* this filter are used to test the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* The full name of the Compute Engine
* [network](/compute/docs/networks-and-firewalls#networks) to which the Job
- * should be peered. For example, projects/12345/global/networks/myVPC.
- * [Format](https:
- * //cloud.google.com/compute/docs/reference/rest/v1/networks/insert)
- * is of the form projects/{project}/global/networks/{network}.
- * Where {project} is a project number, as in '12345', and {network} is
+ * should be peered. For example, `projects/12345/global/networks/myVPC`.
+ * [Format](/compute/docs/reference/rest/v1/networks/insert)
+ * is of the form `projects/{project}/global/networks/{network}`.
+ * Where {project} is a project number, as in `12345`, and {network} is a
* network name.
* Private services access must already be configured for the network. If left
* unspecified, the job is not peered with any network.
@@ -195,14 +197,14 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr
*
*
*
*
@@ -932,7 +934,7 @@ public Builder setValidationFilterBytes(com.google.protobuf.ByteString value) {
* this filter are used to test the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -222,14 +224,14 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr
*
*
*
*
@@ -903,7 +905,7 @@ public Builder clearValidationFilter() {
* this filter are used to validate the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
@@ -249,14 +251,14 @@ com.google.cloud.aiplatform.v1beta1.WorkerPoolSpecOrBuilder getWorkerPoolSpecsOr
*
*
*
*
@@ -881,7 +883,7 @@ public Builder setValidationFilter(java.lang.String value) {
* this filter are used to validate the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
- * The Google Cloud Storage location to store the output of this CustomJob or
+ * The Cloud Storage location to store the output of this CustomJob or
* HyperparameterTuningJob. For HyperparameterTuningJob,
- * [base_output_directory][CustomJob.job_spec.base_output_directory] of
+ * the baseOutputDirectory of
* each child CustomJob backing a Trial is set to a subdirectory of name
- * [id][google.cloud.aiplatform.v1beta1.Trial.id] under parent HyperparameterTuningJob's
- * [base_output_directory][HyperparameterTuningJob.trial_job_spec.base_output_directory].
- * Following AI Platform environment variables will be passed to containers or
- * python modules when this field is set:
+ * [id][google.cloud.aiplatform.v1beta1.Trial.id] under its parent HyperparameterTuningJob's
+ * baseOutputDirectory.
+ * The following AI Platform environment variables will be passed to
+ * containers or python modules when this field is set:
* For CustomJob:
* * AIP_MODEL_DIR = `<base_output_directory>/model/`
* * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJob.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJob.java
index 6e0fe044c..2df6ab022 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJob.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJob.java
@@ -229,6 +229,23 @@ private DataLabelingJob(
mutable_bitField0_ |= 0x00000008;
}
specialistPools_.add(s);
+ break;
+ }
+ case 162:
+ {
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null;
+ if (encryptionSpec_ != null) {
+ subBuilder = encryptionSpec_.toBuilder();
+ }
+ encryptionSpec_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(encryptionSpec_);
+ encryptionSpec_ = subBuilder.buildPartial();
+ }
+
break;
}
case 170:
@@ -1248,13 +1265,70 @@ public com.google.protobuf.ByteString getSpecialistPoolsBytes(int index) {
return specialistPools_.getByteString(index);
}
+ public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 20;
+ private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+ /**
+ *
+ *
+ *
*
- *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ @java.lang.Override
+ public boolean hasEncryptionSpec() {
+ return encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ *
+ * @return The encryptionSpec.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() {
+ return getEncryptionSpec();
+ }
+
public static final int ACTIVE_LEARNING_CONFIG_FIELD_NUMBER = 21;
private com.google.cloud.aiplatform.v1beta1.ActiveLearningConfig activeLearningConfig_;
/**
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -1271,7 +1345,7 @@ public boolean hasActiveLearningConfig() {
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -1290,7 +1364,7 @@ public com.google.cloud.aiplatform.v1beta1.ActiveLearningConfig getActiveLearnin
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -1360,6 +1434,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
for (int i = 0; i < specialistPools_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 16, specialistPools_.getRaw(i));
}
+ if (encryptionSpec_ != null) {
+ output.writeMessage(20, getEncryptionSpec());
+ }
if (activeLearningConfig_ != null) {
output.writeMessage(21, getActiveLearningConfig());
}
@@ -1444,6 +1521,9 @@ public int getSerializedSize() {
size += dataSize;
size += 2 * getSpecialistPoolsList().size();
}
+ if (encryptionSpec_ != null) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(20, getEncryptionSpec());
+ }
if (activeLearningConfig_ != null) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(21, getActiveLearningConfig());
@@ -1498,6 +1578,10 @@ public boolean equals(final java.lang.Object obj) {
}
if (!internalGetLabels().equals(other.internalGetLabels())) return false;
if (!getSpecialistPoolsList().equals(other.getSpecialistPoolsList())) return false;
+ if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false;
+ if (hasEncryptionSpec()) {
+ if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false;
+ }
if (hasActiveLearningConfig() != other.hasActiveLearningConfig()) return false;
if (hasActiveLearningConfig()) {
if (!getActiveLearningConfig().equals(other.getActiveLearningConfig())) return false;
@@ -1563,6 +1647,10 @@ public int hashCode() {
hash = (37 * hash) + SPECIALIST_POOLS_FIELD_NUMBER;
hash = (53 * hash) + getSpecialistPoolsList().hashCode();
}
+ if (hasEncryptionSpec()) {
+ hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER;
+ hash = (53 * hash) + getEncryptionSpec().hashCode();
+ }
if (hasActiveLearningConfig()) {
hash = (37 * hash) + ACTIVE_LEARNING_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getActiveLearningConfig().hashCode();
@@ -1787,6 +1875,12 @@ public Builder clear() {
internalGetMutableLabels().clear();
specialistPools_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000008);
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
if (activeLearningConfigBuilder_ == null) {
activeLearningConfig_ = null;
} else {
@@ -1867,6 +1961,11 @@ public com.google.cloud.aiplatform.v1beta1.DataLabelingJob buildPartial() {
bitField0_ = (bitField0_ & ~0x00000008);
}
result.specialistPools_ = specialistPools_;
+ if (encryptionSpecBuilder_ == null) {
+ result.encryptionSpec_ = encryptionSpec_;
+ } else {
+ result.encryptionSpec_ = encryptionSpecBuilder_.build();
+ }
if (activeLearningConfigBuilder_ == null) {
result.activeLearningConfig_ = activeLearningConfig_;
} else {
@@ -1984,6 +2083,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.DataLabelingJob oth
}
onChanged();
}
+ if (other.hasEncryptionSpec()) {
+ mergeEncryptionSpec(other.getEncryptionSpec());
+ }
if (other.hasActiveLearningConfig()) {
mergeActiveLearningConfig(other.getActiveLearningConfig());
}
@@ -4450,6 +4552,220 @@ public Builder addSpecialistPoolsBytes(com.google.protobuf.ByteString value) {
return this;
}
+ private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>
+ encryptionSpecBuilder_;
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ public boolean hasEncryptionSpec() {
+ return encryptionSpecBuilder_ != null || encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ *
+ * @return The encryptionSpec.
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ } else {
+ return encryptionSpecBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ */
+ public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ encryptionSpec_ = value;
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ */
+ public Builder setEncryptionSpec(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = builderForValue.build();
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ */
+ public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (encryptionSpec_ != null) {
+ encryptionSpec_ =
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ encryptionSpec_ = value;
+ }
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ */
+ public Builder clearEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ onChanged();
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() {
+
+ onChanged();
+ return getEncryptionSpecFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder
+ getEncryptionSpecOrBuilder() {
+ if (encryptionSpecBuilder_ != null) {
+ return encryptionSpecBuilder_.getMessageOrBuilder();
+ } else {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>
+ getEncryptionSpecFieldBuilder() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpecBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>(
+ getEncryptionSpec(), getParentForChildren(), isClean());
+ encryptionSpec_ = null;
+ }
+ return encryptionSpecBuilder_;
+ }
+
private com.google.cloud.aiplatform.v1beta1.ActiveLearningConfig activeLearningConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.ActiveLearningConfig,
@@ -4460,7 +4776,7 @@ public Builder addSpecialistPoolsBytes(com.google.protobuf.ByteString value) {
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -4477,7 +4793,7 @@ public boolean hasActiveLearningConfig() {
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -4500,7 +4816,7 @@ public com.google.cloud.aiplatform.v1beta1.ActiveLearningConfig getActiveLearnin
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -4526,7 +4842,7 @@ public Builder setActiveLearningConfig(
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -4549,7 +4865,7 @@ public Builder setActiveLearningConfig(
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -4580,7 +4896,7 @@ public Builder mergeActiveLearningConfig(
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -4603,7 +4919,7 @@ public Builder clearActiveLearningConfig() {
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -4621,7 +4937,7 @@ public Builder clearActiveLearningConfig() {
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -4643,7 +4959,7 @@ public Builder clearActiveLearningConfig() {
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobOrBuilder.java
index d14e83ae7..5e4ffea31 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobOrBuilder.java
@@ -692,7 +692,51 @@ java.lang.String getAnnotationLabelsOrDefault(
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ boolean hasEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ *
+ * @return The encryptionSpec.
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a DataLabelingJob. If set, this
+ * DataLabelingJob will be secured by this key.
+ * Note: Annotations created in the DataLabelingJob are associated with
+ * the EncryptionSpec of the Dataset they are exported to.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 20;
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder();
+
+ /**
+ *
+ *
+ *
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -706,7 +750,7 @@ java.lang.String getAnnotationLabelsOrDefault(
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
@@ -720,7 +764,7 @@ java.lang.String getAnnotationLabelsOrDefault(
*
*
*
- * Paramaters that configure active learning pipeline. Active learning will
+ * Parameters that configure active learning pipeline. Active learning will
* label the data incrementally via several iterations. For every iteration,
* it will select a batch of data based on the sampling strategy.
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobProto.java
index ab991290d..3fb48edbb 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DataLabelingJobProto.java
@@ -65,59 +65,62 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "rm.v1beta1\032\037google/api/field_behavior.pr"
+ "oto\032\031google/api/resource.proto\0326google/c"
+ "loud/aiplatform/v1beta1/accelerator_type"
- + ".proto\032/google/cloud/aiplatform/v1beta1/"
- + "job_state.proto\0325google/cloud/aiplatform"
- + "/v1beta1/specialist_pool.proto\032\034google/p"
- + "rotobuf/struct.proto\032\037google/protobuf/ti"
- + "mestamp.proto\032\027google/rpc/status.proto\032\027"
- + "google/type/money.proto\032\034google/api/anno"
- + "tations.proto\"\256\010\n\017DataLabelingJob\022\021\n\004nam"
- + "e\030\001 \001(\tB\003\340A\003\022\031\n\014display_name\030\002 \001(\tB\003\340A\002\022"
- + ";\n\010datasets\030\003 \003(\tB)\340A\002\372A#\n!aiplatform.go"
- + "ogleapis.com/Dataset\022a\n\021annotation_label"
- + "s\030\014 \003(\0132F.google.cloud.aiplatform.v1beta"
- + "1.DataLabelingJob.AnnotationLabelsEntry\022"
- + "\032\n\rlabeler_count\030\004 \001(\005B\003\340A\002\022\034\n\017instructi"
- + "on_uri\030\005 \001(\tB\003\340A\002\022\036\n\021inputs_schema_uri\030\006"
- + " \001(\tB\003\340A\002\022+\n\006inputs\030\007 \001(\0132\026.google.proto"
- + "buf.ValueB\003\340A\002\022=\n\005state\030\010 \001(\0162).google.c"
- + "loud.aiplatform.v1beta1.JobStateB\003\340A\003\022\036\n"
- + "\021labeling_progress\030\r \001(\005B\003\340A\003\022.\n\rcurrent"
- + "_spend\030\016 \001(\0132\022.google.type.MoneyB\003\340A\003\0224\n"
- + "\013create_time\030\t \001(\0132\032.google.protobuf.Tim"
- + "estampB\003\340A\003\0224\n\013update_time\030\n \001(\0132\032.googl"
- + "e.protobuf.TimestampB\003\340A\003\022&\n\005error\030\026 \001(\013"
- + "2\022.google.rpc.StatusB\003\340A\003\022L\n\006labels\030\013 \003("
- + "\0132<.google.cloud.aiplatform.v1beta1.Data"
- + "LabelingJob.LabelsEntry\022\030\n\020specialist_po"
- + "ols\030\020 \003(\t\022U\n\026active_learning_config\030\025 \001("
- + "\01325.google.cloud.aiplatform.v1beta1.Acti"
- + "veLearningConfig\0327\n\025AnnotationLabelsEntr"
- + "y\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\032-\n\013Lab"
- + "elsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001"
- + ":|\352Ay\n)aiplatform.googleapis.com/DataLab"
- + "elingJob\022Lprojects/{project}/locations/{"
- + "location}/dataLabelingJobs/{data_labelin"
- + "g_job}\"\202\002\n\024ActiveLearningConfig\022\035\n\023max_d"
- + "ata_item_count\030\001 \001(\003H\000\022\"\n\030max_data_item_"
- + "percentage\030\002 \001(\005H\000\022D\n\rsample_config\030\003 \001("
- + "\0132-.google.cloud.aiplatform.v1beta1.Samp"
- + "leConfig\022H\n\017training_config\030\004 \001(\0132/.goog"
- + "le.cloud.aiplatform.v1beta1.TrainingConf"
- + "igB\027\n\025human_labeling_budget\"\275\002\n\014SampleCo"
- + "nfig\022)\n\037initial_batch_sample_percentage\030"
- + "\001 \001(\005H\000\022+\n!following_batch_sample_percen"
- + "tage\030\003 \001(\005H\001\022U\n\017sample_strategy\030\005 \001(\0162<."
- + "google.cloud.aiplatform.v1beta1.SampleCo"
- + "nfig.SampleStrategy\"B\n\016SampleStrategy\022\037\n"
- + "\033SAMPLE_STRATEGY_UNSPECIFIED\020\000\022\017\n\013UNCERT"
- + "AINTY\020\001B\033\n\031initial_batch_sample_sizeB\035\n\033"
- + "following_batch_sample_size\"6\n\016TrainingC"
- + "onfig\022$\n\034timeout_training_milli_hours\030\001 "
- + "\001(\003B\210\001\n#com.google.cloud.aiplatform.v1be"
- + "ta1B\024DataLabelingJobProtoP\001ZIgoogle.gola"
- + "ng.org/genproto/googleapis/cloud/aiplatf"
- + "orm/v1beta1;aiplatformb\006proto3"
+ + ".proto\0325google/cloud/aiplatform/v1beta1/"
+ + "encryption_spec.proto\032/google/cloud/aipl"
+ + "atform/v1beta1/job_state.proto\0325google/c"
+ + "loud/aiplatform/v1beta1/specialist_pool."
+ + "proto\032\034google/protobuf/struct.proto\032\037goo"
+ + "gle/protobuf/timestamp.proto\032\027google/rpc"
+ + "/status.proto\032\027google/type/money.proto\032\034"
+ + "google/api/annotations.proto\"\370\010\n\017DataLab"
+ + "elingJob\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031\n\014display_n"
+ + "ame\030\002 \001(\tB\003\340A\002\022;\n\010datasets\030\003 \003(\tB)\340A\002\372A#"
+ + "\n!aiplatform.googleapis.com/Dataset\022a\n\021a"
+ + "nnotation_labels\030\014 \003(\0132F.google.cloud.ai"
+ + "platform.v1beta1.DataLabelingJob.Annotat"
+ + "ionLabelsEntry\022\032\n\rlabeler_count\030\004 \001(\005B\003\340"
+ + "A\002\022\034\n\017instruction_uri\030\005 \001(\tB\003\340A\002\022\036\n\021inpu"
+ + "ts_schema_uri\030\006 \001(\tB\003\340A\002\022+\n\006inputs\030\007 \001(\013"
+ + "2\026.google.protobuf.ValueB\003\340A\002\022=\n\005state\030\010"
+ + " \001(\0162).google.cloud.aiplatform.v1beta1.J"
+ + "obStateB\003\340A\003\022\036\n\021labeling_progress\030\r \001(\005B"
+ + "\003\340A\003\022.\n\rcurrent_spend\030\016 \001(\0132\022.google.typ"
+ + "e.MoneyB\003\340A\003\0224\n\013create_time\030\t \001(\0132\032.goog"
+ + "le.protobuf.TimestampB\003\340A\003\0224\n\013update_tim"
+ + "e\030\n \001(\0132\032.google.protobuf.TimestampB\003\340A\003"
+ + "\022&\n\005error\030\026 \001(\0132\022.google.rpc.StatusB\003\340A\003"
+ + "\022L\n\006labels\030\013 \003(\0132<.google.cloud.aiplatfo"
+ + "rm.v1beta1.DataLabelingJob.LabelsEntry\022\030"
+ + "\n\020specialist_pools\030\020 \003(\t\022H\n\017encryption_s"
+ + "pec\030\024 \001(\0132/.google.cloud.aiplatform.v1be"
+ + "ta1.EncryptionSpec\022U\n\026active_learning_co"
+ + "nfig\030\025 \001(\01325.google.cloud.aiplatform.v1b"
+ + "eta1.ActiveLearningConfig\0327\n\025AnnotationL"
+ + "abelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\002"
+ + "8\001\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030"
+ + "\002 \001(\t:\0028\001:|\352Ay\n)aiplatform.googleapis.co"
+ + "m/DataLabelingJob\022Lprojects/{project}/lo"
+ + "cations/{location}/dataLabelingJobs/{dat"
+ + "a_labeling_job}\"\202\002\n\024ActiveLearningConfig"
+ + "\022\035\n\023max_data_item_count\030\001 \001(\003H\000\022\"\n\030max_d"
+ + "ata_item_percentage\030\002 \001(\005H\000\022D\n\rsample_co"
+ + "nfig\030\003 \001(\0132-.google.cloud.aiplatform.v1b"
+ + "eta1.SampleConfig\022H\n\017training_config\030\004 \001"
+ + "(\0132/.google.cloud.aiplatform.v1beta1.Tra"
+ + "iningConfigB\027\n\025human_labeling_budget\"\275\002\n"
+ + "\014SampleConfig\022)\n\037initial_batch_sample_pe"
+ + "rcentage\030\001 \001(\005H\000\022+\n!following_batch_samp"
+ + "le_percentage\030\003 \001(\005H\001\022U\n\017sample_strategy"
+ + "\030\005 \001(\0162<.google.cloud.aiplatform.v1beta1"
+ + ".SampleConfig.SampleStrategy\"B\n\016SampleSt"
+ + "rategy\022\037\n\033SAMPLE_STRATEGY_UNSPECIFIED\020\000\022"
+ + "\017\n\013UNCERTAINTY\020\001B\033\n\031initial_batch_sample"
+ + "_sizeB\035\n\033following_batch_sample_size\"6\n\016"
+ + "TrainingConfig\022$\n\034timeout_training_milli"
+ + "_hours\030\001 \001(\003B\210\001\n#com.google.cloud.aiplat"
+ + "form.v1beta1B\024DataLabelingJobProtoP\001ZIgo"
+ + "ogle.golang.org/genproto/googleapis/clou"
+ + "d/aiplatform/v1beta1;aiplatformb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -126,6 +129,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.AcceleratorTypeProto.getDescriptor(),
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.SpecialistPoolProto.getDescriptor(),
com.google.protobuf.StructProto.getDescriptor(),
@@ -156,6 +160,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"Error",
"Labels",
"SpecialistPools",
+ "EncryptionSpec",
"ActiveLearningConfig",
});
internal_static_google_cloud_aiplatform_v1beta1_DataLabelingJob_AnnotationLabelsEntry_descriptor =
@@ -220,6 +225,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.AcceleratorTypeProto.getDescriptor();
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.SpecialistPoolProto.getDescriptor();
com.google.protobuf.StructProto.getDescriptor();
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Dataset.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Dataset.java
index 01df6018b..839cb7af2 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Dataset.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Dataset.java
@@ -157,6 +157,23 @@ private Dataset(
metadata_ = subBuilder.buildPartial();
}
+ break;
+ }
+ case 90:
+ {
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null;
+ if (encryptionSpec_ != null) {
+ subBuilder = encryptionSpec_.toBuilder();
+ }
+ encryptionSpec_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(encryptionSpec_);
+ encryptionSpec_ = subBuilder.buildPartial();
+ }
+
break;
}
default:
@@ -696,6 +713,57 @@ public java.lang.String getLabelsOrThrow(java.lang.String key) {
return map.get(key);
}
+ public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 11;
+ private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ @java.lang.Override
+ public boolean hasEncryptionSpec() {
+ return encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ *
+ * @return The encryptionSpec.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() {
+ return getEncryptionSpec();
+ }
+
private byte memoizedIsInitialized = -1;
@java.lang.Override
@@ -733,6 +801,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
if (metadata_ != null) {
output.writeMessage(8, getMetadata());
}
+ if (encryptionSpec_ != null) {
+ output.writeMessage(11, getEncryptionSpec());
+ }
unknownFields.writeTo(output);
}
@@ -773,6 +844,9 @@ public int getSerializedSize() {
if (metadata_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, getMetadata());
}
+ if (encryptionSpec_ != null) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(11, getEncryptionSpec());
+ }
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
@@ -806,6 +880,10 @@ public boolean equals(final java.lang.Object obj) {
}
if (!getEtag().equals(other.getEtag())) return false;
if (!internalGetLabels().equals(other.internalGetLabels())) return false;
+ if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false;
+ if (hasEncryptionSpec()) {
+ if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false;
+ }
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@@ -841,6 +919,10 @@ public int hashCode() {
hash = (37 * hash) + LABELS_FIELD_NUMBER;
hash = (53 * hash) + internalGetLabels().hashCode();
}
+ if (hasEncryptionSpec()) {
+ hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER;
+ hash = (53 * hash) + getEncryptionSpec().hashCode();
+ }
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
@@ -1033,6 +1115,12 @@ public Builder clear() {
etag_ = "";
internalGetMutableLabels().clear();
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
return this;
}
@@ -1082,6 +1170,11 @@ public com.google.cloud.aiplatform.v1beta1.Dataset buildPartial() {
result.etag_ = etag_;
result.labels_ = internalGetLabels();
result.labels_.makeImmutable();
+ if (encryptionSpecBuilder_ == null) {
+ result.encryptionSpec_ = encryptionSpec_;
+ } else {
+ result.encryptionSpec_ = encryptionSpecBuilder_.build();
+ }
onBuilt();
return result;
}
@@ -1157,6 +1250,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.Dataset other) {
onChanged();
}
internalGetMutableLabels().mergeFrom(other.internalGetLabels());
+ if (other.hasEncryptionSpec()) {
+ mergeEncryptionSpec(other.getEncryptionSpec());
+ }
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
@@ -2455,6 +2551,202 @@ public Builder putAllLabels(java.util.Map
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ public boolean hasEncryptionSpec() {
+ return encryptionSpecBuilder_ != null || encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ *
+ * @return The encryptionSpec.
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ } else {
+ return encryptionSpecBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ */
+ public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ encryptionSpec_ = value;
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ */
+ public Builder setEncryptionSpec(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = builderForValue.build();
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ */
+ public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (encryptionSpec_ != null) {
+ encryptionSpec_ =
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ encryptionSpec_ = value;
+ }
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ */
+ public Builder clearEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ onChanged();
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() {
+
+ onChanged();
+ return getEncryptionSpecFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder
+ getEncryptionSpecOrBuilder() {
+ if (encryptionSpecBuilder_ != null) {
+ return encryptionSpecBuilder_.getMessageOrBuilder();
+ } else {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>
+ getEncryptionSpecFieldBuilder() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpecBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>(
+ getEncryptionSpec(), getParentForChildren(), isClean());
+ encryptionSpec_ = null;
+ }
+ return encryptionSpecBuilder_;
+ }
+
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetOrBuilder.java
index f0196cae4..54bee561d 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetOrBuilder.java
@@ -351,4 +351,42 @@ public interface DatasetOrBuilder
* map<string, string> labels = 7;
*/
java.lang.String getLabelsOrThrow(java.lang.String key);
+
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ boolean hasEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ *
+ * @return The encryptionSpec.
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Dataset. If set, this Dataset
+ * and all sub-resources of this Dataset will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 11;
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder();
}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetProto.java
index 8958051f7..bc261f981 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DatasetProto.java
@@ -59,36 +59,40 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"\n-google/cloud/aiplatform/v1beta1/datase"
+ "t.proto\022\037google.cloud.aiplatform.v1beta1"
+ "\032\037google/api/field_behavior.proto\032\031googl"
- + "e/api/resource.proto\032(google/cloud/aipla"
- + "tform/v1beta1/io.proto\032\034google/protobuf/"
- + "struct.proto\032\037google/protobuf/timestamp."
- + "proto\032\034google/api/annotations.proto\"\333\003\n\007"
- + "Dataset\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031\n\014display_na"
- + "me\030\002 \001(\tB\003\340A\002\022 \n\023metadata_schema_uri\030\003 \001"
- + "(\tB\003\340A\002\022-\n\010metadata\030\010 \001(\0132\026.google.proto"
- + "buf.ValueB\003\340A\002\0224\n\013create_time\030\004 \001(\0132\032.go"
- + "ogle.protobuf.TimestampB\003\340A\003\0224\n\013update_t"
- + "ime\030\005 \001(\0132\032.google.protobuf.TimestampB\003\340"
- + "A\003\022\014\n\004etag\030\006 \001(\t\022D\n\006labels\030\007 \003(\01324.googl"
- + "e.cloud.aiplatform.v1beta1.Dataset.Label"
- + "sEntry\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005va"
- + "lue\030\002 \001(\t:\0028\001:b\352A_\n!aiplatform.googleapi"
- + "s.com/Dataset\022:projects/{project}/locati"
- + "ons/{location}/datasets/{dataset}\"\226\002\n\020Im"
- + "portDataConfig\022@\n\ngcs_source\030\001 \001(\0132*.goo"
- + "gle.cloud.aiplatform.v1beta1.GcsSourceH\000"
- + "\022_\n\020data_item_labels\030\002 \003(\0132E.google.clou"
- + "d.aiplatform.v1beta1.ImportDataConfig.Da"
- + "taItemLabelsEntry\022\036\n\021import_schema_uri\030\004"
- + " \001(\tB\003\340A\002\0325\n\023DataItemLabelsEntry\022\013\n\003key\030"
- + "\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B\010\n\006source\"\211\001\n\020E"
- + "xportDataConfig\022J\n\017gcs_destination\030\001 \001(\013"
- + "2/.google.cloud.aiplatform.v1beta1.GcsDe"
- + "stinationH\000\022\032\n\022annotations_filter\030\002 \001(\tB"
- + "\r\n\013destinationB\200\001\n#com.google.cloud.aipl"
- + "atform.v1beta1B\014DatasetProtoP\001ZIgoogle.g"
- + "olang.org/genproto/googleapis/cloud/aipl"
- + "atform/v1beta1;aiplatformb\006proto3"
+ + "e/api/resource.proto\0325google/cloud/aipla"
+ + "tform/v1beta1/encryption_spec.proto\032(goo"
+ + "gle/cloud/aiplatform/v1beta1/io.proto\032\034g"
+ + "oogle/protobuf/struct.proto\032\037google/prot"
+ + "obuf/timestamp.proto\032\034google/api/annotat"
+ + "ions.proto\"\245\004\n\007Dataset\022\021\n\004name\030\001 \001(\tB\003\340A"
+ + "\003\022\031\n\014display_name\030\002 \001(\tB\003\340A\002\022 \n\023metadata"
+ + "_schema_uri\030\003 \001(\tB\003\340A\002\022-\n\010metadata\030\010 \001(\013"
+ + "2\026.google.protobuf.ValueB\003\340A\002\0224\n\013create_"
+ + "time\030\004 \001(\0132\032.google.protobuf.TimestampB\003"
+ + "\340A\003\0224\n\013update_time\030\005 \001(\0132\032.google.protob"
+ + "uf.TimestampB\003\340A\003\022\014\n\004etag\030\006 \001(\t\022D\n\006label"
+ + "s\030\007 \003(\01324.google.cloud.aiplatform.v1beta"
+ + "1.Dataset.LabelsEntry\022H\n\017encryption_spec"
+ + "\030\013 \001(\0132/.google.cloud.aiplatform.v1beta1"
+ + ".EncryptionSpec\032-\n\013LabelsEntry\022\013\n\003key\030\001 "
+ + "\001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:b\352A_\n!aiplatform."
+ + "googleapis.com/Dataset\022:projects/{projec"
+ + "t}/locations/{location}/datasets/{datase"
+ + "t}\"\226\002\n\020ImportDataConfig\022@\n\ngcs_source\030\001 "
+ + "\001(\0132*.google.cloud.aiplatform.v1beta1.Gc"
+ + "sSourceH\000\022_\n\020data_item_labels\030\002 \003(\0132E.go"
+ + "ogle.cloud.aiplatform.v1beta1.ImportData"
+ + "Config.DataItemLabelsEntry\022\036\n\021import_sch"
+ + "ema_uri\030\004 \001(\tB\003\340A\002\0325\n\023DataItemLabelsEntr"
+ + "y\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B\010\n\006sou"
+ + "rce\"\211\001\n\020ExportDataConfig\022J\n\017gcs_destinat"
+ + "ion\030\001 \001(\0132/.google.cloud.aiplatform.v1be"
+ + "ta1.GcsDestinationH\000\022\032\n\022annotations_filt"
+ + "er\030\002 \001(\tB\r\n\013destinationB\200\001\n#com.google.c"
+ + "loud.aiplatform.v1beta1B\014DatasetProtoP\001Z"
+ + "Igoogle.golang.org/genproto/googleapis/c"
+ + "loud/aiplatform/v1beta1;aiplatformb\006prot"
+ + "o3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -96,6 +100,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(),
com.google.protobuf.StructProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
@@ -115,6 +120,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"UpdateTime",
"Etag",
"Labels",
+ "EncryptionSpec",
});
internal_static_google_cloud_aiplatform_v1beta1_Dataset_LabelsEntry_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_Dataset_descriptor.getNestedTypes().get(0);
@@ -158,6 +164,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
descriptor, registry);
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor();
com.google.protobuf.StructProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModel.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModel.java
index b6068bca9..de3249d36 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModel.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModel.java
@@ -715,7 +715,7 @@ public com.google.protobuf.ByteString getServiceAccountBytes() {
*
* If true, the container of the DeployedModel instances will send `stderr`
* and `stdout` streams to Stackdriver Logging.
- * Only supported for custom-trained Models and AutoML Tables Models.
+ * Only supported for custom-trained Models and AutoML Tabular Models.
*
*
* bool enable_container_logging = 12;
@@ -2664,7 +2664,7 @@ public Builder setServiceAccountBytes(com.google.protobuf.ByteString value) {
*
* If true, the container of the DeployedModel instances will send `stderr`
* and `stdout` streams to Stackdriver Logging.
- * Only supported for custom-trained Models and AutoML Tables Models.
+ * Only supported for custom-trained Models and AutoML Tabular Models.
*
*
* bool enable_container_logging = 12;
@@ -2681,7 +2681,7 @@ public boolean getEnableContainerLogging() {
*
* If true, the container of the DeployedModel instances will send `stderr`
* and `stdout` streams to Stackdriver Logging.
- * Only supported for custom-trained Models and AutoML Tables Models.
+ * Only supported for custom-trained Models and AutoML Tabular Models.
*
*
* bool enable_container_logging = 12;
@@ -2701,7 +2701,7 @@ public Builder setEnableContainerLogging(boolean value) {
*
* If true, the container of the DeployedModel instances will send `stderr`
* and `stdout` streams to Stackdriver Logging.
- * Only supported for custom-trained Models and AutoML Tables Models.
+ * Only supported for custom-trained Models and AutoML Tabular Models.
*
*
* bool enable_container_logging = 12;
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModelOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModelOrBuilder.java
index dc2507765..0410f13c2 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModelOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployedModelOrBuilder.java
@@ -317,7 +317,7 @@ public interface DeployedModelOrBuilder
*
* If true, the container of the DeployedModel instances will send `stderr`
* and `stdout` streams to Stackdriver Logging.
- * Only supported for custom-trained Models and AutoML Tables Models.
+ * Only supported for custom-trained Models and AutoML Tabular Models.
*
*
* bool enable_container_logging = 12;
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpec.java
index a7d7a5dc7..f1ca46c51 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpec.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpec.java
@@ -122,7 +122,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
*
*
*
- * Type of the boot disk (default is "pd-standard").
+ * Type of the boot disk (default is "pd-ssd").
* Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
* "pd-standard" (Persistent Disk Hard Disk Drive).
*
@@ -147,7 +147,7 @@ public java.lang.String getBootDiskType() {
*
*
*
- * Type of the boot disk (default is "pd-standard").
+ * Type of the boot disk (default is "pd-ssd").
* Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
* "pd-standard" (Persistent Disk Hard Disk Drive).
*
@@ -523,7 +523,7 @@ public Builder mergeFrom(
*
*
*
- * Type of the boot disk (default is "pd-standard").
+ * Type of the boot disk (default is "pd-ssd").
* Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
* "pd-standard" (Persistent Disk Hard Disk Drive).
*
@@ -547,7 +547,7 @@ public java.lang.String getBootDiskType() {
*
*
*
- * Type of the boot disk (default is "pd-standard").
+ * Type of the boot disk (default is "pd-ssd").
* Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
* "pd-standard" (Persistent Disk Hard Disk Drive).
*
@@ -571,7 +571,7 @@ public com.google.protobuf.ByteString getBootDiskTypeBytes() {
*
*
*
- * Type of the boot disk (default is "pd-standard").
+ * Type of the boot disk (default is "pd-ssd").
* Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
* "pd-standard" (Persistent Disk Hard Disk Drive).
*
@@ -594,7 +594,7 @@ public Builder setBootDiskType(java.lang.String value) {
*
*
*
- * Type of the boot disk (default is "pd-standard").
+ * Type of the boot disk (default is "pd-ssd").
* Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
* "pd-standard" (Persistent Disk Hard Disk Drive).
*
@@ -613,7 +613,7 @@ public Builder clearBootDiskType() {
*
*
*
- * Type of the boot disk (default is "pd-standard").
+ * Type of the boot disk (default is "pd-ssd").
* Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
* "pd-standard" (Persistent Disk Hard Disk Drive).
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpecOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpecOrBuilder.java
index 54c1c5b64..288fd2baf 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpecOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DiskSpecOrBuilder.java
@@ -27,7 +27,7 @@ public interface DiskSpecOrBuilder
*
*
*
- * Type of the boot disk (default is "pd-standard").
+ * Type of the boot disk (default is "pd-ssd").
* Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
* "pd-standard" (Persistent Disk Hard Disk Drive).
*
@@ -41,7 +41,7 @@ public interface DiskSpecOrBuilder
*
*
*
- * Type of the boot disk (default is "pd-standard").
+ * Type of the boot disk (default is "pd-ssd").
* Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
* "pd-standard" (Persistent Disk Hard Disk Drive).
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpec.java
new file mode 100644
index 000000000..388bb0d27
--- /dev/null
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpec.java
@@ -0,0 +1,666 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: google/cloud/aiplatform/v1beta1/encryption_spec.proto
+
+package com.google.cloud.aiplatform.v1beta1;
+
+/**
+ *
+ *
+ *
+ * Represents a customer-managed encryption key spec that can be applied to
+ * a top-level resource.
+ *
+ *
+ * Protobuf type {@code google.cloud.aiplatform.v1beta1.EncryptionSpec}
+ */
+public final class EncryptionSpec extends com.google.protobuf.GeneratedMessageV3
+ implements
+ // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.EncryptionSpec)
+ EncryptionSpecOrBuilder {
+ private static final long serialVersionUID = 0L;
+ // Use EncryptionSpec.newBuilder() to construct.
+ private EncryptionSpec(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ super(builder);
+ }
+
+ private EncryptionSpec() {
+ kmsKeyName_ = "";
+ }
+
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
+ return new EncryptionSpec();
+ }
+
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
+ return this.unknownFields;
+ }
+
+ private EncryptionSpec(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ case 10:
+ {
+ java.lang.String s = input.readStringRequireUtf8();
+
+ kmsKeyName_ = s;
+ break;
+ }
+ default:
+ {
+ if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
+ return com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto
+ .internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_descriptor;
+ }
+
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto
+ .internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.class,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder.class);
+ }
+
+ public static final int KMS_KEY_NAME_FIELD_NUMBER = 1;
+ private volatile java.lang.Object kmsKeyName_;
+ /**
+ *
+ *
+ *
+ * Required. The Cloud KMS resource identifier of the customer managed encryption key
+ * used to protect a resource. Has the form:
+ * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+ * The key needs to be in the same region as where the compute resource is
+ * created.
+ *
+ *
+ * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * @return The kmsKeyName.
+ */
+ @java.lang.Override
+ public java.lang.String getKmsKeyName() {
+ java.lang.Object ref = kmsKeyName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ kmsKeyName_ = s;
+ return s;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Required. The Cloud KMS resource identifier of the customer managed encryption key
+ * used to protect a resource. Has the form:
+ * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+ * The key needs to be in the same region as where the compute resource is
+ * created.
+ *
+ *
+ * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * @return The bytes for kmsKeyName.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString getKmsKeyNameBytes() {
+ java.lang.Object ref = kmsKeyName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ kmsKeyName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private byte memoizedIsInitialized = -1;
+
+ @java.lang.Override
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ @java.lang.Override
+ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
+ if (!getKmsKeyNameBytes().isEmpty()) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 1, kmsKeyName_);
+ }
+ unknownFields.writeTo(output);
+ }
+
+ @java.lang.Override
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (!getKmsKeyNameBytes().isEmpty()) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, kmsKeyName_);
+ }
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.EncryptionSpec)) {
+ return super.equals(obj);
+ }
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec other =
+ (com.google.cloud.aiplatform.v1beta1.EncryptionSpec) obj;
+
+ if (!getKmsKeyName().equals(other.getKmsKeyName())) return false;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (37 * hash) + KMS_KEY_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getKmsKeyName().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom(
+ java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom(
+ java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom(
+ byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom(
+ java.io.InputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom(
+ java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseDelimitedFrom(
+ java.io.InputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseDelimitedFrom(
+ java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom(
+ com.google.protobuf.CodedInputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ @java.lang.Override
+ public Builder newBuilderForType() {
+ return newBuilder();
+ }
+
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+
+ public static Builder newBuilder(com.google.cloud.aiplatform.v1beta1.EncryptionSpec prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ *
+ *
+ *
+ * Represents a customer-managed encryption key spec that can be applied to
+ * a top-level resource.
+ *
+ *
+ * Protobuf type {@code google.cloud.aiplatform.v1beta1.EncryptionSpec}
+ */
+ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder
+ * Required. The Cloud KMS resource identifier of the customer managed encryption key
+ * used to protect a resource. Has the form:
+ * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+ * The key needs to be in the same region as where the compute resource is
+ * created.
+ *
+ *
+ * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * @return The kmsKeyName.
+ */
+ public java.lang.String getKmsKeyName() {
+ java.lang.Object ref = kmsKeyName_;
+ if (!(ref instanceof java.lang.String)) {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ kmsKeyName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Required. The Cloud KMS resource identifier of the customer managed encryption key
+ * used to protect a resource. Has the form:
+ * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+ * The key needs to be in the same region as where the compute resource is
+ * created.
+ *
+ *
+ * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * @return The bytes for kmsKeyName.
+ */
+ public com.google.protobuf.ByteString getKmsKeyNameBytes() {
+ java.lang.Object ref = kmsKeyName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ kmsKeyName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Required. The Cloud KMS resource identifier of the customer managed encryption key
+ * used to protect a resource. Has the form:
+ * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+ * The key needs to be in the same region as where the compute resource is
+ * created.
+ *
+ *
+ * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * @param value The kmsKeyName to set.
+ * @return This builder for chaining.
+ */
+ public Builder setKmsKeyName(java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ kmsKeyName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Required. The Cloud KMS resource identifier of the customer managed encryption key
+ * used to protect a resource. Has the form:
+ * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+ * The key needs to be in the same region as where the compute resource is
+ * created.
+ *
+ *
+ * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * @return This builder for chaining.
+ */
+ public Builder clearKmsKeyName() {
+
+ kmsKeyName_ = getDefaultInstance().getKmsKeyName();
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Required. The Cloud KMS resource identifier of the customer managed encryption key
+ * used to protect a resource. Has the form:
+ * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+ * The key needs to be in the same region as where the compute resource is
+ * created.
+ *
+ *
+ * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * @param value The bytes for kmsKeyName to set.
+ * @return This builder for chaining.
+ */
+ public Builder setKmsKeyNameBytes(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ checkByteStringIsUtf8(value);
+
+ kmsKeyName_ = value;
+ onChanged();
+ return this;
+ }
+
+ @java.lang.Override
+ public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
+ // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.EncryptionSpec)
+ }
+
+ // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.EncryptionSpec)
+ private static final com.google.cloud.aiplatform.v1beta1.EncryptionSpec DEFAULT_INSTANCE;
+
+ static {
+ DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.EncryptionSpec();
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.EncryptionSpec getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ private static final com.google.protobuf.Parser
+ * Required. The Cloud KMS resource identifier of the customer managed encryption key
+ * used to protect a resource. Has the form:
+ * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+ * The key needs to be in the same region as where the compute resource is
+ * created.
+ *
+ *
+ * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * @return The kmsKeyName.
+ */
+ java.lang.String getKmsKeyName();
+ /**
+ *
+ *
+ *
+ * Required. The Cloud KMS resource identifier of the customer managed encryption key
+ * used to protect a resource. Has the form:
+ * `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
+ * The key needs to be in the same region as where the compute resource is
+ * created.
+ *
+ *
+ * string kms_key_name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * @return The bytes for kmsKeyName.
+ */
+ com.google.protobuf.ByteString getKmsKeyNameBytes();
+}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpecProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpecProto.java
new file mode 100644
index 000000000..097eddefb
--- /dev/null
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EncryptionSpecProto.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: google/cloud/aiplatform/v1beta1/encryption_spec.proto
+
+package com.google.cloud.aiplatform.v1beta1;
+
+public final class EncryptionSpecProto {
+ private EncryptionSpecProto() {}
+
+ public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
+
+ public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
+ registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
+ }
+
+ static final com.google.protobuf.Descriptors.Descriptor
+ internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_descriptor;
+ static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ return descriptor;
+ }
+
+ private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
+
+ static {
+ java.lang.String[] descriptorData = {
+ "\n5google/cloud/aiplatform/v1beta1/encryp"
+ + "tion_spec.proto\022\037google.cloud.aiplatform"
+ + ".v1beta1\032\037google/api/field_behavior.prot"
+ + "o\032\034google/api/annotations.proto\"+\n\016Encry"
+ + "ptionSpec\022\031\n\014kms_key_name\030\001 \001(\tB\003\340A\002B\207\001\n"
+ + "#com.google.cloud.aiplatform.v1beta1B\023En"
+ + "cryptionSpecProtoP\001ZIgoogle.golang.org/g"
+ + "enproto/googleapis/cloud/aiplatform/v1be"
+ + "ta1;aiplatformb\006proto3"
+ };
+ descriptor =
+ com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
+ descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ com.google.api.FieldBehaviorProto.getDescriptor(),
+ com.google.api.AnnotationsProto.getDescriptor(),
+ });
+ internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_fieldAccessorTable =
+ new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+ internal_static_google_cloud_aiplatform_v1beta1_EncryptionSpec_descriptor,
+ new java.lang.String[] {
+ "KmsKeyName",
+ });
+ com.google.protobuf.ExtensionRegistry registry =
+ com.google.protobuf.ExtensionRegistry.newInstance();
+ registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
+ com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
+ descriptor, registry);
+ com.google.api.FieldBehaviorProto.getDescriptor();
+ com.google.api.AnnotationsProto.getDescriptor();
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Endpoint.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Endpoint.java
index 157c5ff3c..9ee75e4a9 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Endpoint.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Endpoint.java
@@ -173,6 +173,23 @@ private Endpoint(
updateTime_ = subBuilder.buildPartial();
}
+ break;
+ }
+ case 82:
+ {
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null;
+ if (encryptionSpec_ != null) {
+ subBuilder = encryptionSpec_.toBuilder();
+ }
+ encryptionSpec_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(encryptionSpec_);
+ encryptionSpec_ = subBuilder.buildPartial();
+ }
+
break;
}
default:
@@ -849,6 +866,60 @@ public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
return getUpdateTime();
}
+ public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 10;
+ private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ @java.lang.Override
+ public boolean hasEncryptionSpec() {
+ return encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ *
+ * @return The encryptionSpec.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() {
+ return getEncryptionSpec();
+ }
+
private byte memoizedIsInitialized = -1;
@java.lang.Override
@@ -888,6 +959,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
if (updateTime_ != null) {
output.writeMessage(9, getUpdateTime());
}
+ if (encryptionSpec_ != null) {
+ output.writeMessage(10, getEncryptionSpec());
+ }
unknownFields.writeTo(output);
}
@@ -938,6 +1012,9 @@ public int getSerializedSize() {
if (updateTime_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(9, getUpdateTime());
}
+ if (encryptionSpec_ != null) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(10, getEncryptionSpec());
+ }
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
@@ -969,6 +1046,10 @@ public boolean equals(final java.lang.Object obj) {
if (hasUpdateTime()) {
if (!getUpdateTime().equals(other.getUpdateTime())) return false;
}
+ if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false;
+ if (hasEncryptionSpec()) {
+ if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false;
+ }
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@@ -1008,6 +1089,10 @@ public int hashCode() {
hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getUpdateTime().hashCode();
}
+ if (hasEncryptionSpec()) {
+ hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER;
+ hash = (53 * hash) + getEncryptionSpec().hashCode();
+ }
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
@@ -1208,6 +1293,12 @@ public Builder clear() {
updateTime_ = null;
updateTimeBuilder_ = null;
}
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
return this;
}
@@ -1263,6 +1354,11 @@ public com.google.cloud.aiplatform.v1beta1.Endpoint buildPartial() {
} else {
result.updateTime_ = updateTimeBuilder_.build();
}
+ if (encryptionSpecBuilder_ == null) {
+ result.encryptionSpec_ = encryptionSpec_;
+ } else {
+ result.encryptionSpec_ = encryptionSpecBuilder_.build();
+ }
onBuilt();
return result;
}
@@ -1363,6 +1459,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.Endpoint other) {
if (other.hasUpdateTime()) {
mergeUpdateTime(other.getUpdateTime());
}
+ if (other.hasEncryptionSpec()) {
+ mergeEncryptionSpec(other.getEncryptionSpec());
+ }
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
@@ -3046,6 +3145,211 @@ public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
return updateTimeBuilder_;
}
+ private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>
+ encryptionSpecBuilder_;
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ public boolean hasEncryptionSpec() {
+ return encryptionSpecBuilder_ != null || encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ *
+ * @return The encryptionSpec.
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ } else {
+ return encryptionSpecBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ */
+ public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ encryptionSpec_ = value;
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ */
+ public Builder setEncryptionSpec(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = builderForValue.build();
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ */
+ public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (encryptionSpec_ != null) {
+ encryptionSpec_ =
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ encryptionSpec_ = value;
+ }
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ */
+ public Builder clearEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ onChanged();
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() {
+
+ onChanged();
+ return getEncryptionSpecFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder
+ getEncryptionSpecOrBuilder() {
+ if (encryptionSpecBuilder_ != null) {
+ return encryptionSpecBuilder_.getMessageOrBuilder();
+ } else {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>
+ getEncryptionSpecFieldBuilder() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpecBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>(
+ getEncryptionSpec(), getParentForChildren(), isClean());
+ encryptionSpec_ = null;
+ }
+ return encryptionSpecBuilder_;
+ }
+
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointOrBuilder.java
index d23033be3..83d69a594 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointOrBuilder.java
@@ -429,4 +429,45 @@ public interface EndpointOrBuilder
*
*/
com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder();
+
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ boolean hasEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ *
+ * @return The encryptionSpec.
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for an Endpoint. If set, this
+ * Endpoint and all sub-resources of this Endpoint will be secured by
+ * this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 10;
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder();
}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointProto.java
index 2b47ee4c4..47c9a8e84 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EndpointProto.java
@@ -55,44 +55,47 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"\n.google/cloud/aiplatform/v1beta1/endpoi"
+ "nt.proto\022\037google.cloud.aiplatform.v1beta"
+ "1\032\037google/api/field_behavior.proto\032\031goog"
- + "le/api/resource.proto\0321google/cloud/aipl"
- + "atform/v1beta1/explanation.proto\0327google"
- + "/cloud/aiplatform/v1beta1/machine_resour"
- + "ces.proto\032\037google/protobuf/timestamp.pro"
- + "to\032\034google/api/annotations.proto\"\373\004\n\010End"
- + "point\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031\n\014display_name"
- + "\030\002 \001(\tB\003\340A\002\022\023\n\013description\030\003 \001(\t\022L\n\017depl"
- + "oyed_models\030\004 \003(\0132..google.cloud.aiplatf"
- + "orm.v1beta1.DeployedModelB\003\340A\003\022R\n\rtraffi"
- + "c_split\030\005 \003(\0132;.google.cloud.aiplatform."
- + "v1beta1.Endpoint.TrafficSplitEntry\022\014\n\004et"
- + "ag\030\006 \001(\t\022E\n\006labels\030\007 \003(\01325.google.cloud."
- + "aiplatform.v1beta1.Endpoint.LabelsEntry\022"
- + "4\n\013create_time\030\010 \001(\0132\032.google.protobuf.T"
- + "imestampB\003\340A\003\0224\n\013update_time\030\t \001(\0132\032.goo"
- + "gle.protobuf.TimestampB\003\340A\003\0323\n\021TrafficSp"
- + "litEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\005:\0028\001"
- + "\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 "
- + "\001(\t:\0028\001:e\352Ab\n\"aiplatform.googleapis.com/"
- + "Endpoint\022string value = 2;
+ * string value = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The value.
*/
@@ -203,7 +203,7 @@ public java.lang.String getValue() {
*
*
*
- * Variables that reference a $(VAR_NAME) are expanded
+ * Required. Variables that reference a $(VAR_NAME) are expanded
* using the previous defined environment variables in the container and
* any service environment variables. If a variable cannot be resolved,
* the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -212,7 +212,7 @@ public java.lang.String getValue() {
* exists or not.
*
*
- * string value = 2;
+ * string value = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The bytes for value.
*/
@@ -672,7 +672,7 @@ public Builder setNameBytes(com.google.protobuf.ByteString value) {
*
*
*
- * Variables that reference a $(VAR_NAME) are expanded
+ * Required. Variables that reference a $(VAR_NAME) are expanded
* using the previous defined environment variables in the container and
* any service environment variables. If a variable cannot be resolved,
* the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -681,7 +681,7 @@ public Builder setNameBytes(com.google.protobuf.ByteString value) {
* exists or not.
*
*
- * string value = 2;
+ * string value = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The value.
*/
@@ -700,7 +700,7 @@ public java.lang.String getValue() {
*
*
*
- * Variables that reference a $(VAR_NAME) are expanded
+ * Required. Variables that reference a $(VAR_NAME) are expanded
* using the previous defined environment variables in the container and
* any service environment variables. If a variable cannot be resolved,
* the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -709,7 +709,7 @@ public java.lang.String getValue() {
* exists or not.
*
*
- * string value = 2;
+ * string value = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The bytes for value.
*/
@@ -728,7 +728,7 @@ public com.google.protobuf.ByteString getValueBytes() {
*
*
*
- * Variables that reference a $(VAR_NAME) are expanded
+ * Required. Variables that reference a $(VAR_NAME) are expanded
* using the previous defined environment variables in the container and
* any service environment variables. If a variable cannot be resolved,
* the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -737,7 +737,7 @@ public com.google.protobuf.ByteString getValueBytes() {
* exists or not.
*
*
- * string value = 2;
+ * string value = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @param value The value to set.
* @return This builder for chaining.
@@ -755,7 +755,7 @@ public Builder setValue(java.lang.String value) {
*
*
*
- * Variables that reference a $(VAR_NAME) are expanded
+ * Required. Variables that reference a $(VAR_NAME) are expanded
* using the previous defined environment variables in the container and
* any service environment variables. If a variable cannot be resolved,
* the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -764,7 +764,7 @@ public Builder setValue(java.lang.String value) {
* exists or not.
*
*
- * string value = 2;
+ * string value = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return This builder for chaining.
*/
@@ -778,7 +778,7 @@ public Builder clearValue() {
*
*
*
- * Variables that reference a $(VAR_NAME) are expanded
+ * Required. Variables that reference a $(VAR_NAME) are expanded
* using the previous defined environment variables in the container and
* any service environment variables. If a variable cannot be resolved,
* the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -787,7 +787,7 @@ public Builder clearValue() {
* exists or not.
*
*
- * string value = 2;
+ * string value = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @param value The bytes for value to set.
* @return This builder for chaining.
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarOrBuilder.java
index e160feabf..2317b8c6d 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarOrBuilder.java
@@ -52,7 +52,7 @@ public interface EnvVarOrBuilder
*
*
*
- * Variables that reference a $(VAR_NAME) are expanded
+ * Required. Variables that reference a $(VAR_NAME) are expanded
* using the previous defined environment variables in the container and
* any service environment variables. If a variable cannot be resolved,
* the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -61,7 +61,7 @@ public interface EnvVarOrBuilder
* exists or not.
*
*
- * string value = 2;
+ * string value = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The value.
*/
@@ -70,7 +70,7 @@ public interface EnvVarOrBuilder
*
*
*
- * Variables that reference a $(VAR_NAME) are expanded
+ * Required. Variables that reference a $(VAR_NAME) are expanded
* using the previous defined environment variables in the container and
* any service environment variables. If a variable cannot be resolved,
* the reference in the input string will be unchanged. The $(VAR_NAME)
@@ -79,7 +79,7 @@ public interface EnvVarOrBuilder
* exists or not.
*
*
- * string value = 2;
+ * string value = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The bytes for value.
*/
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarProto.java
index ca406038d..9a6720956 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EnvVarProto.java
@@ -43,12 +43,12 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"\n-google/cloud/aiplatform/v1beta1/env_va"
+ "r.proto\022\037google.cloud.aiplatform.v1beta1"
+ "\032\037google/api/field_behavior.proto\032\034googl"
- + "e/api/annotations.proto\"*\n\006EnvVar\022\021\n\004nam"
- + "e\030\001 \001(\tB\003\340A\002\022\r\n\005value\030\002 \001(\tB\177\n#com.googl"
- + "e.cloud.aiplatform.v1beta1B\013EnvVarProtoP"
- + "\001ZIgoogle.golang.org/genproto/googleapis"
- + "/cloud/aiplatform/v1beta1;aiplatformb\006pr"
- + "oto3"
+ + "e/api/annotations.proto\"/\n\006EnvVar\022\021\n\004nam"
+ + "e\030\001 \001(\tB\003\340A\002\022\022\n\005value\030\002 \001(\tB\003\340A\002B\177\n#com."
+ + "google.cloud.aiplatform.v1beta1B\013EnvVarP"
+ + "rotoP\001ZIgoogle.golang.org/genproto/googl"
+ + "eapis/cloud/aiplatform/v1beta1;aiplatfor"
+ + "mb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequest.java
index 4c06cea68..f0985dbd5 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequest.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequest.java
@@ -110,6 +110,23 @@ private ExplainRequest(
parameters_ = subBuilder.buildPartial();
}
+ break;
+ }
+ case 42:
+ {
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder subBuilder = null;
+ if (explanationSpecOverride_ != null) {
+ subBuilder = explanationSpecOverride_.toBuilder();
+ }
+ explanationSpecOverride_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(explanationSpecOverride_);
+ explanationSpecOverride_ = subBuilder.buildPartial();
+ }
+
break;
}
default:
@@ -374,6 +391,79 @@ public com.google.protobuf.ValueOrBuilder getParametersOrBuilder() {
return getParameters();
}
+ public static final int EXPLANATION_SPEC_OVERRIDE_FIELD_NUMBER = 5;
+ private com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanationSpecOverride_;
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ *
+ * @return Whether the explanationSpecOverride field is set.
+ */
+ @java.lang.Override
+ public boolean hasExplanationSpecOverride() {
+ return explanationSpecOverride_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ *
+ * @return The explanationSpecOverride.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride getExplanationSpecOverride() {
+ return explanationSpecOverride_ == null
+ ? com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.getDefaultInstance()
+ : explanationSpecOverride_;
+ }
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder
+ getExplanationSpecOverrideOrBuilder() {
+ return getExplanationSpecOverride();
+ }
+
public static final int DEPLOYED_MODEL_ID_FIELD_NUMBER = 3;
private volatile java.lang.Object deployedModelId_;
/**
@@ -451,6 +541,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
if (parameters_ != null) {
output.writeMessage(4, getParameters());
}
+ if (explanationSpecOverride_ != null) {
+ output.writeMessage(5, getExplanationSpecOverride());
+ }
unknownFields.writeTo(output);
}
@@ -472,6 +565,10 @@ public int getSerializedSize() {
if (parameters_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getParameters());
}
+ if (explanationSpecOverride_ != null) {
+ size +=
+ com.google.protobuf.CodedOutputStream.computeMessageSize(5, getExplanationSpecOverride());
+ }
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
@@ -494,6 +591,10 @@ public boolean equals(final java.lang.Object obj) {
if (hasParameters()) {
if (!getParameters().equals(other.getParameters())) return false;
}
+ if (hasExplanationSpecOverride() != other.hasExplanationSpecOverride()) return false;
+ if (hasExplanationSpecOverride()) {
+ if (!getExplanationSpecOverride().equals(other.getExplanationSpecOverride())) return false;
+ }
if (!getDeployedModelId().equals(other.getDeployedModelId())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
@@ -516,6 +617,10 @@ public int hashCode() {
hash = (37 * hash) + PARAMETERS_FIELD_NUMBER;
hash = (53 * hash) + getParameters().hashCode();
}
+ if (hasExplanationSpecOverride()) {
+ hash = (37 * hash) + EXPLANATION_SPEC_OVERRIDE_FIELD_NUMBER;
+ hash = (53 * hash) + getExplanationSpecOverride().hashCode();
+ }
hash = (37 * hash) + DEPLOYED_MODEL_ID_FIELD_NUMBER;
hash = (53 * hash) + getDeployedModelId().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
@@ -679,6 +784,12 @@ public Builder clear() {
parameters_ = null;
parametersBuilder_ = null;
}
+ if (explanationSpecOverrideBuilder_ == null) {
+ explanationSpecOverride_ = null;
+ } else {
+ explanationSpecOverride_ = null;
+ explanationSpecOverrideBuilder_ = null;
+ }
deployedModelId_ = "";
return this;
@@ -724,6 +835,11 @@ public com.google.cloud.aiplatform.v1beta1.ExplainRequest buildPartial() {
} else {
result.parameters_ = parametersBuilder_.build();
}
+ if (explanationSpecOverrideBuilder_ == null) {
+ result.explanationSpecOverride_ = explanationSpecOverride_;
+ } else {
+ result.explanationSpecOverride_ = explanationSpecOverrideBuilder_.build();
+ }
result.deployedModelId_ = deployedModelId_;
onBuilt();
return result;
@@ -809,6 +925,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ExplainRequest othe
if (other.hasParameters()) {
mergeParameters(other.getParameters());
}
+ if (other.hasExplanationSpecOverride()) {
+ mergeExplanationSpecOverride(other.getExplanationSpecOverride());
+ }
if (!other.getDeployedModelId().isEmpty()) {
deployedModelId_ = other.deployedModelId_;
onChanged();
@@ -1701,6 +1820,270 @@ public com.google.protobuf.ValueOrBuilder getParametersOrBuilder() {
return parametersBuilder_;
}
+ private com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanationSpecOverride_;
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder>
+ explanationSpecOverrideBuilder_;
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ *
+ * @return Whether the explanationSpecOverride field is set.
+ */
+ public boolean hasExplanationSpecOverride() {
+ return explanationSpecOverrideBuilder_ != null || explanationSpecOverride_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ *
+ * @return The explanationSpecOverride.
+ */
+ public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride
+ getExplanationSpecOverride() {
+ if (explanationSpecOverrideBuilder_ == null) {
+ return explanationSpecOverride_ == null
+ ? com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.getDefaultInstance()
+ : explanationSpecOverride_;
+ } else {
+ return explanationSpecOverrideBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ */
+ public Builder setExplanationSpecOverride(
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride value) {
+ if (explanationSpecOverrideBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ explanationSpecOverride_ = value;
+ onChanged();
+ } else {
+ explanationSpecOverrideBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ */
+ public Builder setExplanationSpecOverride(
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder builderForValue) {
+ if (explanationSpecOverrideBuilder_ == null) {
+ explanationSpecOverride_ = builderForValue.build();
+ onChanged();
+ } else {
+ explanationSpecOverrideBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ */
+ public Builder mergeExplanationSpecOverride(
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride value) {
+ if (explanationSpecOverrideBuilder_ == null) {
+ if (explanationSpecOverride_ != null) {
+ explanationSpecOverride_ =
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.newBuilder(
+ explanationSpecOverride_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ explanationSpecOverride_ = value;
+ }
+ onChanged();
+ } else {
+ explanationSpecOverrideBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ */
+ public Builder clearExplanationSpecOverride() {
+ if (explanationSpecOverrideBuilder_ == null) {
+ explanationSpecOverride_ = null;
+ onChanged();
+ } else {
+ explanationSpecOverride_ = null;
+ explanationSpecOverrideBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ */
+ public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder
+ getExplanationSpecOverrideBuilder() {
+
+ onChanged();
+ return getExplanationSpecOverrideFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ */
+ public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder
+ getExplanationSpecOverrideOrBuilder() {
+ if (explanationSpecOverrideBuilder_ != null) {
+ return explanationSpecOverrideBuilder_.getMessageOrBuilder();
+ } else {
+ return explanationSpecOverride_ == null
+ ? com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.getDefaultInstance()
+ : explanationSpecOverride_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder>
+ getExplanationSpecOverrideFieldBuilder() {
+ if (explanationSpecOverrideBuilder_ == null) {
+ explanationSpecOverrideBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder>(
+ getExplanationSpecOverride(), getParentForChildren(), isClean());
+ explanationSpecOverride_ = null;
+ }
+ return explanationSpecOverrideBuilder_;
+ }
+
private java.lang.Object deployedModelId_ = "";
/**
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequestOrBuilder.java
index 49654405e..05f8740af 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequestOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplainRequestOrBuilder.java
@@ -196,6 +196,66 @@ public interface ExplainRequestOrBuilder
*/
com.google.protobuf.ValueOrBuilder getParametersOrBuilder();
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ *
+ * @return Whether the explanationSpecOverride field is set.
+ */
+ boolean hasExplanationSpecOverride();
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ *
+ * @return The explanationSpecOverride.
+ */
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride getExplanationSpecOverride();
+ /**
+ *
+ *
+ *
+ * If specified, overrides the
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of the DeployedModel.
+ * Can be used for explaining prediction results with different
+ * configurations, such as:
+ * - Explaining top-5 predictions results as opposed to top-1;
+ * - Increasing path count or step count of the attribution methods to reduce
+ * approximate errors;
+ * - Using different baselines for explaining the prediction results.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpecOverride explanation_spec_override = 5;
+ *
+ */
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverrideOrBuilder
+ getExplanationSpecOverrideOrBuilder();
+
/**
*
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadata.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadata.java
index fa4e9660d..511c22b5e 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadata.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadata.java
@@ -10318,8 +10318,8 @@ public int getInputsCount() {
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -10354,8 +10354,8 @@ public boolean containsInputs(java.lang.String key) {
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -10381,8 +10381,8 @@ public boolean containsInputs(java.lang.String key) {
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -10414,8 +10414,8 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata get
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -11093,8 +11093,8 @@ public int getInputsCount() {
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -11129,8 +11129,8 @@ public boolean containsInputs(java.lang.String key) {
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -11156,8 +11156,8 @@ public boolean containsInputs(java.lang.String key) {
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -11190,8 +11190,8 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata get
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -11231,8 +11231,8 @@ public Builder clearInputs() {
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -11266,8 +11266,8 @@ public Builder removeInputs(java.lang.String key) {
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -11299,8 +11299,8 @@ public Builder putInputs(
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOrBuilder.java
index fc2ca5493..e543a0496 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOrBuilder.java
@@ -33,8 +33,8 @@ public interface ExplanationMetadataOrBuilder
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -55,8 +55,8 @@ public interface ExplanationMetadataOrBuilder
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -82,8 +82,8 @@ public interface ExplanationMetadataOrBuilder
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -106,8 +106,8 @@ public interface ExplanationMetadataOrBuilder
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
@@ -130,8 +130,8 @@ com.google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata getInputsO
* name specified as the key in [ExplanationMetadata.inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The baseline
* of the empty feature is chosen by AI Platform.
* For AI Platform provided Tensorflow images, the key can be any friendly
- * name of the feature . Once specified, [
- * featureAttributions][Attribution.feature_attributions] will be keyed by
+ * name of the feature. Once specified,
+ * [featureAttributions][google.cloud.aiplatform.v1beta1.Attribution.feature_attributions] are keyed by
* this key (if not grouped with another feature).
* For custom images, the key must match with the key in
* [instance][google.cloud.aiplatform.v1beta1.ExplainRequest.instances].
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOverride.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOverride.java
new file mode 100644
index 000000000..e1ed5ec8d
--- /dev/null
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationMetadataOverride.java
@@ -0,0 +1,2094 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: google/cloud/aiplatform/v1beta1/explanation.proto
+
+package com.google.cloud.aiplatform.v1beta1;
+
+/**
+ *
+ *
+ *
+ * The [ExplanationMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata] entries that can be overridden at
+ * [online explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain] time.
+ *
+ *
+ * Protobuf type {@code google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride}
+ */
+public final class ExplanationMetadataOverride extends com.google.protobuf.GeneratedMessageV3
+ implements
+ // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride)
+ ExplanationMetadataOverrideOrBuilder {
+ private static final long serialVersionUID = 0L;
+ // Use ExplanationMetadataOverride.newBuilder() to construct.
+ private ExplanationMetadataOverride(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ super(builder);
+ }
+
+ private ExplanationMetadataOverride() {}
+
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
+ return new ExplanationMetadataOverride();
+ }
+
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
+ return this.unknownFields;
+ }
+
+ private ExplanationMetadataOverride(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ case 10:
+ {
+ if (!((mutable_bitField0_ & 0x00000001) != 0)) {
+ inputs_ =
+ com.google.protobuf.MapField.newMapField(InputsDefaultEntryHolder.defaultEntry);
+ mutable_bitField0_ |= 0x00000001;
+ }
+ com.google.protobuf.MapEntry<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride>
+ inputs__ =
+ input.readMessage(
+ InputsDefaultEntryHolder.defaultEntry.getParserForType(),
+ extensionRegistry);
+ inputs_.getMutableMap().put(inputs__.getKey(), inputs__.getValue());
+ break;
+ }
+ default:
+ {
+ if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
+ return com.google.cloud.aiplatform.v1beta1.ExplanationProto
+ .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor;
+ }
+
+ @SuppressWarnings({"rawtypes"})
+ @java.lang.Override
+ protected com.google.protobuf.MapField internalGetMapField(int number) {
+ switch (number) {
+ case 1:
+ return internalGetInputs();
+ default:
+ throw new RuntimeException("Invalid map field number: " + number);
+ }
+ }
+
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return com.google.cloud.aiplatform.v1beta1.ExplanationProto
+ .internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.class,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder.class);
+ }
+
+ public interface InputMetadataOverrideOrBuilder
+ extends
+ // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride)
+ com.google.protobuf.MessageOrBuilder {
+
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ java.util.List
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ com.google.protobuf.Value getInputBaselines(int index);
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ int getInputBaselinesCount();
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ java.util.List extends com.google.protobuf.ValueOrBuilder> getInputBaselinesOrBuilderList();
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ com.google.protobuf.ValueOrBuilder getInputBaselinesOrBuilder(int index);
+ }
+ /**
+ *
+ *
+ *
+ * The [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata] entries to be
+ * overridden.
+ *
+ *
+ * Protobuf type {@code
+ * google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride}
+ */
+ public static final class InputMetadataOverride extends com.google.protobuf.GeneratedMessageV3
+ implements
+ // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride)
+ InputMetadataOverrideOrBuilder {
+ private static final long serialVersionUID = 0L;
+ // Use InputMetadataOverride.newBuilder() to construct.
+ private InputMetadataOverride(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ super(builder);
+ }
+
+ private InputMetadataOverride() {
+ inputBaselines_ = java.util.Collections.emptyList();
+ }
+
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
+ return new InputMetadataOverride();
+ }
+
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
+ return this.unknownFields;
+ }
+
+ private InputMetadataOverride(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ case 10:
+ {
+ if (!((mutable_bitField0_ & 0x00000001) != 0)) {
+ inputBaselines_ = new java.util.ArrayList
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ @java.lang.Override
+ public java.util.List
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ @java.lang.Override
+ public java.util.List extends com.google.protobuf.ValueOrBuilder>
+ getInputBaselinesOrBuilderList() {
+ return inputBaselines_;
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ @java.lang.Override
+ public int getInputBaselinesCount() {
+ return inputBaselines_.size();
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ @java.lang.Override
+ public com.google.protobuf.Value getInputBaselines(int index) {
+ return inputBaselines_.get(index);
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ @java.lang.Override
+ public com.google.protobuf.ValueOrBuilder getInputBaselinesOrBuilder(int index) {
+ return inputBaselines_.get(index);
+ }
+
+ private byte memoizedIsInitialized = -1;
+
+ @java.lang.Override
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ @java.lang.Override
+ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
+ for (int i = 0; i < inputBaselines_.size(); i++) {
+ output.writeMessage(1, inputBaselines_.get(i));
+ }
+ unknownFields.writeTo(output);
+ }
+
+ @java.lang.Override
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < inputBaselines_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, inputBaselines_.get(i));
+ }
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj
+ instanceof
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride)) {
+ return super.equals(obj);
+ }
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride other =
+ (com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride)
+ obj;
+
+ if (!getInputBaselinesList().equals(other.getInputBaselinesList())) return false;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptor().hashCode();
+ if (getInputBaselinesCount() > 0) {
+ hash = (37 * hash) + INPUT_BASELINES_FIELD_NUMBER;
+ hash = (53 * hash) + getInputBaselinesList().hashCode();
+ }
+ hash = (29 * hash) + unknownFields.hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride
+ parseFrom(java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride
+ parseFrom(
+ java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride
+ parseFrom(com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride
+ parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride
+ parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride
+ parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride
+ parseFrom(java.io.InputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride
+ parseFrom(
+ java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride
+ parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride
+ parseDelimitedFrom(
+ java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride
+ parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride
+ parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ @java.lang.Override
+ public Builder newBuilderForType() {
+ return newBuilder();
+ }
+
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+
+ public static Builder newBuilder(
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride
+ prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ *
+ *
+ *
+ * The [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata] entries to be
+ * overridden.
+ *
+ *
+ * Protobuf type {@code
+ * google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride}
+ */
+ public static final class Builder
+ extends com.google.protobuf.GeneratedMessageV3.Builder
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public java.util.List
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public int getInputBaselinesCount() {
+ if (inputBaselinesBuilder_ == null) {
+ return inputBaselines_.size();
+ } else {
+ return inputBaselinesBuilder_.getCount();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public com.google.protobuf.Value getInputBaselines(int index) {
+ if (inputBaselinesBuilder_ == null) {
+ return inputBaselines_.get(index);
+ } else {
+ return inputBaselinesBuilder_.getMessage(index);
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public Builder setInputBaselines(int index, com.google.protobuf.Value value) {
+ if (inputBaselinesBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureInputBaselinesIsMutable();
+ inputBaselines_.set(index, value);
+ onChanged();
+ } else {
+ inputBaselinesBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public Builder setInputBaselines(
+ int index, com.google.protobuf.Value.Builder builderForValue) {
+ if (inputBaselinesBuilder_ == null) {
+ ensureInputBaselinesIsMutable();
+ inputBaselines_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ inputBaselinesBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public Builder addInputBaselines(com.google.protobuf.Value value) {
+ if (inputBaselinesBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureInputBaselinesIsMutable();
+ inputBaselines_.add(value);
+ onChanged();
+ } else {
+ inputBaselinesBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public Builder addInputBaselines(int index, com.google.protobuf.Value value) {
+ if (inputBaselinesBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureInputBaselinesIsMutable();
+ inputBaselines_.add(index, value);
+ onChanged();
+ } else {
+ inputBaselinesBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public Builder addInputBaselines(com.google.protobuf.Value.Builder builderForValue) {
+ if (inputBaselinesBuilder_ == null) {
+ ensureInputBaselinesIsMutable();
+ inputBaselines_.add(builderForValue.build());
+ onChanged();
+ } else {
+ inputBaselinesBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public Builder addInputBaselines(
+ int index, com.google.protobuf.Value.Builder builderForValue) {
+ if (inputBaselinesBuilder_ == null) {
+ ensureInputBaselinesIsMutable();
+ inputBaselines_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ inputBaselinesBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public Builder addAllInputBaselines(
+ java.lang.Iterable extends com.google.protobuf.Value> values) {
+ if (inputBaselinesBuilder_ == null) {
+ ensureInputBaselinesIsMutable();
+ com.google.protobuf.AbstractMessageLite.Builder.addAll(values, inputBaselines_);
+ onChanged();
+ } else {
+ inputBaselinesBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public Builder clearInputBaselines() {
+ if (inputBaselinesBuilder_ == null) {
+ inputBaselines_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ inputBaselinesBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public Builder removeInputBaselines(int index) {
+ if (inputBaselinesBuilder_ == null) {
+ ensureInputBaselinesIsMutable();
+ inputBaselines_.remove(index);
+ onChanged();
+ } else {
+ inputBaselinesBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public com.google.protobuf.Value.Builder getInputBaselinesBuilder(int index) {
+ return getInputBaselinesFieldBuilder().getBuilder(index);
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public com.google.protobuf.ValueOrBuilder getInputBaselinesOrBuilder(int index) {
+ if (inputBaselinesBuilder_ == null) {
+ return inputBaselines_.get(index);
+ } else {
+ return inputBaselinesBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public java.util.List extends com.google.protobuf.ValueOrBuilder>
+ getInputBaselinesOrBuilderList() {
+ if (inputBaselinesBuilder_ != null) {
+ return inputBaselinesBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(inputBaselines_);
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public com.google.protobuf.Value.Builder addInputBaselinesBuilder() {
+ return getInputBaselinesFieldBuilder()
+ .addBuilder(com.google.protobuf.Value.getDefaultInstance());
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public com.google.protobuf.Value.Builder addInputBaselinesBuilder(int index) {
+ return getInputBaselinesFieldBuilder()
+ .addBuilder(index, com.google.protobuf.Value.getDefaultInstance());
+ }
+ /**
+ *
+ *
+ *
+ * Baseline inputs for this feature.
+ * This overrides the `input_baseline` field of the
+ * [ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.InputMetadata]
+ * object of the corresponding feature's input metadata. If it's not
+ * specified, the original baselines are not overridden.
+ *
+ *
+ * repeated .google.protobuf.Value input_baselines = 1;
+ */
+ public java.util.List
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ @java.lang.Override
+ public boolean containsInputs(java.lang.String key) {
+ if (key == null) {
+ throw new java.lang.NullPointerException();
+ }
+ return internalGetInputs().getMap().containsKey(key);
+ }
+ /** Use {@link #getInputsMap()} instead. */
+ @java.lang.Override
+ @java.lang.Deprecated
+ public java.util.Map<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ getInputs() {
+ return getInputsMap();
+ }
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ @java.lang.Override
+ public java.util.Map<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ getInputsMap() {
+ return internalGetInputs().getMap();
+ }
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride
+ getInputsOrDefault(
+ java.lang.String key,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride
+ defaultValue) {
+ if (key == null) {
+ throw new java.lang.NullPointerException();
+ }
+ java.util.Map<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ map = internalGetInputs().getMap();
+ return map.containsKey(key) ? map.get(key) : defaultValue;
+ }
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride
+ getInputsOrThrow(java.lang.String key) {
+ if (key == null) {
+ throw new java.lang.NullPointerException();
+ }
+ java.util.Map<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ map = internalGetInputs().getMap();
+ if (!map.containsKey(key)) {
+ throw new java.lang.IllegalArgumentException();
+ }
+ return map.get(key);
+ }
+
+ private byte memoizedIsInitialized = -1;
+
+ @java.lang.Override
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ @java.lang.Override
+ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
+ com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
+ output, internalGetInputs(), InputsDefaultEntryHolder.defaultEntry, 1);
+ unknownFields.writeTo(output);
+ }
+
+ @java.lang.Override
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (java.util.Map.Entry<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ entry : internalGetInputs().getMap().entrySet()) {
+ com.google.protobuf.MapEntry<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ inputs__ =
+ InputsDefaultEntryHolder.defaultEntry
+ .newBuilderForType()
+ .setKey(entry.getKey())
+ .setValue(entry.getValue())
+ .build();
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, inputs__);
+ }
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride)) {
+ return super.equals(obj);
+ }
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride other =
+ (com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride) obj;
+
+ if (!internalGetInputs().equals(other.internalGetInputs())) return false;
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptor().hashCode();
+ if (!internalGetInputs().getMap().isEmpty()) {
+ hash = (37 * hash) + INPUTS_FIELD_NUMBER;
+ hash = (53 * hash) + internalGetInputs().hashCode();
+ }
+ hash = (29 * hash) + unknownFields.hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom(
+ java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom(
+ java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom(
+ byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom(
+ byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom(
+ java.io.InputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom(
+ java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseDelimitedFrom(
+ java.io.InputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseDelimitedFrom(
+ java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom(
+ com.google.protobuf.CodedInputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ @java.lang.Override
+ public Builder newBuilderForType() {
+ return newBuilder();
+ }
+
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+
+ public static Builder newBuilder(
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ *
+ *
+ *
+ * The [ExplanationMetadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata] entries that can be overridden at
+ * [online explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain] time.
+ *
+ *
+ * Protobuf type {@code google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride}
+ */
+ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ @java.lang.Override
+ public boolean containsInputs(java.lang.String key) {
+ if (key == null) {
+ throw new java.lang.NullPointerException();
+ }
+ return internalGetInputs().getMap().containsKey(key);
+ }
+ /** Use {@link #getInputsMap()} instead. */
+ @java.lang.Override
+ @java.lang.Deprecated
+ public java.util.Map<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ getInputs() {
+ return getInputsMap();
+ }
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ @java.lang.Override
+ public java.util.Map<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ getInputsMap() {
+ return internalGetInputs().getMap();
+ }
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride
+ getInputsOrDefault(
+ java.lang.String key,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride
+ defaultValue) {
+ if (key == null) {
+ throw new java.lang.NullPointerException();
+ }
+ java.util.Map<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ map = internalGetInputs().getMap();
+ return map.containsKey(key) ? map.get(key) : defaultValue;
+ }
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride
+ getInputsOrThrow(java.lang.String key) {
+ if (key == null) {
+ throw new java.lang.NullPointerException();
+ }
+ java.util.Map<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ map = internalGetInputs().getMap();
+ if (!map.containsKey(key)) {
+ throw new java.lang.IllegalArgumentException();
+ }
+ return map.get(key);
+ }
+
+ public Builder clearInputs() {
+ internalGetMutableInputs().getMutableMap().clear();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ public Builder removeInputs(java.lang.String key) {
+ if (key == null) {
+ throw new java.lang.NullPointerException();
+ }
+ internalGetMutableInputs().getMutableMap().remove(key);
+ return this;
+ }
+ /** Use alternate mutation accessors instead. */
+ @java.lang.Deprecated
+ public java.util.Map<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ getMutableInputs() {
+ return internalGetMutableInputs().getMutableMap();
+ }
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ public Builder putInputs(
+ java.lang.String key,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride
+ value) {
+ if (key == null) {
+ throw new java.lang.NullPointerException();
+ }
+ if (value == null) {
+ throw new java.lang.NullPointerException();
+ }
+ internalGetMutableInputs().getMutableMap().put(key, value);
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ public Builder putAllInputs(
+ java.util.Map<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ .InputMetadataOverride>
+ values) {
+ internalGetMutableInputs().getMutableMap().putAll(values);
+ return this;
+ }
+
+ @java.lang.Override
+ public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
+ // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride)
+ }
+
+ // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride)
+ private static final com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ DEFAULT_INSTANCE;
+
+ static {
+ DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride();
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride
+ getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ private static final com.google.protobuf.Parser
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ int getInputsCount();
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ boolean containsInputs(java.lang.String key);
+ /** Use {@link #getInputsMap()} instead. */
+ @java.lang.Deprecated
+ java.util.Map<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ getInputs();
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ java.util.Map<
+ java.lang.String,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride>
+ getInputsMap();
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride
+ getInputsOrDefault(
+ java.lang.String key,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride
+ defaultValue);
+ /**
+ *
+ *
+ *
+ * Required. Overrides the [input metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs] of the features.
+ * The key is the name of the feature to be overridden. The keys specified
+ * here must exist in the input metadata to be overridden. If a feature is
+ * not specified here, the corresponding feature's input metadata is not
+ * overridden.
+ *
+ *
+ *
+ * map<string, .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride> inputs = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ */
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.InputMetadataOverride
+ getInputsOrThrow(java.lang.String key);
+}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParameters.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParameters.java
index e4612aeb9..eb75dc2a5 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParameters.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParameters.java
@@ -466,7 +466,7 @@ public int getTopK() {
*
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -489,7 +489,7 @@ public boolean hasOutputIndices() {
*
*
*
@@ -855,7 +857,7 @@ public com.google.protobuf.ByteString getValidationFilterBytes() {
* this filter are used to validate the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -514,7 +514,7 @@ public com.google.protobuf.ListValue getOutputIndices() {
*
*
*
@@ -828,7 +830,7 @@ public java.lang.String getValidationFilter() {
* this filter are used to validate the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -1860,7 +1860,7 @@ public Builder clearTopK() {
*
*
*
@@ -801,7 +803,7 @@ public Builder setTrainingFilterBytes(com.google.protobuf.ByteString value) {
* this filter are used to validate the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -1882,7 +1882,7 @@ public boolean hasOutputIndices() {
*
*
*
@@ -772,7 +774,7 @@ public Builder clearTrainingFilter() {
* this filter are used to train the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -1910,7 +1910,7 @@ public com.google.protobuf.ListValue getOutputIndices() {
*
*
*
@@ -750,7 +752,7 @@ public Builder setTrainingFilter(java.lang.String value) {
* this filter are used to train the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -1940,7 +1940,7 @@ public Builder setOutputIndices(com.google.protobuf.ListValue value) {
*
*
*
@@ -724,7 +726,7 @@ public com.google.protobuf.ByteString getTrainingFilterBytes() {
* this filter are used to train the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -1967,7 +1967,7 @@ public Builder setOutputIndices(com.google.protobuf.ListValue.Builder builderFor
*
*
*
@@ -697,7 +699,7 @@ public java.lang.String getTrainingFilter() {
* this filter are used to train the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -2001,7 +2001,7 @@ public Builder mergeOutputIndices(com.google.protobuf.ListValue value) {
*
*
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.FilterSplit}
@@ -670,7 +672,7 @@ public Builder mergeFrom(
* this filter are used to train the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -2029,7 +2029,7 @@ public Builder clearOutputIndices() {
*
*
*
@@ -494,6 +495,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build
* supported for Datasets containing DataItems.
* If any of the filters in this message are to match nothing, then they can be
* set as '-' (the minus sign).
+ * Supported only for unstructured Datasets.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -2051,7 +2051,7 @@ public com.google.protobuf.ListValue.Builder getOutputIndicesBuilder() {
*
*
*
@@ -287,7 +288,7 @@ public java.lang.String getTestFilter() {
* this filter are used to test the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -2077,7 +2077,7 @@ public com.google.protobuf.ListValueOrBuilder getOutputIndicesOrBuilder() {
*
*
*
@@ -259,7 +260,7 @@ public com.google.protobuf.ByteString getValidationFilterBytes() {
* this filter are used to test the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParametersOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParametersOrBuilder.java
index 49e328159..481e2c415 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParametersOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationParametersOrBuilder.java
@@ -200,7 +200,7 @@ public interface ExplanationParametersOrBuilder
*
*
*
@@ -228,7 +229,7 @@ public java.lang.String getValidationFilter() {
* this filter are used to validate the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -220,7 +220,7 @@ public interface ExplanationParametersOrBuilder
*
*
*
@@ -200,7 +201,7 @@ public com.google.protobuf.ByteString getTrainingFilterBytes() {
* this filter are used to validate the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
@@ -240,7 +240,7 @@ public interface ExplanationParametersOrBuilder
*
*
*
@@ -169,7 +170,7 @@ public java.lang.String getTrainingFilter() {
* this filter are used to train the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
* If populated, only returns attributions that have
- * [output_index][Attributions.output_index] contained in output_indices. It
+ * [output_index][google.cloud.aiplatform.v1beta1.Attribution.output_index] contained in output_indices. It
* must be an ndarray of integers, with the same shape of the output it's
* explaining.
* If not populated, returns attributions for [top_k][google.cloud.aiplatform.v1beta1.ExplanationParameters.top_k] indices of outputs.
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationProto.java
index 024a4c76c..5dba6213f 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationProto.java
@@ -71,6 +71,22 @@ public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry r
internal_static_google_cloud_aiplatform_v1beta1_FeatureNoiseSigma_NoiseSigmaForFeature_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_FeatureNoiseSigma_NoiseSigmaForFeature_fieldAccessorTable;
+ static final com.google.protobuf.Descriptors.Descriptor
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_descriptor;
+ static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_fieldAccessorTable;
+ static final com.google.protobuf.Descriptors.Descriptor
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor;
+ static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_fieldAccessorTable;
+ static final com.google.protobuf.Descriptors.Descriptor
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_descriptor;
+ static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_fieldAccessorTable;
+ static final com.google.protobuf.Descriptors.Descriptor
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputsEntry_descriptor;
+ static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputsEntry_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
@@ -128,10 +144,23 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "gle.cloud.aiplatform.v1beta1.FeatureNois"
+ "eSigma.NoiseSigmaForFeature\0323\n\024NoiseSigm"
+ "aForFeature\022\014\n\004name\030\001 \001(\t\022\r\n\005sigma\030\002 \001(\002"
- + "B\204\001\n#com.google.cloud.aiplatform.v1beta1"
- + "B\020ExplanationProtoP\001ZIgoogle.golang.org/"
- + "genproto/googleapis/cloud/aiplatform/v1b"
- + "eta1;aiplatformb\006proto3"
+ + "\"\265\001\n\027ExplanationSpecOverride\022J\n\nparamete"
+ + "rs\030\001 \001(\01326.google.cloud.aiplatform.v1bet"
+ + "a1.ExplanationParameters\022N\n\010metadata\030\002 \001"
+ + "(\0132<.google.cloud.aiplatform.v1beta1.Exp"
+ + "lanationMetadataOverride\"\312\002\n\033Explanation"
+ + "MetadataOverride\022]\n\006inputs\030\001 \003(\0132H.googl"
+ + "e.cloud.aiplatform.v1beta1.ExplanationMe"
+ + "tadataOverride.InputsEntryB\003\340A\002\032H\n\025Input"
+ + "MetadataOverride\022/\n\017input_baselines\030\001 \003("
+ + "\0132\026.google.protobuf.Value\032\201\001\n\013InputsEntr"
+ + "y\022\013\n\003key\030\001 \001(\t\022a\n\005value\030\002 \001(\0132R.google.c"
+ + "loud.aiplatform.v1beta1.ExplanationMetad"
+ + "ataOverride.InputMetadataOverride:\0028\001B\204\001"
+ + "\n#com.google.cloud.aiplatform.v1beta1B\020E"
+ + "xplanationProtoP\001ZIgoogle.golang.org/gen"
+ + "proto/googleapis/cloud/aiplatform/v1beta"
+ + "1;aiplatformb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -243,6 +272,42 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
new java.lang.String[] {
"Name", "Sigma",
});
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_descriptor =
+ getDescriptor().getMessageTypes().get(10);
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_fieldAccessorTable =
+ new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_descriptor,
+ new java.lang.String[] {
+ "Parameters", "Metadata",
+ });
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor =
+ getDescriptor().getMessageTypes().get(11);
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_fieldAccessorTable =
+ new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor,
+ new java.lang.String[] {
+ "Inputs",
+ });
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_descriptor =
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor
+ .getNestedTypes()
+ .get(0);
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_fieldAccessorTable =
+ new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputMetadataOverride_descriptor,
+ new java.lang.String[] {
+ "InputBaselines",
+ });
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputsEntry_descriptor =
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_descriptor
+ .getNestedTypes()
+ .get(1);
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputsEntry_fieldAccessorTable =
+ new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+ internal_static_google_cloud_aiplatform_v1beta1_ExplanationMetadataOverride_InputsEntry_descriptor,
+ new java.lang.String[] {
+ "Key", "Value",
+ });
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationSpecOverride.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationSpecOverride.java
new file mode 100644
index 000000000..0ceb1bee8
--- /dev/null
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExplanationSpecOverride.java
@@ -0,0 +1,1051 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: google/cloud/aiplatform/v1beta1/explanation.proto
+
+package com.google.cloud.aiplatform.v1beta1;
+
+/**
+ *
+ *
+ *
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.FilterSplit}
@@ -141,7 +142,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
* this filter are used to train the Model. A filter with same syntax
* as the one used in [DatasetService.ListDataItems][google.cloud.aiplatform.v1beta1.DatasetService.ListDataItems] may be used. If a
* single DataItem is matched by more than one of the FilterSplit filters,
- * then it will be assigned to the first set that applies to it in the
+ * then it is assigned to the first set that applies to it in the
* training, validation, test order.
*
+ * The [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] entries that can be overridden at [online
+ * explanation][PredictionService.Explain][google.cloud.aiplatform.v1beta1.PredictionService.Explain] time.
+ *
+ *
+ * Protobuf type {@code google.cloud.aiplatform.v1beta1.ExplanationSpecOverride}
+ */
+public final class ExplanationSpecOverride extends com.google.protobuf.GeneratedMessageV3
+ implements
+ // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ExplanationSpecOverride)
+ ExplanationSpecOverrideOrBuilder {
+ private static final long serialVersionUID = 0L;
+ // Use ExplanationSpecOverride.newBuilder() to construct.
+ private ExplanationSpecOverride(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ super(builder);
+ }
+
+ private ExplanationSpecOverride() {}
+
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
+ return new ExplanationSpecOverride();
+ }
+
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
+ return this.unknownFields;
+ }
+
+ private ExplanationSpecOverride(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ case 10:
+ {
+ com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder subBuilder = null;
+ if (parameters_ != null) {
+ subBuilder = parameters_.toBuilder();
+ }
+ parameters_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.ExplanationParameters.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(parameters_);
+ parameters_ = subBuilder.buildPartial();
+ }
+
+ break;
+ }
+ case 18:
+ {
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder subBuilder =
+ null;
+ if (metadata_ != null) {
+ subBuilder = metadata_.toBuilder();
+ }
+ metadata_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(metadata_);
+ metadata_ = subBuilder.buildPartial();
+ }
+
+ break;
+ }
+ default:
+ {
+ if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
+ return com.google.cloud.aiplatform.v1beta1.ExplanationProto
+ .internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_descriptor;
+ }
+
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return com.google.cloud.aiplatform.v1beta1.ExplanationProto
+ .internal_static_google_cloud_aiplatform_v1beta1_ExplanationSpecOverride_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.class,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride.Builder.class);
+ }
+
+ public static final int PARAMETERS_FIELD_NUMBER = 1;
+ private com.google.cloud.aiplatform.v1beta1.ExplanationParameters parameters_;
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ *
+ * @return Whether the parameters field is set.
+ */
+ @java.lang.Override
+ public boolean hasParameters() {
+ return parameters_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ *
+ * @return The parameters.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ExplanationParameters getParameters() {
+ return parameters_ == null
+ ? com.google.cloud.aiplatform.v1beta1.ExplanationParameters.getDefaultInstance()
+ : parameters_;
+ }
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder
+ getParametersOrBuilder() {
+ return getParameters();
+ }
+
+ public static final int METADATA_FIELD_NUMBER = 2;
+ private com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata_;
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ *
+ * @return Whether the metadata field is set.
+ */
+ @java.lang.Override
+ public boolean hasMetadata() {
+ return metadata_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ *
+ * @return The metadata.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride getMetadata() {
+ return metadata_ == null
+ ? com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.getDefaultInstance()
+ : metadata_;
+ }
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder
+ getMetadataOrBuilder() {
+ return getMetadata();
+ }
+
+ private byte memoizedIsInitialized = -1;
+
+ @java.lang.Override
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ @java.lang.Override
+ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
+ if (parameters_ != null) {
+ output.writeMessage(1, getParameters());
+ }
+ if (metadata_ != null) {
+ output.writeMessage(2, getMetadata());
+ }
+ unknownFields.writeTo(output);
+ }
+
+ @java.lang.Override
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (parameters_ != null) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getParameters());
+ }
+ if (metadata_ != null) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getMetadata());
+ }
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride)) {
+ return super.equals(obj);
+ }
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride other =
+ (com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride) obj;
+
+ if (hasParameters() != other.hasParameters()) return false;
+ if (hasParameters()) {
+ if (!getParameters().equals(other.getParameters())) return false;
+ }
+ if (hasMetadata() != other.hasMetadata()) return false;
+ if (hasMetadata()) {
+ if (!getMetadata().equals(other.getMetadata())) return false;
+ }
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptor().hashCode();
+ if (hasParameters()) {
+ hash = (37 * hash) + PARAMETERS_FIELD_NUMBER;
+ hash = (53 * hash) + getParameters().hashCode();
+ }
+ if (hasMetadata()) {
+ hash = (37 * hash) + METADATA_FIELD_NUMBER;
+ hash = (53 * hash) + getMetadata().hashCode();
+ }
+ hash = (29 * hash) + unknownFields.hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom(
+ java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom(
+ java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom(
+ byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom(
+ java.io.InputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom(
+ java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseDelimitedFrom(
+ java.io.InputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseDelimitedFrom(
+ java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom(
+ com.google.protobuf.CodedInputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ @java.lang.Override
+ public Builder newBuilderForType() {
+ return newBuilder();
+ }
+
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+
+ public static Builder newBuilder(
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ *
+ *
+ *
+ * The [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] entries that can be overridden at [online
+ * explanation][PredictionService.Explain][google.cloud.aiplatform.v1beta1.PredictionService.Explain] time.
+ *
+ *
+ * Protobuf type {@code google.cloud.aiplatform.v1beta1.ExplanationSpecOverride}
+ */
+ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ *
+ * @return Whether the parameters field is set.
+ */
+ public boolean hasParameters() {
+ return parametersBuilder_ != null || parameters_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ *
+ * @return The parameters.
+ */
+ public com.google.cloud.aiplatform.v1beta1.ExplanationParameters getParameters() {
+ if (parametersBuilder_ == null) {
+ return parameters_ == null
+ ? com.google.cloud.aiplatform.v1beta1.ExplanationParameters.getDefaultInstance()
+ : parameters_;
+ } else {
+ return parametersBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ */
+ public Builder setParameters(com.google.cloud.aiplatform.v1beta1.ExplanationParameters value) {
+ if (parametersBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ parameters_ = value;
+ onChanged();
+ } else {
+ parametersBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ */
+ public Builder setParameters(
+ com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder builderForValue) {
+ if (parametersBuilder_ == null) {
+ parameters_ = builderForValue.build();
+ onChanged();
+ } else {
+ parametersBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ */
+ public Builder mergeParameters(
+ com.google.cloud.aiplatform.v1beta1.ExplanationParameters value) {
+ if (parametersBuilder_ == null) {
+ if (parameters_ != null) {
+ parameters_ =
+ com.google.cloud.aiplatform.v1beta1.ExplanationParameters.newBuilder(parameters_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ parameters_ = value;
+ }
+ onChanged();
+ } else {
+ parametersBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ */
+ public Builder clearParameters() {
+ if (parametersBuilder_ == null) {
+ parameters_ = null;
+ onChanged();
+ } else {
+ parameters_ = null;
+ parametersBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ */
+ public com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder
+ getParametersBuilder() {
+
+ onChanged();
+ return getParametersFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ */
+ public com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder
+ getParametersOrBuilder() {
+ if (parametersBuilder_ != null) {
+ return parametersBuilder_.getMessageOrBuilder();
+ } else {
+ return parameters_ == null
+ ? com.google.cloud.aiplatform.v1beta1.ExplanationParameters.getDefaultInstance()
+ : parameters_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ExplanationParameters,
+ com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder,
+ com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder>
+ getParametersFieldBuilder() {
+ if (parametersBuilder_ == null) {
+ parametersBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ExplanationParameters,
+ com.google.cloud.aiplatform.v1beta1.ExplanationParameters.Builder,
+ com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder>(
+ getParameters(), getParentForChildren(), isClean());
+ parameters_ = null;
+ }
+ return parametersBuilder_;
+ }
+
+ private com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata_;
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder>
+ metadataBuilder_;
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ *
+ * @return Whether the metadata field is set.
+ */
+ public boolean hasMetadata() {
+ return metadataBuilder_ != null || metadata_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ *
+ * @return The metadata.
+ */
+ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride getMetadata() {
+ if (metadataBuilder_ == null) {
+ return metadata_ == null
+ ? com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.getDefaultInstance()
+ : metadata_;
+ } else {
+ return metadataBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ */
+ public Builder setMetadata(
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride value) {
+ if (metadataBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ metadata_ = value;
+ onChanged();
+ } else {
+ metadataBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ */
+ public Builder setMetadata(
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder builderForValue) {
+ if (metadataBuilder_ == null) {
+ metadata_ = builderForValue.build();
+ onChanged();
+ } else {
+ metadataBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ */
+ public Builder mergeMetadata(
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride value) {
+ if (metadataBuilder_ == null) {
+ if (metadata_ != null) {
+ metadata_ =
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.newBuilder(metadata_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ metadata_ = value;
+ }
+ onChanged();
+ } else {
+ metadataBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ */
+ public Builder clearMetadata() {
+ if (metadataBuilder_ == null) {
+ metadata_ = null;
+ onChanged();
+ } else {
+ metadata_ = null;
+ metadataBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ */
+ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder
+ getMetadataBuilder() {
+
+ onChanged();
+ return getMetadataFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ */
+ public com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder
+ getMetadataOrBuilder() {
+ if (metadataBuilder_ != null) {
+ return metadataBuilder_.getMessageOrBuilder();
+ } else {
+ return metadata_ == null
+ ? com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.getDefaultInstance()
+ : metadata_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder>
+ getMetadataFieldBuilder() {
+ if (metadataBuilder_ == null) {
+ metadataBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride.Builder,
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder>(
+ getMetadata(), getParentForChildren(), isClean());
+ metadata_ = null;
+ }
+ return metadataBuilder_;
+ }
+
+ @java.lang.Override
+ public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
+ // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ExplanationSpecOverride)
+ }
+
+ // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ExplanationSpecOverride)
+ private static final com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride DEFAULT_INSTANCE;
+
+ static {
+ DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride();
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ExplanationSpecOverride getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ private static final com.google.protobuf.Parser
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ *
+ * @return Whether the parameters field is set.
+ */
+ boolean hasParameters();
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ *
+ * @return The parameters.
+ */
+ com.google.cloud.aiplatform.v1beta1.ExplanationParameters getParameters();
+ /**
+ *
+ *
+ *
+ * The parameters to be overridden. Note that the
+ * [method][google.cloud.aiplatform.v1beta1.ExplanationParameters.method] cannot be changed. If not specified,
+ * no parameter is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationParameters parameters = 1;
+ */
+ com.google.cloud.aiplatform.v1beta1.ExplanationParametersOrBuilder getParametersOrBuilder();
+
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ *
+ * @return Whether the metadata field is set.
+ */
+ boolean hasMetadata();
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ *
+ * @return The metadata.
+ */
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride getMetadata();
+ /**
+ *
+ *
+ *
+ * The metadata to be overridden. If not specified, no metadata is overridden.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationMetadataOverride metadata = 2;
+ */
+ com.google.cloud.aiplatform.v1beta1.ExplanationMetadataOverrideOrBuilder getMetadataOrBuilder();
+}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExportModelRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExportModelRequest.java
index 12a7d312f..15577f997 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExportModelRequest.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExportModelRequest.java
@@ -169,14 +169,14 @@ public interface OutputConfigOrBuilder
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -188,14 +188,14 @@ public interface OutputConfigOrBuilder
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -207,14 +207,14 @@ public interface OutputConfigOrBuilder
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -227,8 +227,8 @@ public interface OutputConfigOrBuilder
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -243,8 +243,8 @@ public interface OutputConfigOrBuilder
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -259,8 +259,8 @@ public interface OutputConfigOrBuilder
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -458,14 +458,14 @@ public com.google.protobuf.ByteString getExportFormatIdBytes() {
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -480,14 +480,14 @@ public boolean hasArtifactDestination() {
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -504,14 +504,14 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getArtifactDestination
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -530,8 +530,8 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getArtifactDestination
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -549,8 +549,8 @@ public boolean hasImageDestination() {
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -570,8 +570,8 @@ public com.google.cloud.aiplatform.v1beta1.ContainerRegistryDestination getImage
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -1102,14 +1102,14 @@ public Builder setExportFormatIdBytes(com.google.protobuf.ByteString value) {
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -1123,14 +1123,14 @@ public boolean hasArtifactDestination() {
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -1150,14 +1150,14 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getArtifactDestination
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -1180,14 +1180,14 @@ public Builder setArtifactDestination(
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -1207,14 +1207,14 @@ public Builder setArtifactDestination(
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -1241,14 +1241,14 @@ public Builder mergeArtifactDestination(
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -1268,14 +1268,14 @@ public Builder clearArtifactDestination() {
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -1290,14 +1290,14 @@ public Builder clearArtifactDestination() {
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -1316,14 +1316,14 @@ public Builder clearArtifactDestination() {
*
*
*
- * The Google Cloud Storage location where the Model artifact is to be
+ * The Cloud Storage location where the Model artifact is to be
* written to. Under the directory given as the destination a new one with
* name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
* will be created. Inside, the Model and any of its supporting files
* will be written.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains ARTIFACT.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `ARTIFACT`.
*
*
* .google.cloud.aiplatform.v1beta1.GcsDestination artifact_destination = 3;
@@ -1357,8 +1357,8 @@ public Builder clearArtifactDestination() {
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -1375,8 +1375,8 @@ public boolean hasImageDestination() {
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -1401,8 +1401,8 @@ public boolean hasImageDestination() {
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -1428,8 +1428,8 @@ public Builder setImageDestination(
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -1453,8 +1453,8 @@ public Builder setImageDestination(
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -1485,8 +1485,8 @@ public Builder mergeImageDestination(
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -1509,8 +1509,8 @@ public Builder clearImageDestination() {
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -1528,8 +1528,8 @@ public Builder clearImageDestination() {
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
@@ -1552,8 +1552,8 @@ public Builder clearImageDestination() {
*
* The Google Container Registry or Artifact Registry uri where the
* Model container image will be copied to.
- * This field should only be set when
- * [Models.supported_export_formats.exportable_contents] contains IMAGE.
+ * This field should only be set when the `exportableContent` field of the
+ * [Model.supported_export_formats] object contains `IMAGE`.
*
*
* .google.cloud.aiplatform.v1beta1.ContainerRegistryDestination image_destination = 4;
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplit.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplit.java
index ec7a1316b..b76ec6200 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplit.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FilterSplit.java
@@ -27,6 +27,7 @@
* supported for Datasets containing DataItems.
* If any of the filters in this message are to match nothing, then they can be
* set as '-' (the minus sign).
+ * Supported only for unstructured Datasets.
*
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ @java.lang.Override
+ public boolean hasEncryptionSpec() {
+ return encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ *
+ * @return The encryptionSpec.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() {
+ return getEncryptionSpec();
+ }
+
private byte memoizedIsInitialized = -1;
@java.lang.Override
@@ -1103,6 +1174,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 16);
+ if (encryptionSpec_ != null) {
+ output.writeMessage(17, getEncryptionSpec());
+ }
unknownFields.writeTo(output);
}
@@ -1164,6 +1238,9 @@ public int getSerializedSize() {
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(16, labels__);
}
+ if (encryptionSpec_ != null) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(17, getEncryptionSpec());
+ }
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
@@ -1216,6 +1293,10 @@ public boolean equals(final java.lang.Object obj) {
if (!getError().equals(other.getError())) return false;
}
if (!internalGetLabels().equals(other.internalGetLabels())) return false;
+ if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false;
+ if (hasEncryptionSpec()) {
+ if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false;
+ }
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@@ -1275,6 +1356,10 @@ public int hashCode() {
hash = (37 * hash) + LABELS_FIELD_NUMBER;
hash = (53 * hash) + internalGetLabels().hashCode();
}
+ if (hasEncryptionSpec()) {
+ hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER;
+ hash = (53 * hash) + getEncryptionSpec().hashCode();
+ }
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
@@ -1506,6 +1591,12 @@ public Builder clear() {
errorBuilder_ = null;
}
internalGetMutableLabels().clear();
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
return this;
}
@@ -1586,6 +1677,11 @@ public com.google.cloud.aiplatform.v1beta1.HyperparameterTuningJob buildPartial(
}
result.labels_ = internalGetLabels();
result.labels_.makeImmutable();
+ if (encryptionSpecBuilder_ == null) {
+ result.encryptionSpec_ = encryptionSpec_;
+ } else {
+ result.encryptionSpec_ = encryptionSpecBuilder_.build();
+ }
onBuilt();
return result;
}
@@ -1705,6 +1801,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.HyperparameterTunin
mergeError(other.getError());
}
internalGetMutableLabels().mergeFrom(other.internalGetLabels());
+ if (other.hasEncryptionSpec()) {
+ mergeEncryptionSpec(other.getEncryptionSpec());
+ }
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
@@ -4199,6 +4298,211 @@ public Builder putAllLabels(java.util.Map
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ public boolean hasEncryptionSpec() {
+ return encryptionSpecBuilder_ != null || encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ *
+ * @return The encryptionSpec.
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ } else {
+ return encryptionSpecBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ */
+ public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ encryptionSpec_ = value;
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ */
+ public Builder setEncryptionSpec(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = builderForValue.build();
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ */
+ public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (encryptionSpec_ != null) {
+ encryptionSpec_ =
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ encryptionSpec_ = value;
+ }
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ */
+ public Builder clearEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ onChanged();
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() {
+
+ onChanged();
+ return getEncryptionSpecFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder
+ getEncryptionSpecOrBuilder() {
+ if (encryptionSpecBuilder_ != null) {
+ return encryptionSpecBuilder_.getMessageOrBuilder();
+ } else {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>
+ getEncryptionSpecFieldBuilder() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpecBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>(
+ getEncryptionSpec(), getParentForChildren(), isClean());
+ encryptionSpec_ = null;
+ }
+ return encryptionSpecBuilder_;
+ }
+
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobOrBuilder.java
index 7634369d1..8de927964 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobOrBuilder.java
@@ -564,4 +564,45 @@ public interface HyperparameterTuningJobOrBuilder
* map<string, string> labels = 16;
*/
java.lang.String getLabelsOrThrow(java.lang.String key);
+
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ boolean hasEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ *
+ * @return The encryptionSpec.
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key options for a HyperparameterTuningJob.
+ * If this is set, then all resources created by the HyperparameterTuningJob
+ * will be encrypted with the provided encryption key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 17;
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder();
}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobProto.java
index d96f4404b..fac475138 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/HyperparameterTuningJobProto.java
@@ -49,40 +49,43 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "aiplatform.v1beta1\032\037google/api/field_beh"
+ "avior.proto\032\031google/api/resource.proto\0320"
+ "google/cloud/aiplatform/v1beta1/custom_j"
- + "ob.proto\032/google/cloud/aiplatform/v1beta"
- + "1/job_state.proto\032+google/cloud/aiplatfo"
- + "rm/v1beta1/study.proto\032\037google/protobuf/"
- + "timestamp.proto\032\027google/rpc/status.proto"
- + "\032\034google/api/annotations.proto\"\317\007\n\027Hyper"
- + "parameterTuningJob\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031\n"
- + "\014display_name\030\002 \001(\tB\003\340A\002\022C\n\nstudy_spec\030\004"
- + " \001(\0132*.google.cloud.aiplatform.v1beta1.S"
- + "tudySpecB\003\340A\002\022\034\n\017max_trial_count\030\005 \001(\005B\003"
- + "\340A\002\022!\n\024parallel_trial_count\030\006 \001(\005B\003\340A\002\022\036"
- + "\n\026max_failed_trial_count\030\007 \001(\005\022K\n\016trial_"
- + "job_spec\030\010 \001(\0132..google.cloud.aiplatform"
- + ".v1beta1.CustomJobSpecB\003\340A\002\022;\n\006trials\030\t "
- + "\003(\0132&.google.cloud.aiplatform.v1beta1.Tr"
- + "ialB\003\340A\003\022=\n\005state\030\n \001(\0162).google.cloud.a"
- + "iplatform.v1beta1.JobStateB\003\340A\003\0224\n\013creat"
- + "e_time\030\013 \001(\0132\032.google.protobuf.Timestamp"
- + "B\003\340A\003\0223\n\nstart_time\030\014 \001(\0132\032.google.proto"
- + "buf.TimestampB\003\340A\003\0221\n\010end_time\030\r \001(\0132\032.g"
- + "oogle.protobuf.TimestampB\003\340A\003\0224\n\013update_"
- + "time\030\016 \001(\0132\032.google.protobuf.TimestampB\003"
- + "\340A\003\022&\n\005error\030\017 \001(\0132\022.google.rpc.StatusB\003"
- + "\340A\003\022T\n\006labels\030\020 \003(\0132D.google.cloud.aipla"
- + "tform.v1beta1.HyperparameterTuningJob.La"
- + "belsEntry\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n"
- + "\005value\030\002 \001(\t:\0028\001:\225\001\352A\221\001\n1aiplatform.goog"
- + "leapis.com/HyperparameterTuningJob\022\\proj"
- + "ects/{project}/locations/{location}/hype"
- + "rparameterTuningJobs/{hyperparameter_tun"
- + "ing_job}B\220\001\n#com.google.cloud.aiplatform"
- + ".v1beta1B\034HyperparameterTuningJobProtoP\001"
- + "ZIgoogle.golang.org/genproto/googleapis/"
- + "cloud/aiplatform/v1beta1;aiplatformb\006pro"
- + "to3"
+ + "ob.proto\0325google/cloud/aiplatform/v1beta"
+ + "1/encryption_spec.proto\032/google/cloud/ai"
+ + "platform/v1beta1/job_state.proto\032+google"
+ + "/cloud/aiplatform/v1beta1/study.proto\032\037g"
+ + "oogle/protobuf/timestamp.proto\032\027google/r"
+ + "pc/status.proto\032\034google/api/annotations."
+ + "proto\"\231\010\n\027HyperparameterTuningJob\022\021\n\004nam"
+ + "e\030\001 \001(\tB\003\340A\003\022\031\n\014display_name\030\002 \001(\tB\003\340A\002\022"
+ + "C\n\nstudy_spec\030\004 \001(\0132*.google.cloud.aipla"
+ + "tform.v1beta1.StudySpecB\003\340A\002\022\034\n\017max_tria"
+ + "l_count\030\005 \001(\005B\003\340A\002\022!\n\024parallel_trial_cou"
+ + "nt\030\006 \001(\005B\003\340A\002\022\036\n\026max_failed_trial_count\030"
+ + "\007 \001(\005\022K\n\016trial_job_spec\030\010 \001(\0132..google.c"
+ + "loud.aiplatform.v1beta1.CustomJobSpecB\003\340"
+ + "A\002\022;\n\006trials\030\t \003(\0132&.google.cloud.aiplat"
+ + "form.v1beta1.TrialB\003\340A\003\022=\n\005state\030\n \001(\0162)"
+ + ".google.cloud.aiplatform.v1beta1.JobStat"
+ + "eB\003\340A\003\0224\n\013create_time\030\013 \001(\0132\032.google.pro"
+ + "tobuf.TimestampB\003\340A\003\0223\n\nstart_time\030\014 \001(\013"
+ + "2\032.google.protobuf.TimestampB\003\340A\003\0221\n\010end"
+ + "_time\030\r \001(\0132\032.google.protobuf.TimestampB"
+ + "\003\340A\003\0224\n\013update_time\030\016 \001(\0132\032.google.proto"
+ + "buf.TimestampB\003\340A\003\022&\n\005error\030\017 \001(\0132\022.goog"
+ + "le.rpc.StatusB\003\340A\003\022T\n\006labels\030\020 \003(\0132D.goo"
+ + "gle.cloud.aiplatform.v1beta1.Hyperparame"
+ + "terTuningJob.LabelsEntry\022H\n\017encryption_s"
+ + "pec\030\021 \001(\0132/.google.cloud.aiplatform.v1be"
+ + "ta1.EncryptionSpec\032-\n\013LabelsEntry\022\013\n\003key"
+ + "\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:\225\001\352A\221\001\n1aiplat"
+ + "form.googleapis.com/HyperparameterTuning"
+ + "Job\022\\projects/{project}/locations/{locat"
+ + "ion}/hyperparameterTuningJobs/{hyperpara"
+ + "meter_tuning_job}B\220\001\n#com.google.cloud.a"
+ + "iplatform.v1beta1B\034HyperparameterTuningJ"
+ + "obProtoP\001ZIgoogle.golang.org/genproto/go"
+ + "ogleapis/cloud/aiplatform/v1beta1;aiplat"
+ + "formb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -91,6 +94,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.CustomJobProto.getDescriptor(),
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.StudyProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
@@ -118,6 +122,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"UpdateTime",
"Error",
"Labels",
+ "EncryptionSpec",
});
internal_static_google_cloud_aiplatform_v1beta1_HyperparameterTuningJob_LabelsEntry_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_HyperparameterTuningJob_descriptor
@@ -138,6 +143,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.CustomJobProto.getDescriptor();
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.JobStateProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.StudyProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfig.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfig.java
index 3b6605cc0..eb71199dc 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfig.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfig.java
@@ -553,17 +553,17 @@ public com.google.cloud.aiplatform.v1beta1.TimestampSplitOrBuilder getTimestampS
*
*
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -583,17 +583,17 @@ public boolean hasGcsDestination() {
*
*
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -616,17 +616,17 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getGcsDestination() {
*
*
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -649,12 +649,14 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestinationOrBuilder getGcsDestina
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -676,12 +678,14 @@ public boolean hasBigqueryDestination() {
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -706,12 +710,14 @@ public com.google.cloud.aiplatform.v1beta1.BigQueryDestination getBigqueryDestin
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -799,7 +805,7 @@ public com.google.protobuf.ByteString getDatasetIdBytes() {
*
*
*
- * Only applicable to Datasets that have DataItems and Annotations.
+ * Applicable only to Datasets that have DataItems and Annotations.
* A filter on Annotations of the Dataset. Only Annotations that both
* match this filter and belong to DataItems not ignored by the split method
* are used in respectively training, validation or test role, depending on
@@ -830,7 +836,7 @@ public java.lang.String getAnnotationsFilter() {
*
*
*
- * Only applicable to Datasets that have DataItems and Annotations.
+ * Applicable only to Datasets that have DataItems and Annotations.
* A filter on Annotations of the Dataset. Only Annotations that both
* match this filter and belong to DataItems not ignored by the split method
* are used in respectively training, validation or test role, depending on
@@ -864,14 +870,13 @@ public com.google.protobuf.ByteString getAnnotationsFilterBytes() {
*
*
*
- * Only applicable to custom training.
- * Google Cloud Storage URI points to a YAML file describing annotation
- * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
- * https:
- * //github.com/OAI/OpenAPI-Specification/b
- * // lob/master/versions/3.0.2.md#schema-object)
+ * Applicable only to custom training with Datasets that have DataItems and
+ * Annotations.
+ * Cloud Storage URI that points to a YAML file describing the annotation
+ * schema. The schema is defined as an OpenAPI 3.0.2
+ * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
* The schema files that can be used here are found in
- * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+ * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
* chosen schema must be consistent with
* [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
* [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -903,14 +908,13 @@ public java.lang.String getAnnotationSchemaUri() {
*
*
*
- * Only applicable to custom training.
- * Google Cloud Storage URI points to a YAML file describing annotation
- * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
- * https:
- * //github.com/OAI/OpenAPI-Specification/b
- * // lob/master/versions/3.0.2.md#schema-object)
+ * Applicable only to custom training with Datasets that have DataItems and
+ * Annotations.
+ * Cloud Storage URI that points to a YAML file describing the annotation
+ * schema. The schema is defined as an OpenAPI 3.0.2
+ * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
* The schema files that can be used here are found in
- * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+ * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
* chosen schema must be consistent with
* [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
* [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -2382,17 +2386,17 @@ public com.google.cloud.aiplatform.v1beta1.TimestampSplit.Builder getTimestampSp
*
*
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2412,17 +2416,17 @@ public boolean hasGcsDestination() {
*
*
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2452,17 +2456,17 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination getGcsDestination() {
*
*
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2489,17 +2493,17 @@ public Builder setGcsDestination(com.google.cloud.aiplatform.v1beta1.GcsDestinat
*
*
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2524,17 +2528,17 @@ public Builder setGcsDestination(
*
*
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2571,17 +2575,17 @@ public Builder mergeGcsDestination(com.google.cloud.aiplatform.v1beta1.GcsDestin
*
*
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2611,17 +2615,17 @@ public Builder clearGcsDestination() {
*
*
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2638,17 +2642,17 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder getGcsDestinat
*
*
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2674,17 +2678,17 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder getGcsDestinat
*
*
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -2728,12 +2732,14 @@ public com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder getGcsDestinat
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2755,12 +2761,14 @@ public boolean hasBigqueryDestination() {
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2792,12 +2800,14 @@ public com.google.cloud.aiplatform.v1beta1.BigQueryDestination getBigqueryDestin
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2827,12 +2837,14 @@ public Builder setBigqueryDestination(
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2859,12 +2871,14 @@ public Builder setBigqueryDestination(
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2904,12 +2918,14 @@ public Builder mergeBigqueryDestination(
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2941,12 +2957,14 @@ public Builder clearBigqueryDestination() {
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2966,12 +2984,14 @@ public Builder clearBigqueryDestination() {
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -2999,12 +3019,14 @@ public Builder clearBigqueryDestination() {
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -3183,7 +3205,7 @@ public Builder setDatasetIdBytes(com.google.protobuf.ByteString value) {
*
*
*
- * Only applicable to Datasets that have DataItems and Annotations.
+ * Applicable only to Datasets that have DataItems and Annotations.
* A filter on Annotations of the Dataset. Only Annotations that both
* match this filter and belong to DataItems not ignored by the split method
* are used in respectively training, validation or test role, depending on
@@ -3213,7 +3235,7 @@ public java.lang.String getAnnotationsFilter() {
*
*
*
*
- *
- * Only applicable to Datasets that have DataItems and Annotations.
+ * Applicable only to Datasets that have DataItems and Annotations.
* A filter on Annotations of the Dataset. Only Annotations that both
* match this filter and belong to DataItems not ignored by the split method
* are used in respectively training, validation or test role, depending on
@@ -3243,7 +3265,7 @@ public com.google.protobuf.ByteString getAnnotationsFilterBytes() {
*
*
*
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequest.java
index 4378609ba..f91dd6a21 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequest.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequest.java
@@ -208,7 +208,19 @@ public com.google.protobuf.ByteString getParentBytes() {
*
*
*
- * Only applicable to Datasets that have DataItems and Annotations.
+ * Applicable only to Datasets that have DataItems and Annotations.
* A filter on Annotations of the Dataset. Only Annotations that both
* match this filter and belong to DataItems not ignored by the split method
* are used in respectively training, validation or test role, depending on
@@ -3272,7 +3294,7 @@ public Builder setAnnotationsFilter(java.lang.String value) {
*
*
*
*
@@ -87,17 +85,15 @@ public interface ListEndpointsRequestOrBuilder
* Optional. An expression for filtering the results of the request. For field names
* both snake_case and camelCase are supported.
* * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID,
- * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
- * * `display_name` supports =, != and regex()
- * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax)
+ * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
+ * * `display_name` supports = and, !=
* * `labels` supports general map functions that is:
- * `labels.key=value` - key:value equality
- * `labels.key:* or labels:key - key existence
- * A key including a space must be quoted. `labels."a key"`.
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
* Some examples:
* * `endpoint=1`
* * `displayName="myDisplayName"`
- * * `regex(display_name, "^A") -> The display name starts with an A.
* * `labels.myKey="myValue"`
*
- * Only applicable to Datasets that have DataItems and Annotations.
+ * Applicable only to Datasets that have DataItems and Annotations.
* A filter on Annotations of the Dataset. Only Annotations that both
* match this filter and belong to DataItems not ignored by the split method
* are used in respectively training, validation or test role, depending on
@@ -3297,7 +3319,7 @@ public Builder clearAnnotationsFilter() {
*
*
*
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequestOrBuilder.java
index 11a5d8957..526e58a30 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequestOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequestOrBuilder.java
@@ -61,17 +61,15 @@ public interface ListEndpointsRequestOrBuilder
* Optional. An expression for filtering the results of the request. For field names
* both snake_case and camelCase are supported.
* * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID,
- * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
- * * `display_name` supports =, != and regex()
- * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax)
+ * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
+ * * `display_name` supports = and, !=
* * `labels` supports general map functions that is:
- * `labels.key=value` - key:value equality
- * `labels.key:* or labels:key - key existence
- * A key including a space must be quoted. `labels."a key"`.
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
* Some examples:
* * `endpoint=1`
* * `displayName="myDisplayName"`
- * * `regex(display_name, "^A") -> The display name starts with an A.
* * `labels.myKey="myValue"`
*
- * Only applicable to Datasets that have DataItems and Annotations.
+ * Applicable only to Datasets that have DataItems and Annotations.
* A filter on Annotations of the Dataset. Only Annotations that both
* match this filter and belong to DataItems not ignored by the split method
* are used in respectively training, validation or test role, depending on
@@ -3329,14 +3351,13 @@ public Builder setAnnotationsFilterBytes(com.google.protobuf.ByteString value) {
*
*
*
*
@@ -1062,17 +1050,15 @@ public Builder clearFilter() {
* Optional. An expression for filtering the results of the request. For field names
* both snake_case and camelCase are supported.
* * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID,
- * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
- * * `display_name` supports =, != and regex()
- * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax)
+ * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
+ * * `display_name` supports = and, !=
* * `labels` supports general map functions that is:
- * `labels.key=value` - key:value equality
- * `labels.key:* or labels:key - key existence
- * A key including a space must be quoted. `labels."a key"`.
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
* Some examples:
* * `endpoint=1`
* * `displayName="myDisplayName"`
- * * `regex(display_name, "^A") -> The display name starts with an A.
* * `labels.myKey="myValue"`
*
- * Only applicable to custom training.
- * Google Cloud Storage URI points to a YAML file describing annotation
- * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
- * https:
- * //github.com/OAI/OpenAPI-Specification/b
- * // lob/master/versions/3.0.2.md#schema-object)
+ * Applicable only to custom training with Datasets that have DataItems and
+ * Annotations.
+ * Cloud Storage URI that points to a YAML file describing the annotation
+ * schema. The schema is defined as an OpenAPI 3.0.2
+ * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
* The schema files that can be used here are found in
- * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+ * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
* chosen schema must be consistent with
* [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
* [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -3367,14 +3388,13 @@ public java.lang.String getAnnotationSchemaUri() {
*
*
*
*
@@ -1031,17 +1021,15 @@ public Builder setFilter(java.lang.String value) {
* Optional. An expression for filtering the results of the request. For field names
* both snake_case and camelCase are supported.
* * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID,
- * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
- * * `display_name` supports =, != and regex()
- * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax)
+ * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
+ * * `display_name` supports = and, !=
* * `labels` supports general map functions that is:
- * `labels.key=value` - key:value equality
- * `labels.key:* or labels:key - key existence
- * A key including a space must be quoted. `labels."a key"`.
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
* Some examples:
* * `endpoint=1`
* * `displayName="myDisplayName"`
- * * `regex(display_name, "^A") -> The display name starts with an A.
* * `labels.myKey="myValue"`
*
- * Only applicable to custom training.
- * Google Cloud Storage URI points to a YAML file describing annotation
- * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
- * https:
- * //github.com/OAI/OpenAPI-Specification/b
- * // lob/master/versions/3.0.2.md#schema-object)
+ * Applicable only to custom training with Datasets that have DataItems and
+ * Annotations.
+ * Cloud Storage URI that points to a YAML file describing the annotation
+ * schema. The schema is defined as an OpenAPI 3.0.2
+ * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
* The schema files that can be used here are found in
- * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+ * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
* chosen schema must be consistent with
* [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
* [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -3405,14 +3425,13 @@ public com.google.protobuf.ByteString getAnnotationSchemaUriBytes() {
*
*
*
*
@@ -996,17 +988,15 @@ public com.google.protobuf.ByteString getFilterBytes() {
* Optional. An expression for filtering the results of the request. For field names
* both snake_case and camelCase are supported.
* * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID,
- * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
- * * `display_name` supports =, != and regex()
- * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax)
+ * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
+ * * `display_name` supports = and, !=
* * `labels` supports general map functions that is:
- * `labels.key=value` - key:value equality
- * `labels.key:* or labels:key - key existence
- * A key including a space must be quoted. `labels."a key"`.
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
* Some examples:
* * `endpoint=1`
* * `displayName="myDisplayName"`
- * * `regex(display_name, "^A") -> The display name starts with an A.
* * `labels.myKey="myValue"`
*
- * Only applicable to custom training.
- * Google Cloud Storage URI points to a YAML file describing annotation
- * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
- * https:
- * //github.com/OAI/OpenAPI-Specification/b
- * // lob/master/versions/3.0.2.md#schema-object)
+ * Applicable only to custom training with Datasets that have DataItems and
+ * Annotations.
+ * Cloud Storage URI that points to a YAML file describing the annotation
+ * schema. The schema is defined as an OpenAPI 3.0.2
+ * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
* The schema files that can be used here are found in
- * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+ * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
* chosen schema must be consistent with
* [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
* [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -3442,14 +3461,13 @@ public Builder setAnnotationSchemaUri(java.lang.String value) {
*
*
*
*
@@ -960,17 +954,15 @@ public java.lang.String getFilter() {
* Optional. An expression for filtering the results of the request. For field names
* both snake_case and camelCase are supported.
* * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID,
- * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
- * * `display_name` supports =, != and regex()
- * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax)
+ * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
+ * * `display_name` supports = and, !=
* * `labels` supports general map functions that is:
- * `labels.key=value` - key:value equality
- * `labels.key:* or labels:key - key existence
- * A key including a space must be quoted. `labels."a key"`.
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
* Some examples:
* * `endpoint=1`
* * `displayName="myDisplayName"`
- * * `regex(display_name, "^A") -> The display name starts with an A.
* * `labels.myKey="myValue"`
*
- * Only applicable to custom training.
- * Google Cloud Storage URI points to a YAML file describing annotation
- * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
- * https:
- * //github.com/OAI/OpenAPI-Specification/b
- * // lob/master/versions/3.0.2.md#schema-object)
+ * Applicable only to custom training with Datasets that have DataItems and
+ * Annotations.
+ * Cloud Storage URI that points to a YAML file describing the annotation
+ * schema. The schema is defined as an OpenAPI 3.0.2
+ * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
* The schema files that can be used here are found in
- * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+ * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
* chosen schema must be consistent with
* [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
* [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -3475,14 +3493,13 @@ public Builder clearAnnotationSchemaUri() {
*
*
*
*
@@ -924,17 +920,15 @@ public Builder setParentBytes(com.google.protobuf.ByteString value) {
* Optional. An expression for filtering the results of the request. For field names
* both snake_case and camelCase are supported.
* * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID,
- * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
- * * `display_name` supports =, != and regex()
- * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax)
+ * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
+ * * `display_name` supports = and, !=
* * `labels` supports general map functions that is:
- * `labels.key=value` - key:value equality
- * `labels.key:* or labels:key - key existence
- * A key including a space must be quoted. `labels."a key"`.
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
* Some examples:
* * `endpoint=1`
* * `displayName="myDisplayName"`
- * * `regex(display_name, "^A") -> The display name starts with an A.
* * `labels.myKey="myValue"`
*
- * Only applicable to custom training.
- * Google Cloud Storage URI points to a YAML file describing annotation
- * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
- * https:
- * //github.com/OAI/OpenAPI-Specification/b
- * // lob/master/versions/3.0.2.md#schema-object)
+ * Applicable only to custom training with Datasets that have DataItems and
+ * Annotations.
+ * Cloud Storage URI that points to a YAML file describing the annotation
+ * schema. The schema is defined as an OpenAPI 3.0.2
+ * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
* The schema files that can be used here are found in
- * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+ * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
* chosen schema must be consistent with
* [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
* [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfigOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfigOrBuilder.java
index aff3c1765..552e4ae07 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfigOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/InputDataConfigOrBuilder.java
@@ -173,17 +173,17 @@ public interface InputDataConfigOrBuilder
*
*
*
*
@@ -248,17 +246,15 @@ public java.lang.String getFilter() {
* Optional. An expression for filtering the results of the request. For field names
* both snake_case and camelCase are supported.
* * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID,
- * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
- * * `display_name` supports =, != and regex()
- * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax)
+ * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
+ * * `display_name` supports = and, !=
* * `labels` supports general map functions that is:
- * `labels.key=value` - key:value equality
- * `labels.key:* or labels:key - key existence
- * A key including a space must be quoted. `labels."a key"`.
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
* Some examples:
* * `endpoint=1`
* * `displayName="myDisplayName"`
- * * `regex(display_name, "^A") -> The display name starts with an A.
* * `labels.myKey="myValue"`
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -200,17 +200,17 @@ public interface InputDataConfigOrBuilder
*
*
*
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequest.java
index ff2e69216..e39ebcc74 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequest.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListEndpointsRequest.java
@@ -211,17 +211,15 @@ public com.google.protobuf.ByteString getParentBytes() {
* Optional. An expression for filtering the results of the request. For field names
* both snake_case and camelCase are supported.
* * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID,
- * ie. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
- * * `display_name` supports =, != and regex()
- * (uses [re2](https://github.com/google/re2/wiki/Syntax) syntax)
+ * i.e. the last segment of the Endpoint's [resource name][google.cloud.aiplatform.v1beta1.Endpoint.name].
+ * * `display_name` supports = and, !=
* * `labels` supports general map functions that is:
- * `labels.key=value` - key:value equality
- * `labels.key:* or labels:key - key existence
- * A key including a space must be quoted. `labels."a key"`.
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
* Some examples:
* * `endpoint=1`
* * `displayName="myDisplayName"`
- * * `regex(display_name, "^A") -> The display name starts with an A.
* * `labels.myKey="myValue"`
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -227,17 +227,17 @@ public interface InputDataConfigOrBuilder
*
*
*
*
@@ -177,7 +197,7 @@ public interface ListDatasetsRequestOrBuilder
* Use "desc" after a field name for descending.
* Supported fields:
* * `display_name`
- * * `data_item_count` * `create_time`
+ * * `create_time`
* * `update_time`
*
- * The Google Cloud Storage location where the training data is to be
- * written to. In the given directory a new directory will be created with
+ * The Cloud Storage location where the training data is to be
+ * written to. In the given directory a new directory is created with
* name:
* `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
* where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
- * All training input data will be written into that directory.
- * The AI Platform environment variables representing Google Cloud Storage
- * data URIs will always be represented in the Google Cloud Storage wildcard
+ * All training input data is written into that directory.
+ * The AI Platform environment variables representing Cloud Storage
+ * data URIs are represented in the Cloud Storage wildcard
* format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
- * * AIP_TRAINING_DATA_URI =
+ * * AIP_TRAINING_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* * AIP_VALIDATION_DATA_URI =
* "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
@@ -253,12 +253,14 @@ public interface InputDataConfigOrBuilder
*
*
*
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListDatasetsRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListDatasetsRequestOrBuilder.java
index 53573bf34..67015fddf 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListDatasetsRequestOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListDatasetsRequestOrBuilder.java
@@ -58,7 +58,17 @@ public interface ListDatasetsRequestOrBuilder
*
*
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -277,12 +279,14 @@ public interface InputDataConfigOrBuilder
*
*
*
*
@@ -1516,7 +1586,7 @@ public Builder clearOrderBy() {
* Use "desc" after a field name for descending.
* Supported fields:
* * `display_name`
- * * `data_item_count` * `create_time`
+ * * `create_time`
* * `update_time`
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -301,12 +305,14 @@ public interface InputDataConfigOrBuilder
*
*
*
*
@@ -1494,7 +1564,7 @@ public Builder setOrderBy(java.lang.String value) {
* Use "desc" after a field name for descending.
* Supported fields:
* * `display_name`
- * * `data_item_count` * `create_time`
+ * * `create_time`
* * `update_time`
*
+ * Only applicable to custom training with tabular Dataset with BigQuery
+ * source.
* The BigQuery project location where the training data is to be written
* to. In the given project a new dataset is created with name
* `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
* where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
- * input data will be written into that dataset. In the dataset three
- * tables will be created, `training`, `validation` and `test`.
+ * input data is written into that dataset. In the dataset three
+ * tables are created, `training`, `validation` and `test`.
* * AIP_DATA_FORMAT = "bigquery".
* * AIP_TRAINING_DATA_URI =
* "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
@@ -362,7 +368,7 @@ public interface InputDataConfigOrBuilder
*
*
*
*
@@ -1468,7 +1538,7 @@ public com.google.protobuf.ByteString getOrderByBytes() {
* Use "desc" after a field name for descending.
* Supported fields:
* * `display_name`
- * * `data_item_count` * `create_time`
+ * * `create_time`
* * `update_time`
*
- * Only applicable to Datasets that have DataItems and Annotations.
+ * Applicable only to Datasets that have DataItems and Annotations.
* A filter on Annotations of the Dataset. Only Annotations that both
* match this filter and belong to DataItems not ignored by the split method
* are used in respectively training, validation or test role, depending on
@@ -382,7 +388,7 @@ public interface InputDataConfigOrBuilder
*
*
*
*
@@ -1441,7 +1511,7 @@ public java.lang.String getOrderBy() {
* Use "desc" after a field name for descending.
* Supported fields:
* * `display_name`
- * * `data_item_count` * `create_time`
+ * * `create_time`
* * `update_time`
*
- * Only applicable to Datasets that have DataItems and Annotations.
+ * Applicable only to Datasets that have DataItems and Annotations.
* A filter on Annotations of the Dataset. Only Annotations that both
* match this filter and belong to DataItems not ignored by the split method
* are used in respectively training, validation or test role, depending on
@@ -403,14 +409,13 @@ public interface InputDataConfigOrBuilder
*
*
*
*
@@ -967,7 +987,17 @@ public Builder setParentBytes(com.google.protobuf.ByteString value) {
*
*
*
- * Only applicable to custom training.
- * Google Cloud Storage URI points to a YAML file describing annotation
- * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
- * https:
- * //github.com/OAI/OpenAPI-Specification/b
- * // lob/master/versions/3.0.2.md#schema-object)
+ * Applicable only to custom training with Datasets that have DataItems and
+ * Annotations.
+ * Cloud Storage URI that points to a YAML file describing the annotation
+ * schema. The schema is defined as an OpenAPI 3.0.2
+ * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
* The schema files that can be used here are found in
- * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+ * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
* chosen schema must be consistent with
* [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
* [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
@@ -431,14 +436,13 @@ public interface InputDataConfigOrBuilder
*
*
*
*
@@ -410,7 +430,7 @@ public java.lang.String getOrderBy() {
* Use "desc" after a field name for descending.
* Supported fields:
* * `display_name`
- * * `data_item_count` * `create_time`
+ * * `create_time`
* * `update_time`
*
- * Only applicable to custom training.
- * Google Cloud Storage URI points to a YAML file describing annotation
- * schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](
- * https:
- * //github.com/OAI/OpenAPI-Specification/b
- * // lob/master/versions/3.0.2.md#schema-object)
+ * Applicable only to custom training with Datasets that have DataItems and
+ * Annotations.
+ * Cloud Storage URI that points to a YAML file describing the annotation
+ * schema. The schema is defined as an OpenAPI 3.0.2
+ * [Schema Object](https://tinyurl.com/y538mdwt#schema-object).
* The schema files that can be used here are found in
- * gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
+ * gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
* chosen schema must be consistent with
* [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by
* [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceProto.java
index 5f182f9f1..f90dbd52a 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/JobServiceProto.java
@@ -142,212 +142,214 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "form/v1beta1/custom_job.proto\0327google/cl"
+ "oud/aiplatform/v1beta1/data_labeling_job"
+ ".proto\032?google/cloud/aiplatform/v1beta1/"
- + "hyperparameter_tuning_job.proto\032#google/"
- + "longrunning/operations.proto\032\033google/pro"
- + "tobuf/empty.proto\032 google/protobuf/field"
- + "_mask.proto\"\230\001\n\026CreateCustomJobRequest\0229"
- + "\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations.google"
- + "apis.com/Location\022C\n\ncustom_job\030\002 \001(\0132*."
- + "google.cloud.aiplatform.v1beta1.CustomJo"
- + "bB\003\340A\002\"P\n\023GetCustomJobRequest\0229\n\004name\030\001 "
- + "\001(\tB+\340A\002\372A%\n#aiplatform.googleapis.com/C"
- + "ustomJob\"\270\001\n\025ListCustomJobsRequest\0229\n\006pa"
- + "rent\030\001 \001(\tB)\340A\002\372A#\n!locations.googleapis"
- + ".com/Location\022\016\n\006filter\030\002 \001(\t\022\021\n\tpage_si"
- + "ze\030\003 \001(\005\022\022\n\npage_token\030\004 \001(\t\022-\n\tread_mas"
- + "k\030\005 \001(\0132\032.google.protobuf.FieldMask\"r\n\026L"
- + "istCustomJobsResponse\022?\n\013custom_jobs\030\001 \003"
- + "(\0132*.google.cloud.aiplatform.v1beta1.Cus"
- + "tomJob\022\027\n\017next_page_token\030\002 \001(\t\"S\n\026Delet"
- + "eCustomJobRequest\0229\n\004name\030\001 \001(\tB+\340A\002\372A%\n"
- + "#aiplatform.googleapis.com/CustomJob\"S\n\026"
- + "CancelCustomJobRequest\0229\n\004name\030\001 \001(\tB+\340A"
- + "\002\372A%\n#aiplatform.googleapis.com/CustomJo"
- + "b\"\253\001\n\034CreateDataLabelingJobRequest\0229\n\006pa"
- + "rent\030\001 \001(\tB)\340A\002\372A#\n!locations.googleapis"
- + ".com/Location\022P\n\021data_labeling_job\030\002 \001(\013"
- + "20.google.cloud.aiplatform.v1beta1.DataL"
- + "abelingJobB\003\340A\002\"\\\n\031GetDataLabelingJobReq"
- + "uest\022?\n\004name\030\001 \001(\tB1\340A\002\372A+\n)aiplatform.g"
- + "oogleapis.com/DataLabelingJob\"\320\001\n\033ListDa"
- + "taLabelingJobsRequest\0229\n\006parent\030\001 \001(\tB)\340"
- + "A\002\372A#\n!locations.googleapis.com/Location"
- + "\022\016\n\006filter\030\002 \001(\t\022\021\n\tpage_size\030\003 \001(\005\022\022\n\np"
- + "age_token\030\004 \001(\t\022-\n\tread_mask\030\005 \001(\0132\032.goo"
- + "gle.protobuf.FieldMask\022\020\n\010order_by\030\006 \001(\t"
- + "\"\205\001\n\034ListDataLabelingJobsResponse\022L\n\022dat"
- + "a_labeling_jobs\030\001 \003(\01320.google.cloud.aip"
- + "latform.v1beta1.DataLabelingJob\022\027\n\017next_"
- + "page_token\030\002 \001(\t\"_\n\034DeleteDataLabelingJo"
- + "bRequest\022?\n\004name\030\001 \001(\tB1\340A\002\372A+\n)aiplatfo"
- + "rm.googleapis.com/DataLabelingJob\"_\n\034Can"
- + "celDataLabelingJobRequest\022?\n\004name\030\001 \001(\tB"
- + "1\340A\002\372A+\n)aiplatform.googleapis.com/DataL"
- + "abelingJob\"\303\001\n$CreateHyperparameterTunin"
- + "gJobRequest\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!loc"
- + "ations.googleapis.com/Location\022`\n\031hyperp"
- + "arameter_tuning_job\030\002 \001(\01328.google.cloud"
- + ".aiplatform.v1beta1.HyperparameterTuning"
- + "JobB\003\340A\002\"l\n!GetHyperparameterTuningJobRe"
- + "quest\022G\n\004name\030\001 \001(\tB9\340A\002\372A3\n1aiplatform."
- + "googleapis.com/HyperparameterTuningJob\"\306"
- + "\001\n#ListHyperparameterTuningJobsRequest\0229"
- + "\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations.google"
- + "apis.com/Location\022\016\n\006filter\030\002 \001(\t\022\021\n\tpag"
- + "e_size\030\003 \001(\005\022\022\n\npage_token\030\004 \001(\t\022-\n\tread"
- + "_mask\030\005 \001(\0132\032.google.protobuf.FieldMask\""
- + "\235\001\n$ListHyperparameterTuningJobsResponse"
- + "\022\\\n\032hyperparameter_tuning_jobs\030\001 \003(\01328.g"
- + "oogle.cloud.aiplatform.v1beta1.Hyperpara"
- + "meterTuningJob\022\027\n\017next_page_token\030\002 \001(\t\""
- + "o\n$DeleteHyperparameterTuningJobRequest\022"
- + "G\n\004name\030\001 \001(\tB9\340A\002\372A3\n1aiplatform.google"
- + "apis.com/HyperparameterTuningJob\"o\n$Canc"
- + "elHyperparameterTuningJobRequest\022G\n\004name"
- + "\030\001 \001(\tB9\340A\002\372A3\n1aiplatform.googleapis.co"
- + "m/HyperparameterTuningJob\"\264\001\n\037CreateBatc"
- + "hPredictionJobRequest\0229\n\006parent\030\001 \001(\tB)\340"
- + "A\002\372A#\n!locations.googleapis.com/Location"
- + "\022V\n\024batch_prediction_job\030\002 \001(\01323.google."
- + "cloud.aiplatform.v1beta1.BatchPrediction"
- + "JobB\003\340A\002\"b\n\034GetBatchPredictionJobRequest"
- + "\022B\n\004name\030\001 \001(\tB4\340A\002\372A.\n,aiplatform.googl"
- + "eapis.com/BatchPredictionJob\"\301\001\n\036ListBat"
- + "chPredictionJobsRequest\0229\n\006parent\030\001 \001(\tB"
+ + "hyperparameter_tuning_job.proto\032/google/"
+ + "cloud/aiplatform/v1beta1/operation.proto"
+ + "\032#google/longrunning/operations.proto\032\033g"
+ + "oogle/protobuf/empty.proto\032 google/proto"
+ + "buf/field_mask.proto\032\037google/protobuf/ti"
+ + "mestamp.proto\"\230\001\n\026CreateCustomJobRequest"
+ + "\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations.goog"
+ + "leapis.com/Location\022C\n\ncustom_job\030\002 \001(\0132"
+ + "*.google.cloud.aiplatform.v1beta1.Custom"
+ + "JobB\003\340A\002\"P\n\023GetCustomJobRequest\0229\n\004name\030"
+ + "\001 \001(\tB+\340A\002\372A%\n#aiplatform.googleapis.com"
+ + "/CustomJob\"\270\001\n\025ListCustomJobsRequest\0229\n\006"
+ + "parent\030\001 \001(\tB)\340A\002\372A#\n!locations.googleap"
+ + "is.com/Location\022\016\n\006filter\030\002 \001(\t\022\021\n\tpage_"
+ + "size\030\003 \001(\005\022\022\n\npage_token\030\004 \001(\t\022-\n\tread_m"
+ + "ask\030\005 \001(\0132\032.google.protobuf.FieldMask\"r\n"
+ + "\026ListCustomJobsResponse\022?\n\013custom_jobs\030\001"
+ + " \003(\0132*.google.cloud.aiplatform.v1beta1.C"
+ + "ustomJob\022\027\n\017next_page_token\030\002 \001(\t\"S\n\026Del"
+ + "eteCustomJobRequest\0229\n\004name\030\001 \001(\tB+\340A\002\372A"
+ + "%\n#aiplatform.googleapis.com/CustomJob\"S"
+ + "\n\026CancelCustomJobRequest\0229\n\004name\030\001 \001(\tB+"
+ + "\340A\002\372A%\n#aiplatform.googleapis.com/Custom"
+ + "Job\"\253\001\n\034CreateDataLabelingJobRequest\0229\n\006"
+ + "parent\030\001 \001(\tB)\340A\002\372A#\n!locations.googleap"
+ + "is.com/Location\022P\n\021data_labeling_job\030\002 \001"
+ + "(\01320.google.cloud.aiplatform.v1beta1.Dat"
+ + "aLabelingJobB\003\340A\002\"\\\n\031GetDataLabelingJobR"
+ + "equest\022?\n\004name\030\001 \001(\tB1\340A\002\372A+\n)aiplatform"
+ + ".googleapis.com/DataLabelingJob\"\320\001\n\033List"
+ + "DataLabelingJobsRequest\0229\n\006parent\030\001 \001(\tB"
+ ")\340A\002\372A#\n!locations.googleapis.com/Locati"
+ "on\022\016\n\006filter\030\002 \001(\t\022\021\n\tpage_size\030\003 \001(\005\022\022\n"
+ "\npage_token\030\004 \001(\t\022-\n\tread_mask\030\005 \001(\0132\032.g"
- + "oogle.protobuf.FieldMask\"\216\001\n\037ListBatchPr"
- + "edictionJobsResponse\022R\n\025batch_prediction"
- + "_jobs\030\001 \003(\01323.google.cloud.aiplatform.v1"
- + "beta1.BatchPredictionJob\022\027\n\017next_page_to"
- + "ken\030\002 \001(\t\"e\n\037DeleteBatchPredictionJobReq"
- + "uest\022B\n\004name\030\001 \001(\tB4\340A\002\372A.\n,aiplatform.g"
- + "oogleapis.com/BatchPredictionJob\"e\n\037Canc"
- + "elBatchPredictionJobRequest\022B\n\004name\030\001 \001("
- + "\tB4\340A\002\372A.\n,aiplatform.googleapis.com/Bat"
- + "chPredictionJob2\333$\n\nJobService\022\323\001\n\017Creat"
- + "eCustomJob\0227.google.cloud.aiplatform.v1b"
- + "eta1.CreateCustomJobRequest\032*.google.clo"
- + "ud.aiplatform.v1beta1.CustomJob\"[\202\323\344\223\002A\""
- + "3/v1beta1/{parent=projects/*/locations/*"
- + "}/customJobs:\ncustom_job\332A\021parent,custom"
- + "_job\022\264\001\n\014GetCustomJob\0224.google.cloud.aip"
- + "latform.v1beta1.GetCustomJobRequest\032*.go"
- + "ogle.cloud.aiplatform.v1beta1.CustomJob\""
- + "B\202\323\344\223\0025\0223/v1beta1/{name=projects/*/locat"
- + "ions/*/customJobs/*}\332A\004name\022\307\001\n\016ListCust"
- + "omJobs\0226.google.cloud.aiplatform.v1beta1"
- + ".ListCustomJobsRequest\0327.google.cloud.ai"
- + "platform.v1beta1.ListCustomJobsResponse\""
- + "D\202\323\344\223\0025\0223/v1beta1/{parent=projects/*/loc"
- + "ations/*}/customJobs\332A\006parent\022\340\001\n\017Delete"
- + "CustomJob\0227.google.cloud.aiplatform.v1be"
- + "ta1.DeleteCustomJobRequest\032\035.google.long"
- + "running.Operation\"u\202\323\344\223\0025*3/v1beta1/{nam"
- + "e=projects/*/locations/*/customJobs/*}\332A"
- + "\004name\312A0\n\025google.protobuf.Empty\022\027DeleteO"
- + "perationMetadata\022\260\001\n\017CancelCustomJob\0227.g"
- + "oogle.cloud.aiplatform.v1beta1.CancelCus"
- + "tomJobRequest\032\026.google.protobuf.Empty\"L\202"
- + "\323\344\223\002?\":/v1beta1/{name=projects/*/locatio"
- + "ns/*/customJobs/*}:cancel:\001*\332A\004name\022\371\001\n\025"
- + "CreateDataLabelingJob\022=.google.cloud.aip"
- + "latform.v1beta1.CreateDataLabelingJobReq"
- + "uest\0320.google.cloud.aiplatform.v1beta1.D"
- + "ataLabelingJob\"o\202\323\344\223\002N\"9/v1beta1/{parent"
- + "=projects/*/locations/*}/dataLabelingJob"
- + "s:\021data_labeling_job\332A\030parent,data_label"
- + "ing_job\022\314\001\n\022GetDataLabelingJob\022:.google."
- + "cloud.aiplatform.v1beta1.GetDataLabeling"
- + "JobRequest\0320.google.cloud.aiplatform.v1b"
- + "eta1.DataLabelingJob\"H\202\323\344\223\002;\0229/v1beta1/{"
- + "name=projects/*/locations/*/dataLabeling"
- + "Jobs/*}\332A\004name\022\337\001\n\024ListDataLabelingJobs\022"
- + "<.google.cloud.aiplatform.v1beta1.ListDa"
- + "taLabelingJobsRequest\032=.google.cloud.aip"
- + "latform.v1beta1.ListDataLabelingJobsResp"
- + "onse\"J\202\323\344\223\002;\0229/v1beta1/{parent=projects/"
- + "*/locations/*}/dataLabelingJobs\332A\006parent"
- + "\022\362\001\n\025DeleteDataLabelingJob\022=.google.clou"
- + "d.aiplatform.v1beta1.DeleteDataLabelingJ"
- + "obRequest\032\035.google.longrunning.Operation"
- + "\"{\202\323\344\223\002;*9/v1beta1/{name=projects/*/loca"
- + "tions/*/dataLabelingJobs/*}\332A\004name\312A0\n\025g"
- + "oogle.protobuf.Empty\022\027DeleteOperationMet"
- + "adata\022\302\001\n\025CancelDataLabelingJob\022=.google"
- + ".cloud.aiplatform.v1beta1.CancelDataLabe"
- + "lingJobRequest\032\026.google.protobuf.Empty\"R"
- + "\202\323\344\223\002E\"@/v1beta1/{name=projects/*/locati"
- + "ons/*/dataLabelingJobs/*}:cancel:\001*\332A\004na"
- + "me\022\252\002\n\035CreateHyperparameterTuningJob\022E.g"
- + "oogle.cloud.aiplatform.v1beta1.CreateHyp"
- + "erparameterTuningJobRequest\0328.google.clo"
+ + "oogle.protobuf.FieldMask\022\020\n\010order_by\030\006 \001"
+ + "(\t\"\205\001\n\034ListDataLabelingJobsResponse\022L\n\022d"
+ + "ata_labeling_jobs\030\001 \003(\01320.google.cloud.a"
+ + "iplatform.v1beta1.DataLabelingJob\022\027\n\017nex"
+ + "t_page_token\030\002 \001(\t\"_\n\034DeleteDataLabeling"
+ + "JobRequest\022?\n\004name\030\001 \001(\tB1\340A\002\372A+\n)aiplat"
+ + "form.googleapis.com/DataLabelingJob\"_\n\034C"
+ + "ancelDataLabelingJobRequest\022?\n\004name\030\001 \001("
+ + "\tB1\340A\002\372A+\n)aiplatform.googleapis.com/Dat"
+ + "aLabelingJob\"\303\001\n$CreateHyperparameterTun"
+ + "ingJobRequest\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!l"
+ + "ocations.googleapis.com/Location\022`\n\031hype"
+ + "rparameter_tuning_job\030\002 \001(\01328.google.clo"
+ "ud.aiplatform.v1beta1.HyperparameterTuni"
- + "ngJob\"\207\001\202\323\344\223\002^\"A/v1beta1/{parent=project"
- + "s/*/locations/*}/hyperparameterTuningJob"
- + "s:\031hyperparameter_tuning_job\332A parent,hy"
- + "perparameter_tuning_job\022\354\001\n\032GetHyperpara"
- + "meterTuningJob\022B.google.cloud.aiplatform"
- + ".v1beta1.GetHyperparameterTuningJobReque"
- + "st\0328.google.cloud.aiplatform.v1beta1.Hyp"
- + "erparameterTuningJob\"P\202\323\344\223\002C\022A/v1beta1/{"
- + "name=projects/*/locations/*/hyperparamet"
- + "erTuningJobs/*}\332A\004name\022\377\001\n\034ListHyperpara"
- + "meterTuningJobs\022D.google.cloud.aiplatfor"
- + "m.v1beta1.ListHyperparameterTuningJobsRe"
- + "quest\032E.google.cloud.aiplatform.v1beta1."
- + "ListHyperparameterTuningJobsResponse\"R\202\323"
- + "\344\223\002C\022A/v1beta1/{parent=projects/*/locati"
- + "ons/*}/hyperparameterTuningJobs\332A\006parent"
- + "\022\213\002\n\035DeleteHyperparameterTuningJob\022E.goo"
- + "gle.cloud.aiplatform.v1beta1.DeleteHyper"
- + "parameterTuningJobRequest\032\035.google.longr"
- + "unning.Operation\"\203\001\202\323\344\223\002C*A/v1beta1/{nam"
- + "e=projects/*/locations/*/hyperparameterT"
- + "uningJobs/*}\332A\004name\312A0\n\025google.protobuf."
- + "Empty\022\027DeleteOperationMetadata\022\332\001\n\035Cance"
- + "lHyperparameterTuningJob\022E.google.cloud."
- + "aiplatform.v1beta1.CancelHyperparameterT"
- + "uningJobRequest\032\026.google.protobuf.Empty\""
- + "Z\202\323\344\223\002M\"H/v1beta1/{name=projects/*/locat"
- + "ions/*/hyperparameterTuningJobs/*}:cance"
- + "l:\001*\332A\004name\022\213\002\n\030CreateBatchPredictionJob"
- + "\022@.google.cloud.aiplatform.v1beta1.Creat"
- + "eBatchPredictionJobRequest\0323.google.clou"
- + "d.aiplatform.v1beta1.BatchPredictionJob\""
- + "x\202\323\344\223\002T\"\022\022*\022\022*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `display_name`: supports = and !=
+ * * `metadata_schema_uri`: supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -239,7 +249,17 @@ public java.lang.String getFilter() {
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `display_name`: supports = and !=
+ * * `metadata_schema_uri`: supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -382,7 +402,7 @@ public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() {
* Use "desc" after a field name for descending.
* Supported fields:
* * `display_name`
- * * `data_item_count` * `create_time`
+ * * `create_time`
* * `update_time`
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `display_name`: supports = and !=
+ * * `metadata_schema_uri`: supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -989,7 +1019,17 @@ public java.lang.String getFilter() {
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `display_name`: supports = and !=
+ * * `metadata_schema_uri`: supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -1011,7 +1051,17 @@ public com.google.protobuf.ByteString getFilterBytes() {
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `display_name`: supports = and !=
+ * * `metadata_schema_uri`: supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -1032,7 +1082,17 @@ public Builder setFilter(java.lang.String value) {
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `display_name`: supports = and !=
+ * * `metadata_schema_uri`: supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -1049,7 +1109,17 @@ public Builder clearFilter() {
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `display_name`: supports = and !=
+ * * `metadata_schema_uri`: supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -1414,7 +1484,7 @@ public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() {
* Use "desc" after a field name for descending.
* Supported fields:
* * `display_name`
- * * `data_item_count` * `create_time`
+ * * `create_time`
* * `update_time`
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `display_name`: supports = and !=
+ * * `metadata_schema_uri`: supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -70,7 +80,17 @@ public interface ListDatasetsRequestOrBuilder
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `display_name`: supports = and !=
+ * * `metadata_schema_uri`: supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -160,7 +180,7 @@ public interface ListDatasetsRequestOrBuilder
* Use "desc" after a field name for descending.
* Supported fields:
* * `display_name`
- * * `data_item_count` * `create_time`
+ * * `create_time`
* * `update_time`
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `model` supports = and !=. `model` represents the Model ID,
+ * i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+ * * `display_name` supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `model=1234`
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -231,7 +243,19 @@ public java.lang.String getFilter() {
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `model` supports = and !=. `model` represents the Model ID,
+ * i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+ * * `display_name` supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `model=1234`
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -890,7 +914,19 @@ public Builder setParentBytes(com.google.protobuf.ByteString value) {
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `model` supports = and !=. `model` represents the Model ID,
+ * i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+ * * `display_name` supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `model=1234`
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -912,7 +948,19 @@ public java.lang.String getFilter() {
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `model` supports = and !=. `model` represents the Model ID,
+ * i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+ * * `display_name` supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `model=1234`
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -934,7 +982,19 @@ public com.google.protobuf.ByteString getFilterBytes() {
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `model` supports = and !=. `model` represents the Model ID,
+ * i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+ * * `display_name` supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `model=1234`
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -955,7 +1015,19 @@ public Builder setFilter(java.lang.String value) {
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `model` supports = and !=. `model` represents the Model ID,
+ * i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+ * * `display_name` supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `model=1234`
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -972,7 +1044,19 @@ public Builder clearFilter() {
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `model` supports = and !=. `model` represents the Model ID,
+ * i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+ * * `display_name` supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `model=1234`
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequestOrBuilder.java
index b72b77ea2..8b752cb44 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequestOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListModelsRequestOrBuilder.java
@@ -58,7 +58,19 @@ public interface ListModelsRequestOrBuilder
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `model` supports = and !=. `model` represents the Model ID,
+ * i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+ * * `display_name` supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `model=1234`
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
@@ -70,7 +82,19 @@ public interface ListModelsRequestOrBuilder
*
*
*
- * The standard list filter.
+ * An expression for filtering the results of the request. For field names
+ * both snake_case and camelCase are supported.
+ * * `model` supports = and !=. `model` represents the Model ID,
+ * i.e. the last segment of the Model's [resource name][google.cloud.aiplatform.v1beta1.Model.name].
+ * * `display_name` supports = and !=
+ * * `labels` supports general map functions that is:
+ * * `labels.key=value` - key:value equality
+ * * `labels.key:* or labels:key - key existence
+ * * A key including a space must be quoted. `labels."a key"`.
+ * Some examples:
+ * * `model=1234`
+ * * `displayName="myDisplayName"`
+ * * `labels.myKey="myValue"`
*
*
* string filter = 2;
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpec.java
index 44b16c16c..1f71333d7 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpec.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpec.java
@@ -130,25 +130,12 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
*
*
*
- * Immutable. The type of the machine.
- * Following machine types are supported:
- * * `n1-standard-2`
- * * `n1-standard-4`
- * * `n1-standard-8`
- * * `n1-standard-16`
- * * `n1-standard-32`
- * * `n1-highmem-2`
- * * `n1-highmem-4`
- * * `n1-highmem-8`
- * * `n1-highmem-16`
- * * `n1-highmem-32`
- * * `n1-highcpu-2`
- * * `n1-highcpu-4`
- * * `n1-highcpu-8`
- * * `n1-highcpu-16`
- * * `n1-highcpu-32`
- * When used for [DeployedMode][] this field is optional and the default value
- * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+ * Immutable. The type of the machine. For the machine types supported for prediction,
+ * see https://tinyurl.com/aip-docs/predictions/machine-types.
+ * For machine types supported for creating a custom training job, see
+ * https://tinyurl.com/aip-docs/training/configure-compute.
+ * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+ * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
* [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
*
*
@@ -172,25 +159,12 @@ public java.lang.String getMachineType() {
*
*
*
- * Immutable. The type of the machine.
- * Following machine types are supported:
- * * `n1-standard-2`
- * * `n1-standard-4`
- * * `n1-standard-8`
- * * `n1-standard-16`
- * * `n1-standard-32`
- * * `n1-highmem-2`
- * * `n1-highmem-4`
- * * `n1-highmem-8`
- * * `n1-highmem-16`
- * * `n1-highmem-32`
- * * `n1-highcpu-2`
- * * `n1-highcpu-4`
- * * `n1-highcpu-8`
- * * `n1-highcpu-16`
- * * `n1-highcpu-32`
- * When used for [DeployedMode][] this field is optional and the default value
- * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+ * Immutable. The type of the machine. For the machine types supported for prediction,
+ * see https://tinyurl.com/aip-docs/predictions/machine-types.
+ * For machine types supported for creating a custom training job, see
+ * https://tinyurl.com/aip-docs/training/configure-compute.
+ * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+ * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
* [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
*
*
@@ -629,25 +603,12 @@ public Builder mergeFrom(
*
*
*
- * Immutable. The type of the machine.
- * Following machine types are supported:
- * * `n1-standard-2`
- * * `n1-standard-4`
- * * `n1-standard-8`
- * * `n1-standard-16`
- * * `n1-standard-32`
- * * `n1-highmem-2`
- * * `n1-highmem-4`
- * * `n1-highmem-8`
- * * `n1-highmem-16`
- * * `n1-highmem-32`
- * * `n1-highcpu-2`
- * * `n1-highcpu-4`
- * * `n1-highcpu-8`
- * * `n1-highcpu-16`
- * * `n1-highcpu-32`
- * When used for [DeployedMode][] this field is optional and the default value
- * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+ * Immutable. The type of the machine. For the machine types supported for prediction,
+ * see https://tinyurl.com/aip-docs/predictions/machine-types.
+ * For machine types supported for creating a custom training job, see
+ * https://tinyurl.com/aip-docs/training/configure-compute.
+ * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+ * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
* [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
*
*
@@ -670,25 +631,12 @@ public java.lang.String getMachineType() {
*
*
*
- * Immutable. The type of the machine.
- * Following machine types are supported:
- * * `n1-standard-2`
- * * `n1-standard-4`
- * * `n1-standard-8`
- * * `n1-standard-16`
- * * `n1-standard-32`
- * * `n1-highmem-2`
- * * `n1-highmem-4`
- * * `n1-highmem-8`
- * * `n1-highmem-16`
- * * `n1-highmem-32`
- * * `n1-highcpu-2`
- * * `n1-highcpu-4`
- * * `n1-highcpu-8`
- * * `n1-highcpu-16`
- * * `n1-highcpu-32`
- * When used for [DeployedMode][] this field is optional and the default value
- * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+ * Immutable. The type of the machine. For the machine types supported for prediction,
+ * see https://tinyurl.com/aip-docs/predictions/machine-types.
+ * For machine types supported for creating a custom training job, see
+ * https://tinyurl.com/aip-docs/training/configure-compute.
+ * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+ * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
* [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
*
*
@@ -711,25 +659,12 @@ public com.google.protobuf.ByteString getMachineTypeBytes() {
*
*
*
- * Immutable. The type of the machine.
- * Following machine types are supported:
- * * `n1-standard-2`
- * * `n1-standard-4`
- * * `n1-standard-8`
- * * `n1-standard-16`
- * * `n1-standard-32`
- * * `n1-highmem-2`
- * * `n1-highmem-4`
- * * `n1-highmem-8`
- * * `n1-highmem-16`
- * * `n1-highmem-32`
- * * `n1-highcpu-2`
- * * `n1-highcpu-4`
- * * `n1-highcpu-8`
- * * `n1-highcpu-16`
- * * `n1-highcpu-32`
- * When used for [DeployedMode][] this field is optional and the default value
- * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+ * Immutable. The type of the machine. For the machine types supported for prediction,
+ * see https://tinyurl.com/aip-docs/predictions/machine-types.
+ * For machine types supported for creating a custom training job, see
+ * https://tinyurl.com/aip-docs/training/configure-compute.
+ * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+ * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
* [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
*
*
@@ -751,25 +686,12 @@ public Builder setMachineType(java.lang.String value) {
*
*
*
- * Immutable. The type of the machine.
- * Following machine types are supported:
- * * `n1-standard-2`
- * * `n1-standard-4`
- * * `n1-standard-8`
- * * `n1-standard-16`
- * * `n1-standard-32`
- * * `n1-highmem-2`
- * * `n1-highmem-4`
- * * `n1-highmem-8`
- * * `n1-highmem-16`
- * * `n1-highmem-32`
- * * `n1-highcpu-2`
- * * `n1-highcpu-4`
- * * `n1-highcpu-8`
- * * `n1-highcpu-16`
- * * `n1-highcpu-32`
- * When used for [DeployedMode][] this field is optional and the default value
- * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+ * Immutable. The type of the machine. For the machine types supported for prediction,
+ * see https://tinyurl.com/aip-docs/predictions/machine-types.
+ * For machine types supported for creating a custom training job, see
+ * https://tinyurl.com/aip-docs/training/configure-compute.
+ * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+ * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
* [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
*
*
@@ -787,25 +709,12 @@ public Builder clearMachineType() {
*
*
*
- * Immutable. The type of the machine.
- * Following machine types are supported:
- * * `n1-standard-2`
- * * `n1-standard-4`
- * * `n1-standard-8`
- * * `n1-standard-16`
- * * `n1-standard-32`
- * * `n1-highmem-2`
- * * `n1-highmem-4`
- * * `n1-highmem-8`
- * * `n1-highmem-16`
- * * `n1-highmem-32`
- * * `n1-highcpu-2`
- * * `n1-highcpu-4`
- * * `n1-highcpu-8`
- * * `n1-highcpu-16`
- * * `n1-highcpu-32`
- * When used for [DeployedMode][] this field is optional and the default value
- * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+ * Immutable. The type of the machine. For the machine types supported for prediction,
+ * see https://tinyurl.com/aip-docs/predictions/machine-types.
+ * For machine types supported for creating a custom training job, see
+ * https://tinyurl.com/aip-docs/training/configure-compute.
+ * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+ * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
* [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
*
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpecOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpecOrBuilder.java
index 854e04550..43a7bc1d7 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpecOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MachineSpecOrBuilder.java
@@ -27,25 +27,12 @@ public interface MachineSpecOrBuilder
*
*
*
- * Immutable. The type of the machine.
- * Following machine types are supported:
- * * `n1-standard-2`
- * * `n1-standard-4`
- * * `n1-standard-8`
- * * `n1-standard-16`
- * * `n1-standard-32`
- * * `n1-highmem-2`
- * * `n1-highmem-4`
- * * `n1-highmem-8`
- * * `n1-highmem-16`
- * * `n1-highmem-32`
- * * `n1-highcpu-2`
- * * `n1-highcpu-4`
- * * `n1-highcpu-8`
- * * `n1-highcpu-16`
- * * `n1-highcpu-32`
- * When used for [DeployedMode][] this field is optional and the default value
- * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+ * Immutable. The type of the machine. For the machine types supported for prediction,
+ * see https://tinyurl.com/aip-docs/predictions/machine-types.
+ * For machine types supported for creating a custom training job, see
+ * https://tinyurl.com/aip-docs/training/configure-compute.
+ * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+ * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
* [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
*
*
@@ -58,25 +45,12 @@ public interface MachineSpecOrBuilder
*
*
*
- * Immutable. The type of the machine.
- * Following machine types are supported:
- * * `n1-standard-2`
- * * `n1-standard-4`
- * * `n1-standard-8`
- * * `n1-standard-16`
- * * `n1-standard-32`
- * * `n1-highmem-2`
- * * `n1-highmem-4`
- * * `n1-highmem-8`
- * * `n1-highmem-16`
- * * `n1-highmem-32`
- * * `n1-highcpu-2`
- * * `n1-highcpu-4`
- * * `n1-highcpu-8`
- * * `n1-highcpu-16`
- * * `n1-highcpu-32`
- * When used for [DeployedMode][] this field is optional and the default value
- * is `n1-standard-2`. If used for [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
+ * Immutable. The type of the machine. For the machine types supported for prediction,
+ * see https://tinyurl.com/aip-docs/predictions/machine-types.
+ * For machine types supported for creating a custom training job, see
+ * https://tinyurl.com/aip-docs/training/configure-compute.
+ * For [DeployedModel][google.cloud.aiplatform.v1beta1.DeployedModel] this field is optional, and the default
+ * value is `n1-standard-2`. For [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob] or as part of
* [WorkerPoolSpec][google.cloud.aiplatform.v1beta1.WorkerPoolSpec] this field is required.
*
*
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceProto.java
index 787ef260e..81177ae42 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrationServiceProto.java
@@ -75,6 +75,10 @@ public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry r
internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_fieldAccessorTable;
+ static final com.google.protobuf.Descriptors.Descriptor
+ internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_descriptor;
+ static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
@@ -95,86 +99,97 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "rm/v1beta1/migratable_resource.proto\032/go"
+ "ogle/cloud/aiplatform/v1beta1/operation."
+ "proto\032#google/longrunning/operations.pro"
- + "to\"\204\001\n SearchMigratableResourcesRequest\022"
- + "9\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations.googl"
- + "eapis.com/Location\022\021\n\tpage_size\030\002 \001(\005\022\022\n"
- + "\npage_token\030\003 \001(\t\"\217\001\n!SearchMigratableRe"
- + "sourcesResponse\022Q\n\024migratable_resources\030"
- + "\001 \003(\01323.google.cloud.aiplatform.v1beta1."
- + "MigratableResource\022\027\n\017next_page_token\030\002 "
- + "\001(\t\"\272\001\n\034BatchMigrateResourcesRequest\0229\n\006"
- + "parent\030\001 \001(\tB)\340A\002\372A#\n!locations.googleap"
- + "is.com/Location\022_\n\031migrate_resource_requ"
- + "ests\030\002 \003(\01327.google.cloud.aiplatform.v1b"
- + "eta1.MigrateResourceRequestB\003\340A\002\"\374\n\n\026Mig"
- + "rateResourceRequest\022\213\001\n&migrate_ml_engin"
- + "e_model_version_config\030\001 \001(\0132Y.google.cl"
- + "oud.aiplatform.v1beta1.MigrateResourceRe"
- + "quest.MigrateMlEngineModelVersionConfigH"
- + "\000\022w\n\033migrate_automl_model_config\030\002 \001(\0132P"
+ + "to\032\027google/rpc/status.proto\"\224\001\n SearchMi"
+ + "gratableResourcesRequest\0229\n\006parent\030\001 \001(\t"
+ + "B)\340A\002\372A#\n!locations.googleapis.com/Locat"
+ + "ion\022\021\n\tpage_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001"
+ + "(\t\022\016\n\006filter\030\004 \001(\t\"\217\001\n!SearchMigratableR"
+ + "esourcesResponse\022Q\n\024migratable_resources"
+ + "\030\001 \003(\01323.google.cloud.aiplatform.v1beta1"
+ + ".MigratableResource\022\027\n\017next_page_token\030\002"
+ + " \001(\t\"\272\001\n\034BatchMigrateResourcesRequest\0229\n"
+ + "\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations.googlea"
+ + "pis.com/Location\022_\n\031migrate_resource_req"
+ + "uests\030\002 \003(\01327.google.cloud.aiplatform.v1"
+ + "beta1.MigrateResourceRequestB\003\340A\002\"\374\n\n\026Mi"
+ + "grateResourceRequest\022\213\001\n&migrate_ml_engi"
+ + "ne_model_version_config\030\001 \001(\0132Y.google.c"
+ + "loud.aiplatform.v1beta1.MigrateResourceR"
+ + "equest.MigrateMlEngineModelVersionConfig"
+ + "H\000\022w\n\033migrate_automl_model_config\030\002 \001(\0132"
+ + "P.google.cloud.aiplatform.v1beta1.Migrat"
+ + "eResourceRequest.MigrateAutomlModelConfi"
+ + "gH\000\022{\n\035migrate_automl_dataset_config\030\003 \001"
+ + "(\0132R.google.cloud.aiplatform.v1beta1.Mig"
+ + "rateResourceRequest.MigrateAutomlDataset"
+ + "ConfigH\000\022\210\001\n$migrate_data_labeling_datas"
+ + "et_config\030\004 \001(\0132X.google.cloud.aiplatfor"
+ + "m.v1beta1.MigrateResourceRequest.Migrate"
+ + "DataLabelingDatasetConfigH\000\032\225\001\n!MigrateM"
+ + "lEngineModelVersionConfig\022\025\n\010endpoint\030\001 "
+ + "\001(\tB\003\340A\002\0228\n\rmodel_version\030\002 \001(\tB!\340A\002\372A\033\n"
+ + "\031ml.googleapis.com/Version\022\037\n\022model_disp"
+ + "lay_name\030\003 \001(\tB\003\340A\002\032o\n\030MigrateAutomlMode"
+ + "lConfig\0222\n\005model\030\001 \001(\tB#\340A\002\372A\035\n\033automl.g"
+ + "oogleapis.com/Model\022\037\n\022model_display_nam"
+ + "e\030\002 \001(\tB\003\340A\001\032w\n\032MigrateAutomlDatasetConf"
+ + "ig\0226\n\007dataset\030\001 \001(\tB%\340A\002\372A\037\n\035automl.goog"
+ + "leapis.com/Dataset\022!\n\024dataset_display_na"
+ + "me\030\002 \001(\tB\003\340A\002\032\305\003\n MigrateDataLabelingDat"
+ + "asetConfig\022<\n\007dataset\030\001 \001(\tB+\340A\002\372A%\n#dat"
+ + "alabeling.googleapis.com/Dataset\022!\n\024data"
+ + "set_display_name\030\002 \001(\tB\003\340A\001\022\301\001\n/migrate_"
+ + "data_labeling_annotated_dataset_configs\030"
+ + "\003 \003(\0132\202\001.google.cloud.aiplatform.v1beta1"
+ + ".MigrateResourceRequest.MigrateDataLabel"
+ + "ingDatasetConfig.MigrateDataLabelingAnno"
+ + "tatedDatasetConfigB\003\340A\001\032|\n)MigrateDataLa"
+ + "belingAnnotatedDatasetConfig\022O\n\021annotate"
+ + "d_dataset\030\001 \001(\tB4\340A\002\372A.\n,datalabeling.go"
+ + "ogleapis.com/AnnotatedDatasetB\t\n\007request"
+ + "\"}\n\035BatchMigrateResourcesResponse\022\\\n\032mig"
+ + "rate_resource_responses\030\001 \003(\01328.google.c"
+ + "loud.aiplatform.v1beta1.MigrateResourceR"
+ + "esponse\"\362\001\n\027MigrateResourceResponse\0229\n\007d"
+ + "ataset\030\001 \001(\tB&\372A#\n!aiplatform.googleapis"
+ + ".com/DatasetH\000\0225\n\005model\030\002 \001(\tB$\372A!\n\037aipl"
+ + "atform.googleapis.com/ModelH\000\022P\n\023migrata"
+ + "ble_resource\030\003 \001(\01323.google.cloud.aiplat"
+ + "form.v1beta1.MigratableResourceB\023\n\021migra"
+ + "ted_resource\"\352\003\n&BatchMigrateResourcesOp"
+ + "erationMetadata\022S\n\020generic_metadata\030\001 \001("
+ + "\01329.google.cloud.aiplatform.v1beta1.Gene"
+ + "ricOperationMetadata\022n\n\017partial_results\030"
+ + "\002 \003(\0132U.google.cloud.aiplatform.v1beta1."
+ + "BatchMigrateResourcesOperationMetadata.P"
+ + "artialResult\032\372\001\n\rPartialResult\022#\n\005error\030"
+ + "\002 \001(\0132\022.google.rpc.StatusH\000\0225\n\005model\030\003 \001"
+ + "(\tB$\372A!\n\037aiplatform.googleapis.com/Model"
+ + "H\000\0229\n\007dataset\030\004 \001(\tB&\372A#\n!aiplatform.goo"
+ + "gleapis.com/DatasetH\000\022H\n\007request\030\001 \001(\01327"
+ ".google.cloud.aiplatform.v1beta1.Migrate"
- + "ResourceRequest.MigrateAutomlModelConfig"
- + "H\000\022{\n\035migrate_automl_dataset_config\030\003 \001("
- + "\0132R.google.cloud.aiplatform.v1beta1.Migr"
- + "ateResourceRequest.MigrateAutomlDatasetC"
- + "onfigH\000\022\210\001\n$migrate_data_labeling_datase"
- + "t_config\030\004 \001(\0132X.google.cloud.aiplatform"
- + ".v1beta1.MigrateResourceRequest.MigrateD"
- + "ataLabelingDatasetConfigH\000\032\225\001\n!MigrateMl"
- + "EngineModelVersionConfig\022\025\n\010endpoint\030\001 \001"
- + "(\tB\003\340A\002\0228\n\rmodel_version\030\002 \001(\tB!\340A\002\372A\033\n\031"
- + "ml.googleapis.com/Version\022\037\n\022model_displ"
- + "ay_name\030\003 \001(\tB\003\340A\002\032o\n\030MigrateAutomlModel"
- + "Config\0222\n\005model\030\001 \001(\tB#\340A\002\372A\035\n\033automl.go"
- + "ogleapis.com/Model\022\037\n\022model_display_name"
- + "\030\002 \001(\tB\003\340A\001\032w\n\032MigrateAutomlDatasetConfi"
- + "g\0226\n\007dataset\030\001 \001(\tB%\340A\002\372A\037\n\035automl.googl"
- + "eapis.com/Dataset\022!\n\024dataset_display_nam"
- + "e\030\002 \001(\tB\003\340A\002\032\305\003\n MigrateDataLabelingData"
- + "setConfig\022<\n\007dataset\030\001 \001(\tB+\340A\002\372A%\n#data"
- + "labeling.googleapis.com/Dataset\022!\n\024datas"
- + "et_display_name\030\002 \001(\tB\003\340A\001\022\301\001\n/migrate_d"
- + "ata_labeling_annotated_dataset_configs\030\003"
- + " \003(\0132\202\001.google.cloud.aiplatform.v1beta1."
- + "MigrateResourceRequest.MigrateDataLabeli"
- + "ngDatasetConfig.MigrateDataLabelingAnnot"
- + "atedDatasetConfigB\003\340A\001\032|\n)MigrateDataLab"
- + "elingAnnotatedDatasetConfig\022O\n\021annotated"
- + "_dataset\030\001 \001(\tB4\340A\002\372A.\n,datalabeling.goo"
- + "gleapis.com/AnnotatedDatasetB\t\n\007request\""
- + "}\n\035BatchMigrateResourcesResponse\022\\\n\032migr"
- + "ate_resource_responses\030\001 \003(\01328.google.cl"
- + "oud.aiplatform.v1beta1.MigrateResourceRe"
- + "sponse\"\362\001\n\027MigrateResourceResponse\0229\n\007da"
- + "taset\030\001 \001(\tB&\372A#\n!aiplatform.googleapis."
- + "com/DatasetH\000\0225\n\005model\030\002 \001(\tB$\372A!\n\037aipla"
- + "tform.googleapis.com/ModelH\000\022P\n\023migratab"
- + "le_resource\030\003 \001(\01323.google.cloud.aiplatf"
- + "orm.v1beta1.MigratableResourceB\023\n\021migrat"
- + "ed_resource\"}\n&BatchMigrateResourcesOper"
- + "ationMetadata\022S\n\020generic_metadata\030\001 \001(\0132"
- + "9.google.cloud.aiplatform.v1beta1.Generi"
- + "cOperationMetadata2\233\005\n\020MigrationService\022"
- + "\373\001\n\031SearchMigratableResources\022A.google.c"
- + "loud.aiplatform.v1beta1.SearchMigratable"
- + "ResourcesRequest\032B.google.cloud.aiplatfo"
- + "rm.v1beta1.SearchMigratableResourcesResp"
- + "onse\"W\202\323\344\223\002H\"C/v1beta1/{parent=projects/"
- + "*/locations/*}/migratableResources:searc"
- + "h:\001*\332A\006parent\022\271\002\n\025BatchMigrateResources\022"
- + "=.google.cloud.aiplatform.v1beta1.BatchM"
- + "igrateResourcesRequest\032\035.google.longrunn"
- + "ing.Operation\"\301\001\202\323\344\223\002N\"I/v1beta1/{parent"
- + "=projects/*/locations/*}/migratableResou"
- + "rces:batchMigrate:\001*\332A parent,migrate_re"
- + "source_requests\312AG\n\035BatchMigrateResource"
- + "sResponse\022&BatchMigrateResourcesOperatio"
- + "nMetadata\032M\312A\031aiplatform.googleapis.com\322"
- + "A.https://www.googleapis.com/auth/cloud-"
- + "platformB\211\001\n#com.google.cloud.aiplatform"
- + ".v1beta1B\025MigrationServiceProtoP\001ZIgoogl"
- + "e.golang.org/genproto/googleapis/cloud/a"
- + "iplatform/v1beta1;aiplatformb\006proto3"
+ + "ResourceRequestB\010\n\006result2\233\005\n\020MigrationS"
+ + "ervice\022\373\001\n\031SearchMigratableResources\022A.g"
+ + "oogle.cloud.aiplatform.v1beta1.SearchMig"
+ + "ratableResourcesRequest\032B.google.cloud.a"
+ + "iplatform.v1beta1.SearchMigratableResour"
+ + "cesResponse\"W\202\323\344\223\002H\"C/v1beta1/{parent=pr"
+ + "ojects/*/locations/*}/migratableResource"
+ + "s:search:\001*\332A\006parent\022\271\002\n\025BatchMigrateRes"
+ + "ources\022=.google.cloud.aiplatform.v1beta1"
+ + ".BatchMigrateResourcesRequest\032\035.google.l"
+ + "ongrunning.Operation\"\301\001\202\323\344\223\002N\"I/v1beta1/"
+ + "{parent=projects/*/locations/*}/migratab"
+ + "leResources:batchMigrate:\001*\332A parent,mig"
+ + "rate_resource_requests\312AG\n\035BatchMigrateR"
+ + "esourcesResponse\022&BatchMigrateResourcesO"
+ + "perationMetadata\032M\312A\031aiplatform.googleap"
+ + "is.com\322A.https://www.googleapis.com/auth"
+ + "/cloud-platformB\211\001\n#com.google.cloud.aip"
+ + "latform.v1beta1B\025MigrationServiceProtoP\001"
+ + "ZIgoogle.golang.org/genproto/googleapis/"
+ + "cloud/aiplatform/v1beta1;aiplatformb\006pro"
+ + "to3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -189,6 +204,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.cloud.aiplatform.v1beta1.MigratableResourceProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.OperationProto.getDescriptor(),
com.google.longrunning.OperationsProto.getDescriptor(),
+ com.google.rpc.StatusProto.getDescriptor(),
});
internal_static_google_cloud_aiplatform_v1beta1_SearchMigratableResourcesRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
@@ -196,7 +212,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_SearchMigratableResourcesRequest_descriptor,
new java.lang.String[] {
- "Parent", "PageSize", "PageToken",
+ "Parent", "PageSize", "PageToken", "Filter",
});
internal_static_google_cloud_aiplatform_v1beta1_SearchMigratableResourcesResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
@@ -298,7 +314,17 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_descriptor,
new java.lang.String[] {
- "GenericMetadata",
+ "GenericMetadata", "PartialResults",
+ });
+ internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_descriptor =
+ internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_descriptor
+ .getNestedTypes()
+ .get(0);
+ internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_fieldAccessorTable =
+ new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+ internal_static_google_cloud_aiplatform_v1beta1_BatchMigrateResourcesOperationMetadata_PartialResult_descriptor,
+ new java.lang.String[] {
+ "Error", "Model", "Dataset", "Request", "Result",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
@@ -320,6 +346,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.cloud.aiplatform.v1beta1.MigratableResourceProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.OperationProto.getDescriptor();
com.google.longrunning.OperationsProto.getDescriptor();
+ com.google.rpc.StatusProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Model.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Model.java
index 14d76fee0..e7fcfc7af 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Model.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/Model.java
@@ -302,6 +302,23 @@ private Model(
explanationSpec_ = subBuilder.buildPartial();
}
+ break;
+ }
+ case 194:
+ {
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null;
+ if (encryptionSpec_ != null) {
+ subBuilder = encryptionSpec_.toBuilder();
+ }
+ encryptionSpec_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(encryptionSpec_);
+ encryptionSpec_ = subBuilder.buildPartial();
+ }
+
break;
}
case 210:
@@ -2790,6 +2807,10 @@ public int getSupportedDeploymentResourcesTypesValue(int index) {
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -2831,6 +2852,10 @@ public com.google.protobuf.ProtocolStringList getSupportedInputStorageFormatsLis
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -2872,6 +2897,10 @@ public int getSupportedInputStorageFormatsCount() {
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -2914,6 +2943,10 @@ public java.lang.String getSupportedInputStorageFormats(int index) {
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -3287,19 +3320,20 @@ public com.google.cloud.aiplatform.v1beta1.DeployedModelRefOrBuilder getDeployed
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*
* @return Whether the explanationSpec field is set.
*/
@@ -3311,19 +3345,20 @@ public boolean hasExplanationSpec() {
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*
* @return The explanationSpec.
*/
@@ -3337,19 +3372,20 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec()
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder
@@ -3522,6 +3558,57 @@ public java.lang.String getLabelsOrThrow(java.lang.String key) {
return map.get(key);
}
+ public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 24;
+ private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ @java.lang.Override
+ public boolean hasEncryptionSpec() {
+ return encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ *
+ * @return The encryptionSpec.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() {
+ return getEncryptionSpec();
+ }
+
private byte memoizedIsInitialized = -1;
@java.lang.Override
@@ -3596,6 +3683,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
if (explanationSpec_ != null) {
output.writeMessage(23, getExplanationSpec());
}
+ if (encryptionSpec_ != null) {
+ output.writeMessage(24, getEncryptionSpec());
+ }
if (!getArtifactUriBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 26, artifactUri_);
}
@@ -3692,6 +3782,9 @@ public int getSerializedSize() {
if (explanationSpec_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(23, getExplanationSpec());
}
+ if (encryptionSpec_ != null) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(24, getEncryptionSpec());
+ }
if (!getArtifactUriBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(26, artifactUri_);
}
@@ -3752,6 +3845,10 @@ public boolean equals(final java.lang.Object obj) {
}
if (!getEtag().equals(other.getEtag())) return false;
if (!internalGetLabels().equals(other.internalGetLabels())) return false;
+ if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false;
+ if (hasEncryptionSpec()) {
+ if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false;
+ }
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@@ -3825,6 +3922,10 @@ public int hashCode() {
hash = (37 * hash) + LABELS_FIELD_NUMBER;
hash = (53 * hash) + internalGetLabels().hashCode();
}
+ if (hasEncryptionSpec()) {
+ hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER;
+ hash = (53 * hash) + getEncryptionSpec().hashCode();
+ }
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
@@ -4062,6 +4163,12 @@ public Builder clear() {
etag_ = "";
internalGetMutableLabels().clear();
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
return this;
}
@@ -4163,6 +4270,11 @@ public com.google.cloud.aiplatform.v1beta1.Model buildPartial() {
result.etag_ = etag_;
result.labels_ = internalGetLabels();
result.labels_.makeImmutable();
+ if (encryptionSpecBuilder_ == null) {
+ result.encryptionSpec_ = encryptionSpec_;
+ } else {
+ result.encryptionSpec_ = encryptionSpecBuilder_.build();
+ }
onBuilt();
return result;
}
@@ -4343,6 +4455,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.Model other) {
onChanged();
}
internalGetMutableLabels().mergeFrom(other.internalGetLabels());
+ if (other.hasEncryptionSpec()) {
+ mergeEncryptionSpec(other.getEncryptionSpec());
+ }
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
@@ -6564,6 +6679,10 @@ private void ensureSupportedInputStorageFormatsIsMutable() {
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -6605,6 +6724,10 @@ public com.google.protobuf.ProtocolStringList getSupportedInputStorageFormatsLis
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -6646,6 +6769,10 @@ public int getSupportedInputStorageFormatsCount() {
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -6688,6 +6815,10 @@ public java.lang.String getSupportedInputStorageFormats(int index) {
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -6730,6 +6861,10 @@ public com.google.protobuf.ByteString getSupportedInputStorageFormatsBytes(int i
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -6779,6 +6914,10 @@ public Builder setSupportedInputStorageFormats(int index, java.lang.String value
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -6827,6 +6966,10 @@ public Builder addSupportedInputStorageFormats(java.lang.String value) {
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -6872,6 +7015,10 @@ public Builder addAllSupportedInputStorageFormats(java.lang.Iterable
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*
* @return Whether the explanationSpec field is set.
*/
@@ -8197,19 +8349,20 @@ public boolean hasExplanationSpec() {
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*
* @return The explanationSpec.
*/
@@ -8226,19 +8379,20 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec()
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*/
public Builder setExplanationSpec(com.google.cloud.aiplatform.v1beta1.ExplanationSpec value) {
if (explanationSpecBuilder_ == null) {
@@ -8257,19 +8411,20 @@ public Builder setExplanationSpec(com.google.cloud.aiplatform.v1beta1.Explanatio
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*/
public Builder setExplanationSpec(
com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder builderForValue) {
@@ -8286,19 +8441,20 @@ public Builder setExplanationSpec(
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*/
public Builder mergeExplanationSpec(com.google.cloud.aiplatform.v1beta1.ExplanationSpec value) {
if (explanationSpecBuilder_ == null) {
@@ -8321,19 +8477,20 @@ public Builder mergeExplanationSpec(com.google.cloud.aiplatform.v1beta1.Explanat
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*/
public Builder clearExplanationSpec() {
if (explanationSpecBuilder_ == null) {
@@ -8350,19 +8507,20 @@ public Builder clearExplanationSpec() {
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*/
public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder getExplanationSpecBuilder() {
@@ -8373,19 +8531,20 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder getExplanatio
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*/
public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder
getExplanationSpecOrBuilder() {
@@ -8401,19 +8560,20 @@ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder getExplanatio
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.ExplanationSpec,
@@ -8728,6 +8888,202 @@ public Builder putAllLabels(java.util.Map
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ public boolean hasEncryptionSpec() {
+ return encryptionSpecBuilder_ != null || encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ *
+ * @return The encryptionSpec.
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ } else {
+ return encryptionSpecBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ encryptionSpec_ = value;
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ public Builder setEncryptionSpec(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = builderForValue.build();
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (encryptionSpec_ != null) {
+ encryptionSpec_ =
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ encryptionSpec_ = value;
+ }
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ public Builder clearEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ onChanged();
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() {
+
+ onChanged();
+ return getEncryptionSpecFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder
+ getEncryptionSpecOrBuilder() {
+ if (encryptionSpecBuilder_ != null) {
+ return encryptionSpecBuilder_.getMessageOrBuilder();
+ } else {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>
+ getEncryptionSpecFieldBuilder() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpecBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>(
+ getEncryptionSpec(), getParentForChildren(), isClean());
+ encryptionSpec_ = null;
+ }
+ return encryptionSpecBuilder_;
+ }
+
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelContainerSpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelContainerSpec.java
index 3db576bb5..0f8cd2ff7 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelContainerSpec.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelContainerSpec.java
@@ -981,9 +981,8 @@ public com.google.cloud.aiplatform.v1beta1.PortOrBuilder getPortsOrBuilder(int i
* container's response in the API response.
* For example, if you set this field to `/foo`, then when AI Platform
* receives a prediction request, it forwards the request body in a POST
- * request to the following URL on the container:
- * <code>localhost:<var>PORT</var>/foo</code>
- * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s
+ * request to the `/foo` path on the port of your container specified by the
+ * first value of this `ModelContainerSpec`'s
* [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field.
* If you don't specify this field, it defaults to the following value when
* you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]:
@@ -1028,9 +1027,8 @@ public java.lang.String getPredictRoute() {
* container's response in the API response.
* For example, if you set this field to `/foo`, then when AI Platform
* receives a prediction request, it forwards the request body in a POST
- * request to the following URL on the container:
- * <code>localhost:<var>PORT</var>/foo</code>
- * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s
+ * request to the `/foo` path on the port of your container specified by the
+ * first value of this `ModelContainerSpec`'s
* [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field.
* If you don't specify this field, it defaults to the following value when
* you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]:
@@ -1077,9 +1075,8 @@ public com.google.protobuf.ByteString getPredictRouteBytes() {
* [health
* checks](https://tinyurl.com/cust-cont-reqs#checks).
* For example, if you set this field to `/bar`, then AI Platform
- * intermittently sends a GET request to the following URL on the container:
- * <code>localhost:<var>PORT</var>/bar</code>
- * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s
+ * intermittently sends a GET request to the `/bar` path on the port of your
+ * container specified by the first value of this `ModelContainerSpec`'s
* [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field.
* If you don't specify this field, it defaults to the following value when
* you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]:
@@ -1123,9 +1120,8 @@ public java.lang.String getHealthRoute() {
* [health
* checks](https://tinyurl.com/cust-cont-reqs#checks).
* For example, if you set this field to `/bar`, then AI Platform
- * intermittently sends a GET request to the following URL on the container:
- * <code>localhost:<var>PORT</var>/bar</code>
- * <var>PORT</var> refers to the first value of this `ModelContainerSpec`'s
+ * intermittently sends a GET request to the `/bar` path on the port of your
+ * container specified by the first value of this `ModelContainerSpec`'s
* [ports][google.cloud.aiplatform.v1beta1.ModelContainerSpec.ports] field.
* If you don't specify this field, it defaults to the following value when
* you [deploy this Model to an Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]:
@@ -4183,9 +4179,8 @@ public java.util.List
+ * Explanation type.
+ * For AutoML Image Classification models, possible values are:
+ * * `image-integrated-gradients`
+ * * `image-xrai`
+ *
+ *
+ * string explanation_type = 1;
+ *
+ * @return The explanationType.
+ */
+ java.lang.String getExplanationType();
+ /**
+ *
+ *
+ *
+ * Explanation type.
+ * For AutoML Image Classification models, possible values are:
+ * * `image-integrated-gradients`
+ * * `image-xrai`
+ *
+ *
+ * string explanation_type = 1;
+ *
+ * @return The bytes for explanationType.
+ */
+ com.google.protobuf.ByteString getExplanationTypeBytes();
+
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ *
+ * @return Whether the explanationSpec field is set.
+ */
+ boolean hasExplanationSpec();
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ *
+ * @return The explanationSpec.
+ */
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec();
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ */
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder getExplanationSpecOrBuilder();
+ }
+ /**
+ * Protobuf type {@code
+ * google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec}
+ */
+ public static final class ModelEvaluationExplanationSpec
+ extends com.google.protobuf.GeneratedMessageV3
+ implements
+ // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec)
+ ModelEvaluationExplanationSpecOrBuilder {
+ private static final long serialVersionUID = 0L;
+ // Use ModelEvaluationExplanationSpec.newBuilder() to construct.
+ private ModelEvaluationExplanationSpec(
+ com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ super(builder);
+ }
+
+ private ModelEvaluationExplanationSpec() {
+ explanationType_ = "";
+ }
+
+ @java.lang.Override
+ @SuppressWarnings({"unused"})
+ protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
+ return new ModelEvaluationExplanationSpec();
+ }
+
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
+ return this.unknownFields;
+ }
+
+ private ModelEvaluationExplanationSpec(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ this();
+ if (extensionRegistry == null) {
+ throw new java.lang.NullPointerException();
+ }
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ case 10:
+ {
+ java.lang.String s = input.readStringRequireUtf8();
+
+ explanationType_ = s;
+ break;
+ }
+ case 18:
+ {
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder subBuilder = null;
+ if (explanationSpec_ != null) {
+ subBuilder = explanationSpec_.toBuilder();
+ }
+ explanationSpec_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpec.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(explanationSpec_);
+ explanationSpec_ = subBuilder.buildPartial();
+ }
+
+ break;
+ }
+ default:
+ {
+ if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
+ return com.google.cloud.aiplatform.v1beta1.ModelEvaluationProto
+ .internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_descriptor;
+ }
+
+ @java.lang.Override
+ protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return com.google.cloud.aiplatform.v1beta1.ModelEvaluationProto
+ .internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ .class,
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ .Builder.class);
+ }
+
+ public static final int EXPLANATION_TYPE_FIELD_NUMBER = 1;
+ private volatile java.lang.Object explanationType_;
+ /**
+ *
+ *
+ *
+ * Explanation type.
+ * For AutoML Image Classification models, possible values are:
+ * * `image-integrated-gradients`
+ * * `image-xrai`
+ *
+ *
+ * string explanation_type = 1;
+ *
+ * @return The explanationType.
+ */
+ @java.lang.Override
+ public java.lang.String getExplanationType() {
+ java.lang.Object ref = explanationType_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ explanationType_ = s;
+ return s;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Explanation type.
+ * For AutoML Image Classification models, possible values are:
+ * * `image-integrated-gradients`
+ * * `image-xrai`
+ *
+ *
+ * string explanation_type = 1;
+ *
+ * @return The bytes for explanationType.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString getExplanationTypeBytes() {
+ java.lang.Object ref = explanationType_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ explanationType_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ public static final int EXPLANATION_SPEC_FIELD_NUMBER = 2;
+ private com.google.cloud.aiplatform.v1beta1.ExplanationSpec explanationSpec_;
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ *
+ * @return Whether the explanationSpec field is set.
+ */
+ @java.lang.Override
+ public boolean hasExplanationSpec() {
+ return explanationSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ *
+ * @return The explanationSpec.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec() {
+ return explanationSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance()
+ : explanationSpec_;
+ }
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder
+ getExplanationSpecOrBuilder() {
+ return getExplanationSpec();
+ }
+
+ private byte memoizedIsInitialized = -1;
+
+ @java.lang.Override
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ @java.lang.Override
+ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
+ if (!getExplanationTypeBytes().isEmpty()) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 1, explanationType_);
+ }
+ if (explanationSpec_ != null) {
+ output.writeMessage(2, getExplanationSpec());
+ }
+ unknownFields.writeTo(output);
+ }
+
+ @java.lang.Override
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (!getExplanationTypeBytes().isEmpty()) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, explanationType_);
+ }
+ if (explanationSpec_ != null) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getExplanationSpec());
+ }
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj
+ instanceof
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec)) {
+ return super.equals(obj);
+ }
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec other =
+ (com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec) obj;
+
+ if (!getExplanationType().equals(other.getExplanationType())) return false;
+ if (hasExplanationSpec() != other.hasExplanationSpec()) return false;
+ if (hasExplanationSpec()) {
+ if (!getExplanationSpec().equals(other.getExplanationSpec())) return false;
+ }
+ if (!unknownFields.equals(other.unknownFields)) return false;
+ return true;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (37 * hash) + EXPLANATION_TYPE_FIELD_NUMBER;
+ hash = (53 * hash) + getExplanationType().hashCode();
+ if (hasExplanationSpec()) {
+ hash = (37 * hash) + EXPLANATION_SPEC_FIELD_NUMBER;
+ hash = (53 * hash) + getExplanationSpec().hashCode();
+ }
+ hash = (29 * hash) + unknownFields.hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ parseFrom(java.nio.ByteBuffer data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ parseFrom(
+ java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ parseFrom(com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ parseFrom(java.io.InputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ parseFrom(
+ java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ parseDelimitedFrom(
+ java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
+ PARSER, input, extensionRegistry);
+ }
+
+ @java.lang.Override
+ public Builder newBuilderForType() {
+ return newBuilder();
+ }
+
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+
+ public static Builder newBuilder(
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+
+ @java.lang.Override
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code
+ * google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec}
+ */
+ public static final class Builder
+ extends com.google.protobuf.GeneratedMessageV3.Builder
+ * Explanation type.
+ * For AutoML Image Classification models, possible values are:
+ * * `image-integrated-gradients`
+ * * `image-xrai`
+ *
+ *
+ * string explanation_type = 1;
+ *
+ * @return The explanationType.
+ */
+ public java.lang.String getExplanationType() {
+ java.lang.Object ref = explanationType_;
+ if (!(ref instanceof java.lang.String)) {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ explanationType_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Explanation type.
+ * For AutoML Image Classification models, possible values are:
+ * * `image-integrated-gradients`
+ * * `image-xrai`
+ *
+ *
+ * string explanation_type = 1;
+ *
+ * @return The bytes for explanationType.
+ */
+ public com.google.protobuf.ByteString getExplanationTypeBytes() {
+ java.lang.Object ref = explanationType_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ explanationType_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Explanation type.
+ * For AutoML Image Classification models, possible values are:
+ * * `image-integrated-gradients`
+ * * `image-xrai`
+ *
+ *
+ * string explanation_type = 1;
+ *
+ * @param value The explanationType to set.
+ * @return This builder for chaining.
+ */
+ public Builder setExplanationType(java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ explanationType_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Explanation type.
+ * For AutoML Image Classification models, possible values are:
+ * * `image-integrated-gradients`
+ * * `image-xrai`
+ *
+ *
+ * string explanation_type = 1;
+ *
+ * @return This builder for chaining.
+ */
+ public Builder clearExplanationType() {
+
+ explanationType_ = getDefaultInstance().getExplanationType();
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Explanation type.
+ * For AutoML Image Classification models, possible values are:
+ * * `image-integrated-gradients`
+ * * `image-xrai`
+ *
+ *
+ * string explanation_type = 1;
+ *
+ * @param value The bytes for explanationType to set.
+ * @return This builder for chaining.
+ */
+ public Builder setExplanationTypeBytes(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ checkByteStringIsUtf8(value);
+
+ explanationType_ = value;
+ onChanged();
+ return this;
+ }
+
+ private com.google.cloud.aiplatform.v1beta1.ExplanationSpec explanationSpec_;
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpec,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder>
+ explanationSpecBuilder_;
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ *
+ * @return Whether the explanationSpec field is set.
+ */
+ public boolean hasExplanationSpec() {
+ return explanationSpecBuilder_ != null || explanationSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ *
+ * @return The explanationSpec.
+ */
+ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec() {
+ if (explanationSpecBuilder_ == null) {
+ return explanationSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance()
+ : explanationSpec_;
+ } else {
+ return explanationSpecBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ */
+ public Builder setExplanationSpec(com.google.cloud.aiplatform.v1beta1.ExplanationSpec value) {
+ if (explanationSpecBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ explanationSpec_ = value;
+ onChanged();
+ } else {
+ explanationSpecBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ */
+ public Builder setExplanationSpec(
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder builderForValue) {
+ if (explanationSpecBuilder_ == null) {
+ explanationSpec_ = builderForValue.build();
+ onChanged();
+ } else {
+ explanationSpecBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ */
+ public Builder mergeExplanationSpec(
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpec value) {
+ if (explanationSpecBuilder_ == null) {
+ if (explanationSpec_ != null) {
+ explanationSpec_ =
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpec.newBuilder(explanationSpec_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ explanationSpec_ = value;
+ }
+ onChanged();
+ } else {
+ explanationSpecBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ */
+ public Builder clearExplanationSpec() {
+ if (explanationSpecBuilder_ == null) {
+ explanationSpec_ = null;
+ onChanged();
+ } else {
+ explanationSpec_ = null;
+ explanationSpecBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ */
+ public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder
+ getExplanationSpecBuilder() {
+
+ onChanged();
+ return getExplanationSpecFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ */
+ public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder
+ getExplanationSpecOrBuilder() {
+ if (explanationSpecBuilder_ != null) {
+ return explanationSpecBuilder_.getMessageOrBuilder();
+ } else {
+ return explanationSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance()
+ : explanationSpec_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Explanation spec details.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 2;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpec,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder>
+ getExplanationSpecFieldBuilder() {
+ if (explanationSpecBuilder_ == null) {
+ explanationSpecBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpec,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder>(
+ getExplanationSpec(), getParentForChildren(), isClean());
+ explanationSpec_ = null;
+ }
+ return explanationSpecBuilder_;
+ }
+
+ @java.lang.Override
+ public final Builder setUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ @java.lang.Override
+ public final Builder mergeUnknownFields(
+ final com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
+ // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec)
+ }
+
+ // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec)
+ private static final com.google.cloud.aiplatform.v1beta1.ModelEvaluation
+ .ModelEvaluationExplanationSpec
+ DEFAULT_INSTANCE;
+
+ static {
+ DEFAULT_INSTANCE =
+ new com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec();
+ }
+
+ public static com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ private static final com.google.protobuf.Parser
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ @java.lang.Override
+ public java.util.List<
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec>
+ getExplanationSpecsList() {
+ return explanationSpecs_;
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ @java.lang.Override
+ public java.util.List<
+ ? extends
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation
+ .ModelEvaluationExplanationSpecOrBuilder>
+ getExplanationSpecsOrBuilderList() {
+ return explanationSpecs_;
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ @java.lang.Override
+ public int getExplanationSpecsCount() {
+ return explanationSpecs_.size();
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ getExplanationSpecs(int index) {
+ return explanationSpecs_.get(index);
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpecOrBuilder
+ getExplanationSpecsOrBuilder(int index) {
+ return explanationSpecs_.get(index);
+ }
+
private byte memoizedIsInitialized = -1;
@java.lang.Override
@@ -549,6 +1687,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
if (modelExplanation_ != null) {
output.writeMessage(8, getModelExplanation());
}
+ for (int i = 0; i < explanationSpecs_.size(); i++) {
+ output.writeMessage(9, explanationSpecs_.get(i));
+ }
unknownFields.writeTo(output);
}
@@ -581,6 +1722,9 @@ public int getSerializedSize() {
if (modelExplanation_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, getModelExplanation());
}
+ for (int i = 0; i < explanationSpecs_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(9, explanationSpecs_.get(i));
+ }
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
@@ -612,6 +1756,7 @@ public boolean equals(final java.lang.Object obj) {
if (hasModelExplanation()) {
if (!getModelExplanation().equals(other.getModelExplanation())) return false;
}
+ if (!getExplanationSpecsList().equals(other.getExplanationSpecsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@@ -643,6 +1788,10 @@ public int hashCode() {
hash = (37 * hash) + MODEL_EXPLANATION_FIELD_NUMBER;
hash = (53 * hash) + getModelExplanation().hashCode();
}
+ if (getExplanationSpecsCount() > 0) {
+ hash = (37 * hash) + EXPLANATION_SPECS_FIELD_NUMBER;
+ hash = (53 * hash) + getExplanationSpecsList().hashCode();
+ }
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
@@ -783,7 +1932,9 @@ private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
+ if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
+ getExplanationSpecsFieldBuilder();
+ }
}
@java.lang.Override
@@ -813,6 +1964,12 @@ public Builder clear() {
modelExplanation_ = null;
modelExplanationBuilder_ = null;
}
+ if (explanationSpecsBuilder_ == null) {
+ explanationSpecs_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ explanationSpecsBuilder_.clear();
+ }
return this;
}
@@ -863,6 +2020,15 @@ public com.google.cloud.aiplatform.v1beta1.ModelEvaluation buildPartial() {
} else {
result.modelExplanation_ = modelExplanationBuilder_.build();
}
+ if (explanationSpecsBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) != 0)) {
+ explanationSpecs_ = java.util.Collections.unmodifiableList(explanationSpecs_);
+ bitField0_ = (bitField0_ & ~0x00000002);
+ }
+ result.explanationSpecs_ = explanationSpecs_;
+ } else {
+ result.explanationSpecs_ = explanationSpecsBuilder_.build();
+ }
onBuilt();
return result;
}
@@ -940,6 +2106,33 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ModelEvaluation oth
if (other.hasModelExplanation()) {
mergeModelExplanation(other.getModelExplanation());
}
+ if (explanationSpecsBuilder_ == null) {
+ if (!other.explanationSpecs_.isEmpty()) {
+ if (explanationSpecs_.isEmpty()) {
+ explanationSpecs_ = other.explanationSpecs_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ ensureExplanationSpecsIsMutable();
+ explanationSpecs_.addAll(other.explanationSpecs_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.explanationSpecs_.isEmpty()) {
+ if (explanationSpecsBuilder_.isEmpty()) {
+ explanationSpecsBuilder_.dispose();
+ explanationSpecsBuilder_ = null;
+ explanationSpecs_ = other.explanationSpecs_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ explanationSpecsBuilder_ =
+ com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
+ ? getExplanationSpecsFieldBuilder()
+ : null;
+ } else {
+ explanationSpecsBuilder_.addAllMessages(other.explanationSpecs_);
+ }
+ }
+ }
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
@@ -2016,6 +3209,458 @@ public Builder clearModelExplanation() {
return modelExplanationBuilder_;
}
+ private java.util.List<
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec>
+ explanationSpecs_ = java.util.Collections.emptyList();
+
+ private void ensureExplanationSpecsIsMutable() {
+ if (!((bitField0_ & 0x00000002) != 0)) {
+ explanationSpecs_ =
+ new java.util.ArrayList<
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec>(
+ explanationSpecs_);
+ bitField0_ |= 0x00000002;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec,
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ .Builder,
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation
+ .ModelEvaluationExplanationSpecOrBuilder>
+ explanationSpecsBuilder_;
+
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public java.util.List<
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec>
+ getExplanationSpecsList() {
+ if (explanationSpecsBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(explanationSpecs_);
+ } else {
+ return explanationSpecsBuilder_.getMessageList();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public int getExplanationSpecsCount() {
+ if (explanationSpecsBuilder_ == null) {
+ return explanationSpecs_.size();
+ } else {
+ return explanationSpecsBuilder_.getCount();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ getExplanationSpecs(int index) {
+ if (explanationSpecsBuilder_ == null) {
+ return explanationSpecs_.get(index);
+ } else {
+ return explanationSpecsBuilder_.getMessage(index);
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public Builder setExplanationSpecs(
+ int index,
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec value) {
+ if (explanationSpecsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureExplanationSpecsIsMutable();
+ explanationSpecs_.set(index, value);
+ onChanged();
+ } else {
+ explanationSpecsBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public Builder setExplanationSpecs(
+ int index,
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec.Builder
+ builderForValue) {
+ if (explanationSpecsBuilder_ == null) {
+ ensureExplanationSpecsIsMutable();
+ explanationSpecs_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ explanationSpecsBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public Builder addExplanationSpecs(
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec value) {
+ if (explanationSpecsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureExplanationSpecsIsMutable();
+ explanationSpecs_.add(value);
+ onChanged();
+ } else {
+ explanationSpecsBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public Builder addExplanationSpecs(
+ int index,
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec value) {
+ if (explanationSpecsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureExplanationSpecsIsMutable();
+ explanationSpecs_.add(index, value);
+ onChanged();
+ } else {
+ explanationSpecsBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public Builder addExplanationSpecs(
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec.Builder
+ builderForValue) {
+ if (explanationSpecsBuilder_ == null) {
+ ensureExplanationSpecsIsMutable();
+ explanationSpecs_.add(builderForValue.build());
+ onChanged();
+ } else {
+ explanationSpecsBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public Builder addExplanationSpecs(
+ int index,
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec.Builder
+ builderForValue) {
+ if (explanationSpecsBuilder_ == null) {
+ ensureExplanationSpecsIsMutable();
+ explanationSpecs_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ explanationSpecsBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public Builder addAllExplanationSpecs(
+ java.lang.Iterable<
+ ? extends
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation
+ .ModelEvaluationExplanationSpec>
+ values) {
+ if (explanationSpecsBuilder_ == null) {
+ ensureExplanationSpecsIsMutable();
+ com.google.protobuf.AbstractMessageLite.Builder.addAll(values, explanationSpecs_);
+ onChanged();
+ } else {
+ explanationSpecsBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public Builder clearExplanationSpecs() {
+ if (explanationSpecsBuilder_ == null) {
+ explanationSpecs_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ onChanged();
+ } else {
+ explanationSpecsBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public Builder removeExplanationSpecs(int index) {
+ if (explanationSpecsBuilder_ == null) {
+ ensureExplanationSpecsIsMutable();
+ explanationSpecs_.remove(index);
+ onChanged();
+ } else {
+ explanationSpecsBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ .Builder
+ getExplanationSpecsBuilder(int index) {
+ return getExplanationSpecsFieldBuilder().getBuilder(index);
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public com.google.cloud.aiplatform.v1beta1.ModelEvaluation
+ .ModelEvaluationExplanationSpecOrBuilder
+ getExplanationSpecsOrBuilder(int index) {
+ if (explanationSpecsBuilder_ == null) {
+ return explanationSpecs_.get(index);
+ } else {
+ return explanationSpecsBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public java.util.List<
+ ? extends
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation
+ .ModelEvaluationExplanationSpecOrBuilder>
+ getExplanationSpecsOrBuilderList() {
+ if (explanationSpecsBuilder_ != null) {
+ return explanationSpecsBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(explanationSpecs_);
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ .Builder
+ addExplanationSpecsBuilder() {
+ return getExplanationSpecsFieldBuilder()
+ .addBuilder(
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ .getDefaultInstance());
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ .Builder
+ addExplanationSpecsBuilder(int index) {
+ return getExplanationSpecsFieldBuilder()
+ .addBuilder(
+ index,
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ .getDefaultInstance());
+ }
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ public java.util.List<
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ .Builder>
+ getExplanationSpecsBuilderList() {
+ return getExplanationSpecsFieldBuilder().getBuilderList();
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec,
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ .Builder,
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation
+ .ModelEvaluationExplanationSpecOrBuilder>
+ getExplanationSpecsFieldBuilder() {
+ if (explanationSpecsBuilder_ == null) {
+ explanationSpecsBuilder_ =
+ new com.google.protobuf.RepeatedFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec,
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ .Builder,
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation
+ .ModelEvaluationExplanationSpecOrBuilder>(
+ explanationSpecs_,
+ ((bitField0_ & 0x00000002) != 0),
+ getParentForChildren(),
+ isClean());
+ explanationSpecs_ = null;
+ }
+ return explanationSpecsBuilder_;
+ }
+
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationOrBuilder.java
index 57cf0caf2..40c7fdd28 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationOrBuilder.java
@@ -264,4 +264,77 @@ public interface ModelEvaluationOrBuilder
*
*/
com.google.cloud.aiplatform.v1beta1.ModelExplanationOrBuilder getModelExplanationOrBuilder();
+
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ java.util.List
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec
+ getExplanationSpecs(int index);
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ int getExplanationSpecsCount();
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ java.util.List<
+ ? extends
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation
+ .ModelEvaluationExplanationSpecOrBuilder>
+ getExplanationSpecsOrBuilderList();
+ /**
+ *
+ *
+ *
+ * Output only. Describes the values of [ExplanationSpec][google.cloud.aiplatform.v1beta1.ExplanationSpec] that are used for explaining
+ * the predicted values on the evaluated data.
+ *
+ *
+ *
+ * repeated .google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpec explanation_specs = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
+ *
+ */
+ com.google.cloud.aiplatform.v1beta1.ModelEvaluation.ModelEvaluationExplanationSpecOrBuilder
+ getExplanationSpecsOrBuilder(int index);
}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationProto.java
index bca98ace2..b219e56e7 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelEvaluationProto.java
@@ -31,6 +31,10 @@ public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry r
internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_fieldAccessorTable;
+ static final com.google.protobuf.Descriptors.Descriptor
+ internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_descriptor;
+ static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
@@ -47,21 +51,27 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "oud/aiplatform/v1beta1/explanation.proto"
+ "\032\034google/protobuf/struct.proto\032\037google/p"
+ "rotobuf/timestamp.proto\032\034google/api/anno"
- + "tations.proto\"\234\003\n\017ModelEvaluation\022\021\n\004nam"
+ + "tations.proto\"\226\005\n\017ModelEvaluation\022\021\n\004nam"
+ "e\030\001 \001(\tB\003\340A\003\022\037\n\022metrics_schema_uri\030\002 \001(\t"
+ "B\003\340A\003\022,\n\007metrics\030\003 \001(\0132\026.google.protobuf"
+ ".ValueB\003\340A\003\0224\n\013create_time\030\004 \001(\0132\032.googl"
+ "e.protobuf.TimestampB\003\340A\003\022\035\n\020slice_dimen"
+ "sions\030\005 \003(\tB\003\340A\003\022Q\n\021model_explanation\030\010 "
+ "\001(\01321.google.cloud.aiplatform.v1beta1.Mo"
- + "delExplanationB\003\340A\003:\177\352A|\n)aiplatform.goo"
- + "gleapis.com/ModelEvaluation\022Oprojects/{p"
- + "roject}/locations/{location}/models/{mod"
- + "el}/evaluations/{evaluation}B\210\001\n#com.goo"
- + "gle.cloud.aiplatform.v1beta1B\024ModelEvalu"
- + "ationProtoP\001ZIgoogle.golang.org/genproto"
- + "/googleapis/cloud/aiplatform/v1beta1;aip"
- + "latformb\006proto3"
+ + "delExplanationB\003\340A\003\022o\n\021explanation_specs"
+ + "\030\t \003(\0132O.google.cloud.aiplatform.v1beta1"
+ + ".ModelEvaluation.ModelEvaluationExplanat"
+ + "ionSpecB\003\340A\003\032\206\001\n\036ModelEvaluationExplanat"
+ + "ionSpec\022\030\n\020explanation_type\030\001 \001(\t\022J\n\020exp"
+ + "lanation_spec\030\002 \001(\01320.google.cloud.aipla"
+ + "tform.v1beta1.ExplanationSpec:\177\352A|\n)aipl"
+ + "atform.googleapis.com/ModelEvaluation\022Op"
+ + "rojects/{project}/locations/{location}/m"
+ + "odels/{model}/evaluations/{evaluation}B\210"
+ + "\001\n#com.google.cloud.aiplatform.v1beta1B\024"
+ + "ModelEvaluationProtoP\001ZIgoogle.golang.or"
+ + "g/genproto/googleapis/cloud/aiplatform/v"
+ + "1beta1;aiplatformb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -86,6 +96,17 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"CreateTime",
"SliceDimensions",
"ModelExplanation",
+ "ExplanationSpecs",
+ });
+ internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_descriptor =
+ internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_descriptor
+ .getNestedTypes()
+ .get(0);
+ internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_fieldAccessorTable =
+ new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+ internal_static_google_cloud_aiplatform_v1beta1_ModelEvaluation_ModelEvaluationExplanationSpec_descriptor,
+ new java.lang.String[] {
+ "ExplanationType", "ExplanationSpec",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanation.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanation.java
index 7afe891e0..42b2d89da 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanation.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanation.java
@@ -126,7 +126,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -155,7 +155,7 @@ public java.util.List
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -185,7 +185,7 @@ public java.util.List
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -214,7 +214,7 @@ public int getMeanAttributionsCount() {
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -243,7 +243,7 @@ public com.google.cloud.aiplatform.v1beta1.Attribution getMeanAttributions(int i
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -652,7 +652,7 @@ private void ensureMeanAttributionsIsMutable() {
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -685,7 +685,7 @@ private void ensureMeanAttributionsIsMutable() {
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -717,7 +717,7 @@ public int getMeanAttributionsCount() {
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -749,7 +749,7 @@ public com.google.cloud.aiplatform.v1beta1.Attribution getMeanAttributions(int i
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -788,7 +788,7 @@ public Builder setMeanAttributions(
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -824,7 +824,7 @@ public Builder setMeanAttributions(
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -862,7 +862,7 @@ public Builder addMeanAttributions(com.google.cloud.aiplatform.v1beta1.Attributi
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -901,7 +901,7 @@ public Builder addMeanAttributions(
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -937,7 +937,7 @@ public Builder addMeanAttributions(
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -973,7 +973,7 @@ public Builder addMeanAttributions(
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -1009,7 +1009,7 @@ public Builder addAllMeanAttributions(
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -1044,7 +1044,7 @@ public Builder clearMeanAttributions() {
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -1079,7 +1079,7 @@ public Builder removeMeanAttributions(int index) {
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -1108,7 +1108,7 @@ public com.google.cloud.aiplatform.v1beta1.Attribution.Builder getMeanAttributio
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -1141,7 +1141,7 @@ public com.google.cloud.aiplatform.v1beta1.AttributionOrBuilder getMeanAttributi
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -1174,7 +1174,7 @@ public com.google.cloud.aiplatform.v1beta1.AttributionOrBuilder getMeanAttributi
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -1203,7 +1203,7 @@ public com.google.cloud.aiplatform.v1beta1.Attribution.Builder addMeanAttributio
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -1233,7 +1233,7 @@ public com.google.cloud.aiplatform.v1beta1.Attribution.Builder addMeanAttributio
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanationOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanationOrBuilder.java
index 1ab25e538..8871d76a7 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanationOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelExplanationOrBuilder.java
@@ -27,7 +27,7 @@ public interface ModelExplanationOrBuilder
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -53,7 +53,7 @@ public interface ModelExplanationOrBuilder
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -79,7 +79,7 @@ public interface ModelExplanationOrBuilder
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -105,7 +105,7 @@ public interface ModelExplanationOrBuilder
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
@@ -132,7 +132,7 @@ public interface ModelExplanationOrBuilder
*
*
*
- * Output only. Aggregated attributions explaning the Model's prediction outputs over the
+ * Output only. Aggregated attributions explaining the Model's prediction outputs over the
* set of instances. The attributions are grouped by outputs.
* For Models that predict only one output, such as regression Models that
* predict only one score, there is only one attibution that explains the
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelOrBuilder.java
index b15effdb0..226bd2b21 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelOrBuilder.java
@@ -559,6 +559,10 @@ public interface ModelOrBuilder
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -598,6 +602,10 @@ public interface ModelOrBuilder
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -637,6 +645,10 @@ public interface ModelOrBuilder
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -677,6 +689,10 @@ public interface ModelOrBuilder
* * `bigquery`
* Each instance is a single row in BigQuery. Uses
* [BigQuerySource][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.bigquery_source].
+ * * `file-list`
+ * Each line of the file is the location of an instance to process, uses
+ * `gcs_source` field of the
+ * [InputConfig][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig] object.
* If this Model doesn't support any of these formats it means it cannot be
* used with a [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. However, if it has
* [supported_deployment_resources_types][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types], it could serve online
@@ -996,19 +1012,20 @@ com.google.cloud.aiplatform.v1beta1.DeployedModelRefOrBuilder getDeployedModelsO
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*
* @return Whether the explanationSpec field is set.
*/
@@ -1017,19 +1034,20 @@ com.google.cloud.aiplatform.v1beta1.DeployedModelRefOrBuilder getDeployedModelsO
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*
* @return The explanationSpec.
*/
@@ -1038,19 +1056,20 @@ com.google.cloud.aiplatform.v1beta1.DeployedModelRefOrBuilder getDeployedModelsO
*
*
*
- * Output only. The default explanation specification for this Model.
- * Model can be used for [requesting explanation][google.cloud.aiplatform.v1beta1.PredictionService.Explain]
- * after being [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The default explanation specification for this Model.
+ * The Model can be used for [requesting
+ * explanation][PredictionService.Explain] after being
+ * [deployed][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel] iff it is populated.
+ * The Model can be used for [batch
+ * explanation][BatchPredictionJob.generate_explanation] iff it is populated.
* All fields of the explanation_spec can be overridden by
* [explanation_spec][google.cloud.aiplatform.v1beta1.DeployedModel.explanation_spec] of
- * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model].
- * This field is populated only for tabular AutoML Models.
- * Specifying it with [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel] is not supported.
+ * [DeployModelRequest.deployed_model][google.cloud.aiplatform.v1beta1.DeployModelRequest.deployed_model], or
+ * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] of
+ * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob].
*
*
- *
- * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
- *
+ * .google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 23;
*/
com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder getExplanationSpecOrBuilder();
@@ -1154,4 +1173,42 @@ com.google.cloud.aiplatform.v1beta1.DeployedModelRefOrBuilder getDeployedModelsO
* map<string, string> labels = 17;
*/
java.lang.String getLabelsOrThrow(java.lang.String key);
+
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ boolean hasEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ *
+ * @return The encryptionSpec.
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a Model. If set, this
+ * Model and all sub-resources of this Model will be secured by this key.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder();
}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelProto.java
index 76874664e..c8b0bde7f 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelProto.java
@@ -66,64 +66,67 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "api/resource.proto\032-google/cloud/aiplatf"
+ "orm/v1beta1/dataset.proto\0328google/cloud/"
+ "aiplatform/v1beta1/deployed_model_ref.pr"
- + "oto\032-google/cloud/aiplatform/v1beta1/env"
- + "_var.proto\0321google/cloud/aiplatform/v1be"
- + "ta1/explanation.proto\032\034google/protobuf/s"
- + "truct.proto\032\037google/protobuf/timestamp.p"
- + "roto\032\034google/api/annotations.proto\"\214\014\n\005M"
- + "odel\022\014\n\004name\030\001 \001(\t\022\031\n\014display_name\030\002 \001(\t"
- + "B\003\340A\002\022\023\n\013description\030\003 \001(\t\022J\n\020predict_sc"
- + "hemata\030\004 \001(\01320.google.cloud.aiplatform.v"
- + "1beta1.PredictSchemata\022 \n\023metadata_schem"
- + "a_uri\030\005 \001(\tB\003\340A\005\022-\n\010metadata\030\006 \001(\0132\026.goo"
- + "gle.protobuf.ValueB\003\340A\005\022Z\n\030supported_exp"
- + "ort_formats\030\024 \003(\01323.google.cloud.aiplatf"
- + "orm.v1beta1.Model.ExportFormatB\003\340A\003\022M\n\021t"
- + "raining_pipeline\030\007 \001(\tB2\340A\003\372A,\n*aiplatfo"
- + "rm.googleapis.com/TrainingPipeline\022P\n\016co"
- + "ntainer_spec\030\t \001(\01323.google.cloud.aiplat"
- + "form.v1beta1.ModelContainerSpecB\003\340A\004\022\031\n\014"
- + "artifact_uri\030\032 \001(\tB\003\340A\005\022q\n$supported_dep"
- + "loyment_resources_types\030\n \003(\0162>.google.c"
- + "loud.aiplatform.v1beta1.Model.Deployment"
- + "ResourcesTypeB\003\340A\003\022,\n\037supported_input_st"
- + "orage_formats\030\013 \003(\tB\003\340A\003\022-\n supported_ou"
- + "tput_storage_formats\030\014 \003(\tB\003\340A\003\0224\n\013creat"
- + "e_time\030\r \001(\0132\032.google.protobuf.Timestamp"
- + "B\003\340A\003\0224\n\013update_time\030\016 \001(\0132\032.google.prot"
- + "obuf.TimestampB\003\340A\003\022O\n\017deployed_models\030\017"
- + " \003(\01321.google.cloud.aiplatform.v1beta1.D"
- + "eployedModelRefB\003\340A\003\022O\n\020explanation_spec"
- + "\030\027 \001(\01320.google.cloud.aiplatform.v1beta1"
- + ".ExplanationSpecB\003\340A\003\022\014\n\004etag\030\020 \001(\t\022B\n\006l"
- + "abels\030\021 \003(\01322.google.cloud.aiplatform.v1"
- + "beta1.Model.LabelsEntry\032\332\001\n\014ExportFormat"
- + "\022\017\n\002id\030\001 \001(\tB\003\340A\003\022g\n\023exportable_contents"
- + "\030\002 \003(\0162E.google.cloud.aiplatform.v1beta1"
- + ".Model.ExportFormat.ExportableContentB\003\340"
- + "A\003\"P\n\021ExportableContent\022\"\n\036EXPORTABLE_CO"
- + "NTENT_UNSPECIFIED\020\000\022\014\n\010ARTIFACT\020\001\022\t\n\005IMA"
- + "GE\020\002\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005valu"
- + "e\030\002 \001(\t:\0028\001\"v\n\027DeploymentResourcesType\022)"
- + "\n%DEPLOYMENT_RESOURCES_TYPE_UNSPECIFIED\020"
- + "\000\022\027\n\023DEDICATED_RESOURCES\020\001\022\027\n\023AUTOMATIC_"
- + "RESOURCES\020\002:\\\352AY\n\037aiplatform.googleapis."
- + "com/Model\0226projects/{project}/locations/"
- + "{location}/models/{model}\"{\n\017PredictSche"
- + "mata\022 \n\023instance_schema_uri\030\001 \001(\tB\003\340A\005\022\""
- + "\n\025parameters_schema_uri\030\002 \001(\tB\003\340A\005\022\"\n\025pr"
- + "ediction_schema_uri\030\003 \001(\tB\003\340A\005\"\205\002\n\022Model"
- + "ContainerSpec\022\031\n\timage_uri\030\001 \001(\tB\006\340A\002\340A\005"
- + "\022\024\n\007command\030\002 \003(\tB\003\340A\005\022\021\n\004args\030\003 \003(\tB\003\340A"
- + "\005\0229\n\003env\030\004 \003(\0132\'.google.cloud.aiplatform"
- + ".v1beta1.EnvVarB\003\340A\005\0229\n\005ports\030\005 \003(\0132%.go"
- + "ogle.cloud.aiplatform.v1beta1.PortB\003\340A\005\022"
- + "\032\n\rpredict_route\030\006 \001(\tB\003\340A\005\022\031\n\014health_ro"
- + "ute\030\007 \001(\tB\003\340A\005\"\036\n\004Port\022\026\n\016container_port"
- + "\030\003 \001(\005B~\n#com.google.cloud.aiplatform.v1"
- + "beta1B\nModelProtoP\001ZIgoogle.golang.org/g"
- + "enproto/googleapis/cloud/aiplatform/v1be"
- + "ta1;aiplatformb\006proto3"
+ + "oto\0325google/cloud/aiplatform/v1beta1/enc"
+ + "ryption_spec.proto\032-google/cloud/aiplatf"
+ + "orm/v1beta1/env_var.proto\0321google/cloud/"
+ + "aiplatform/v1beta1/explanation.proto\032\034go"
+ + "ogle/protobuf/struct.proto\032\037google/proto"
+ + "buf/timestamp.proto\032\034google/api/annotati"
+ + "ons.proto\"\321\014\n\005Model\022\014\n\004name\030\001 \001(\t\022\031\n\014dis"
+ + "play_name\030\002 \001(\tB\003\340A\002\022\023\n\013description\030\003 \001("
+ + "\t\022J\n\020predict_schemata\030\004 \001(\01320.google.clo"
+ + "ud.aiplatform.v1beta1.PredictSchemata\022 \n"
+ + "\023metadata_schema_uri\030\005 \001(\tB\003\340A\005\022-\n\010metad"
+ + "ata\030\006 \001(\0132\026.google.protobuf.ValueB\003\340A\005\022Z"
+ + "\n\030supported_export_formats\030\024 \003(\01323.googl"
+ + "e.cloud.aiplatform.v1beta1.Model.ExportF"
+ + "ormatB\003\340A\003\022M\n\021training_pipeline\030\007 \001(\tB2\340"
+ + "A\003\372A,\n*aiplatform.googleapis.com/Trainin"
+ + "gPipeline\022P\n\016container_spec\030\t \001(\01323.goog"
+ + "le.cloud.aiplatform.v1beta1.ModelContain"
+ + "erSpecB\003\340A\004\022\031\n\014artifact_uri\030\032 \001(\tB\003\340A\005\022q"
+ + "\n$supported_deployment_resources_types\030\n"
+ + " \003(\0162>.google.cloud.aiplatform.v1beta1.M"
+ + "odel.DeploymentResourcesTypeB\003\340A\003\022,\n\037sup"
+ + "ported_input_storage_formats\030\013 \003(\tB\003\340A\003\022"
+ + "-\n supported_output_storage_formats\030\014 \003("
+ + "\tB\003\340A\003\0224\n\013create_time\030\r \001(\0132\032.google.pro"
+ + "tobuf.TimestampB\003\340A\003\0224\n\013update_time\030\016 \001("
+ + "\0132\032.google.protobuf.TimestampB\003\340A\003\022O\n\017de"
+ + "ployed_models\030\017 \003(\01321.google.cloud.aipla"
+ + "tform.v1beta1.DeployedModelRefB\003\340A\003\022J\n\020e"
+ + "xplanation_spec\030\027 \001(\01320.google.cloud.aip"
+ + "latform.v1beta1.ExplanationSpec\022\014\n\004etag\030"
+ + "\020 \001(\t\022B\n\006labels\030\021 \003(\01322.google.cloud.aip"
+ + "latform.v1beta1.Model.LabelsEntry\022H\n\017enc"
+ + "ryption_spec\030\030 \001(\0132/.google.cloud.aiplat"
+ + "form.v1beta1.EncryptionSpec\032\332\001\n\014ExportFo"
+ + "rmat\022\017\n\002id\030\001 \001(\tB\003\340A\003\022g\n\023exportable_cont"
+ + "ents\030\002 \003(\0162E.google.cloud.aiplatform.v1b"
+ + "eta1.Model.ExportFormat.ExportableConten"
+ + "tB\003\340A\003\"P\n\021ExportableContent\022\"\n\036EXPORTABL"
+ + "E_CONTENT_UNSPECIFIED\020\000\022\014\n\010ARTIFACT\020\001\022\t\n"
+ + "\005IMAGE\020\002\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005"
+ + "value\030\002 \001(\t:\0028\001\"v\n\027DeploymentResourcesTy"
+ + "pe\022)\n%DEPLOYMENT_RESOURCES_TYPE_UNSPECIF"
+ + "IED\020\000\022\027\n\023DEDICATED_RESOURCES\020\001\022\027\n\023AUTOMA"
+ + "TIC_RESOURCES\020\002:\\\352AY\n\037aiplatform.googlea"
+ + "pis.com/Model\0226projects/{project}/locati"
+ + "ons/{location}/models/{model}\"{\n\017Predict"
+ + "Schemata\022 \n\023instance_schema_uri\030\001 \001(\tB\003\340"
+ + "A\005\022\"\n\025parameters_schema_uri\030\002 \001(\tB\003\340A\005\022\""
+ + "\n\025prediction_schema_uri\030\003 \001(\tB\003\340A\005\"\205\002\n\022M"
+ + "odelContainerSpec\022\031\n\timage_uri\030\001 \001(\tB\006\340A"
+ + "\002\340A\005\022\024\n\007command\030\002 \003(\tB\003\340A\005\022\021\n\004args\030\003 \003(\t"
+ + "B\003\340A\005\0229\n\003env\030\004 \003(\0132\'.google.cloud.aiplat"
+ + "form.v1beta1.EnvVarB\003\340A\005\0229\n\005ports\030\005 \003(\0132"
+ + "%.google.cloud.aiplatform.v1beta1.PortB\003"
+ + "\340A\005\022\032\n\rpredict_route\030\006 \001(\tB\003\340A\005\022\031\n\014healt"
+ + "h_route\030\007 \001(\tB\003\340A\005\"\036\n\004Port\022\026\n\016container_"
+ + "port\030\003 \001(\005B~\n#com.google.cloud.aiplatfor"
+ + "m.v1beta1B\nModelProtoP\001ZIgoogle.golang.o"
+ + "rg/genproto/googleapis/cloud/aiplatform/"
+ + "v1beta1;aiplatformb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -133,6 +136,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.DatasetProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.DeployedModelNameProto.getDescriptor(),
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.EnvVarProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.ExplanationProto.getDescriptor(),
com.google.protobuf.StructProto.getDescriptor(),
@@ -164,6 +168,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"ExplanationSpec",
"Etag",
"Labels",
+ "EncryptionSpec",
});
internal_static_google_cloud_aiplatform_v1beta1_Model_ExportFormat_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_Model_descriptor.getNestedTypes().get(0);
@@ -216,6 +221,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.api.ResourceProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.DatasetProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.DeployedModelNameProto.getDescriptor();
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.EnvVarProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.ExplanationProto.getDescriptor();
com.google.protobuf.StructProto.getDescriptor();
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceProto.java
index b8d6202ed..317cc7b1d 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PredictionServiceProto.java
@@ -65,34 +65,36 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "tobuf.ValueB\003\340A\002\022*\n\nparameters\030\003 \001(\0132\026.g"
+ "oogle.protobuf.Value\"Y\n\017PredictResponse\022"
+ "+\n\013predictions\030\001 \003(\0132\026.google.protobuf.V"
- + "alue\022\031\n\021deployed_model_id\030\002 \001(\t\"\305\001\n\016Expl"
+ + "alue\022\031\n\021deployed_model_id\030\002 \001(\t\"\242\002\n\016Expl"
+ "ainRequest\022<\n\010endpoint\030\001 \001(\tB*\340A\002\372A$\n\"ai"
+ "platform.googleapis.com/Endpoint\022.\n\tinst"
+ "ances\030\002 \003(\0132\026.google.protobuf.ValueB\003\340A\002"
+ "\022*\n\nparameters\030\004 \001(\0132\026.google.protobuf.V"
- + "alue\022\031\n\021deployed_model_id\030\003 \001(\t\"\235\001\n\017Expl"
- + "ainResponse\022B\n\014explanations\030\001 \003(\0132,.goog"
- + "le.cloud.aiplatform.v1beta1.Explanation\022"
- + "\031\n\021deployed_model_id\030\002 \001(\t\022+\n\013prediction"
- + "s\030\003 \003(\0132\026.google.protobuf.Value2\250\004\n\021Pred"
- + "ictionService\022\327\001\n\007Predict\022/.google.cloud"
- + ".aiplatform.v1beta1.PredictRequest\0320.goo"
- + "gle.cloud.aiplatform.v1beta1.PredictResp"
- + "onse\"i\202\323\344\223\002C\">/v1beta1/{endpoint=project"
- + "s/*/locations/*/endpoints/*}:predict:\001*\332"
- + "A\035endpoint,instances,parameters\022\351\001\n\007Expl"
- + "ain\022/.google.cloud.aiplatform.v1beta1.Ex"
- + "plainRequest\0320.google.cloud.aiplatform.v"
- + "1beta1.ExplainResponse\"{\202\323\344\223\002C\">/v1beta1"
- + "/{endpoint=projects/*/locations/*/endpoi"
- + "nts/*}:explain:\001*\332A/endpoint,instances,p"
- + "arameters,deployed_model_id\032M\312A\031aiplatfo"
- + "rm.googleapis.com\322A.https://www.googleap"
- + "is.com/auth/cloud-platformB\212\001\n#com.googl"
- + "e.cloud.aiplatform.v1beta1B\026PredictionSe"
- + "rviceProtoP\001ZIgoogle.golang.org/genproto"
- + "/googleapis/cloud/aiplatform/v1beta1;aip"
- + "latformb\006proto3"
+ + "alue\022[\n\031explanation_spec_override\030\005 \001(\0132"
+ + "8.google.cloud.aiplatform.v1beta1.Explan"
+ + "ationSpecOverride\022\031\n\021deployed_model_id\030\003"
+ + " \001(\t\"\235\001\n\017ExplainResponse\022B\n\014explanations"
+ + "\030\001 \003(\0132,.google.cloud.aiplatform.v1beta1"
+ + ".Explanation\022\031\n\021deployed_model_id\030\002 \001(\t\022"
+ + "+\n\013predictions\030\003 \003(\0132\026.google.protobuf.V"
+ + "alue2\250\004\n\021PredictionService\022\327\001\n\007Predict\022/"
+ + ".google.cloud.aiplatform.v1beta1.Predict"
+ + "Request\0320.google.cloud.aiplatform.v1beta"
+ + "1.PredictResponse\"i\202\323\344\223\002C\">/v1beta1/{end"
+ + "point=projects/*/locations/*/endpoints/*"
+ + "}:predict:\001*\332A\035endpoint,instances,parame"
+ + "ters\022\351\001\n\007Explain\022/.google.cloud.aiplatfo"
+ + "rm.v1beta1.ExplainRequest\0320.google.cloud"
+ + ".aiplatform.v1beta1.ExplainResponse\"{\202\323\344"
+ + "\223\002C\">/v1beta1/{endpoint=projects/*/locat"
+ + "ions/*/endpoints/*}:explain:\001*\332A/endpoin"
+ + "t,instances,parameters,deployed_model_id"
+ + "\032M\312A\031aiplatform.googleapis.com\322A.https:/"
+ + "/www.googleapis.com/auth/cloud-platformB"
+ + "\212\001\n#com.google.cloud.aiplatform.v1beta1B"
+ + "\026PredictionServiceProtoP\001ZIgoogle.golang"
+ + ".org/genproto/googleapis/cloud/aiplatfor"
+ + "m/v1beta1;aiplatformb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -127,7 +129,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ExplainRequest_descriptor,
new java.lang.String[] {
- "Endpoint", "Instances", "Parameters", "DeployedModelId",
+ "Endpoint", "Instances", "Parameters", "ExplanationSpecOverride", "DeployedModelId",
});
internal_static_google_cloud_aiplatform_v1beta1_ExplainResponse_descriptor =
getDescriptor().getMessageTypes().get(3);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequest.java
index 53ac48b90..281e54cbc 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequest.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequest.java
@@ -41,6 +41,7 @@ private SearchMigratableResourcesRequest(
private SearchMigratableResourcesRequest() {
parent_ = "";
pageToken_ = "";
+ filter_ = "";
}
@java.lang.Override
@@ -91,6 +92,13 @@ private SearchMigratableResourcesRequest(
pageToken_ = s;
break;
}
+ case 34:
+ {
+ java.lang.String s = input.readStringRequireUtf8();
+
+ filter_ = s;
+ break;
+ }
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
@@ -254,6 +262,71 @@ public com.google.protobuf.ByteString getPageTokenBytes() {
}
}
+ public static final int FILTER_FIELD_NUMBER = 4;
+ private volatile java.lang.Object filter_;
+ /**
+ *
+ *
+ *
+ * Supported filters are:
+ * * Resource type: For a specific type of MigratableResource.
+ * * `ml_engine_model_version:*`
+ * * `automl_model:*`,
+ * * `automl_dataset:*`
+ * * `data_labeling_dataset:*`.
+ * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+ * * `last_migrate_time:*` will filter migrated resources.
+ * * `NOT last_migrate_time:*` will filter not yet migrated resource.
+ *
+ *
+ * string filter = 4;
+ *
+ * @return The filter.
+ */
+ @java.lang.Override
+ public java.lang.String getFilter() {
+ java.lang.Object ref = filter_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ filter_ = s;
+ return s;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Supported filters are:
+ * * Resource type: For a specific type of MigratableResource.
+ * * `ml_engine_model_version:*`
+ * * `automl_model:*`,
+ * * `automl_dataset:*`
+ * * `data_labeling_dataset:*`.
+ * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+ * * `last_migrate_time:*` will filter migrated resources.
+ * * `NOT last_migrate_time:*` will filter not yet migrated resource.
+ *
+ *
+ * string filter = 4;
+ *
+ * @return The bytes for filter.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString getFilterBytes() {
+ java.lang.Object ref = filter_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ filter_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
private byte memoizedIsInitialized = -1;
@java.lang.Override
@@ -277,6 +350,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
if (!getPageTokenBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
+ if (!getFilterBytes().isEmpty()) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
+ }
unknownFields.writeTo(output);
}
@@ -295,6 +371,9 @@ public int getSerializedSize() {
if (!getPageTokenBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
+ if (!getFilterBytes().isEmpty()) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
+ }
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
@@ -314,6 +393,7 @@ public boolean equals(final java.lang.Object obj) {
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
+ if (!getFilter().equals(other.getFilter())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@@ -331,6 +411,8 @@ public int hashCode() {
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
+ hash = (37 * hash) + FILTER_FIELD_NUMBER;
+ hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
@@ -485,6 +567,8 @@ public Builder clear() {
pageToken_ = "";
+ filter_ = "";
+
return this;
}
@@ -517,6 +601,7 @@ public com.google.cloud.aiplatform.v1beta1.SearchMigratableResourcesRequest buil
result.parent_ = parent_;
result.pageSize_ = pageSize_;
result.pageToken_ = pageToken_;
+ result.filter_ = filter_;
onBuilt();
return result;
}
@@ -581,6 +666,10 @@ public Builder mergeFrom(
pageToken_ = other.pageToken_;
onChanged();
}
+ if (!other.getFilter().isEmpty()) {
+ filter_ = other.filter_;
+ onChanged();
+ }
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
@@ -909,6 +998,152 @@ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
return this;
}
+ private java.lang.Object filter_ = "";
+ /**
+ *
+ *
+ *
+ * Supported filters are:
+ * * Resource type: For a specific type of MigratableResource.
+ * * `ml_engine_model_version:*`
+ * * `automl_model:*`,
+ * * `automl_dataset:*`
+ * * `data_labeling_dataset:*`.
+ * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+ * * `last_migrate_time:*` will filter migrated resources.
+ * * `NOT last_migrate_time:*` will filter not yet migrated resource.
+ *
+ *
+ * string filter = 4;
+ *
+ * @return The filter.
+ */
+ public java.lang.String getFilter() {
+ java.lang.Object ref = filter_;
+ if (!(ref instanceof java.lang.String)) {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ filter_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Supported filters are:
+ * * Resource type: For a specific type of MigratableResource.
+ * * `ml_engine_model_version:*`
+ * * `automl_model:*`,
+ * * `automl_dataset:*`
+ * * `data_labeling_dataset:*`.
+ * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+ * * `last_migrate_time:*` will filter migrated resources.
+ * * `NOT last_migrate_time:*` will filter not yet migrated resource.
+ *
+ *
+ * string filter = 4;
+ *
+ * @return The bytes for filter.
+ */
+ public com.google.protobuf.ByteString getFilterBytes() {
+ java.lang.Object ref = filter_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ filter_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Supported filters are:
+ * * Resource type: For a specific type of MigratableResource.
+ * * `ml_engine_model_version:*`
+ * * `automl_model:*`,
+ * * `automl_dataset:*`
+ * * `data_labeling_dataset:*`.
+ * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+ * * `last_migrate_time:*` will filter migrated resources.
+ * * `NOT last_migrate_time:*` will filter not yet migrated resource.
+ *
+ *
+ * string filter = 4;
+ *
+ * @param value The filter to set.
+ * @return This builder for chaining.
+ */
+ public Builder setFilter(java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ filter_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Supported filters are:
+ * * Resource type: For a specific type of MigratableResource.
+ * * `ml_engine_model_version:*`
+ * * `automl_model:*`,
+ * * `automl_dataset:*`
+ * * `data_labeling_dataset:*`.
+ * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+ * * `last_migrate_time:*` will filter migrated resources.
+ * * `NOT last_migrate_time:*` will filter not yet migrated resource.
+ *
+ *
+ * string filter = 4;
+ *
+ * @return This builder for chaining.
+ */
+ public Builder clearFilter() {
+
+ filter_ = getDefaultInstance().getFilter();
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Supported filters are:
+ * * Resource type: For a specific type of MigratableResource.
+ * * `ml_engine_model_version:*`
+ * * `automl_model:*`,
+ * * `automl_dataset:*`
+ * * `data_labeling_dataset:*`.
+ * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+ * * `last_migrate_time:*` will filter migrated resources.
+ * * `NOT last_migrate_time:*` will filter not yet migrated resource.
+ *
+ *
+ * string filter = 4;
+ *
+ * @param value The bytes for filter to set.
+ * @return This builder for chaining.
+ */
+ public Builder setFilterBytes(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ checkByteStringIsUtf8(value);
+
+ filter_ = value;
+ onChanged();
+ return this;
+ }
+
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequestOrBuilder.java
index 745f02b0f..a7fe855c4 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequestOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SearchMigratableResourcesRequestOrBuilder.java
@@ -98,4 +98,45 @@ public interface SearchMigratableResourcesRequestOrBuilder
* @return The bytes for pageToken.
*/
com.google.protobuf.ByteString getPageTokenBytes();
+
+ /**
+ *
+ *
+ *
+ * Supported filters are:
+ * * Resource type: For a specific type of MigratableResource.
+ * * `ml_engine_model_version:*`
+ * * `automl_model:*`,
+ * * `automl_dataset:*`
+ * * `data_labeling_dataset:*`.
+ * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+ * * `last_migrate_time:*` will filter migrated resources.
+ * * `NOT last_migrate_time:*` will filter not yet migrated resource.
+ *
+ *
+ * string filter = 4;
+ *
+ * @return The filter.
+ */
+ java.lang.String getFilter();
+ /**
+ *
+ *
+ *
+ * Supported filters are:
+ * * Resource type: For a specific type of MigratableResource.
+ * * `ml_engine_model_version:*`
+ * * `automl_model:*`,
+ * * `automl_dataset:*`
+ * * `data_labeling_dataset:*`.
+ * * Migrated or not: Filter migrated resource or not by last_migrate_time.
+ * * `last_migrate_time:*` will filter migrated resources.
+ * * `NOT last_migrate_time:*` will filter not yet migrated resource.
+ *
+ *
+ * string filter = 4;
+ *
+ * @return The bytes for filter.
+ */
+ com.google.protobuf.ByteString getFilterBytes();
}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfig.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfig.java
index f2d574cd0..3948ee145 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfig.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfig.java
@@ -192,11 +192,9 @@ public GradientNoiseSigmaCase getGradientNoiseSigmaCase() {
* This is a single float value and will be used to add noise to all the
* features. Use this field when all features are normalized to have the
* same distribution: scale to range [0, 1], [-1, 1] or z-scoring, where
- * features are normalized to have 0-mean and 1-variance. Refer to
- * this doc for more details about normalization:
- * https:
- * //developers.google.com/machine-learning
- * // /data-prep/transform/normalization.
+ * features are normalized to have 0-mean and 1-variance. For more details
+ * about normalization:
+ * https://tinyurl.com/dgc-normalization.
* For best results the recommended value is about 10% - 20% of the standard
* deviation of the input feature. Refer to section 3.2 of the SmoothGrad
* paper: https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1.
@@ -717,11 +715,9 @@ public Builder clearGradientNoiseSigma() {
* This is a single float value and will be used to add noise to all the
* features. Use this field when all features are normalized to have the
* same distribution: scale to range [0, 1], [-1, 1] or z-scoring, where
- * features are normalized to have 0-mean and 1-variance. Refer to
- * this doc for more details about normalization:
- * https:
- * //developers.google.com/machine-learning
- * // /data-prep/transform/normalization.
+ * features are normalized to have 0-mean and 1-variance. For more details
+ * about normalization:
+ * https://tinyurl.com/dgc-normalization.
* For best results the recommended value is about 10% - 20% of the standard
* deviation of the input feature. Refer to section 3.2 of the SmoothGrad
* paper: https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1.
@@ -747,11 +743,9 @@ public float getNoiseSigma() {
* This is a single float value and will be used to add noise to all the
* features. Use this field when all features are normalized to have the
* same distribution: scale to range [0, 1], [-1, 1] or z-scoring, where
- * features are normalized to have 0-mean and 1-variance. Refer to
- * this doc for more details about normalization:
- * https:
- * //developers.google.com/machine-learning
- * // /data-prep/transform/normalization.
+ * features are normalized to have 0-mean and 1-variance. For more details
+ * about normalization:
+ * https://tinyurl.com/dgc-normalization.
* For best results the recommended value is about 10% - 20% of the standard
* deviation of the input feature. Refer to section 3.2 of the SmoothGrad
* paper: https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1.
@@ -778,11 +772,9 @@ public Builder setNoiseSigma(float value) {
* This is a single float value and will be used to add noise to all the
* features. Use this field when all features are normalized to have the
* same distribution: scale to range [0, 1], [-1, 1] or z-scoring, where
- * features are normalized to have 0-mean and 1-variance. Refer to
- * this doc for more details about normalization:
- * https:
- * //developers.google.com/machine-learning
- * // /data-prep/transform/normalization.
+ * features are normalized to have 0-mean and 1-variance. For more details
+ * about normalization:
+ * https://tinyurl.com/dgc-normalization.
* For best results the recommended value is about 10% - 20% of the standard
* deviation of the input feature. Refer to section 3.2 of the SmoothGrad
* paper: https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1.
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfigOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfigOrBuilder.java
index 47d57d5ab..fd7bd2dfe 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfigOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SmoothGradConfigOrBuilder.java
@@ -30,11 +30,9 @@ public interface SmoothGradConfigOrBuilder
* This is a single float value and will be used to add noise to all the
* features. Use this field when all features are normalized to have the
* same distribution: scale to range [0, 1], [-1, 1] or z-scoring, where
- * features are normalized to have 0-mean and 1-variance. Refer to
- * this doc for more details about normalization:
- * https:
- * //developers.google.com/machine-learning
- * // /data-prep/transform/normalization.
+ * features are normalized to have 0-mean and 1-variance. For more details
+ * about normalization:
+ * https://tinyurl.com/dgc-normalization.
* For best results the recommended value is about 10% - 20% of the standard
* deviation of the input feature. Refer to section 3.2 of the SmoothGrad
* paper: https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1.
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudyProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudyProto.java
index 4873428eb..d769cfb2d 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudyProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudyProto.java
@@ -101,87 +101,100 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "google/api/field_behavior.proto\032\031google/"
+ "api/resource.proto\032\036google/protobuf/dura"
+ "tion.proto\032\034google/protobuf/struct.proto"
- + "\032\037google/protobuf/timestamp.proto\032\034googl"
- + "e/api/annotations.proto\"\330\004\n\005Trial\022\017\n\002id\030"
- + "\002 \001(\tB\003\340A\003\022@\n\005state\030\003 \001(\0162,.google.cloud"
- + ".aiplatform.v1beta1.Trial.StateB\003\340A\003\022I\n\n"
- + "parameters\030\004 \003(\01320.google.cloud.aiplatfo"
- + "rm.v1beta1.Trial.ParameterB\003\340A\003\022L\n\021final"
- + "_measurement\030\005 \001(\0132,.google.cloud.aiplat"
- + "form.v1beta1.MeasurementB\003\340A\003\0223\n\nstart_t"
- + "ime\030\007 \001(\0132\032.google.protobuf.TimestampB\003\340"
- + "A\003\0221\n\010end_time\030\010 \001(\0132\032.google.protobuf.T"
- + "imestampB\003\340A\003\022?\n\ncustom_job\030\013 \001(\tB+\340A\003\372A"
- + "%\n#aiplatform.googleapis.com/CustomJob\032R"
- + "\n\tParameter\022\031\n\014parameter_id\030\001 \001(\tB\003\340A\003\022*"
- + "\n\005value\030\002 \001(\0132\026.google.protobuf.ValueB\003\340"
- + "A\003\"f\n\005State\022\025\n\021STATE_UNSPECIFIED\020\000\022\r\n\tRE"
- + "QUESTED\020\001\022\n\n\006ACTIVE\020\002\022\014\n\010STOPPING\020\003\022\r\n\tS"
- + "UCCEEDED\020\004\022\016\n\nINFEASIBLE\020\005\"\310\021\n\tStudySpec"
- + "\022K\n\007metrics\030\001 \003(\01325.google.cloud.aiplatf"
- + "orm.v1beta1.StudySpec.MetricSpecB\003\340A\002\022Q\n"
- + "\nparameters\030\002 \003(\01328.google.cloud.aiplatf"
- + "orm.v1beta1.StudySpec.ParameterSpecB\003\340A\002"
- + "\022G\n\talgorithm\030\003 \001(\01624.google.cloud.aipla"
- + "tform.v1beta1.StudySpec.Algorithm\032\272\001\n\nMe"
- + "tricSpec\022\026\n\tmetric_id\030\001 \001(\tB\003\340A\002\022Q\n\004goal"
- + "\030\002 \001(\0162>.google.cloud.aiplatform.v1beta1"
- + ".StudySpec.MetricSpec.GoalTypeB\003\340A\002\"A\n\010G"
- + "oalType\022\031\n\025GOAL_TYPE_UNSPECIFIED\020\000\022\014\n\010MA"
- + "XIMIZE\020\001\022\014\n\010MINIMIZE\020\002\032\310\r\n\rParameterSpec"
- + "\022e\n\021double_value_spec\030\002 \001(\0132H.google.clo"
- + "ud.aiplatform.v1beta1.StudySpec.Paramete"
- + "rSpec.DoubleValueSpecH\000\022g\n\022integer_value"
- + "_spec\030\003 \001(\0132I.google.cloud.aiplatform.v1"
- + "beta1.StudySpec.ParameterSpec.IntegerVal"
- + "ueSpecH\000\022o\n\026categorical_value_spec\030\004 \001(\013"
- + "2M.google.cloud.aiplatform.v1beta1.Study"
- + "Spec.ParameterSpec.CategoricalValueSpecH"
- + "\000\022i\n\023discrete_value_spec\030\005 \001(\0132J.google."
- + "cloud.aiplatform.v1beta1.StudySpec.Param"
- + "eterSpec.DiscreteValueSpecH\000\022\031\n\014paramete"
- + "r_id\030\001 \001(\tB\003\340A\002\022V\n\nscale_type\030\006 \001(\0162B.go"
+ + "\032\037google/protobuf/timestamp.proto\032\036googl"
+ + "e/protobuf/wrappers.proto\032\034google/api/an"
+ + "notations.proto\"\306\005\n\005Trial\022\017\n\002id\030\002 \001(\tB\003\340"
+ + "A\003\022@\n\005state\030\003 \001(\0162,.google.cloud.aiplatf"
+ + "orm.v1beta1.Trial.StateB\003\340A\003\022I\n\nparamete"
+ + "rs\030\004 \003(\01320.google.cloud.aiplatform.v1bet"
+ + "a1.Trial.ParameterB\003\340A\003\022L\n\021final_measure"
+ + "ment\030\005 \001(\0132,.google.cloud.aiplatform.v1b"
+ + "eta1.MeasurementB\003\340A\003\0223\n\nstart_time\030\007 \001("
+ + "\0132\032.google.protobuf.TimestampB\003\340A\003\0221\n\010en"
+ + "d_time\030\010 \001(\0132\032.google.protobuf.Timestamp"
+ + "B\003\340A\003\022?\n\ncustom_job\030\013 \001(\tB+\340A\003\372A%\n#aipla"
+ + "tform.googleapis.com/CustomJob\032R\n\tParame"
+ + "ter\022\031\n\014parameter_id\030\001 \001(\tB\003\340A\003\022*\n\005value\030"
+ + "\002 \001(\0132\026.google.protobuf.ValueB\003\340A\003\"f\n\005St"
+ + "ate\022\025\n\021STATE_UNSPECIFIED\020\000\022\r\n\tREQUESTED\020"
+ + "\001\022\n\n\006ACTIVE\020\002\022\014\n\010STOPPING\020\003\022\r\n\tSUCCEEDED"
+ + "\020\004\022\016\n\nINFEASIBLE\020\005:l\352Ai\n\037aiplatform.goog"
+ + "leapis.com/Trial\022Fprojects/{project}/loc"
+ + "ations/{location}/studies/{study}/trials"
+ + "/{trial}\"\307\024\n\tStudySpec\022K\n\007metrics\030\001 \003(\0132"
+ + "5.google.cloud.aiplatform.v1beta1.StudyS"
+ + "pec.MetricSpecB\003\340A\002\022Q\n\nparameters\030\002 \003(\0132"
+ + "8.google.cloud.aiplatform.v1beta1.StudyS"
+ + "pec.ParameterSpecB\003\340A\002\022G\n\talgorithm\030\003 \001("
+ + "\01624.google.cloud.aiplatform.v1beta1.Stud"
+ + "ySpec.Algorithm\022V\n\021observation_noise\030\006 \001"
+ + "(\0162;.google.cloud.aiplatform.v1beta1.Stu"
+ + "dySpec.ObservationNoise\022g\n\032measurement_s"
+ + "election_type\030\007 \001(\0162C.google.cloud.aipla"
+ + "tform.v1beta1.StudySpec.MeasurementSelec"
+ + "tionType\032\272\001\n\nMetricSpec\022\026\n\tmetric_id\030\001 \001"
+ + "(\tB\003\340A\002\022Q\n\004goal\030\002 \001(\0162>.google.cloud.aip"
+ + "latform.v1beta1.StudySpec.MetricSpec.Goa"
+ + "lTypeB\003\340A\002\"A\n\010GoalType\022\031\n\025GOAL_TYPE_UNSP"
+ + "ECIFIED\020\000\022\014\n\010MAXIMIZE\020\001\022\014\n\010MINIMIZE\020\002\032\310\r"
+ + "\n\rParameterSpec\022e\n\021double_value_spec\030\002 \001"
+ + "(\0132H.google.cloud.aiplatform.v1beta1.Stu"
+ + "dySpec.ParameterSpec.DoubleValueSpecH\000\022g"
+ + "\n\022integer_value_spec\030\003 \001(\0132I.google.clou"
+ + "d.aiplatform.v1beta1.StudySpec.Parameter"
+ + "Spec.IntegerValueSpecH\000\022o\n\026categorical_v"
+ + "alue_spec\030\004 \001(\0132M.google.cloud.aiplatfor"
+ + "m.v1beta1.StudySpec.ParameterSpec.Catego"
+ + "ricalValueSpecH\000\022i\n\023discrete_value_spec\030"
+ + "\005 \001(\0132J.google.cloud.aiplatform.v1beta1."
+ + "StudySpec.ParameterSpec.DiscreteValueSpe"
+ + "cH\000\022\031\n\014parameter_id\030\001 \001(\tB\003\340A\002\022V\n\nscale_"
+ + "type\030\006 \001(\0162B.google.cloud.aiplatform.v1b"
+ + "eta1.StudySpec.ParameterSpec.ScaleType\022v"
+ + "\n\033conditional_parameter_specs\030\n \003(\0132Q.go"
+ "ogle.cloud.aiplatform.v1beta1.StudySpec."
- + "ParameterSpec.ScaleType\022v\n\033conditional_p"
- + "arameter_specs\030\n \003(\0132Q.google.cloud.aipl"
- + "atform.v1beta1.StudySpec.ParameterSpec.C"
- + "onditionalParameterSpec\032A\n\017DoubleValueSp"
- + "ec\022\026\n\tmin_value\030\001 \001(\001B\003\340A\002\022\026\n\tmax_value\030"
- + "\002 \001(\001B\003\340A\002\032B\n\020IntegerValueSpec\022\026\n\tmin_va"
- + "lue\030\001 \001(\003B\003\340A\002\022\026\n\tmax_value\030\002 \001(\003B\003\340A\002\032+"
- + "\n\024CategoricalValueSpec\022\023\n\006values\030\001 \003(\tB\003"
- + "\340A\002\032(\n\021DiscreteValueSpec\022\023\n\006values\030\001 \003(\001"
- + "B\003\340A\002\032\271\005\n\030ConditionalParameterSpec\022\212\001\n\026p"
- + "arent_discrete_values\030\002 \001(\0132h.google.clo"
- + "ud.aiplatform.v1beta1.StudySpec.Paramete"
- + "rSpec.ConditionalParameterSpec.DiscreteV"
- + "alueConditionH\000\022\200\001\n\021parent_int_values\030\003 "
- + "\001(\0132c.google.cloud.aiplatform.v1beta1.St"
- + "udySpec.ParameterSpec.ConditionalParamet"
- + "erSpec.IntValueConditionH\000\022\220\001\n\031parent_ca"
- + "tegorical_values\030\004 \001(\0132k.google.cloud.ai"
- + "platform.v1beta1.StudySpec.ParameterSpec"
- + ".ConditionalParameterSpec.CategoricalVal"
- + "ueConditionH\000\022U\n\016parameter_spec\030\001 \001(\01328."
+ + "ParameterSpec.ConditionalParameterSpec\032A"
+ + "\n\017DoubleValueSpec\022\026\n\tmin_value\030\001 \001(\001B\003\340A"
+ + "\002\022\026\n\tmax_value\030\002 \001(\001B\003\340A\002\032B\n\020IntegerValu"
+ + "eSpec\022\026\n\tmin_value\030\001 \001(\003B\003\340A\002\022\026\n\tmax_val"
+ + "ue\030\002 \001(\003B\003\340A\002\032+\n\024CategoricalValueSpec\022\023\n"
+ + "\006values\030\001 \003(\tB\003\340A\002\032(\n\021DiscreteValueSpec\022"
+ + "\023\n\006values\030\001 \003(\001B\003\340A\002\032\271\005\n\030ConditionalPara"
+ + "meterSpec\022\212\001\n\026parent_discrete_values\030\002 \001"
+ + "(\0132h.google.cloud.aiplatform.v1beta1.Stu"
+ + "dySpec.ParameterSpec.ConditionalParamete"
+ + "rSpec.DiscreteValueConditionH\000\022\200\001\n\021paren"
+ + "t_int_values\030\003 \001(\0132c.google.cloud.aiplat"
+ + "form.v1beta1.StudySpec.ParameterSpec.Con"
+ + "ditionalParameterSpec.IntValueConditionH"
+ + "\000\022\220\001\n\031parent_categorical_values\030\004 \001(\0132k."
+ "google.cloud.aiplatform.v1beta1.StudySpe"
- + "c.ParameterSpecB\003\340A\002\032-\n\026DiscreteValueCon"
- + "dition\022\023\n\006values\030\001 \003(\001B\003\340A\002\032(\n\021IntValueC"
- + "ondition\022\023\n\006values\030\001 \003(\003B\003\340A\002\0320\n\031Categor"
- + "icalValueCondition\022\023\n\006values\030\001 \003(\tB\003\340A\002B"
- + "\030\n\026parent_value_condition\"n\n\tScaleType\022\032"
- + "\n\026SCALE_TYPE_UNSPECIFIED\020\000\022\025\n\021UNIT_LINEA"
- + "R_SCALE\020\001\022\022\n\016UNIT_LOG_SCALE\020\002\022\032\n\026UNIT_RE"
- + "VERSE_LOG_SCALE\020\003B\026\n\024parameter_value_spe"
- + "c\"J\n\tAlgorithm\022\031\n\025ALGORITHM_UNSPECIFIED\020"
- + "\000\022\017\n\013GRID_SEARCH\020\002\022\021\n\rRANDOM_SEARCH\020\003\"\247\001"
- + "\n\013Measurement\022\027\n\nstep_count\030\002 \001(\003B\003\340A\003\022I"
- + "\n\007metrics\030\003 \003(\01323.google.cloud.aiplatfor"
- + "m.v1beta1.Measurement.MetricB\003\340A\003\0324\n\006Met"
- + "ric\022\026\n\tmetric_id\030\001 \001(\tB\003\340A\003\022\022\n\005value\030\002 \001"
- + "(\001B\003\340A\003B~\n#com.google.cloud.aiplatform.v"
- + "1beta1B\nStudyProtoP\001ZIgoogle.golang.org/"
- + "genproto/googleapis/cloud/aiplatform/v1b"
- + "eta1;aiplatformb\006proto3"
+ + "c.ParameterSpec.ConditionalParameterSpec"
+ + ".CategoricalValueConditionH\000\022U\n\016paramete"
+ + "r_spec\030\001 \001(\01328.google.cloud.aiplatform.v"
+ + "1beta1.StudySpec.ParameterSpecB\003\340A\002\032-\n\026D"
+ + "iscreteValueCondition\022\023\n\006values\030\001 \003(\001B\003\340"
+ + "A\002\032(\n\021IntValueCondition\022\023\n\006values\030\001 \003(\003B"
+ + "\003\340A\002\0320\n\031CategoricalValueCondition\022\023\n\006val"
+ + "ues\030\001 \003(\tB\003\340A\002B\030\n\026parent_value_condition"
+ + "\"n\n\tScaleType\022\032\n\026SCALE_TYPE_UNSPECIFIED\020"
+ + "\000\022\025\n\021UNIT_LINEAR_SCALE\020\001\022\022\n\016UNIT_LOG_SCA"
+ + "LE\020\002\022\032\n\026UNIT_REVERSE_LOG_SCALE\020\003B\026\n\024para"
+ + "meter_value_spec\"J\n\tAlgorithm\022\031\n\025ALGORIT"
+ + "HM_UNSPECIFIED\020\000\022\017\n\013GRID_SEARCH\020\002\022\021\n\rRAN"
+ + "DOM_SEARCH\020\003\"H\n\020ObservationNoise\022!\n\035OBSE"
+ + "RVATION_NOISE_UNSPECIFIED\020\000\022\007\n\003LOW\020\001\022\010\n\004"
+ + "HIGH\020\002\"r\n\030MeasurementSelectionType\022*\n&ME"
+ + "ASUREMENT_SELECTION_TYPE_UNSPECIFIED\020\000\022\024"
+ + "\n\020LAST_MEASUREMENT\020\001\022\024\n\020BEST_MEASUREMENT"
+ + "\020\002\"\247\001\n\013Measurement\022\027\n\nstep_count\030\002 \001(\003B\003"
+ + "\340A\003\022I\n\007metrics\030\003 \003(\01323.google.cloud.aipl"
+ + "atform.v1beta1.Measurement.MetricB\003\340A\003\0324"
+ + "\n\006Metric\022\026\n\tmetric_id\030\001 \001(\tB\003\340A\003\022\022\n\005valu"
+ + "e\030\002 \001(\001B\003\340A\003B~\n#com.google.cloud.aiplatf"
+ + "orm.v1beta1B\nStudyProtoP\001ZIgoogle.golang"
+ + ".org/genproto/googleapis/cloud/aiplatfor"
+ + "m/v1beta1;aiplatformb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -192,6 +205,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.protobuf.DurationProto.getDescriptor(),
com.google.protobuf.StructProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
+ com.google.protobuf.WrappersProto.getDescriptor(),
com.google.api.AnnotationsProto.getDescriptor(),
});
internal_static_google_cloud_aiplatform_v1beta1_Trial_descriptor =
@@ -216,7 +230,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_StudySpec_descriptor,
new java.lang.String[] {
- "Metrics", "Parameters", "Algorithm",
+ "Metrics", "Parameters", "Algorithm", "ObservationNoise", "MeasurementSelectionType",
});
internal_static_google_cloud_aiplatform_v1beta1_StudySpec_MetricSpec_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_StudySpec_descriptor
@@ -350,6 +364,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
+ registry.add(com.google.api.ResourceProto.resource);
registry.add(com.google.api.ResourceProto.resourceReference);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
@@ -358,6 +373,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.protobuf.DurationProto.getDescriptor();
com.google.protobuf.StructProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
+ com.google.protobuf.WrappersProto.getDescriptor();
com.google.api.AnnotationsProto.getDescriptor();
}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpec.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpec.java
index 251335b9d..4b8c8ce8b 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpec.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpec.java
@@ -41,6 +41,8 @@ private StudySpec() {
metrics_ = java.util.Collections.emptyList();
parameters_ = java.util.Collections.emptyList();
algorithm_ = 0;
+ observationNoise_ = 0;
+ measurementSelectionType_ = 0;
}
@java.lang.Override
@@ -108,6 +110,20 @@ private StudySpec(
algorithm_ = rawValue;
break;
}
+ case 48:
+ {
+ int rawValue = input.readEnum();
+
+ observationNoise_ = rawValue;
+ break;
+ }
+ case 56:
+ {
+ int rawValue = input.readEnum();
+
+ measurementSelectionType_ = rawValue;
+ break;
+ }
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
@@ -307,6 +323,345 @@ private Algorithm(int value) {
// @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1beta1.StudySpec.Algorithm)
}
+ /**
+ *
+ *
+ *
+ * Describes the noise level of the repeated observations.
+ * "Noisy" means that the repeated observations with the same Trial parameters
+ * may lead to different metric evaluations.
+ *
+ *
+ * Protobuf enum {@code google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise}
+ */
+ public enum ObservationNoise implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ *
+ *
+ *
+ * The default noise level chosen by the AI Platform service.
+ *
+ *
+ * OBSERVATION_NOISE_UNSPECIFIED = 0;
+ */
+ OBSERVATION_NOISE_UNSPECIFIED(0),
+ /**
+ *
+ *
+ *
+ * AI Platform Vizier assumes that the objective function is (nearly)
+ * perfectly reproducible, and will never repeat the same Trial
+ * parameters.
+ *
+ *
+ * LOW = 1;
+ */
+ LOW(1),
+ /**
+ *
+ *
+ *
+ * AI Platform Vizier will estimate the amount of noise in metric
+ * evaluations, it may repeat the same Trial parameters more than once.
+ *
+ *
+ * HIGH = 2;
+ */
+ HIGH(2),
+ UNRECOGNIZED(-1),
+ ;
+
+ /**
+ *
+ *
+ *
+ * The default noise level chosen by the AI Platform service.
+ *
+ *
+ * OBSERVATION_NOISE_UNSPECIFIED = 0;
+ */
+ public static final int OBSERVATION_NOISE_UNSPECIFIED_VALUE = 0;
+ /**
+ *
+ *
+ *
+ * AI Platform Vizier assumes that the objective function is (nearly)
+ * perfectly reproducible, and will never repeat the same Trial
+ * parameters.
+ *
+ *
+ * LOW = 1;
+ */
+ public static final int LOW_VALUE = 1;
+ /**
+ *
+ *
+ *
+ * AI Platform Vizier will estimate the amount of noise in metric
+ * evaluations, it may repeat the same Trial parameters more than once.
+ *
+ *
+ * HIGH = 2;
+ */
+ public static final int HIGH_VALUE = 2;
+
+ public final int getNumber() {
+ if (this == UNRECOGNIZED) {
+ throw new java.lang.IllegalArgumentException(
+ "Can't get the number of an unknown enum value.");
+ }
+ return value;
+ }
+
+ /**
+ * @param value The numeric wire value of the corresponding enum entry.
+ * @return The enum associated with the given numeric wire value.
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+ @java.lang.Deprecated
+ public static ObservationNoise valueOf(int value) {
+ return forNumber(value);
+ }
+
+ /**
+ * @param value The numeric wire value of the corresponding enum entry.
+ * @return The enum associated with the given numeric wire value.
+ */
+ public static ObservationNoise forNumber(int value) {
+ switch (value) {
+ case 0:
+ return OBSERVATION_NOISE_UNSPECIFIED;
+ case 1:
+ return LOW;
+ case 2:
+ return HIGH;
+ default:
+ return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap
+ * This indicates which measurement to use if/when the service automatically
+ * selects the final measurement from previously reported intermediate
+ * measurements. Choose this based on two considerations:
+ * A) Do you expect your measurements to monotonically improve?
+ * If so, choose LAST_MEASUREMENT. On the other hand, if you're in a
+ * situation where your system can "over-train" and you expect the
+ * performance to get better for a while but then start declining,
+ * choose BEST_MEASUREMENT.
+ * B) Are your measurements significantly noisy and/or irreproducible?
+ * If so, BEST_MEASUREMENT will tend to be over-optimistic, and it
+ * may be better to choose LAST_MEASUREMENT.
+ * If both or neither of (A) and (B) apply, it doesn't matter which
+ * selection type is chosen.
+ *
+ *
+ * Protobuf enum {@code google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType}
+ */
+ public enum MeasurementSelectionType implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ *
+ *
+ *
+ * Will be treated as LAST_MEASUREMENT.
+ *
+ *
+ * MEASUREMENT_SELECTION_TYPE_UNSPECIFIED = 0;
+ */
+ MEASUREMENT_SELECTION_TYPE_UNSPECIFIED(0),
+ /**
+ *
+ *
+ *
+ * Use the last measurement reported.
+ *
+ *
+ * LAST_MEASUREMENT = 1;
+ */
+ LAST_MEASUREMENT(1),
+ /**
+ *
+ *
+ *
+ * Use the best measurement reported.
+ *
+ *
+ * BEST_MEASUREMENT = 2;
+ */
+ BEST_MEASUREMENT(2),
+ UNRECOGNIZED(-1),
+ ;
+
+ /**
+ *
+ *
+ *
+ * Will be treated as LAST_MEASUREMENT.
+ *
+ *
+ * MEASUREMENT_SELECTION_TYPE_UNSPECIFIED = 0;
+ */
+ public static final int MEASUREMENT_SELECTION_TYPE_UNSPECIFIED_VALUE = 0;
+ /**
+ *
+ *
+ *
+ * Use the last measurement reported.
+ *
+ *
+ * LAST_MEASUREMENT = 1;
+ */
+ public static final int LAST_MEASUREMENT_VALUE = 1;
+ /**
+ *
+ *
+ *
+ * Use the best measurement reported.
+ *
+ *
+ * BEST_MEASUREMENT = 2;
+ */
+ public static final int BEST_MEASUREMENT_VALUE = 2;
+
+ public final int getNumber() {
+ if (this == UNRECOGNIZED) {
+ throw new java.lang.IllegalArgumentException(
+ "Can't get the number of an unknown enum value.");
+ }
+ return value;
+ }
+
+ /**
+ * @param value The numeric wire value of the corresponding enum entry.
+ * @return The enum associated with the given numeric wire value.
+ * @deprecated Use {@link #forNumber(int)} instead.
+ */
+ @java.lang.Deprecated
+ public static MeasurementSelectionType valueOf(int value) {
+ return forNumber(value);
+ }
+
+ /**
+ * @param value The numeric wire value of the corresponding enum entry.
+ * @return The enum associated with the given numeric wire value.
+ */
+ public static MeasurementSelectionType forNumber(int value) {
+ switch (value) {
+ case 0:
+ return MEASUREMENT_SELECTION_TYPE_UNSPECIFIED;
+ case 1:
+ return LAST_MEASUREMENT;
+ case 2:
+ return BEST_MEASUREMENT;
+ default:
+ return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap
+ * The observation noise level of the study.
+ * Currently only supported by the Vizier service. Not supported by
+ * HyperparamterTuningJob or TrainingPipeline.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6;
+ *
+ * @return The enum numeric value on the wire for observationNoise.
+ */
+ @java.lang.Override
+ public int getObservationNoiseValue() {
+ return observationNoise_;
+ }
+ /**
+ *
+ *
+ *
+ * The observation noise level of the study.
+ * Currently only supported by the Vizier service. Not supported by
+ * HyperparamterTuningJob or TrainingPipeline.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6;
+ *
+ * @return The observationNoise.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise getObservationNoise() {
+ @SuppressWarnings("deprecation")
+ com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise result =
+ com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise.valueOf(observationNoise_);
+ return result == null
+ ? com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise.UNRECOGNIZED
+ : result;
+ }
+
+ public static final int MEASUREMENT_SELECTION_TYPE_FIELD_NUMBER = 7;
+ private int measurementSelectionType_;
+ /**
+ *
+ *
+ *
+ * Describe which measurement selection type will be used
+ *
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7;
+ *
+ *
+ * @return The enum numeric value on the wire for measurementSelectionType.
+ */
+ @java.lang.Override
+ public int getMeasurementSelectionTypeValue() {
+ return measurementSelectionType_;
+ }
+ /**
+ *
+ *
+ *
+ * Describe which measurement selection type will be used
+ *
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7;
+ *
+ *
+ * @return The measurementSelectionType.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType
+ getMeasurementSelectionType() {
+ @SuppressWarnings("deprecation")
+ com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType result =
+ com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType.valueOf(
+ measurementSelectionType_);
+ return result == null
+ ? com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType.UNRECOGNIZED
+ : result;
+ }
+
private byte memoizedIsInitialized = -1;
@java.lang.Override
@@ -13004,6 +13445,18 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
.getNumber()) {
output.writeEnum(3, algorithm_);
}
+ if (observationNoise_
+ != com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise
+ .OBSERVATION_NOISE_UNSPECIFIED
+ .getNumber()) {
+ output.writeEnum(6, observationNoise_);
+ }
+ if (measurementSelectionType_
+ != com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType
+ .MEASUREMENT_SELECTION_TYPE_UNSPECIFIED
+ .getNumber()) {
+ output.writeEnum(7, measurementSelectionType_);
+ }
unknownFields.writeTo(output);
}
@@ -13024,6 +13477,18 @@ public int getSerializedSize() {
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, algorithm_);
}
+ if (observationNoise_
+ != com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise
+ .OBSERVATION_NOISE_UNSPECIFIED
+ .getNumber()) {
+ size += com.google.protobuf.CodedOutputStream.computeEnumSize(6, observationNoise_);
+ }
+ if (measurementSelectionType_
+ != com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType
+ .MEASUREMENT_SELECTION_TYPE_UNSPECIFIED
+ .getNumber()) {
+ size += com.google.protobuf.CodedOutputStream.computeEnumSize(7, measurementSelectionType_);
+ }
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
@@ -13043,6 +13508,8 @@ public boolean equals(final java.lang.Object obj) {
if (!getMetricsList().equals(other.getMetricsList())) return false;
if (!getParametersList().equals(other.getParametersList())) return false;
if (algorithm_ != other.algorithm_) return false;
+ if (observationNoise_ != other.observationNoise_) return false;
+ if (measurementSelectionType_ != other.measurementSelectionType_) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@@ -13064,6 +13531,10 @@ public int hashCode() {
}
hash = (37 * hash) + ALGORITHM_FIELD_NUMBER;
hash = (53 * hash) + algorithm_;
+ hash = (37 * hash) + OBSERVATION_NOISE_FIELD_NUMBER;
+ hash = (53 * hash) + observationNoise_;
+ hash = (37 * hash) + MEASUREMENT_SELECTION_TYPE_FIELD_NUMBER;
+ hash = (53 * hash) + measurementSelectionType_;
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
@@ -13226,6 +13697,10 @@ public Builder clear() {
}
algorithm_ = 0;
+ observationNoise_ = 0;
+
+ measurementSelectionType_ = 0;
+
return this;
}
@@ -13273,6 +13748,8 @@ public com.google.cloud.aiplatform.v1beta1.StudySpec buildPartial() {
result.parameters_ = parametersBuilder_.build();
}
result.algorithm_ = algorithm_;
+ result.observationNoise_ = observationNoise_;
+ result.measurementSelectionType_ = measurementSelectionType_;
onBuilt();
return result;
}
@@ -13379,6 +13856,12 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.StudySpec other) {
if (other.algorithm_ != 0) {
setAlgorithmValue(other.getAlgorithmValue());
}
+ if (other.observationNoise_ != 0) {
+ setObservationNoiseValue(other.getObservationNoiseValue());
+ }
+ if (other.measurementSelectionType_ != 0) {
+ setMeasurementSelectionTypeValue(other.getMeasurementSelectionTypeValue());
+ }
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
@@ -14306,6 +14789,221 @@ public Builder clearAlgorithm() {
return this;
}
+ private int observationNoise_ = 0;
+ /**
+ *
+ *
+ *
+ * The observation noise level of the study.
+ * Currently only supported by the Vizier service. Not supported by
+ * HyperparamterTuningJob or TrainingPipeline.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6;
+ *
+ *
+ * @return The enum numeric value on the wire for observationNoise.
+ */
+ @java.lang.Override
+ public int getObservationNoiseValue() {
+ return observationNoise_;
+ }
+ /**
+ *
+ *
+ *
+ * The observation noise level of the study.
+ * Currently only supported by the Vizier service. Not supported by
+ * HyperparamterTuningJob or TrainingPipeline.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6;
+ *
+ *
+ * @param value The enum numeric value on the wire for observationNoise to set.
+ * @return This builder for chaining.
+ */
+ public Builder setObservationNoiseValue(int value) {
+
+ observationNoise_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The observation noise level of the study.
+ * Currently only supported by the Vizier service. Not supported by
+ * HyperparamterTuningJob or TrainingPipeline.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6;
+ *
+ *
+ * @return The observationNoise.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise getObservationNoise() {
+ @SuppressWarnings("deprecation")
+ com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise result =
+ com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise.valueOf(observationNoise_);
+ return result == null
+ ? com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise.UNRECOGNIZED
+ : result;
+ }
+ /**
+ *
+ *
+ *
+ * The observation noise level of the study.
+ * Currently only supported by the Vizier service. Not supported by
+ * HyperparamterTuningJob or TrainingPipeline.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6;
+ *
+ *
+ * @param value The observationNoise to set.
+ * @return This builder for chaining.
+ */
+ public Builder setObservationNoise(
+ com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ observationNoise_ = value.getNumber();
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The observation noise level of the study.
+ * Currently only supported by the Vizier service. Not supported by
+ * HyperparamterTuningJob or TrainingPipeline.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6;
+ *
+ *
+ * @return This builder for chaining.
+ */
+ public Builder clearObservationNoise() {
+
+ observationNoise_ = 0;
+ onChanged();
+ return this;
+ }
+
+ private int measurementSelectionType_ = 0;
+ /**
+ *
+ *
+ *
+ * Describe which measurement selection type will be used
+ *
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7;
+ *
+ *
+ * @return The enum numeric value on the wire for measurementSelectionType.
+ */
+ @java.lang.Override
+ public int getMeasurementSelectionTypeValue() {
+ return measurementSelectionType_;
+ }
+ /**
+ *
+ *
+ *
+ * Describe which measurement selection type will be used
+ *
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7;
+ *
+ *
+ * @param value The enum numeric value on the wire for measurementSelectionType to set.
+ * @return This builder for chaining.
+ */
+ public Builder setMeasurementSelectionTypeValue(int value) {
+
+ measurementSelectionType_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Describe which measurement selection type will be used
+ *
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7;
+ *
+ *
+ * @return The measurementSelectionType.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType
+ getMeasurementSelectionType() {
+ @SuppressWarnings("deprecation")
+ com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType result =
+ com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType.valueOf(
+ measurementSelectionType_);
+ return result == null
+ ? com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType.UNRECOGNIZED
+ : result;
+ }
+ /**
+ *
+ *
+ *
+ * Describe which measurement selection type will be used
+ *
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7;
+ *
+ *
+ * @param value The measurementSelectionType to set.
+ * @return This builder for chaining.
+ */
+ public Builder setMeasurementSelectionType(
+ com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ measurementSelectionType_ = value.getNumber();
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Describe which measurement selection type will be used
+ *
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7;
+ *
+ *
+ * @return This builder for chaining.
+ */
+ public Builder clearMeasurementSelectionType() {
+
+ measurementSelectionType_ = 0;
+ onChanged();
+ return this;
+ }
+
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpecOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpecOrBuilder.java
index 558724456..fcbd0fde9 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpecOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/StudySpecOrBuilder.java
@@ -172,4 +172,63 @@ com.google.cloud.aiplatform.v1beta1.StudySpec.ParameterSpecOrBuilder getParamete
* @return The algorithm.
*/
com.google.cloud.aiplatform.v1beta1.StudySpec.Algorithm getAlgorithm();
+
+ /**
+ *
+ *
+ *
+ * The observation noise level of the study.
+ * Currently only supported by the Vizier service. Not supported by
+ * HyperparamterTuningJob or TrainingPipeline.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6;
+ *
+ * @return The enum numeric value on the wire for observationNoise.
+ */
+ int getObservationNoiseValue();
+ /**
+ *
+ *
+ *
+ * The observation noise level of the study.
+ * Currently only supported by the Vizier service. Not supported by
+ * HyperparamterTuningJob or TrainingPipeline.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise observation_noise = 6;
+ *
+ * @return The observationNoise.
+ */
+ com.google.cloud.aiplatform.v1beta1.StudySpec.ObservationNoise getObservationNoise();
+
+ /**
+ *
+ *
+ *
+ * Describe which measurement selection type will be used
+ *
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7;
+ *
+ *
+ * @return The enum numeric value on the wire for measurementSelectionType.
+ */
+ int getMeasurementSelectionTypeValue();
+ /**
+ *
+ *
+ *
+ * Describe which measurement selection type will be used
+ *
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType measurement_selection_type = 7;
+ *
+ *
+ * @return The measurementSelectionType.
+ */
+ com.google.cloud.aiplatform.v1beta1.StudySpec.MeasurementSelectionType
+ getMeasurementSelectionType();
}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipeline.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipeline.java
index 353aa7d57..03f4b1206 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipeline.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipeline.java
@@ -254,6 +254,23 @@ private TrainingPipeline(
input.readMessage(
LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry);
labels_.getMutableMap().put(labels__.getKey(), labels__.getValue());
+ break;
+ }
+ case 146:
+ {
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder subBuilder = null;
+ if (encryptionSpec_ != null) {
+ subBuilder = encryptionSpec_.toBuilder();
+ }
+ encryptionSpec_ =
+ input.readMessage(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.parser(),
+ extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(encryptionSpec_);
+ encryptionSpec_ = subBuilder.buildPartial();
+ }
+
break;
}
default:
@@ -655,7 +672,7 @@ public com.google.protobuf.ValueOrBuilder getTrainingTaskMetadataOrBuilder() {
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -683,7 +700,7 @@ public boolean hasModelToUpload() {
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -713,7 +730,7 @@ public com.google.cloud.aiplatform.v1beta1.Model getModelToUpload() {
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -1144,6 +1161,63 @@ public java.lang.String getLabelsOrThrow(java.lang.String key) {
return map.get(key);
}
+ public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 18;
+ private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_;
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ @java.lang.Override
+ public boolean hasEncryptionSpec() {
+ return encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ *
+ * @return The encryptionSpec.
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ */
+ @java.lang.Override
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() {
+ return getEncryptionSpec();
+ }
+
private byte memoizedIsInitialized = -1;
@java.lang.Override
@@ -1201,6 +1275,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 15);
+ if (encryptionSpec_ != null) {
+ output.writeMessage(18, getEncryptionSpec());
+ }
unknownFields.writeTo(output);
}
@@ -1262,6 +1339,9 @@ public int getSerializedSize() {
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(15, labels__);
}
+ if (encryptionSpec_ != null) {
+ size += com.google.protobuf.CodedOutputStream.computeMessageSize(18, getEncryptionSpec());
+ }
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
@@ -1319,6 +1399,10 @@ public boolean equals(final java.lang.Object obj) {
if (!getUpdateTime().equals(other.getUpdateTime())) return false;
}
if (!internalGetLabels().equals(other.internalGetLabels())) return false;
+ if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false;
+ if (hasEncryptionSpec()) {
+ if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false;
+ }
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@@ -1378,6 +1462,10 @@ public int hashCode() {
hash = (37 * hash) + LABELS_FIELD_NUMBER;
hash = (53 * hash) + internalGetLabels().hashCode();
}
+ if (hasEncryptionSpec()) {
+ hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER;
+ hash = (53 * hash) + getEncryptionSpec().hashCode();
+ }
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
@@ -1610,6 +1698,12 @@ public Builder clear() {
updateTimeBuilder_ = null;
}
internalGetMutableLabels().clear();
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
return this;
}
@@ -1689,6 +1783,11 @@ public com.google.cloud.aiplatform.v1beta1.TrainingPipeline buildPartial() {
}
result.labels_ = internalGetLabels();
result.labels_.makeImmutable();
+ if (encryptionSpecBuilder_ == null) {
+ result.encryptionSpec_ = encryptionSpec_;
+ } else {
+ result.encryptionSpec_ = encryptionSpecBuilder_.build();
+ }
onBuilt();
return result;
}
@@ -1782,6 +1881,9 @@ public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.TrainingPipeline ot
mergeUpdateTime(other.getUpdateTime());
}
internalGetMutableLabels().mergeFrom(other.internalGetLabels());
+ if (other.hasEncryptionSpec()) {
+ mergeEncryptionSpec(other.getEncryptionSpec());
+ }
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
@@ -2860,7 +2962,7 @@ public com.google.protobuf.ValueOrBuilder getTrainingTaskMetadataOrBuilder() {
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -2887,7 +2989,7 @@ public boolean hasModelToUpload() {
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -2920,7 +3022,7 @@ public com.google.cloud.aiplatform.v1beta1.Model getModelToUpload() {
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -2955,7 +3057,7 @@ public Builder setModelToUpload(com.google.cloud.aiplatform.v1beta1.Model value)
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -2988,7 +3090,7 @@ public Builder setModelToUpload(
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -3027,7 +3129,7 @@ public Builder mergeModelToUpload(com.google.cloud.aiplatform.v1beta1.Model valu
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -3060,7 +3162,7 @@ public Builder clearModelToUpload() {
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -3087,7 +3189,7 @@ public com.google.cloud.aiplatform.v1beta1.Model.Builder getModelToUploadBuilder
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -3118,7 +3220,7 @@ public com.google.cloud.aiplatform.v1beta1.ModelOrBuilder getModelToUploadOrBuil
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -4437,6 +4539,220 @@ public Builder putAllLabels(java.util.Map
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ public boolean hasEncryptionSpec() {
+ return encryptionSpecBuilder_ != null || encryptionSpec_ != null;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ *
+ * @return The encryptionSpec.
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ } else {
+ return encryptionSpecBuilder_.getMessage();
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ */
+ public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ encryptionSpec_ = value;
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ */
+ public Builder setEncryptionSpec(
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = builderForValue.build();
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.setMessage(builderForValue.build());
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ */
+ public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) {
+ if (encryptionSpecBuilder_ == null) {
+ if (encryptionSpec_ != null) {
+ encryptionSpec_ =
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.newBuilder(encryptionSpec_)
+ .mergeFrom(value)
+ .buildPartial();
+ } else {
+ encryptionSpec_ = value;
+ }
+ onChanged();
+ } else {
+ encryptionSpecBuilder_.mergeFrom(value);
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ */
+ public Builder clearEncryptionSpec() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpec_ = null;
+ onChanged();
+ } else {
+ encryptionSpec_ = null;
+ encryptionSpecBuilder_ = null;
+ }
+
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() {
+
+ onChanged();
+ return getEncryptionSpecFieldBuilder().getBuilder();
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ */
+ public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder
+ getEncryptionSpecOrBuilder() {
+ if (encryptionSpecBuilder_ != null) {
+ return encryptionSpecBuilder_.getMessageOrBuilder();
+ } else {
+ return encryptionSpec_ == null
+ ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()
+ : encryptionSpec_;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ */
+ private com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>
+ getEncryptionSpecFieldBuilder() {
+ if (encryptionSpecBuilder_ == null) {
+ encryptionSpecBuilder_ =
+ new com.google.protobuf.SingleFieldBuilderV3<
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder,
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>(
+ getEncryptionSpec(), getParentForChildren(), isClean());
+ encryptionSpec_ = null;
+ }
+ return encryptionSpecBuilder_;
+ }
+
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineOrBuilder.java
index 671252855..925dbdf68 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineOrBuilder.java
@@ -263,7 +263,7 @@ public interface TrainingPipelineOrBuilder
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -288,7 +288,7 @@ public interface TrainingPipelineOrBuilder
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -313,7 +313,7 @@ public interface TrainingPipelineOrBuilder
*
*
*
- * Describes the Model that may be uploaded (via [ModelService.UploadMode][])
+ * Describes the Model that may be uploaded (via [ModelService.UploadModel][google.cloud.aiplatform.v1beta1.ModelService.UploadModel])
* by this TrainingPipeline. The TrainingPipeline's
* [training_task_definition][google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition] should make clear whether this Model
* description should be populated, and if there are any special requirements
@@ -634,4 +634,48 @@ public interface TrainingPipelineOrBuilder
* map<string, string> labels = 15;
*/
java.lang.String getLabelsOrThrow(java.lang.String key);
+
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ *
+ * @return Whether the encryptionSpec field is set.
+ */
+ boolean hasEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ *
+ * @return The encryptionSpec.
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec();
+ /**
+ *
+ *
+ *
+ * Customer-managed encryption key spec for a TrainingPipeline. If set, this
+ * TrainingPipeline will be secured by this key.
+ * Note: Model trained by this TrainingPipeline is also secured by this key if
+ * [model_to_upload][google.cloud.aiplatform.v1beta1.TrainingPipeline.encryption_spec] is not set separately.
+ *
+ *
+ * .google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 18;
+ */
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder();
}
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineProto.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineProto.java
index d5a1dc5a8..bd427e389 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineProto.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrainingPipelineProto.java
@@ -67,68 +67,71 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"\n7google/cloud/aiplatform/v1beta1/traini"
+ "ng_pipeline.proto\022\037google.cloud.aiplatfo"
+ "rm.v1beta1\032\037google/api/field_behavior.pr"
- + "oto\032\031google/api/resource.proto\032(google/c"
- + "loud/aiplatform/v1beta1/io.proto\0327google"
- + "/cloud/aiplatform/v1beta1/machine_resour"
- + "ces.proto\032Dgoogle/cloud/aiplatform/v1bet"
- + "a1/manual_batch_tuning_parameters.proto\032"
- + "+google/cloud/aiplatform/v1beta1/model.p"
- + "roto\0324google/cloud/aiplatform/v1beta1/pi"
- + "peline_state.proto\032\034google/protobuf/stru"
- + "ct.proto\032\037google/protobuf/timestamp.prot"
- + "o\032\027google/rpc/status.proto\032\034google/api/a"
- + "nnotations.proto\"\253\007\n\020TrainingPipeline\022\021\n"
- + "\004name\030\001 \001(\tB\003\340A\003\022\031\n\014display_name\030\002 \001(\tB\003"
- + "\340A\002\022K\n\021input_data_config\030\003 \001(\01320.google."
- + "cloud.aiplatform.v1beta1.InputDataConfig"
- + "\022%\n\030training_task_definition\030\004 \001(\tB\003\340A\002\022"
- + "9\n\024training_task_inputs\030\005 \001(\0132\026.google.p"
- + "rotobuf.ValueB\003\340A\002\022;\n\026training_task_meta"
- + "data\030\006 \001(\0132\026.google.protobuf.ValueB\003\340A\003\022"
- + "?\n\017model_to_upload\030\007 \001(\0132&.google.cloud."
- + "aiplatform.v1beta1.Model\022B\n\005state\030\t \001(\0162"
- + "..google.cloud.aiplatform.v1beta1.Pipeli"
- + "neStateB\003\340A\003\022&\n\005error\030\n \001(\0132\022.google.rpc"
- + ".StatusB\003\340A\003\0224\n\013create_time\030\013 \001(\0132\032.goog"
- + "le.protobuf.TimestampB\003\340A\003\0223\n\nstart_time"
- + "\030\014 \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022"
- + "1\n\010end_time\030\r \001(\0132\032.google.protobuf.Time"
- + "stampB\003\340A\003\0224\n\013update_time\030\016 \001(\0132\032.google"
- + ".protobuf.TimestampB\003\340A\003\022M\n\006labels\030\017 \003(\013"
- + "2=.google.cloud.aiplatform.v1beta1.Train"
- + "ingPipeline.LabelsEntry\032-\n\013LabelsEntry\022\013"
- + "\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:~\352A{\n*aip"
- + "latform.googleapis.com/TrainingPipeline\022"
- + "Mprojects/{project}/locations/{location}"
- + "/trainingPipelines/{training_pipeline}\"\311"
- + "\004\n\017InputDataConfig\022H\n\016fraction_split\030\002 \001"
- + "(\0132..google.cloud.aiplatform.v1beta1.Fra"
- + "ctionSplitH\000\022D\n\014filter_split\030\003 \001(\0132,.goo"
- + "gle.cloud.aiplatform.v1beta1.FilterSplit"
- + "H\000\022L\n\020predefined_split\030\004 \001(\01320.google.cl"
- + "oud.aiplatform.v1beta1.PredefinedSplitH\000"
- + "\022J\n\017timestamp_split\030\005 \001(\0132/.google.cloud"
- + ".aiplatform.v1beta1.TimestampSplitH\000\022J\n\017"
- + "gcs_destination\030\010 \001(\0132/.google.cloud.aip"
- + "latform.v1beta1.GcsDestinationH\001\022T\n\024bigq"
- + "uery_destination\030\n \001(\01324.google.cloud.ai"
- + "platform.v1beta1.BigQueryDestinationH\001\022\027"
- + "\n\ndataset_id\030\001 \001(\tB\003\340A\002\022\032\n\022annotations_f"
- + "ilter\030\006 \001(\t\022\035\n\025annotation_schema_uri\030\t \001"
- + "(\tB\007\n\005splitB\r\n\013destination\"^\n\rFractionSp"
- + "lit\022\031\n\021training_fraction\030\001 \001(\001\022\033\n\023valida"
- + "tion_fraction\030\002 \001(\001\022\025\n\rtest_fraction\030\003 \001"
- + "(\001\"e\n\013FilterSplit\022\034\n\017training_filter\030\001 \001"
- + "(\tB\003\340A\002\022\036\n\021validation_filter\030\002 \001(\tB\003\340A\002\022"
- + "\030\n\013test_filter\030\003 \001(\tB\003\340A\002\"#\n\017PredefinedS"
- + "plit\022\020\n\003key\030\001 \001(\tB\003\340A\002\"q\n\016TimestampSplit"
- + "\022\031\n\021training_fraction\030\001 \001(\001\022\033\n\023validatio"
- + "n_fraction\030\002 \001(\001\022\025\n\rtest_fraction\030\003 \001(\001\022"
- + "\020\n\003key\030\004 \001(\tB\003\340A\002B\211\001\n#com.google.cloud.a"
- + "iplatform.v1beta1B\025TrainingPipelineProto"
- + "P\001ZIgoogle.golang.org/genproto/googleapi"
- + "s/cloud/aiplatform/v1beta1;aiplatformb\006p"
- + "roto3"
+ + "oto\032\031google/api/resource.proto\0325google/c"
+ + "loud/aiplatform/v1beta1/encryption_spec."
+ + "proto\032(google/cloud/aiplatform/v1beta1/i"
+ + "o.proto\0327google/cloud/aiplatform/v1beta1"
+ + "/machine_resources.proto\032Dgoogle/cloud/a"
+ + "iplatform/v1beta1/manual_batch_tuning_pa"
+ + "rameters.proto\032+google/cloud/aiplatform/"
+ + "v1beta1/model.proto\0324google/cloud/aiplat"
+ + "form/v1beta1/pipeline_state.proto\032\034googl"
+ + "e/protobuf/struct.proto\032\037google/protobuf"
+ + "/timestamp.proto\032\027google/rpc/status.prot"
+ + "o\032\034google/api/annotations.proto\"\365\007\n\020Trai"
+ + "ningPipeline\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\031\n\014displ"
+ + "ay_name\030\002 \001(\tB\003\340A\002\022K\n\021input_data_config\030"
+ + "\003 \001(\01320.google.cloud.aiplatform.v1beta1."
+ + "InputDataConfig\022%\n\030training_task_definit"
+ + "ion\030\004 \001(\tB\003\340A\002\0229\n\024training_task_inputs\030\005"
+ + " \001(\0132\026.google.protobuf.ValueB\003\340A\002\022;\n\026tra"
+ + "ining_task_metadata\030\006 \001(\0132\026.google.proto"
+ + "buf.ValueB\003\340A\003\022?\n\017model_to_upload\030\007 \001(\0132"
+ + "&.google.cloud.aiplatform.v1beta1.Model\022"
+ + "B\n\005state\030\t \001(\0162..google.cloud.aiplatform"
+ + ".v1beta1.PipelineStateB\003\340A\003\022&\n\005error\030\n \001"
+ + "(\0132\022.google.rpc.StatusB\003\340A\003\0224\n\013create_ti"
+ + "me\030\013 \001(\0132\032.google.protobuf.TimestampB\003\340A"
+ + "\003\0223\n\nstart_time\030\014 \001(\0132\032.google.protobuf."
+ + "TimestampB\003\340A\003\0221\n\010end_time\030\r \001(\0132\032.googl"
+ + "e.protobuf.TimestampB\003\340A\003\0224\n\013update_time"
+ + "\030\016 \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022"
+ + "M\n\006labels\030\017 \003(\0132=.google.cloud.aiplatfor"
+ + "m.v1beta1.TrainingPipeline.LabelsEntry\022H"
+ + "\n\017encryption_spec\030\022 \001(\0132/.google.cloud.a"
+ + "iplatform.v1beta1.EncryptionSpec\032-\n\013Labe"
+ + "lsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:"
+ + "~\352A{\n*aiplatform.googleapis.com/Training"
+ + "Pipeline\022Mprojects/{project}/locations/{"
+ + "location}/trainingPipelines/{training_pi"
+ + "peline}\"\311\004\n\017InputDataConfig\022H\n\016fraction_"
+ + "split\030\002 \001(\0132..google.cloud.aiplatform.v1"
+ + "beta1.FractionSplitH\000\022D\n\014filter_split\030\003 "
+ + "\001(\0132,.google.cloud.aiplatform.v1beta1.Fi"
+ + "lterSplitH\000\022L\n\020predefined_split\030\004 \001(\01320."
+ + "google.cloud.aiplatform.v1beta1.Predefin"
+ + "edSplitH\000\022J\n\017timestamp_split\030\005 \001(\0132/.goo"
+ + "gle.cloud.aiplatform.v1beta1.TimestampSp"
+ + "litH\000\022J\n\017gcs_destination\030\010 \001(\0132/.google."
+ + "cloud.aiplatform.v1beta1.GcsDestinationH"
+ + "\001\022T\n\024bigquery_destination\030\n \001(\01324.google"
+ + ".cloud.aiplatform.v1beta1.BigQueryDestin"
+ + "ationH\001\022\027\n\ndataset_id\030\001 \001(\tB\003\340A\002\022\032\n\022anno"
+ + "tations_filter\030\006 \001(\t\022\035\n\025annotation_schem"
+ + "a_uri\030\t \001(\tB\007\n\005splitB\r\n\013destination\"^\n\rF"
+ + "ractionSplit\022\031\n\021training_fraction\030\001 \001(\001\022"
+ + "\033\n\023validation_fraction\030\002 \001(\001\022\025\n\rtest_fra"
+ + "ction\030\003 \001(\001\"e\n\013FilterSplit\022\034\n\017training_f"
+ + "ilter\030\001 \001(\tB\003\340A\002\022\036\n\021validation_filter\030\002 "
+ + "\001(\tB\003\340A\002\022\030\n\013test_filter\030\003 \001(\tB\003\340A\002\"#\n\017Pr"
+ + "edefinedSplit\022\020\n\003key\030\001 \001(\tB\003\340A\002\"q\n\016Times"
+ + "tampSplit\022\031\n\021training_fraction\030\001 \001(\001\022\033\n\023"
+ + "validation_fraction\030\002 \001(\001\022\025\n\rtest_fracti"
+ + "on\030\003 \001(\001\022\020\n\003key\030\004 \001(\tB\003\340A\002B\211\001\n#com.googl"
+ + "e.cloud.aiplatform.v1beta1B\025TrainingPipe"
+ + "lineProtoP\001ZIgoogle.golang.org/genproto/"
+ + "googleapis/cloud/aiplatform/v1beta1;aipl"
+ + "atformb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -136,6 +139,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.MachineResourcesProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParametersProto.getDescriptor(),
@@ -166,6 +170,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"EndTime",
"UpdateTime",
"Labels",
+ "EncryptionSpec",
});
internal_static_google_cloud_aiplatform_v1beta1_TrainingPipeline_LabelsEntry_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_TrainingPipeline_descriptor
@@ -235,6 +240,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
descriptor, registry);
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
+ com.google.cloud.aiplatform.v1beta1.EncryptionSpecProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.MachineResourcesProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParametersProto.getDescriptor();
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrialName.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrialName.java
new file mode 100644
index 000000000..9697fffbb
--- /dev/null
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrialName.java
@@ -0,0 +1,257 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.aiplatform.v1beta1;
+
+import com.google.api.pathtemplate.PathTemplate;
+import com.google.api.resourcenames.ResourceName;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import javax.annotation.Generated;
+
+// AUTO-GENERATED DOCUMENTATION AND CLASS.
+@Generated("by gapic-generator-java")
+public class TrialName implements ResourceName {
+ private static final PathTemplate PROJECT_LOCATION_STUDY_TRIAL =
+ PathTemplate.createWithoutUrlEncoding(
+ "projects/{project}/locations/{location}/studies/{study}/trials/{trial}");
+ private volatile Map
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -218,8 +217,7 @@ public boolean hasUpdateMask() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -241,8 +239,7 @@ public com.google.protobuf.FieldMask getUpdateMask() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -831,8 +828,7 @@ public com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder getDatasetOrBuilder(
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -853,8 +849,7 @@ public boolean hasUpdateMask() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -881,8 +876,7 @@ public com.google.protobuf.FieldMask getUpdateMask() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -911,8 +905,7 @@ public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -938,8 +931,7 @@ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForVal
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -970,8 +962,7 @@ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -998,8 +989,7 @@ public Builder clearUpdateMask() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -1020,8 +1010,7 @@ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -1046,8 +1035,7 @@ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequestOrBuilder.java
index 6e34c4c84..3a5dc293a 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequestOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateDatasetRequestOrBuilder.java
@@ -70,8 +70,7 @@ public interface UpdateDatasetRequestOrBuilder
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -90,8 +89,7 @@ public interface UpdateDatasetRequestOrBuilder
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
@@ -110,8 +108,7 @@ public interface UpdateDatasetRequestOrBuilder
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //tinyurl.com/dev-google-protobuf#google.protobuf.FieldMask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
* Updatable fields:
* * `display_name`
* * `description`
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequest.java
index d9f0a4fbb..3e01d02c6 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequest.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequest.java
@@ -194,6 +194,8 @@ public com.google.cloud.aiplatform.v1beta1.EndpointOrBuilder getEndpointOrBuilde
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -210,6 +212,8 @@ public boolean hasUpdateMask() {
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -226,6 +230,8 @@ public com.google.protobuf.FieldMask getUpdateMask() {
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -810,6 +816,8 @@ public com.google.cloud.aiplatform.v1beta1.EndpointOrBuilder getEndpointOrBuilde
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -825,6 +833,8 @@ public boolean hasUpdateMask() {
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -846,6 +856,8 @@ public com.google.protobuf.FieldMask getUpdateMask() {
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -869,6 +881,8 @@ public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -889,6 +903,8 @@ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForVal
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -914,6 +930,8 @@ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -935,6 +953,8 @@ public Builder clearUpdateMask() {
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -950,6 +970,8 @@ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -969,6 +991,8 @@ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequestOrBuilder.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequestOrBuilder.java
index 615096d7a..d381f83e8 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequestOrBuilder.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateEndpointRequestOrBuilder.java
@@ -69,6 +69,8 @@ public interface UpdateEndpointRequestOrBuilder
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -82,6 +84,8 @@ public interface UpdateEndpointRequestOrBuilder
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -95,6 +99,8 @@ public interface UpdateEndpointRequestOrBuilder
*
*
* Required. The update mask applies to the resource.
+ * See
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
diff --git a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelRequest.java b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelRequest.java
index bc165bd33..62902fada 100644
--- a/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelRequest.java
+++ b/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelRequest.java
@@ -193,9 +193,7 @@ public com.google.cloud.aiplatform.v1beta1.ModelOrBuilder getModelOrBuilder() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //developers.google.com/protocol-buffers
- * // /docs/reference/google.protobuf#fieldmask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -213,9 +211,7 @@ public boolean hasUpdateMask() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //developers.google.com/protocol-buffers
- * // /docs/reference/google.protobuf#fieldmask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -233,9 +229,7 @@ public com.google.protobuf.FieldMask getUpdateMask() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //developers.google.com/protocol-buffers
- * // /docs/reference/google.protobuf#fieldmask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -820,9 +814,7 @@ public com.google.cloud.aiplatform.v1beta1.ModelOrBuilder getModelOrBuilder() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //developers.google.com/protocol-buffers
- * // /docs/reference/google.protobuf#fieldmask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -839,9 +831,7 @@ public boolean hasUpdateMask() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //developers.google.com/protocol-buffers
- * // /docs/reference/google.protobuf#fieldmask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -864,9 +854,7 @@ public com.google.protobuf.FieldMask getUpdateMask() {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //developers.google.com/protocol-buffers
- * // /docs/reference/google.protobuf#fieldmask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -891,9 +879,7 @@ public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //developers.google.com/protocol-buffers
- * // /docs/reference/google.protobuf#fieldmask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -915,9 +901,7 @@ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForVal
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //developers.google.com/protocol-buffers
- * // /docs/reference/google.protobuf#fieldmask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -944,9 +928,7 @@ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
*
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
- * [FieldMask](https:
- * //developers.google.com/protocol-buffers
- * // /docs/reference/google.protobuf#fieldmask).
+ * [FieldMask](https://tinyurl.com/protobufs/google.protobuf#fieldmask).
*
*
* .google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
@@ -969,9 +951,7 @@ public Builder clearUpdateMask() {
*