1 /* 2 * Copyright 2020 Google LLC 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * https://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 // Generated by the protocol buffer compiler. DO NOT EDIT! 17 // source: google/cloud/aiplatform/v1beta1/batch_prediction_job.proto 18 19 package com.google.cloud.aiplatform.v1beta1; 20 21 /** 22 * 23 * 24 * <pre> 25 * A job that uses a 26 * [Model][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] to produce 27 * predictions on multiple [input 28 * instances][google.cloud.aiplatform.v1beta1.BatchPredictionJob.input_config]. 29 * If predictions for significant portion of the instances fail, the job may 30 * finish without attempting predictions for all remaining instances. 31 * </pre> 32 * 33 * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchPredictionJob} 34 */ 35 public final class BatchPredictionJob extends com.google.protobuf.GeneratedMessageV3 36 implements 37 // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.BatchPredictionJob) 38 BatchPredictionJobOrBuilder { 39 private static final long serialVersionUID = 0L; 40 // Use BatchPredictionJob.newBuilder() to construct. BatchPredictionJob(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)41 private BatchPredictionJob(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { 42 super(builder); 43 } 44 BatchPredictionJob()45 private BatchPredictionJob() { 46 name_ = ""; 47 displayName_ = ""; 48 model_ = ""; 49 modelVersionId_ = ""; 50 serviceAccount_ = ""; 51 state_ = 0; 52 partialFailures_ = java.util.Collections.emptyList(); 53 modelMonitoringStatsAnomalies_ = java.util.Collections.emptyList(); 54 } 55 56 @java.lang.Override 57 @SuppressWarnings({"unused"}) newInstance(UnusedPrivateParameter unused)58 protected java.lang.Object newInstance(UnusedPrivateParameter unused) { 59 return new BatchPredictionJob(); 60 } 61 62 @java.lang.Override getUnknownFields()63 public final com.google.protobuf.UnknownFieldSet getUnknownFields() { 64 return this.unknownFields; 65 } 66 getDescriptor()67 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { 68 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 69 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_descriptor; 70 } 71 72 @SuppressWarnings({"rawtypes"}) 73 @java.lang.Override internalGetMapField(int number)74 protected com.google.protobuf.MapField internalGetMapField(int number) { 75 switch (number) { 76 case 19: 77 return internalGetLabels(); 78 default: 79 throw new RuntimeException("Invalid map field number: " + number); 80 } 81 } 82 83 @java.lang.Override 84 protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()85 internalGetFieldAccessorTable() { 86 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 87 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_fieldAccessorTable 88 .ensureFieldAccessorsInitialized( 89 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.class, 90 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.Builder.class); 91 } 92 93 public interface InputConfigOrBuilder 94 extends 95 // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig) 96 com.google.protobuf.MessageOrBuilder { 97 98 /** 99 * 100 * 101 * <pre> 102 * The Cloud Storage location for the input instances. 103 * </pre> 104 * 105 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 106 * 107 * @return Whether the gcsSource field is set. 108 */ hasGcsSource()109 boolean hasGcsSource(); 110 /** 111 * 112 * 113 * <pre> 114 * The Cloud Storage location for the input instances. 115 * </pre> 116 * 117 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 118 * 119 * @return The gcsSource. 120 */ getGcsSource()121 com.google.cloud.aiplatform.v1beta1.GcsSource getGcsSource(); 122 /** 123 * 124 * 125 * <pre> 126 * The Cloud Storage location for the input instances. 127 * </pre> 128 * 129 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 130 */ getGcsSourceOrBuilder()131 com.google.cloud.aiplatform.v1beta1.GcsSourceOrBuilder getGcsSourceOrBuilder(); 132 133 /** 134 * 135 * 136 * <pre> 137 * The BigQuery location of the input table. 138 * The schema of the table should be in the format described by the given 139 * context OpenAPI Schema, if one is provided. The table may contain 140 * additional columns that are not described by the schema, and they will 141 * be ignored. 142 * </pre> 143 * 144 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 145 * 146 * @return Whether the bigquerySource field is set. 147 */ hasBigquerySource()148 boolean hasBigquerySource(); 149 /** 150 * 151 * 152 * <pre> 153 * The BigQuery location of the input table. 154 * The schema of the table should be in the format described by the given 155 * context OpenAPI Schema, if one is provided. The table may contain 156 * additional columns that are not described by the schema, and they will 157 * be ignored. 158 * </pre> 159 * 160 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 161 * 162 * @return The bigquerySource. 163 */ getBigquerySource()164 com.google.cloud.aiplatform.v1beta1.BigQuerySource getBigquerySource(); 165 /** 166 * 167 * 168 * <pre> 169 * The BigQuery location of the input table. 170 * The schema of the table should be in the format described by the given 171 * context OpenAPI Schema, if one is provided. The table may contain 172 * additional columns that are not described by the schema, and they will 173 * be ignored. 174 * </pre> 175 * 176 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 177 */ getBigquerySourceOrBuilder()178 com.google.cloud.aiplatform.v1beta1.BigQuerySourceOrBuilder getBigquerySourceOrBuilder(); 179 180 /** 181 * 182 * 183 * <pre> 184 * Required. The format in which instances are given, must be one of the 185 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 186 * [supported_input_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_input_storage_formats]. 187 * </pre> 188 * 189 * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 190 * 191 * @return The instancesFormat. 192 */ getInstancesFormat()193 java.lang.String getInstancesFormat(); 194 /** 195 * 196 * 197 * <pre> 198 * Required. The format in which instances are given, must be one of the 199 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 200 * [supported_input_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_input_storage_formats]. 201 * </pre> 202 * 203 * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 204 * 205 * @return The bytes for instancesFormat. 206 */ getInstancesFormatBytes()207 com.google.protobuf.ByteString getInstancesFormatBytes(); 208 209 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.SourceCase getSourceCase()210 getSourceCase(); 211 } 212 /** 213 * 214 * 215 * <pre> 216 * Configures the input to 217 * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. 218 * See 219 * [Model.supported_input_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_input_storage_formats] 220 * for Model's supported input formats, and how instances should be expressed 221 * via any of them. 222 * </pre> 223 * 224 * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig} 225 */ 226 public static final class InputConfig extends com.google.protobuf.GeneratedMessageV3 227 implements 228 // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig) 229 InputConfigOrBuilder { 230 private static final long serialVersionUID = 0L; 231 // Use InputConfig.newBuilder() to construct. InputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)232 private InputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { 233 super(builder); 234 } 235 InputConfig()236 private InputConfig() { 237 instancesFormat_ = ""; 238 } 239 240 @java.lang.Override 241 @SuppressWarnings({"unused"}) newInstance(UnusedPrivateParameter unused)242 protected java.lang.Object newInstance(UnusedPrivateParameter unused) { 243 return new InputConfig(); 244 } 245 246 @java.lang.Override getUnknownFields()247 public final com.google.protobuf.UnknownFieldSet getUnknownFields() { 248 return this.unknownFields; 249 } 250 getDescriptor()251 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { 252 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 253 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InputConfig_descriptor; 254 } 255 256 @java.lang.Override 257 protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()258 internalGetFieldAccessorTable() { 259 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 260 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InputConfig_fieldAccessorTable 261 .ensureFieldAccessorsInitialized( 262 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.class, 263 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.Builder.class); 264 } 265 266 private int sourceCase_ = 0; 267 private java.lang.Object source_; 268 269 public enum SourceCase 270 implements 271 com.google.protobuf.Internal.EnumLite, 272 com.google.protobuf.AbstractMessage.InternalOneOfEnum { 273 GCS_SOURCE(2), 274 BIGQUERY_SOURCE(3), 275 SOURCE_NOT_SET(0); 276 private final int value; 277 SourceCase(int value)278 private SourceCase(int value) { 279 this.value = value; 280 } 281 /** 282 * @param value The number of the enum to look for. 283 * @return The enum associated with the given number. 284 * @deprecated Use {@link #forNumber(int)} instead. 285 */ 286 @java.lang.Deprecated valueOf(int value)287 public static SourceCase valueOf(int value) { 288 return forNumber(value); 289 } 290 forNumber(int value)291 public static SourceCase forNumber(int value) { 292 switch (value) { 293 case 2: 294 return GCS_SOURCE; 295 case 3: 296 return BIGQUERY_SOURCE; 297 case 0: 298 return SOURCE_NOT_SET; 299 default: 300 return null; 301 } 302 } 303 getNumber()304 public int getNumber() { 305 return this.value; 306 } 307 }; 308 getSourceCase()309 public SourceCase getSourceCase() { 310 return SourceCase.forNumber(sourceCase_); 311 } 312 313 public static final int GCS_SOURCE_FIELD_NUMBER = 2; 314 /** 315 * 316 * 317 * <pre> 318 * The Cloud Storage location for the input instances. 319 * </pre> 320 * 321 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 322 * 323 * @return Whether the gcsSource field is set. 324 */ 325 @java.lang.Override hasGcsSource()326 public boolean hasGcsSource() { 327 return sourceCase_ == 2; 328 } 329 /** 330 * 331 * 332 * <pre> 333 * The Cloud Storage location for the input instances. 334 * </pre> 335 * 336 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 337 * 338 * @return The gcsSource. 339 */ 340 @java.lang.Override getGcsSource()341 public com.google.cloud.aiplatform.v1beta1.GcsSource getGcsSource() { 342 if (sourceCase_ == 2) { 343 return (com.google.cloud.aiplatform.v1beta1.GcsSource) source_; 344 } 345 return com.google.cloud.aiplatform.v1beta1.GcsSource.getDefaultInstance(); 346 } 347 /** 348 * 349 * 350 * <pre> 351 * The Cloud Storage location for the input instances. 352 * </pre> 353 * 354 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 355 */ 356 @java.lang.Override getGcsSourceOrBuilder()357 public com.google.cloud.aiplatform.v1beta1.GcsSourceOrBuilder getGcsSourceOrBuilder() { 358 if (sourceCase_ == 2) { 359 return (com.google.cloud.aiplatform.v1beta1.GcsSource) source_; 360 } 361 return com.google.cloud.aiplatform.v1beta1.GcsSource.getDefaultInstance(); 362 } 363 364 public static final int BIGQUERY_SOURCE_FIELD_NUMBER = 3; 365 /** 366 * 367 * 368 * <pre> 369 * The BigQuery location of the input table. 370 * The schema of the table should be in the format described by the given 371 * context OpenAPI Schema, if one is provided. The table may contain 372 * additional columns that are not described by the schema, and they will 373 * be ignored. 374 * </pre> 375 * 376 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 377 * 378 * @return Whether the bigquerySource field is set. 379 */ 380 @java.lang.Override hasBigquerySource()381 public boolean hasBigquerySource() { 382 return sourceCase_ == 3; 383 } 384 /** 385 * 386 * 387 * <pre> 388 * The BigQuery location of the input table. 389 * The schema of the table should be in the format described by the given 390 * context OpenAPI Schema, if one is provided. The table may contain 391 * additional columns that are not described by the schema, and they will 392 * be ignored. 393 * </pre> 394 * 395 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 396 * 397 * @return The bigquerySource. 398 */ 399 @java.lang.Override getBigquerySource()400 public com.google.cloud.aiplatform.v1beta1.BigQuerySource getBigquerySource() { 401 if (sourceCase_ == 3) { 402 return (com.google.cloud.aiplatform.v1beta1.BigQuerySource) source_; 403 } 404 return com.google.cloud.aiplatform.v1beta1.BigQuerySource.getDefaultInstance(); 405 } 406 /** 407 * 408 * 409 * <pre> 410 * The BigQuery location of the input table. 411 * The schema of the table should be in the format described by the given 412 * context OpenAPI Schema, if one is provided. The table may contain 413 * additional columns that are not described by the schema, and they will 414 * be ignored. 415 * </pre> 416 * 417 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 418 */ 419 @java.lang.Override 420 public com.google.cloud.aiplatform.v1beta1.BigQuerySourceOrBuilder getBigquerySourceOrBuilder()421 getBigquerySourceOrBuilder() { 422 if (sourceCase_ == 3) { 423 return (com.google.cloud.aiplatform.v1beta1.BigQuerySource) source_; 424 } 425 return com.google.cloud.aiplatform.v1beta1.BigQuerySource.getDefaultInstance(); 426 } 427 428 public static final int INSTANCES_FORMAT_FIELD_NUMBER = 1; 429 430 @SuppressWarnings("serial") 431 private volatile java.lang.Object instancesFormat_ = ""; 432 /** 433 * 434 * 435 * <pre> 436 * Required. The format in which instances are given, must be one of the 437 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 438 * [supported_input_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_input_storage_formats]. 439 * </pre> 440 * 441 * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 442 * 443 * @return The instancesFormat. 444 */ 445 @java.lang.Override getInstancesFormat()446 public java.lang.String getInstancesFormat() { 447 java.lang.Object ref = instancesFormat_; 448 if (ref instanceof java.lang.String) { 449 return (java.lang.String) ref; 450 } else { 451 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 452 java.lang.String s = bs.toStringUtf8(); 453 instancesFormat_ = s; 454 return s; 455 } 456 } 457 /** 458 * 459 * 460 * <pre> 461 * Required. The format in which instances are given, must be one of the 462 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 463 * [supported_input_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_input_storage_formats]. 464 * </pre> 465 * 466 * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 467 * 468 * @return The bytes for instancesFormat. 469 */ 470 @java.lang.Override getInstancesFormatBytes()471 public com.google.protobuf.ByteString getInstancesFormatBytes() { 472 java.lang.Object ref = instancesFormat_; 473 if (ref instanceof java.lang.String) { 474 com.google.protobuf.ByteString b = 475 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 476 instancesFormat_ = b; 477 return b; 478 } else { 479 return (com.google.protobuf.ByteString) ref; 480 } 481 } 482 483 private byte memoizedIsInitialized = -1; 484 485 @java.lang.Override isInitialized()486 public final boolean isInitialized() { 487 byte isInitialized = memoizedIsInitialized; 488 if (isInitialized == 1) return true; 489 if (isInitialized == 0) return false; 490 491 memoizedIsInitialized = 1; 492 return true; 493 } 494 495 @java.lang.Override writeTo(com.google.protobuf.CodedOutputStream output)496 public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { 497 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instancesFormat_)) { 498 com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instancesFormat_); 499 } 500 if (sourceCase_ == 2) { 501 output.writeMessage(2, (com.google.cloud.aiplatform.v1beta1.GcsSource) source_); 502 } 503 if (sourceCase_ == 3) { 504 output.writeMessage(3, (com.google.cloud.aiplatform.v1beta1.BigQuerySource) source_); 505 } 506 getUnknownFields().writeTo(output); 507 } 508 509 @java.lang.Override getSerializedSize()510 public int getSerializedSize() { 511 int size = memoizedSize; 512 if (size != -1) return size; 513 514 size = 0; 515 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instancesFormat_)) { 516 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instancesFormat_); 517 } 518 if (sourceCase_ == 2) { 519 size += 520 com.google.protobuf.CodedOutputStream.computeMessageSize( 521 2, (com.google.cloud.aiplatform.v1beta1.GcsSource) source_); 522 } 523 if (sourceCase_ == 3) { 524 size += 525 com.google.protobuf.CodedOutputStream.computeMessageSize( 526 3, (com.google.cloud.aiplatform.v1beta1.BigQuerySource) source_); 527 } 528 size += getUnknownFields().getSerializedSize(); 529 memoizedSize = size; 530 return size; 531 } 532 533 @java.lang.Override equals(final java.lang.Object obj)534 public boolean equals(final java.lang.Object obj) { 535 if (obj == this) { 536 return true; 537 } 538 if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig)) { 539 return super.equals(obj); 540 } 541 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig other = 542 (com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig) obj; 543 544 if (!getInstancesFormat().equals(other.getInstancesFormat())) return false; 545 if (!getSourceCase().equals(other.getSourceCase())) return false; 546 switch (sourceCase_) { 547 case 2: 548 if (!getGcsSource().equals(other.getGcsSource())) return false; 549 break; 550 case 3: 551 if (!getBigquerySource().equals(other.getBigquerySource())) return false; 552 break; 553 case 0: 554 default: 555 } 556 if (!getUnknownFields().equals(other.getUnknownFields())) return false; 557 return true; 558 } 559 560 @java.lang.Override hashCode()561 public int hashCode() { 562 if (memoizedHashCode != 0) { 563 return memoizedHashCode; 564 } 565 int hash = 41; 566 hash = (19 * hash) + getDescriptor().hashCode(); 567 hash = (37 * hash) + INSTANCES_FORMAT_FIELD_NUMBER; 568 hash = (53 * hash) + getInstancesFormat().hashCode(); 569 switch (sourceCase_) { 570 case 2: 571 hash = (37 * hash) + GCS_SOURCE_FIELD_NUMBER; 572 hash = (53 * hash) + getGcsSource().hashCode(); 573 break; 574 case 3: 575 hash = (37 * hash) + BIGQUERY_SOURCE_FIELD_NUMBER; 576 hash = (53 * hash) + getBigquerySource().hashCode(); 577 break; 578 case 0: 579 default: 580 } 581 hash = (29 * hash) + getUnknownFields().hashCode(); 582 memoizedHashCode = hash; 583 return hash; 584 } 585 parseFrom( java.nio.ByteBuffer data)586 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig parseFrom( 587 java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { 588 return PARSER.parseFrom(data); 589 } 590 parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)591 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig parseFrom( 592 java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 593 throws com.google.protobuf.InvalidProtocolBufferException { 594 return PARSER.parseFrom(data, extensionRegistry); 595 } 596 parseFrom( com.google.protobuf.ByteString data)597 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig parseFrom( 598 com.google.protobuf.ByteString data) 599 throws com.google.protobuf.InvalidProtocolBufferException { 600 return PARSER.parseFrom(data); 601 } 602 parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)603 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig parseFrom( 604 com.google.protobuf.ByteString data, 605 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 606 throws com.google.protobuf.InvalidProtocolBufferException { 607 return PARSER.parseFrom(data, extensionRegistry); 608 } 609 parseFrom( byte[] data)610 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig parseFrom( 611 byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { 612 return PARSER.parseFrom(data); 613 } 614 parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)615 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig parseFrom( 616 byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 617 throws com.google.protobuf.InvalidProtocolBufferException { 618 return PARSER.parseFrom(data, extensionRegistry); 619 } 620 parseFrom( java.io.InputStream input)621 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig parseFrom( 622 java.io.InputStream input) throws java.io.IOException { 623 return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); 624 } 625 parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)626 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig parseFrom( 627 java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 628 throws java.io.IOException { 629 return com.google.protobuf.GeneratedMessageV3.parseWithIOException( 630 PARSER, input, extensionRegistry); 631 } 632 633 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig parseDelimitedFrom(java.io.InputStream input)634 parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { 635 return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); 636 } 637 638 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)639 parseDelimitedFrom( 640 java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 641 throws java.io.IOException { 642 return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( 643 PARSER, input, extensionRegistry); 644 } 645 parseFrom( com.google.protobuf.CodedInputStream input)646 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig parseFrom( 647 com.google.protobuf.CodedInputStream input) throws java.io.IOException { 648 return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); 649 } 650 parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)651 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig parseFrom( 652 com.google.protobuf.CodedInputStream input, 653 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 654 throws java.io.IOException { 655 return com.google.protobuf.GeneratedMessageV3.parseWithIOException( 656 PARSER, input, extensionRegistry); 657 } 658 659 @java.lang.Override newBuilderForType()660 public Builder newBuilderForType() { 661 return newBuilder(); 662 } 663 newBuilder()664 public static Builder newBuilder() { 665 return DEFAULT_INSTANCE.toBuilder(); 666 } 667 newBuilder( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig prototype)668 public static Builder newBuilder( 669 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig prototype) { 670 return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); 671 } 672 673 @java.lang.Override toBuilder()674 public Builder toBuilder() { 675 return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); 676 } 677 678 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent)679 protected Builder newBuilderForType( 680 com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { 681 Builder builder = new Builder(parent); 682 return builder; 683 } 684 /** 685 * 686 * 687 * <pre> 688 * Configures the input to 689 * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. 690 * See 691 * [Model.supported_input_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_input_storage_formats] 692 * for Model's supported input formats, and how instances should be expressed 693 * via any of them. 694 * </pre> 695 * 696 * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig} 697 */ 698 public static final class Builder 699 extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> 700 implements 701 // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig) 702 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfigOrBuilder { getDescriptor()703 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { 704 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 705 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InputConfig_descriptor; 706 } 707 708 @java.lang.Override 709 protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()710 internalGetFieldAccessorTable() { 711 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 712 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InputConfig_fieldAccessorTable 713 .ensureFieldAccessorsInitialized( 714 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.class, 715 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.Builder.class); 716 } 717 718 // Construct using 719 // com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.newBuilder() Builder()720 private Builder() {} 721 Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)722 private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { 723 super(parent); 724 } 725 726 @java.lang.Override clear()727 public Builder clear() { 728 super.clear(); 729 bitField0_ = 0; 730 if (gcsSourceBuilder_ != null) { 731 gcsSourceBuilder_.clear(); 732 } 733 if (bigquerySourceBuilder_ != null) { 734 bigquerySourceBuilder_.clear(); 735 } 736 instancesFormat_ = ""; 737 sourceCase_ = 0; 738 source_ = null; 739 return this; 740 } 741 742 @java.lang.Override getDescriptorForType()743 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { 744 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 745 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InputConfig_descriptor; 746 } 747 748 @java.lang.Override 749 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig getDefaultInstanceForType()750 getDefaultInstanceForType() { 751 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig 752 .getDefaultInstance(); 753 } 754 755 @java.lang.Override build()756 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig build() { 757 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig result = buildPartial(); 758 if (!result.isInitialized()) { 759 throw newUninitializedMessageException(result); 760 } 761 return result; 762 } 763 764 @java.lang.Override buildPartial()765 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig buildPartial() { 766 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig result = 767 new com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig(this); 768 if (bitField0_ != 0) { 769 buildPartial0(result); 770 } 771 buildPartialOneofs(result); 772 onBuilt(); 773 return result; 774 } 775 buildPartial0( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig result)776 private void buildPartial0( 777 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig result) { 778 int from_bitField0_ = bitField0_; 779 if (((from_bitField0_ & 0x00000004) != 0)) { 780 result.instancesFormat_ = instancesFormat_; 781 } 782 } 783 buildPartialOneofs( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig result)784 private void buildPartialOneofs( 785 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig result) { 786 result.sourceCase_ = sourceCase_; 787 result.source_ = this.source_; 788 if (sourceCase_ == 2 && gcsSourceBuilder_ != null) { 789 result.source_ = gcsSourceBuilder_.build(); 790 } 791 if (sourceCase_ == 3 && bigquerySourceBuilder_ != null) { 792 result.source_ = bigquerySourceBuilder_.build(); 793 } 794 } 795 796 @java.lang.Override clone()797 public Builder clone() { 798 return super.clone(); 799 } 800 801 @java.lang.Override setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)802 public Builder setField( 803 com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { 804 return super.setField(field, value); 805 } 806 807 @java.lang.Override clearField(com.google.protobuf.Descriptors.FieldDescriptor field)808 public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { 809 return super.clearField(field); 810 } 811 812 @java.lang.Override clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)813 public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { 814 return super.clearOneof(oneof); 815 } 816 817 @java.lang.Override setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)818 public Builder setRepeatedField( 819 com.google.protobuf.Descriptors.FieldDescriptor field, 820 int index, 821 java.lang.Object value) { 822 return super.setRepeatedField(field, index, value); 823 } 824 825 @java.lang.Override addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)826 public Builder addRepeatedField( 827 com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { 828 return super.addRepeatedField(field, value); 829 } 830 831 @java.lang.Override mergeFrom(com.google.protobuf.Message other)832 public Builder mergeFrom(com.google.protobuf.Message other) { 833 if (other instanceof com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig) { 834 return mergeFrom( 835 (com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig) other); 836 } else { 837 super.mergeFrom(other); 838 return this; 839 } 840 } 841 mergeFrom( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig other)842 public Builder mergeFrom( 843 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig other) { 844 if (other 845 == com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig 846 .getDefaultInstance()) return this; 847 if (!other.getInstancesFormat().isEmpty()) { 848 instancesFormat_ = other.instancesFormat_; 849 bitField0_ |= 0x00000004; 850 onChanged(); 851 } 852 switch (other.getSourceCase()) { 853 case GCS_SOURCE: 854 { 855 mergeGcsSource(other.getGcsSource()); 856 break; 857 } 858 case BIGQUERY_SOURCE: 859 { 860 mergeBigquerySource(other.getBigquerySource()); 861 break; 862 } 863 case SOURCE_NOT_SET: 864 { 865 break; 866 } 867 } 868 this.mergeUnknownFields(other.getUnknownFields()); 869 onChanged(); 870 return this; 871 } 872 873 @java.lang.Override isInitialized()874 public final boolean isInitialized() { 875 return true; 876 } 877 878 @java.lang.Override mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)879 public Builder mergeFrom( 880 com.google.protobuf.CodedInputStream input, 881 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 882 throws java.io.IOException { 883 if (extensionRegistry == null) { 884 throw new java.lang.NullPointerException(); 885 } 886 try { 887 boolean done = false; 888 while (!done) { 889 int tag = input.readTag(); 890 switch (tag) { 891 case 0: 892 done = true; 893 break; 894 case 10: 895 { 896 instancesFormat_ = input.readStringRequireUtf8(); 897 bitField0_ |= 0x00000004; 898 break; 899 } // case 10 900 case 18: 901 { 902 input.readMessage(getGcsSourceFieldBuilder().getBuilder(), extensionRegistry); 903 sourceCase_ = 2; 904 break; 905 } // case 18 906 case 26: 907 { 908 input.readMessage( 909 getBigquerySourceFieldBuilder().getBuilder(), extensionRegistry); 910 sourceCase_ = 3; 911 break; 912 } // case 26 913 default: 914 { 915 if (!super.parseUnknownField(input, extensionRegistry, tag)) { 916 done = true; // was an endgroup tag 917 } 918 break; 919 } // default: 920 } // switch (tag) 921 } // while (!done) 922 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 923 throw e.unwrapIOException(); 924 } finally { 925 onChanged(); 926 } // finally 927 return this; 928 } 929 930 private int sourceCase_ = 0; 931 private java.lang.Object source_; 932 getSourceCase()933 public SourceCase getSourceCase() { 934 return SourceCase.forNumber(sourceCase_); 935 } 936 clearSource()937 public Builder clearSource() { 938 sourceCase_ = 0; 939 source_ = null; 940 onChanged(); 941 return this; 942 } 943 944 private int bitField0_; 945 946 private com.google.protobuf.SingleFieldBuilderV3< 947 com.google.cloud.aiplatform.v1beta1.GcsSource, 948 com.google.cloud.aiplatform.v1beta1.GcsSource.Builder, 949 com.google.cloud.aiplatform.v1beta1.GcsSourceOrBuilder> 950 gcsSourceBuilder_; 951 /** 952 * 953 * 954 * <pre> 955 * The Cloud Storage location for the input instances. 956 * </pre> 957 * 958 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 959 * 960 * @return Whether the gcsSource field is set. 961 */ 962 @java.lang.Override hasGcsSource()963 public boolean hasGcsSource() { 964 return sourceCase_ == 2; 965 } 966 /** 967 * 968 * 969 * <pre> 970 * The Cloud Storage location for the input instances. 971 * </pre> 972 * 973 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 974 * 975 * @return The gcsSource. 976 */ 977 @java.lang.Override getGcsSource()978 public com.google.cloud.aiplatform.v1beta1.GcsSource getGcsSource() { 979 if (gcsSourceBuilder_ == null) { 980 if (sourceCase_ == 2) { 981 return (com.google.cloud.aiplatform.v1beta1.GcsSource) source_; 982 } 983 return com.google.cloud.aiplatform.v1beta1.GcsSource.getDefaultInstance(); 984 } else { 985 if (sourceCase_ == 2) { 986 return gcsSourceBuilder_.getMessage(); 987 } 988 return com.google.cloud.aiplatform.v1beta1.GcsSource.getDefaultInstance(); 989 } 990 } 991 /** 992 * 993 * 994 * <pre> 995 * The Cloud Storage location for the input instances. 996 * </pre> 997 * 998 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 999 */ setGcsSource(com.google.cloud.aiplatform.v1beta1.GcsSource value)1000 public Builder setGcsSource(com.google.cloud.aiplatform.v1beta1.GcsSource value) { 1001 if (gcsSourceBuilder_ == null) { 1002 if (value == null) { 1003 throw new NullPointerException(); 1004 } 1005 source_ = value; 1006 onChanged(); 1007 } else { 1008 gcsSourceBuilder_.setMessage(value); 1009 } 1010 sourceCase_ = 2; 1011 return this; 1012 } 1013 /** 1014 * 1015 * 1016 * <pre> 1017 * The Cloud Storage location for the input instances. 1018 * </pre> 1019 * 1020 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 1021 */ setGcsSource( com.google.cloud.aiplatform.v1beta1.GcsSource.Builder builderForValue)1022 public Builder setGcsSource( 1023 com.google.cloud.aiplatform.v1beta1.GcsSource.Builder builderForValue) { 1024 if (gcsSourceBuilder_ == null) { 1025 source_ = builderForValue.build(); 1026 onChanged(); 1027 } else { 1028 gcsSourceBuilder_.setMessage(builderForValue.build()); 1029 } 1030 sourceCase_ = 2; 1031 return this; 1032 } 1033 /** 1034 * 1035 * 1036 * <pre> 1037 * The Cloud Storage location for the input instances. 1038 * </pre> 1039 * 1040 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 1041 */ mergeGcsSource(com.google.cloud.aiplatform.v1beta1.GcsSource value)1042 public Builder mergeGcsSource(com.google.cloud.aiplatform.v1beta1.GcsSource value) { 1043 if (gcsSourceBuilder_ == null) { 1044 if (sourceCase_ == 2 1045 && source_ != com.google.cloud.aiplatform.v1beta1.GcsSource.getDefaultInstance()) { 1046 source_ = 1047 com.google.cloud.aiplatform.v1beta1.GcsSource.newBuilder( 1048 (com.google.cloud.aiplatform.v1beta1.GcsSource) source_) 1049 .mergeFrom(value) 1050 .buildPartial(); 1051 } else { 1052 source_ = value; 1053 } 1054 onChanged(); 1055 } else { 1056 if (sourceCase_ == 2) { 1057 gcsSourceBuilder_.mergeFrom(value); 1058 } else { 1059 gcsSourceBuilder_.setMessage(value); 1060 } 1061 } 1062 sourceCase_ = 2; 1063 return this; 1064 } 1065 /** 1066 * 1067 * 1068 * <pre> 1069 * The Cloud Storage location for the input instances. 1070 * </pre> 1071 * 1072 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 1073 */ clearGcsSource()1074 public Builder clearGcsSource() { 1075 if (gcsSourceBuilder_ == null) { 1076 if (sourceCase_ == 2) { 1077 sourceCase_ = 0; 1078 source_ = null; 1079 onChanged(); 1080 } 1081 } else { 1082 if (sourceCase_ == 2) { 1083 sourceCase_ = 0; 1084 source_ = null; 1085 } 1086 gcsSourceBuilder_.clear(); 1087 } 1088 return this; 1089 } 1090 /** 1091 * 1092 * 1093 * <pre> 1094 * The Cloud Storage location for the input instances. 1095 * </pre> 1096 * 1097 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 1098 */ getGcsSourceBuilder()1099 public com.google.cloud.aiplatform.v1beta1.GcsSource.Builder getGcsSourceBuilder() { 1100 return getGcsSourceFieldBuilder().getBuilder(); 1101 } 1102 /** 1103 * 1104 * 1105 * <pre> 1106 * The Cloud Storage location for the input instances. 1107 * </pre> 1108 * 1109 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 1110 */ 1111 @java.lang.Override getGcsSourceOrBuilder()1112 public com.google.cloud.aiplatform.v1beta1.GcsSourceOrBuilder getGcsSourceOrBuilder() { 1113 if ((sourceCase_ == 2) && (gcsSourceBuilder_ != null)) { 1114 return gcsSourceBuilder_.getMessageOrBuilder(); 1115 } else { 1116 if (sourceCase_ == 2) { 1117 return (com.google.cloud.aiplatform.v1beta1.GcsSource) source_; 1118 } 1119 return com.google.cloud.aiplatform.v1beta1.GcsSource.getDefaultInstance(); 1120 } 1121 } 1122 /** 1123 * 1124 * 1125 * <pre> 1126 * The Cloud Storage location for the input instances. 1127 * </pre> 1128 * 1129 * <code>.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;</code> 1130 */ 1131 private com.google.protobuf.SingleFieldBuilderV3< 1132 com.google.cloud.aiplatform.v1beta1.GcsSource, 1133 com.google.cloud.aiplatform.v1beta1.GcsSource.Builder, 1134 com.google.cloud.aiplatform.v1beta1.GcsSourceOrBuilder> getGcsSourceFieldBuilder()1135 getGcsSourceFieldBuilder() { 1136 if (gcsSourceBuilder_ == null) { 1137 if (!(sourceCase_ == 2)) { 1138 source_ = com.google.cloud.aiplatform.v1beta1.GcsSource.getDefaultInstance(); 1139 } 1140 gcsSourceBuilder_ = 1141 new com.google.protobuf.SingleFieldBuilderV3< 1142 com.google.cloud.aiplatform.v1beta1.GcsSource, 1143 com.google.cloud.aiplatform.v1beta1.GcsSource.Builder, 1144 com.google.cloud.aiplatform.v1beta1.GcsSourceOrBuilder>( 1145 (com.google.cloud.aiplatform.v1beta1.GcsSource) source_, 1146 getParentForChildren(), 1147 isClean()); 1148 source_ = null; 1149 } 1150 sourceCase_ = 2; 1151 onChanged(); 1152 return gcsSourceBuilder_; 1153 } 1154 1155 private com.google.protobuf.SingleFieldBuilderV3< 1156 com.google.cloud.aiplatform.v1beta1.BigQuerySource, 1157 com.google.cloud.aiplatform.v1beta1.BigQuerySource.Builder, 1158 com.google.cloud.aiplatform.v1beta1.BigQuerySourceOrBuilder> 1159 bigquerySourceBuilder_; 1160 /** 1161 * 1162 * 1163 * <pre> 1164 * The BigQuery location of the input table. 1165 * The schema of the table should be in the format described by the given 1166 * context OpenAPI Schema, if one is provided. The table may contain 1167 * additional columns that are not described by the schema, and they will 1168 * be ignored. 1169 * </pre> 1170 * 1171 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 1172 * 1173 * @return Whether the bigquerySource field is set. 1174 */ 1175 @java.lang.Override hasBigquerySource()1176 public boolean hasBigquerySource() { 1177 return sourceCase_ == 3; 1178 } 1179 /** 1180 * 1181 * 1182 * <pre> 1183 * The BigQuery location of the input table. 1184 * The schema of the table should be in the format described by the given 1185 * context OpenAPI Schema, if one is provided. The table may contain 1186 * additional columns that are not described by the schema, and they will 1187 * be ignored. 1188 * </pre> 1189 * 1190 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 1191 * 1192 * @return The bigquerySource. 1193 */ 1194 @java.lang.Override getBigquerySource()1195 public com.google.cloud.aiplatform.v1beta1.BigQuerySource getBigquerySource() { 1196 if (bigquerySourceBuilder_ == null) { 1197 if (sourceCase_ == 3) { 1198 return (com.google.cloud.aiplatform.v1beta1.BigQuerySource) source_; 1199 } 1200 return com.google.cloud.aiplatform.v1beta1.BigQuerySource.getDefaultInstance(); 1201 } else { 1202 if (sourceCase_ == 3) { 1203 return bigquerySourceBuilder_.getMessage(); 1204 } 1205 return com.google.cloud.aiplatform.v1beta1.BigQuerySource.getDefaultInstance(); 1206 } 1207 } 1208 /** 1209 * 1210 * 1211 * <pre> 1212 * The BigQuery location of the input table. 1213 * The schema of the table should be in the format described by the given 1214 * context OpenAPI Schema, if one is provided. The table may contain 1215 * additional columns that are not described by the schema, and they will 1216 * be ignored. 1217 * </pre> 1218 * 1219 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 1220 */ setBigquerySource(com.google.cloud.aiplatform.v1beta1.BigQuerySource value)1221 public Builder setBigquerySource(com.google.cloud.aiplatform.v1beta1.BigQuerySource value) { 1222 if (bigquerySourceBuilder_ == null) { 1223 if (value == null) { 1224 throw new NullPointerException(); 1225 } 1226 source_ = value; 1227 onChanged(); 1228 } else { 1229 bigquerySourceBuilder_.setMessage(value); 1230 } 1231 sourceCase_ = 3; 1232 return this; 1233 } 1234 /** 1235 * 1236 * 1237 * <pre> 1238 * The BigQuery location of the input table. 1239 * The schema of the table should be in the format described by the given 1240 * context OpenAPI Schema, if one is provided. The table may contain 1241 * additional columns that are not described by the schema, and they will 1242 * be ignored. 1243 * </pre> 1244 * 1245 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 1246 */ setBigquerySource( com.google.cloud.aiplatform.v1beta1.BigQuerySource.Builder builderForValue)1247 public Builder setBigquerySource( 1248 com.google.cloud.aiplatform.v1beta1.BigQuerySource.Builder builderForValue) { 1249 if (bigquerySourceBuilder_ == null) { 1250 source_ = builderForValue.build(); 1251 onChanged(); 1252 } else { 1253 bigquerySourceBuilder_.setMessage(builderForValue.build()); 1254 } 1255 sourceCase_ = 3; 1256 return this; 1257 } 1258 /** 1259 * 1260 * 1261 * <pre> 1262 * The BigQuery location of the input table. 1263 * The schema of the table should be in the format described by the given 1264 * context OpenAPI Schema, if one is provided. The table may contain 1265 * additional columns that are not described by the schema, and they will 1266 * be ignored. 1267 * </pre> 1268 * 1269 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 1270 */ mergeBigquerySource(com.google.cloud.aiplatform.v1beta1.BigQuerySource value)1271 public Builder mergeBigquerySource(com.google.cloud.aiplatform.v1beta1.BigQuerySource value) { 1272 if (bigquerySourceBuilder_ == null) { 1273 if (sourceCase_ == 3 1274 && source_ 1275 != com.google.cloud.aiplatform.v1beta1.BigQuerySource.getDefaultInstance()) { 1276 source_ = 1277 com.google.cloud.aiplatform.v1beta1.BigQuerySource.newBuilder( 1278 (com.google.cloud.aiplatform.v1beta1.BigQuerySource) source_) 1279 .mergeFrom(value) 1280 .buildPartial(); 1281 } else { 1282 source_ = value; 1283 } 1284 onChanged(); 1285 } else { 1286 if (sourceCase_ == 3) { 1287 bigquerySourceBuilder_.mergeFrom(value); 1288 } else { 1289 bigquerySourceBuilder_.setMessage(value); 1290 } 1291 } 1292 sourceCase_ = 3; 1293 return this; 1294 } 1295 /** 1296 * 1297 * 1298 * <pre> 1299 * The BigQuery location of the input table. 1300 * The schema of the table should be in the format described by the given 1301 * context OpenAPI Schema, if one is provided. The table may contain 1302 * additional columns that are not described by the schema, and they will 1303 * be ignored. 1304 * </pre> 1305 * 1306 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 1307 */ clearBigquerySource()1308 public Builder clearBigquerySource() { 1309 if (bigquerySourceBuilder_ == null) { 1310 if (sourceCase_ == 3) { 1311 sourceCase_ = 0; 1312 source_ = null; 1313 onChanged(); 1314 } 1315 } else { 1316 if (sourceCase_ == 3) { 1317 sourceCase_ = 0; 1318 source_ = null; 1319 } 1320 bigquerySourceBuilder_.clear(); 1321 } 1322 return this; 1323 } 1324 /** 1325 * 1326 * 1327 * <pre> 1328 * The BigQuery location of the input table. 1329 * The schema of the table should be in the format described by the given 1330 * context OpenAPI Schema, if one is provided. The table may contain 1331 * additional columns that are not described by the schema, and they will 1332 * be ignored. 1333 * </pre> 1334 * 1335 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 1336 */ getBigquerySourceBuilder()1337 public com.google.cloud.aiplatform.v1beta1.BigQuerySource.Builder getBigquerySourceBuilder() { 1338 return getBigquerySourceFieldBuilder().getBuilder(); 1339 } 1340 /** 1341 * 1342 * 1343 * <pre> 1344 * The BigQuery location of the input table. 1345 * The schema of the table should be in the format described by the given 1346 * context OpenAPI Schema, if one is provided. The table may contain 1347 * additional columns that are not described by the schema, and they will 1348 * be ignored. 1349 * </pre> 1350 * 1351 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 1352 */ 1353 @java.lang.Override 1354 public com.google.cloud.aiplatform.v1beta1.BigQuerySourceOrBuilder getBigquerySourceOrBuilder()1355 getBigquerySourceOrBuilder() { 1356 if ((sourceCase_ == 3) && (bigquerySourceBuilder_ != null)) { 1357 return bigquerySourceBuilder_.getMessageOrBuilder(); 1358 } else { 1359 if (sourceCase_ == 3) { 1360 return (com.google.cloud.aiplatform.v1beta1.BigQuerySource) source_; 1361 } 1362 return com.google.cloud.aiplatform.v1beta1.BigQuerySource.getDefaultInstance(); 1363 } 1364 } 1365 /** 1366 * 1367 * 1368 * <pre> 1369 * The BigQuery location of the input table. 1370 * The schema of the table should be in the format described by the given 1371 * context OpenAPI Schema, if one is provided. The table may contain 1372 * additional columns that are not described by the schema, and they will 1373 * be ignored. 1374 * </pre> 1375 * 1376 * <code>.google.cloud.aiplatform.v1beta1.BigQuerySource bigquery_source = 3;</code> 1377 */ 1378 private com.google.protobuf.SingleFieldBuilderV3< 1379 com.google.cloud.aiplatform.v1beta1.BigQuerySource, 1380 com.google.cloud.aiplatform.v1beta1.BigQuerySource.Builder, 1381 com.google.cloud.aiplatform.v1beta1.BigQuerySourceOrBuilder> getBigquerySourceFieldBuilder()1382 getBigquerySourceFieldBuilder() { 1383 if (bigquerySourceBuilder_ == null) { 1384 if (!(sourceCase_ == 3)) { 1385 source_ = com.google.cloud.aiplatform.v1beta1.BigQuerySource.getDefaultInstance(); 1386 } 1387 bigquerySourceBuilder_ = 1388 new com.google.protobuf.SingleFieldBuilderV3< 1389 com.google.cloud.aiplatform.v1beta1.BigQuerySource, 1390 com.google.cloud.aiplatform.v1beta1.BigQuerySource.Builder, 1391 com.google.cloud.aiplatform.v1beta1.BigQuerySourceOrBuilder>( 1392 (com.google.cloud.aiplatform.v1beta1.BigQuerySource) source_, 1393 getParentForChildren(), 1394 isClean()); 1395 source_ = null; 1396 } 1397 sourceCase_ = 3; 1398 onChanged(); 1399 return bigquerySourceBuilder_; 1400 } 1401 1402 private java.lang.Object instancesFormat_ = ""; 1403 /** 1404 * 1405 * 1406 * <pre> 1407 * Required. The format in which instances are given, must be one of the 1408 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 1409 * [supported_input_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_input_storage_formats]. 1410 * </pre> 1411 * 1412 * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 1413 * 1414 * @return The instancesFormat. 1415 */ getInstancesFormat()1416 public java.lang.String getInstancesFormat() { 1417 java.lang.Object ref = instancesFormat_; 1418 if (!(ref instanceof java.lang.String)) { 1419 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 1420 java.lang.String s = bs.toStringUtf8(); 1421 instancesFormat_ = s; 1422 return s; 1423 } else { 1424 return (java.lang.String) ref; 1425 } 1426 } 1427 /** 1428 * 1429 * 1430 * <pre> 1431 * Required. The format in which instances are given, must be one of the 1432 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 1433 * [supported_input_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_input_storage_formats]. 1434 * </pre> 1435 * 1436 * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 1437 * 1438 * @return The bytes for instancesFormat. 1439 */ getInstancesFormatBytes()1440 public com.google.protobuf.ByteString getInstancesFormatBytes() { 1441 java.lang.Object ref = instancesFormat_; 1442 if (ref instanceof String) { 1443 com.google.protobuf.ByteString b = 1444 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 1445 instancesFormat_ = b; 1446 return b; 1447 } else { 1448 return (com.google.protobuf.ByteString) ref; 1449 } 1450 } 1451 /** 1452 * 1453 * 1454 * <pre> 1455 * Required. The format in which instances are given, must be one of the 1456 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 1457 * [supported_input_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_input_storage_formats]. 1458 * </pre> 1459 * 1460 * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 1461 * 1462 * @param value The instancesFormat to set. 1463 * @return This builder for chaining. 1464 */ setInstancesFormat(java.lang.String value)1465 public Builder setInstancesFormat(java.lang.String value) { 1466 if (value == null) { 1467 throw new NullPointerException(); 1468 } 1469 instancesFormat_ = value; 1470 bitField0_ |= 0x00000004; 1471 onChanged(); 1472 return this; 1473 } 1474 /** 1475 * 1476 * 1477 * <pre> 1478 * Required. The format in which instances are given, must be one of the 1479 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 1480 * [supported_input_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_input_storage_formats]. 1481 * </pre> 1482 * 1483 * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 1484 * 1485 * @return This builder for chaining. 1486 */ clearInstancesFormat()1487 public Builder clearInstancesFormat() { 1488 instancesFormat_ = getDefaultInstance().getInstancesFormat(); 1489 bitField0_ = (bitField0_ & ~0x00000004); 1490 onChanged(); 1491 return this; 1492 } 1493 /** 1494 * 1495 * 1496 * <pre> 1497 * Required. The format in which instances are given, must be one of the 1498 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 1499 * [supported_input_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_input_storage_formats]. 1500 * </pre> 1501 * 1502 * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 1503 * 1504 * @param value The bytes for instancesFormat to set. 1505 * @return This builder for chaining. 1506 */ setInstancesFormatBytes(com.google.protobuf.ByteString value)1507 public Builder setInstancesFormatBytes(com.google.protobuf.ByteString value) { 1508 if (value == null) { 1509 throw new NullPointerException(); 1510 } 1511 checkByteStringIsUtf8(value); 1512 instancesFormat_ = value; 1513 bitField0_ |= 0x00000004; 1514 onChanged(); 1515 return this; 1516 } 1517 1518 @java.lang.Override setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)1519 public final Builder setUnknownFields( 1520 final com.google.protobuf.UnknownFieldSet unknownFields) { 1521 return super.setUnknownFields(unknownFields); 1522 } 1523 1524 @java.lang.Override mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)1525 public final Builder mergeUnknownFields( 1526 final com.google.protobuf.UnknownFieldSet unknownFields) { 1527 return super.mergeUnknownFields(unknownFields); 1528 } 1529 1530 // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig) 1531 } 1532 1533 // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig) 1534 private static final com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig 1535 DEFAULT_INSTANCE; 1536 1537 static { 1538 DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig(); 1539 } 1540 1541 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig getDefaultInstance()1542 getDefaultInstance() { 1543 return DEFAULT_INSTANCE; 1544 } 1545 1546 private static final com.google.protobuf.Parser<InputConfig> PARSER = 1547 new com.google.protobuf.AbstractParser<InputConfig>() { 1548 @java.lang.Override 1549 public InputConfig parsePartialFrom( 1550 com.google.protobuf.CodedInputStream input, 1551 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1552 throws com.google.protobuf.InvalidProtocolBufferException { 1553 Builder builder = newBuilder(); 1554 try { 1555 builder.mergeFrom(input, extensionRegistry); 1556 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1557 throw e.setUnfinishedMessage(builder.buildPartial()); 1558 } catch (com.google.protobuf.UninitializedMessageException e) { 1559 throw e.asInvalidProtocolBufferException() 1560 .setUnfinishedMessage(builder.buildPartial()); 1561 } catch (java.io.IOException e) { 1562 throw new com.google.protobuf.InvalidProtocolBufferException(e) 1563 .setUnfinishedMessage(builder.buildPartial()); 1564 } 1565 return builder.buildPartial(); 1566 } 1567 }; 1568 parser()1569 public static com.google.protobuf.Parser<InputConfig> parser() { 1570 return PARSER; 1571 } 1572 1573 @java.lang.Override getParserForType()1574 public com.google.protobuf.Parser<InputConfig> getParserForType() { 1575 return PARSER; 1576 } 1577 1578 @java.lang.Override 1579 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig getDefaultInstanceForType()1580 getDefaultInstanceForType() { 1581 return DEFAULT_INSTANCE; 1582 } 1583 } 1584 1585 public interface InstanceConfigOrBuilder 1586 extends 1587 // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) 1588 com.google.protobuf.MessageOrBuilder { 1589 1590 /** 1591 * 1592 * 1593 * <pre> 1594 * The format of the instance that the Model accepts. Vertex AI will 1595 * convert compatible 1596 * [batch prediction input instance 1597 * formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format] 1598 * to the specified format. 1599 * Supported values are: 1600 * * `object`: Each input is converted to JSON object format. 1601 * * For `bigquery`, each row is converted to an object. 1602 * * For `jsonl`, each line of the JSONL input must be an object. 1603 * * Does not apply to `csv`, `file-list`, `tf-record`, or 1604 * `tf-record-gzip`. 1605 * * `array`: Each input is converted to JSON array format. 1606 * * For `bigquery`, each row is converted to an array. The order 1607 * of columns is determined by the BigQuery column order, unless 1608 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1609 * is populated. 1610 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1611 * must be populated for specifying field orders. 1612 * * For `jsonl`, if each line of the JSONL input is an object, 1613 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1614 * must be populated for specifying field orders. 1615 * * Does not apply to `csv`, `file-list`, `tf-record`, or 1616 * `tf-record-gzip`. 1617 * If not specified, Vertex AI converts the batch prediction input as 1618 * follows: 1619 * * For `bigquery` and `csv`, the behavior is the same as `array`. The 1620 * order of columns is the same as defined in the file or table, unless 1621 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1622 * is populated. 1623 * * For `jsonl`, the prediction instance format is determined by 1624 * each line of the input. 1625 * * For `tf-record`/`tf-record-gzip`, each record will be converted to 1626 * an object in the format of `{"b64": <value>}`, where `<value>` is 1627 * the Base64-encoded string of the content of the record. 1628 * * For `file-list`, each file in the list will be converted to an 1629 * object in the format of `{"b64": <value>}`, where `<value>` is 1630 * the Base64-encoded string of the content of the file. 1631 * </pre> 1632 * 1633 * <code>string instance_type = 1;</code> 1634 * 1635 * @return The instanceType. 1636 */ getInstanceType()1637 java.lang.String getInstanceType(); 1638 /** 1639 * 1640 * 1641 * <pre> 1642 * The format of the instance that the Model accepts. Vertex AI will 1643 * convert compatible 1644 * [batch prediction input instance 1645 * formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format] 1646 * to the specified format. 1647 * Supported values are: 1648 * * `object`: Each input is converted to JSON object format. 1649 * * For `bigquery`, each row is converted to an object. 1650 * * For `jsonl`, each line of the JSONL input must be an object. 1651 * * Does not apply to `csv`, `file-list`, `tf-record`, or 1652 * `tf-record-gzip`. 1653 * * `array`: Each input is converted to JSON array format. 1654 * * For `bigquery`, each row is converted to an array. The order 1655 * of columns is determined by the BigQuery column order, unless 1656 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1657 * is populated. 1658 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1659 * must be populated for specifying field orders. 1660 * * For `jsonl`, if each line of the JSONL input is an object, 1661 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1662 * must be populated for specifying field orders. 1663 * * Does not apply to `csv`, `file-list`, `tf-record`, or 1664 * `tf-record-gzip`. 1665 * If not specified, Vertex AI converts the batch prediction input as 1666 * follows: 1667 * * For `bigquery` and `csv`, the behavior is the same as `array`. The 1668 * order of columns is the same as defined in the file or table, unless 1669 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1670 * is populated. 1671 * * For `jsonl`, the prediction instance format is determined by 1672 * each line of the input. 1673 * * For `tf-record`/`tf-record-gzip`, each record will be converted to 1674 * an object in the format of `{"b64": <value>}`, where `<value>` is 1675 * the Base64-encoded string of the content of the record. 1676 * * For `file-list`, each file in the list will be converted to an 1677 * object in the format of `{"b64": <value>}`, where `<value>` is 1678 * the Base64-encoded string of the content of the file. 1679 * </pre> 1680 * 1681 * <code>string instance_type = 1;</code> 1682 * 1683 * @return The bytes for instanceType. 1684 */ getInstanceTypeBytes()1685 com.google.protobuf.ByteString getInstanceTypeBytes(); 1686 1687 /** 1688 * 1689 * 1690 * <pre> 1691 * The name of the field that is considered as a key. 1692 * The values identified by the key field is not included in the transformed 1693 * instances that is sent to the Model. This is similar to 1694 * specifying this name of the field in 1695 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. 1696 * In addition, the batch prediction output will not include the instances. 1697 * Instead the output will only include the value of the key field, in a 1698 * field named `key` in the output: 1699 * * For `jsonl` output format, the output will have a `key` field 1700 * instead of the `instance` field. 1701 * * For `csv`/`bigquery` output format, the output will have have a `key` 1702 * column instead of the instance feature columns. 1703 * The input must be JSONL with objects at each line, CSV, BigQuery 1704 * or TfRecord. 1705 * </pre> 1706 * 1707 * <code>string key_field = 2;</code> 1708 * 1709 * @return The keyField. 1710 */ getKeyField()1711 java.lang.String getKeyField(); 1712 /** 1713 * 1714 * 1715 * <pre> 1716 * The name of the field that is considered as a key. 1717 * The values identified by the key field is not included in the transformed 1718 * instances that is sent to the Model. This is similar to 1719 * specifying this name of the field in 1720 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. 1721 * In addition, the batch prediction output will not include the instances. 1722 * Instead the output will only include the value of the key field, in a 1723 * field named `key` in the output: 1724 * * For `jsonl` output format, the output will have a `key` field 1725 * instead of the `instance` field. 1726 * * For `csv`/`bigquery` output format, the output will have have a `key` 1727 * column instead of the instance feature columns. 1728 * The input must be JSONL with objects at each line, CSV, BigQuery 1729 * or TfRecord. 1730 * </pre> 1731 * 1732 * <code>string key_field = 2;</code> 1733 * 1734 * @return The bytes for keyField. 1735 */ getKeyFieldBytes()1736 com.google.protobuf.ByteString getKeyFieldBytes(); 1737 1738 /** 1739 * 1740 * 1741 * <pre> 1742 * Fields that will be included in the prediction instance that is 1743 * sent to the Model. 1744 * If 1745 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 1746 * is `array`, the order of field names in included_fields also determines 1747 * the order of the values in the array. 1748 * When included_fields is populated, 1749 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 1750 * must be empty. 1751 * The input must be JSONL with objects at each line, CSV, BigQuery 1752 * or TfRecord. 1753 * </pre> 1754 * 1755 * <code>repeated string included_fields = 3;</code> 1756 * 1757 * @return A list containing the includedFields. 1758 */ getIncludedFieldsList()1759 java.util.List<java.lang.String> getIncludedFieldsList(); 1760 /** 1761 * 1762 * 1763 * <pre> 1764 * Fields that will be included in the prediction instance that is 1765 * sent to the Model. 1766 * If 1767 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 1768 * is `array`, the order of field names in included_fields also determines 1769 * the order of the values in the array. 1770 * When included_fields is populated, 1771 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 1772 * must be empty. 1773 * The input must be JSONL with objects at each line, CSV, BigQuery 1774 * or TfRecord. 1775 * </pre> 1776 * 1777 * <code>repeated string included_fields = 3;</code> 1778 * 1779 * @return The count of includedFields. 1780 */ getIncludedFieldsCount()1781 int getIncludedFieldsCount(); 1782 /** 1783 * 1784 * 1785 * <pre> 1786 * Fields that will be included in the prediction instance that is 1787 * sent to the Model. 1788 * If 1789 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 1790 * is `array`, the order of field names in included_fields also determines 1791 * the order of the values in the array. 1792 * When included_fields is populated, 1793 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 1794 * must be empty. 1795 * The input must be JSONL with objects at each line, CSV, BigQuery 1796 * or TfRecord. 1797 * </pre> 1798 * 1799 * <code>repeated string included_fields = 3;</code> 1800 * 1801 * @param index The index of the element to return. 1802 * @return The includedFields at the given index. 1803 */ getIncludedFields(int index)1804 java.lang.String getIncludedFields(int index); 1805 /** 1806 * 1807 * 1808 * <pre> 1809 * Fields that will be included in the prediction instance that is 1810 * sent to the Model. 1811 * If 1812 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 1813 * is `array`, the order of field names in included_fields also determines 1814 * the order of the values in the array. 1815 * When included_fields is populated, 1816 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 1817 * must be empty. 1818 * The input must be JSONL with objects at each line, CSV, BigQuery 1819 * or TfRecord. 1820 * </pre> 1821 * 1822 * <code>repeated string included_fields = 3;</code> 1823 * 1824 * @param index The index of the value to return. 1825 * @return The bytes of the includedFields at the given index. 1826 */ getIncludedFieldsBytes(int index)1827 com.google.protobuf.ByteString getIncludedFieldsBytes(int index); 1828 1829 /** 1830 * 1831 * 1832 * <pre> 1833 * Fields that will be excluded in the prediction instance that is 1834 * sent to the Model. 1835 * Excluded will be attached to the batch prediction output if 1836 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 1837 * is not specified. 1838 * When excluded_fields is populated, 1839 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1840 * must be empty. 1841 * The input must be JSONL with objects at each line, CSV, BigQuery 1842 * or TfRecord. 1843 * </pre> 1844 * 1845 * <code>repeated string excluded_fields = 4;</code> 1846 * 1847 * @return A list containing the excludedFields. 1848 */ getExcludedFieldsList()1849 java.util.List<java.lang.String> getExcludedFieldsList(); 1850 /** 1851 * 1852 * 1853 * <pre> 1854 * Fields that will be excluded in the prediction instance that is 1855 * sent to the Model. 1856 * Excluded will be attached to the batch prediction output if 1857 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 1858 * is not specified. 1859 * When excluded_fields is populated, 1860 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1861 * must be empty. 1862 * The input must be JSONL with objects at each line, CSV, BigQuery 1863 * or TfRecord. 1864 * </pre> 1865 * 1866 * <code>repeated string excluded_fields = 4;</code> 1867 * 1868 * @return The count of excludedFields. 1869 */ getExcludedFieldsCount()1870 int getExcludedFieldsCount(); 1871 /** 1872 * 1873 * 1874 * <pre> 1875 * Fields that will be excluded in the prediction instance that is 1876 * sent to the Model. 1877 * Excluded will be attached to the batch prediction output if 1878 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 1879 * is not specified. 1880 * When excluded_fields is populated, 1881 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1882 * must be empty. 1883 * The input must be JSONL with objects at each line, CSV, BigQuery 1884 * or TfRecord. 1885 * </pre> 1886 * 1887 * <code>repeated string excluded_fields = 4;</code> 1888 * 1889 * @param index The index of the element to return. 1890 * @return The excludedFields at the given index. 1891 */ getExcludedFields(int index)1892 java.lang.String getExcludedFields(int index); 1893 /** 1894 * 1895 * 1896 * <pre> 1897 * Fields that will be excluded in the prediction instance that is 1898 * sent to the Model. 1899 * Excluded will be attached to the batch prediction output if 1900 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 1901 * is not specified. 1902 * When excluded_fields is populated, 1903 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1904 * must be empty. 1905 * The input must be JSONL with objects at each line, CSV, BigQuery 1906 * or TfRecord. 1907 * </pre> 1908 * 1909 * <code>repeated string excluded_fields = 4;</code> 1910 * 1911 * @param index The index of the value to return. 1912 * @return The bytes of the excludedFields at the given index. 1913 */ getExcludedFieldsBytes(int index)1914 com.google.protobuf.ByteString getExcludedFieldsBytes(int index); 1915 } 1916 /** 1917 * 1918 * 1919 * <pre> 1920 * Configuration defining how to transform batch prediction input instances to 1921 * the instances that the Model accepts. 1922 * </pre> 1923 * 1924 * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig} 1925 */ 1926 public static final class InstanceConfig extends com.google.protobuf.GeneratedMessageV3 1927 implements 1928 // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) 1929 InstanceConfigOrBuilder { 1930 private static final long serialVersionUID = 0L; 1931 // Use InstanceConfig.newBuilder() to construct. InstanceConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)1932 private InstanceConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { 1933 super(builder); 1934 } 1935 InstanceConfig()1936 private InstanceConfig() { 1937 instanceType_ = ""; 1938 keyField_ = ""; 1939 includedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY; 1940 excludedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY; 1941 } 1942 1943 @java.lang.Override 1944 @SuppressWarnings({"unused"}) newInstance(UnusedPrivateParameter unused)1945 protected java.lang.Object newInstance(UnusedPrivateParameter unused) { 1946 return new InstanceConfig(); 1947 } 1948 1949 @java.lang.Override getUnknownFields()1950 public final com.google.protobuf.UnknownFieldSet getUnknownFields() { 1951 return this.unknownFields; 1952 } 1953 getDescriptor()1954 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { 1955 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 1956 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_descriptor; 1957 } 1958 1959 @java.lang.Override 1960 protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()1961 internalGetFieldAccessorTable() { 1962 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 1963 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_fieldAccessorTable 1964 .ensureFieldAccessorsInitialized( 1965 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.class, 1966 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder.class); 1967 } 1968 1969 public static final int INSTANCE_TYPE_FIELD_NUMBER = 1; 1970 1971 @SuppressWarnings("serial") 1972 private volatile java.lang.Object instanceType_ = ""; 1973 /** 1974 * 1975 * 1976 * <pre> 1977 * The format of the instance that the Model accepts. Vertex AI will 1978 * convert compatible 1979 * [batch prediction input instance 1980 * formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format] 1981 * to the specified format. 1982 * Supported values are: 1983 * * `object`: Each input is converted to JSON object format. 1984 * * For `bigquery`, each row is converted to an object. 1985 * * For `jsonl`, each line of the JSONL input must be an object. 1986 * * Does not apply to `csv`, `file-list`, `tf-record`, or 1987 * `tf-record-gzip`. 1988 * * `array`: Each input is converted to JSON array format. 1989 * * For `bigquery`, each row is converted to an array. The order 1990 * of columns is determined by the BigQuery column order, unless 1991 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1992 * is populated. 1993 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1994 * must be populated for specifying field orders. 1995 * * For `jsonl`, if each line of the JSONL input is an object, 1996 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 1997 * must be populated for specifying field orders. 1998 * * Does not apply to `csv`, `file-list`, `tf-record`, or 1999 * `tf-record-gzip`. 2000 * If not specified, Vertex AI converts the batch prediction input as 2001 * follows: 2002 * * For `bigquery` and `csv`, the behavior is the same as `array`. The 2003 * order of columns is the same as defined in the file or table, unless 2004 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2005 * is populated. 2006 * * For `jsonl`, the prediction instance format is determined by 2007 * each line of the input. 2008 * * For `tf-record`/`tf-record-gzip`, each record will be converted to 2009 * an object in the format of `{"b64": <value>}`, where `<value>` is 2010 * the Base64-encoded string of the content of the record. 2011 * * For `file-list`, each file in the list will be converted to an 2012 * object in the format of `{"b64": <value>}`, where `<value>` is 2013 * the Base64-encoded string of the content of the file. 2014 * </pre> 2015 * 2016 * <code>string instance_type = 1;</code> 2017 * 2018 * @return The instanceType. 2019 */ 2020 @java.lang.Override getInstanceType()2021 public java.lang.String getInstanceType() { 2022 java.lang.Object ref = instanceType_; 2023 if (ref instanceof java.lang.String) { 2024 return (java.lang.String) ref; 2025 } else { 2026 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 2027 java.lang.String s = bs.toStringUtf8(); 2028 instanceType_ = s; 2029 return s; 2030 } 2031 } 2032 /** 2033 * 2034 * 2035 * <pre> 2036 * The format of the instance that the Model accepts. Vertex AI will 2037 * convert compatible 2038 * [batch prediction input instance 2039 * formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format] 2040 * to the specified format. 2041 * Supported values are: 2042 * * `object`: Each input is converted to JSON object format. 2043 * * For `bigquery`, each row is converted to an object. 2044 * * For `jsonl`, each line of the JSONL input must be an object. 2045 * * Does not apply to `csv`, `file-list`, `tf-record`, or 2046 * `tf-record-gzip`. 2047 * * `array`: Each input is converted to JSON array format. 2048 * * For `bigquery`, each row is converted to an array. The order 2049 * of columns is determined by the BigQuery column order, unless 2050 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2051 * is populated. 2052 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2053 * must be populated for specifying field orders. 2054 * * For `jsonl`, if each line of the JSONL input is an object, 2055 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2056 * must be populated for specifying field orders. 2057 * * Does not apply to `csv`, `file-list`, `tf-record`, or 2058 * `tf-record-gzip`. 2059 * If not specified, Vertex AI converts the batch prediction input as 2060 * follows: 2061 * * For `bigquery` and `csv`, the behavior is the same as `array`. The 2062 * order of columns is the same as defined in the file or table, unless 2063 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2064 * is populated. 2065 * * For `jsonl`, the prediction instance format is determined by 2066 * each line of the input. 2067 * * For `tf-record`/`tf-record-gzip`, each record will be converted to 2068 * an object in the format of `{"b64": <value>}`, where `<value>` is 2069 * the Base64-encoded string of the content of the record. 2070 * * For `file-list`, each file in the list will be converted to an 2071 * object in the format of `{"b64": <value>}`, where `<value>` is 2072 * the Base64-encoded string of the content of the file. 2073 * </pre> 2074 * 2075 * <code>string instance_type = 1;</code> 2076 * 2077 * @return The bytes for instanceType. 2078 */ 2079 @java.lang.Override getInstanceTypeBytes()2080 public com.google.protobuf.ByteString getInstanceTypeBytes() { 2081 java.lang.Object ref = instanceType_; 2082 if (ref instanceof java.lang.String) { 2083 com.google.protobuf.ByteString b = 2084 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 2085 instanceType_ = b; 2086 return b; 2087 } else { 2088 return (com.google.protobuf.ByteString) ref; 2089 } 2090 } 2091 2092 public static final int KEY_FIELD_FIELD_NUMBER = 2; 2093 2094 @SuppressWarnings("serial") 2095 private volatile java.lang.Object keyField_ = ""; 2096 /** 2097 * 2098 * 2099 * <pre> 2100 * The name of the field that is considered as a key. 2101 * The values identified by the key field is not included in the transformed 2102 * instances that is sent to the Model. This is similar to 2103 * specifying this name of the field in 2104 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. 2105 * In addition, the batch prediction output will not include the instances. 2106 * Instead the output will only include the value of the key field, in a 2107 * field named `key` in the output: 2108 * * For `jsonl` output format, the output will have a `key` field 2109 * instead of the `instance` field. 2110 * * For `csv`/`bigquery` output format, the output will have have a `key` 2111 * column instead of the instance feature columns. 2112 * The input must be JSONL with objects at each line, CSV, BigQuery 2113 * or TfRecord. 2114 * </pre> 2115 * 2116 * <code>string key_field = 2;</code> 2117 * 2118 * @return The keyField. 2119 */ 2120 @java.lang.Override getKeyField()2121 public java.lang.String getKeyField() { 2122 java.lang.Object ref = keyField_; 2123 if (ref instanceof java.lang.String) { 2124 return (java.lang.String) ref; 2125 } else { 2126 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 2127 java.lang.String s = bs.toStringUtf8(); 2128 keyField_ = s; 2129 return s; 2130 } 2131 } 2132 /** 2133 * 2134 * 2135 * <pre> 2136 * The name of the field that is considered as a key. 2137 * The values identified by the key field is not included in the transformed 2138 * instances that is sent to the Model. This is similar to 2139 * specifying this name of the field in 2140 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. 2141 * In addition, the batch prediction output will not include the instances. 2142 * Instead the output will only include the value of the key field, in a 2143 * field named `key` in the output: 2144 * * For `jsonl` output format, the output will have a `key` field 2145 * instead of the `instance` field. 2146 * * For `csv`/`bigquery` output format, the output will have have a `key` 2147 * column instead of the instance feature columns. 2148 * The input must be JSONL with objects at each line, CSV, BigQuery 2149 * or TfRecord. 2150 * </pre> 2151 * 2152 * <code>string key_field = 2;</code> 2153 * 2154 * @return The bytes for keyField. 2155 */ 2156 @java.lang.Override getKeyFieldBytes()2157 public com.google.protobuf.ByteString getKeyFieldBytes() { 2158 java.lang.Object ref = keyField_; 2159 if (ref instanceof java.lang.String) { 2160 com.google.protobuf.ByteString b = 2161 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 2162 keyField_ = b; 2163 return b; 2164 } else { 2165 return (com.google.protobuf.ByteString) ref; 2166 } 2167 } 2168 2169 public static final int INCLUDED_FIELDS_FIELD_NUMBER = 3; 2170 2171 @SuppressWarnings("serial") 2172 private com.google.protobuf.LazyStringList includedFields_; 2173 /** 2174 * 2175 * 2176 * <pre> 2177 * Fields that will be included in the prediction instance that is 2178 * sent to the Model. 2179 * If 2180 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 2181 * is `array`, the order of field names in included_fields also determines 2182 * the order of the values in the array. 2183 * When included_fields is populated, 2184 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 2185 * must be empty. 2186 * The input must be JSONL with objects at each line, CSV, BigQuery 2187 * or TfRecord. 2188 * </pre> 2189 * 2190 * <code>repeated string included_fields = 3;</code> 2191 * 2192 * @return A list containing the includedFields. 2193 */ getIncludedFieldsList()2194 public com.google.protobuf.ProtocolStringList getIncludedFieldsList() { 2195 return includedFields_; 2196 } 2197 /** 2198 * 2199 * 2200 * <pre> 2201 * Fields that will be included in the prediction instance that is 2202 * sent to the Model. 2203 * If 2204 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 2205 * is `array`, the order of field names in included_fields also determines 2206 * the order of the values in the array. 2207 * When included_fields is populated, 2208 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 2209 * must be empty. 2210 * The input must be JSONL with objects at each line, CSV, BigQuery 2211 * or TfRecord. 2212 * </pre> 2213 * 2214 * <code>repeated string included_fields = 3;</code> 2215 * 2216 * @return The count of includedFields. 2217 */ getIncludedFieldsCount()2218 public int getIncludedFieldsCount() { 2219 return includedFields_.size(); 2220 } 2221 /** 2222 * 2223 * 2224 * <pre> 2225 * Fields that will be included in the prediction instance that is 2226 * sent to the Model. 2227 * If 2228 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 2229 * is `array`, the order of field names in included_fields also determines 2230 * the order of the values in the array. 2231 * When included_fields is populated, 2232 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 2233 * must be empty. 2234 * The input must be JSONL with objects at each line, CSV, BigQuery 2235 * or TfRecord. 2236 * </pre> 2237 * 2238 * <code>repeated string included_fields = 3;</code> 2239 * 2240 * @param index The index of the element to return. 2241 * @return The includedFields at the given index. 2242 */ getIncludedFields(int index)2243 public java.lang.String getIncludedFields(int index) { 2244 return includedFields_.get(index); 2245 } 2246 /** 2247 * 2248 * 2249 * <pre> 2250 * Fields that will be included in the prediction instance that is 2251 * sent to the Model. 2252 * If 2253 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 2254 * is `array`, the order of field names in included_fields also determines 2255 * the order of the values in the array. 2256 * When included_fields is populated, 2257 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 2258 * must be empty. 2259 * The input must be JSONL with objects at each line, CSV, BigQuery 2260 * or TfRecord. 2261 * </pre> 2262 * 2263 * <code>repeated string included_fields = 3;</code> 2264 * 2265 * @param index The index of the value to return. 2266 * @return The bytes of the includedFields at the given index. 2267 */ getIncludedFieldsBytes(int index)2268 public com.google.protobuf.ByteString getIncludedFieldsBytes(int index) { 2269 return includedFields_.getByteString(index); 2270 } 2271 2272 public static final int EXCLUDED_FIELDS_FIELD_NUMBER = 4; 2273 2274 @SuppressWarnings("serial") 2275 private com.google.protobuf.LazyStringList excludedFields_; 2276 /** 2277 * 2278 * 2279 * <pre> 2280 * Fields that will be excluded in the prediction instance that is 2281 * sent to the Model. 2282 * Excluded will be attached to the batch prediction output if 2283 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 2284 * is not specified. 2285 * When excluded_fields is populated, 2286 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2287 * must be empty. 2288 * The input must be JSONL with objects at each line, CSV, BigQuery 2289 * or TfRecord. 2290 * </pre> 2291 * 2292 * <code>repeated string excluded_fields = 4;</code> 2293 * 2294 * @return A list containing the excludedFields. 2295 */ getExcludedFieldsList()2296 public com.google.protobuf.ProtocolStringList getExcludedFieldsList() { 2297 return excludedFields_; 2298 } 2299 /** 2300 * 2301 * 2302 * <pre> 2303 * Fields that will be excluded in the prediction instance that is 2304 * sent to the Model. 2305 * Excluded will be attached to the batch prediction output if 2306 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 2307 * is not specified. 2308 * When excluded_fields is populated, 2309 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2310 * must be empty. 2311 * The input must be JSONL with objects at each line, CSV, BigQuery 2312 * or TfRecord. 2313 * </pre> 2314 * 2315 * <code>repeated string excluded_fields = 4;</code> 2316 * 2317 * @return The count of excludedFields. 2318 */ getExcludedFieldsCount()2319 public int getExcludedFieldsCount() { 2320 return excludedFields_.size(); 2321 } 2322 /** 2323 * 2324 * 2325 * <pre> 2326 * Fields that will be excluded in the prediction instance that is 2327 * sent to the Model. 2328 * Excluded will be attached to the batch prediction output if 2329 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 2330 * is not specified. 2331 * When excluded_fields is populated, 2332 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2333 * must be empty. 2334 * The input must be JSONL with objects at each line, CSV, BigQuery 2335 * or TfRecord. 2336 * </pre> 2337 * 2338 * <code>repeated string excluded_fields = 4;</code> 2339 * 2340 * @param index The index of the element to return. 2341 * @return The excludedFields at the given index. 2342 */ getExcludedFields(int index)2343 public java.lang.String getExcludedFields(int index) { 2344 return excludedFields_.get(index); 2345 } 2346 /** 2347 * 2348 * 2349 * <pre> 2350 * Fields that will be excluded in the prediction instance that is 2351 * sent to the Model. 2352 * Excluded will be attached to the batch prediction output if 2353 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 2354 * is not specified. 2355 * When excluded_fields is populated, 2356 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2357 * must be empty. 2358 * The input must be JSONL with objects at each line, CSV, BigQuery 2359 * or TfRecord. 2360 * </pre> 2361 * 2362 * <code>repeated string excluded_fields = 4;</code> 2363 * 2364 * @param index The index of the value to return. 2365 * @return The bytes of the excludedFields at the given index. 2366 */ getExcludedFieldsBytes(int index)2367 public com.google.protobuf.ByteString getExcludedFieldsBytes(int index) { 2368 return excludedFields_.getByteString(index); 2369 } 2370 2371 private byte memoizedIsInitialized = -1; 2372 2373 @java.lang.Override isInitialized()2374 public final boolean isInitialized() { 2375 byte isInitialized = memoizedIsInitialized; 2376 if (isInitialized == 1) return true; 2377 if (isInitialized == 0) return false; 2378 2379 memoizedIsInitialized = 1; 2380 return true; 2381 } 2382 2383 @java.lang.Override writeTo(com.google.protobuf.CodedOutputStream output)2384 public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { 2385 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceType_)) { 2386 com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instanceType_); 2387 } 2388 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(keyField_)) { 2389 com.google.protobuf.GeneratedMessageV3.writeString(output, 2, keyField_); 2390 } 2391 for (int i = 0; i < includedFields_.size(); i++) { 2392 com.google.protobuf.GeneratedMessageV3.writeString(output, 3, includedFields_.getRaw(i)); 2393 } 2394 for (int i = 0; i < excludedFields_.size(); i++) { 2395 com.google.protobuf.GeneratedMessageV3.writeString(output, 4, excludedFields_.getRaw(i)); 2396 } 2397 getUnknownFields().writeTo(output); 2398 } 2399 2400 @java.lang.Override getSerializedSize()2401 public int getSerializedSize() { 2402 int size = memoizedSize; 2403 if (size != -1) return size; 2404 2405 size = 0; 2406 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceType_)) { 2407 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instanceType_); 2408 } 2409 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(keyField_)) { 2410 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, keyField_); 2411 } 2412 { 2413 int dataSize = 0; 2414 for (int i = 0; i < includedFields_.size(); i++) { 2415 dataSize += computeStringSizeNoTag(includedFields_.getRaw(i)); 2416 } 2417 size += dataSize; 2418 size += 1 * getIncludedFieldsList().size(); 2419 } 2420 { 2421 int dataSize = 0; 2422 for (int i = 0; i < excludedFields_.size(); i++) { 2423 dataSize += computeStringSizeNoTag(excludedFields_.getRaw(i)); 2424 } 2425 size += dataSize; 2426 size += 1 * getExcludedFieldsList().size(); 2427 } 2428 size += getUnknownFields().getSerializedSize(); 2429 memoizedSize = size; 2430 return size; 2431 } 2432 2433 @java.lang.Override equals(final java.lang.Object obj)2434 public boolean equals(final java.lang.Object obj) { 2435 if (obj == this) { 2436 return true; 2437 } 2438 if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig)) { 2439 return super.equals(obj); 2440 } 2441 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig other = 2442 (com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) obj; 2443 2444 if (!getInstanceType().equals(other.getInstanceType())) return false; 2445 if (!getKeyField().equals(other.getKeyField())) return false; 2446 if (!getIncludedFieldsList().equals(other.getIncludedFieldsList())) return false; 2447 if (!getExcludedFieldsList().equals(other.getExcludedFieldsList())) return false; 2448 if (!getUnknownFields().equals(other.getUnknownFields())) return false; 2449 return true; 2450 } 2451 2452 @java.lang.Override hashCode()2453 public int hashCode() { 2454 if (memoizedHashCode != 0) { 2455 return memoizedHashCode; 2456 } 2457 int hash = 41; 2458 hash = (19 * hash) + getDescriptor().hashCode(); 2459 hash = (37 * hash) + INSTANCE_TYPE_FIELD_NUMBER; 2460 hash = (53 * hash) + getInstanceType().hashCode(); 2461 hash = (37 * hash) + KEY_FIELD_FIELD_NUMBER; 2462 hash = (53 * hash) + getKeyField().hashCode(); 2463 if (getIncludedFieldsCount() > 0) { 2464 hash = (37 * hash) + INCLUDED_FIELDS_FIELD_NUMBER; 2465 hash = (53 * hash) + getIncludedFieldsList().hashCode(); 2466 } 2467 if (getExcludedFieldsCount() > 0) { 2468 hash = (37 * hash) + EXCLUDED_FIELDS_FIELD_NUMBER; 2469 hash = (53 * hash) + getExcludedFieldsList().hashCode(); 2470 } 2471 hash = (29 * hash) + getUnknownFields().hashCode(); 2472 memoizedHashCode = hash; 2473 return hash; 2474 } 2475 parseFrom( java.nio.ByteBuffer data)2476 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( 2477 java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { 2478 return PARSER.parseFrom(data); 2479 } 2480 parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2481 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( 2482 java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2483 throws com.google.protobuf.InvalidProtocolBufferException { 2484 return PARSER.parseFrom(data, extensionRegistry); 2485 } 2486 parseFrom( com.google.protobuf.ByteString data)2487 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( 2488 com.google.protobuf.ByteString data) 2489 throws com.google.protobuf.InvalidProtocolBufferException { 2490 return PARSER.parseFrom(data); 2491 } 2492 parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2493 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( 2494 com.google.protobuf.ByteString data, 2495 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2496 throws com.google.protobuf.InvalidProtocolBufferException { 2497 return PARSER.parseFrom(data, extensionRegistry); 2498 } 2499 parseFrom( byte[] data)2500 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( 2501 byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { 2502 return PARSER.parseFrom(data); 2503 } 2504 parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2505 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( 2506 byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2507 throws com.google.protobuf.InvalidProtocolBufferException { 2508 return PARSER.parseFrom(data, extensionRegistry); 2509 } 2510 parseFrom( java.io.InputStream input)2511 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( 2512 java.io.InputStream input) throws java.io.IOException { 2513 return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); 2514 } 2515 parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2516 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( 2517 java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2518 throws java.io.IOException { 2519 return com.google.protobuf.GeneratedMessageV3.parseWithIOException( 2520 PARSER, input, extensionRegistry); 2521 } 2522 2523 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseDelimitedFrom(java.io.InputStream input)2524 parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { 2525 return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); 2526 } 2527 2528 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2529 parseDelimitedFrom( 2530 java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2531 throws java.io.IOException { 2532 return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( 2533 PARSER, input, extensionRegistry); 2534 } 2535 parseFrom( com.google.protobuf.CodedInputStream input)2536 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( 2537 com.google.protobuf.CodedInputStream input) throws java.io.IOException { 2538 return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); 2539 } 2540 parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2541 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig parseFrom( 2542 com.google.protobuf.CodedInputStream input, 2543 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2544 throws java.io.IOException { 2545 return com.google.protobuf.GeneratedMessageV3.parseWithIOException( 2546 PARSER, input, extensionRegistry); 2547 } 2548 2549 @java.lang.Override newBuilderForType()2550 public Builder newBuilderForType() { 2551 return newBuilder(); 2552 } 2553 newBuilder()2554 public static Builder newBuilder() { 2555 return DEFAULT_INSTANCE.toBuilder(); 2556 } 2557 newBuilder( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig prototype)2558 public static Builder newBuilder( 2559 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig prototype) { 2560 return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); 2561 } 2562 2563 @java.lang.Override toBuilder()2564 public Builder toBuilder() { 2565 return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); 2566 } 2567 2568 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent)2569 protected Builder newBuilderForType( 2570 com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { 2571 Builder builder = new Builder(parent); 2572 return builder; 2573 } 2574 /** 2575 * 2576 * 2577 * <pre> 2578 * Configuration defining how to transform batch prediction input instances to 2579 * the instances that the Model accepts. 2580 * </pre> 2581 * 2582 * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig} 2583 */ 2584 public static final class Builder 2585 extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> 2586 implements 2587 // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) 2588 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder { getDescriptor()2589 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { 2590 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 2591 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_descriptor; 2592 } 2593 2594 @java.lang.Override 2595 protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()2596 internalGetFieldAccessorTable() { 2597 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 2598 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_fieldAccessorTable 2599 .ensureFieldAccessorsInitialized( 2600 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.class, 2601 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder 2602 .class); 2603 } 2604 2605 // Construct using 2606 // com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.newBuilder() Builder()2607 private Builder() {} 2608 Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)2609 private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { 2610 super(parent); 2611 } 2612 2613 @java.lang.Override clear()2614 public Builder clear() { 2615 super.clear(); 2616 bitField0_ = 0; 2617 instanceType_ = ""; 2618 keyField_ = ""; 2619 includedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY; 2620 bitField0_ = (bitField0_ & ~0x00000004); 2621 excludedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY; 2622 bitField0_ = (bitField0_ & ~0x00000008); 2623 return this; 2624 } 2625 2626 @java.lang.Override getDescriptorForType()2627 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { 2628 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 2629 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_InstanceConfig_descriptor; 2630 } 2631 2632 @java.lang.Override 2633 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig getDefaultInstanceForType()2634 getDefaultInstanceForType() { 2635 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig 2636 .getDefaultInstance(); 2637 } 2638 2639 @java.lang.Override build()2640 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig build() { 2641 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig result = 2642 buildPartial(); 2643 if (!result.isInitialized()) { 2644 throw newUninitializedMessageException(result); 2645 } 2646 return result; 2647 } 2648 2649 @java.lang.Override buildPartial()2650 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig buildPartial() { 2651 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig result = 2652 new com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig(this); 2653 buildPartialRepeatedFields(result); 2654 if (bitField0_ != 0) { 2655 buildPartial0(result); 2656 } 2657 onBuilt(); 2658 return result; 2659 } 2660 buildPartialRepeatedFields( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig result)2661 private void buildPartialRepeatedFields( 2662 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig result) { 2663 if (((bitField0_ & 0x00000004) != 0)) { 2664 includedFields_ = includedFields_.getUnmodifiableView(); 2665 bitField0_ = (bitField0_ & ~0x00000004); 2666 } 2667 result.includedFields_ = includedFields_; 2668 if (((bitField0_ & 0x00000008) != 0)) { 2669 excludedFields_ = excludedFields_.getUnmodifiableView(); 2670 bitField0_ = (bitField0_ & ~0x00000008); 2671 } 2672 result.excludedFields_ = excludedFields_; 2673 } 2674 buildPartial0( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig result)2675 private void buildPartial0( 2676 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig result) { 2677 int from_bitField0_ = bitField0_; 2678 if (((from_bitField0_ & 0x00000001) != 0)) { 2679 result.instanceType_ = instanceType_; 2680 } 2681 if (((from_bitField0_ & 0x00000002) != 0)) { 2682 result.keyField_ = keyField_; 2683 } 2684 } 2685 2686 @java.lang.Override clone()2687 public Builder clone() { 2688 return super.clone(); 2689 } 2690 2691 @java.lang.Override setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)2692 public Builder setField( 2693 com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { 2694 return super.setField(field, value); 2695 } 2696 2697 @java.lang.Override clearField(com.google.protobuf.Descriptors.FieldDescriptor field)2698 public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { 2699 return super.clearField(field); 2700 } 2701 2702 @java.lang.Override clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)2703 public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { 2704 return super.clearOneof(oneof); 2705 } 2706 2707 @java.lang.Override setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)2708 public Builder setRepeatedField( 2709 com.google.protobuf.Descriptors.FieldDescriptor field, 2710 int index, 2711 java.lang.Object value) { 2712 return super.setRepeatedField(field, index, value); 2713 } 2714 2715 @java.lang.Override addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)2716 public Builder addRepeatedField( 2717 com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { 2718 return super.addRepeatedField(field, value); 2719 } 2720 2721 @java.lang.Override mergeFrom(com.google.protobuf.Message other)2722 public Builder mergeFrom(com.google.protobuf.Message other) { 2723 if (other 2724 instanceof com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) { 2725 return mergeFrom( 2726 (com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) other); 2727 } else { 2728 super.mergeFrom(other); 2729 return this; 2730 } 2731 } 2732 mergeFrom( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig other)2733 public Builder mergeFrom( 2734 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig other) { 2735 if (other 2736 == com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig 2737 .getDefaultInstance()) return this; 2738 if (!other.getInstanceType().isEmpty()) { 2739 instanceType_ = other.instanceType_; 2740 bitField0_ |= 0x00000001; 2741 onChanged(); 2742 } 2743 if (!other.getKeyField().isEmpty()) { 2744 keyField_ = other.keyField_; 2745 bitField0_ |= 0x00000002; 2746 onChanged(); 2747 } 2748 if (!other.includedFields_.isEmpty()) { 2749 if (includedFields_.isEmpty()) { 2750 includedFields_ = other.includedFields_; 2751 bitField0_ = (bitField0_ & ~0x00000004); 2752 } else { 2753 ensureIncludedFieldsIsMutable(); 2754 includedFields_.addAll(other.includedFields_); 2755 } 2756 onChanged(); 2757 } 2758 if (!other.excludedFields_.isEmpty()) { 2759 if (excludedFields_.isEmpty()) { 2760 excludedFields_ = other.excludedFields_; 2761 bitField0_ = (bitField0_ & ~0x00000008); 2762 } else { 2763 ensureExcludedFieldsIsMutable(); 2764 excludedFields_.addAll(other.excludedFields_); 2765 } 2766 onChanged(); 2767 } 2768 this.mergeUnknownFields(other.getUnknownFields()); 2769 onChanged(); 2770 return this; 2771 } 2772 2773 @java.lang.Override isInitialized()2774 public final boolean isInitialized() { 2775 return true; 2776 } 2777 2778 @java.lang.Override mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2779 public Builder mergeFrom( 2780 com.google.protobuf.CodedInputStream input, 2781 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2782 throws java.io.IOException { 2783 if (extensionRegistry == null) { 2784 throw new java.lang.NullPointerException(); 2785 } 2786 try { 2787 boolean done = false; 2788 while (!done) { 2789 int tag = input.readTag(); 2790 switch (tag) { 2791 case 0: 2792 done = true; 2793 break; 2794 case 10: 2795 { 2796 instanceType_ = input.readStringRequireUtf8(); 2797 bitField0_ |= 0x00000001; 2798 break; 2799 } // case 10 2800 case 18: 2801 { 2802 keyField_ = input.readStringRequireUtf8(); 2803 bitField0_ |= 0x00000002; 2804 break; 2805 } // case 18 2806 case 26: 2807 { 2808 java.lang.String s = input.readStringRequireUtf8(); 2809 ensureIncludedFieldsIsMutable(); 2810 includedFields_.add(s); 2811 break; 2812 } // case 26 2813 case 34: 2814 { 2815 java.lang.String s = input.readStringRequireUtf8(); 2816 ensureExcludedFieldsIsMutable(); 2817 excludedFields_.add(s); 2818 break; 2819 } // case 34 2820 default: 2821 { 2822 if (!super.parseUnknownField(input, extensionRegistry, tag)) { 2823 done = true; // was an endgroup tag 2824 } 2825 break; 2826 } // default: 2827 } // switch (tag) 2828 } // while (!done) 2829 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2830 throw e.unwrapIOException(); 2831 } finally { 2832 onChanged(); 2833 } // finally 2834 return this; 2835 } 2836 2837 private int bitField0_; 2838 2839 private java.lang.Object instanceType_ = ""; 2840 /** 2841 * 2842 * 2843 * <pre> 2844 * The format of the instance that the Model accepts. Vertex AI will 2845 * convert compatible 2846 * [batch prediction input instance 2847 * formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format] 2848 * to the specified format. 2849 * Supported values are: 2850 * * `object`: Each input is converted to JSON object format. 2851 * * For `bigquery`, each row is converted to an object. 2852 * * For `jsonl`, each line of the JSONL input must be an object. 2853 * * Does not apply to `csv`, `file-list`, `tf-record`, or 2854 * `tf-record-gzip`. 2855 * * `array`: Each input is converted to JSON array format. 2856 * * For `bigquery`, each row is converted to an array. The order 2857 * of columns is determined by the BigQuery column order, unless 2858 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2859 * is populated. 2860 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2861 * must be populated for specifying field orders. 2862 * * For `jsonl`, if each line of the JSONL input is an object, 2863 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2864 * must be populated for specifying field orders. 2865 * * Does not apply to `csv`, `file-list`, `tf-record`, or 2866 * `tf-record-gzip`. 2867 * If not specified, Vertex AI converts the batch prediction input as 2868 * follows: 2869 * * For `bigquery` and `csv`, the behavior is the same as `array`. The 2870 * order of columns is the same as defined in the file or table, unless 2871 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2872 * is populated. 2873 * * For `jsonl`, the prediction instance format is determined by 2874 * each line of the input. 2875 * * For `tf-record`/`tf-record-gzip`, each record will be converted to 2876 * an object in the format of `{"b64": <value>}`, where `<value>` is 2877 * the Base64-encoded string of the content of the record. 2878 * * For `file-list`, each file in the list will be converted to an 2879 * object in the format of `{"b64": <value>}`, where `<value>` is 2880 * the Base64-encoded string of the content of the file. 2881 * </pre> 2882 * 2883 * <code>string instance_type = 1;</code> 2884 * 2885 * @return The instanceType. 2886 */ getInstanceType()2887 public java.lang.String getInstanceType() { 2888 java.lang.Object ref = instanceType_; 2889 if (!(ref instanceof java.lang.String)) { 2890 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 2891 java.lang.String s = bs.toStringUtf8(); 2892 instanceType_ = s; 2893 return s; 2894 } else { 2895 return (java.lang.String) ref; 2896 } 2897 } 2898 /** 2899 * 2900 * 2901 * <pre> 2902 * The format of the instance that the Model accepts. Vertex AI will 2903 * convert compatible 2904 * [batch prediction input instance 2905 * formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format] 2906 * to the specified format. 2907 * Supported values are: 2908 * * `object`: Each input is converted to JSON object format. 2909 * * For `bigquery`, each row is converted to an object. 2910 * * For `jsonl`, each line of the JSONL input must be an object. 2911 * * Does not apply to `csv`, `file-list`, `tf-record`, or 2912 * `tf-record-gzip`. 2913 * * `array`: Each input is converted to JSON array format. 2914 * * For `bigquery`, each row is converted to an array. The order 2915 * of columns is determined by the BigQuery column order, unless 2916 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2917 * is populated. 2918 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2919 * must be populated for specifying field orders. 2920 * * For `jsonl`, if each line of the JSONL input is an object, 2921 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2922 * must be populated for specifying field orders. 2923 * * Does not apply to `csv`, `file-list`, `tf-record`, or 2924 * `tf-record-gzip`. 2925 * If not specified, Vertex AI converts the batch prediction input as 2926 * follows: 2927 * * For `bigquery` and `csv`, the behavior is the same as `array`. The 2928 * order of columns is the same as defined in the file or table, unless 2929 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2930 * is populated. 2931 * * For `jsonl`, the prediction instance format is determined by 2932 * each line of the input. 2933 * * For `tf-record`/`tf-record-gzip`, each record will be converted to 2934 * an object in the format of `{"b64": <value>}`, where `<value>` is 2935 * the Base64-encoded string of the content of the record. 2936 * * For `file-list`, each file in the list will be converted to an 2937 * object in the format of `{"b64": <value>}`, where `<value>` is 2938 * the Base64-encoded string of the content of the file. 2939 * </pre> 2940 * 2941 * <code>string instance_type = 1;</code> 2942 * 2943 * @return The bytes for instanceType. 2944 */ getInstanceTypeBytes()2945 public com.google.protobuf.ByteString getInstanceTypeBytes() { 2946 java.lang.Object ref = instanceType_; 2947 if (ref instanceof String) { 2948 com.google.protobuf.ByteString b = 2949 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 2950 instanceType_ = b; 2951 return b; 2952 } else { 2953 return (com.google.protobuf.ByteString) ref; 2954 } 2955 } 2956 /** 2957 * 2958 * 2959 * <pre> 2960 * The format of the instance that the Model accepts. Vertex AI will 2961 * convert compatible 2962 * [batch prediction input instance 2963 * formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format] 2964 * to the specified format. 2965 * Supported values are: 2966 * * `object`: Each input is converted to JSON object format. 2967 * * For `bigquery`, each row is converted to an object. 2968 * * For `jsonl`, each line of the JSONL input must be an object. 2969 * * Does not apply to `csv`, `file-list`, `tf-record`, or 2970 * `tf-record-gzip`. 2971 * * `array`: Each input is converted to JSON array format. 2972 * * For `bigquery`, each row is converted to an array. The order 2973 * of columns is determined by the BigQuery column order, unless 2974 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2975 * is populated. 2976 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2977 * must be populated for specifying field orders. 2978 * * For `jsonl`, if each line of the JSONL input is an object, 2979 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2980 * must be populated for specifying field orders. 2981 * * Does not apply to `csv`, `file-list`, `tf-record`, or 2982 * `tf-record-gzip`. 2983 * If not specified, Vertex AI converts the batch prediction input as 2984 * follows: 2985 * * For `bigquery` and `csv`, the behavior is the same as `array`. The 2986 * order of columns is the same as defined in the file or table, unless 2987 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 2988 * is populated. 2989 * * For `jsonl`, the prediction instance format is determined by 2990 * each line of the input. 2991 * * For `tf-record`/`tf-record-gzip`, each record will be converted to 2992 * an object in the format of `{"b64": <value>}`, where `<value>` is 2993 * the Base64-encoded string of the content of the record. 2994 * * For `file-list`, each file in the list will be converted to an 2995 * object in the format of `{"b64": <value>}`, where `<value>` is 2996 * the Base64-encoded string of the content of the file. 2997 * </pre> 2998 * 2999 * <code>string instance_type = 1;</code> 3000 * 3001 * @param value The instanceType to set. 3002 * @return This builder for chaining. 3003 */ setInstanceType(java.lang.String value)3004 public Builder setInstanceType(java.lang.String value) { 3005 if (value == null) { 3006 throw new NullPointerException(); 3007 } 3008 instanceType_ = value; 3009 bitField0_ |= 0x00000001; 3010 onChanged(); 3011 return this; 3012 } 3013 /** 3014 * 3015 * 3016 * <pre> 3017 * The format of the instance that the Model accepts. Vertex AI will 3018 * convert compatible 3019 * [batch prediction input instance 3020 * formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format] 3021 * to the specified format. 3022 * Supported values are: 3023 * * `object`: Each input is converted to JSON object format. 3024 * * For `bigquery`, each row is converted to an object. 3025 * * For `jsonl`, each line of the JSONL input must be an object. 3026 * * Does not apply to `csv`, `file-list`, `tf-record`, or 3027 * `tf-record-gzip`. 3028 * * `array`: Each input is converted to JSON array format. 3029 * * For `bigquery`, each row is converted to an array. The order 3030 * of columns is determined by the BigQuery column order, unless 3031 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3032 * is populated. 3033 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3034 * must be populated for specifying field orders. 3035 * * For `jsonl`, if each line of the JSONL input is an object, 3036 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3037 * must be populated for specifying field orders. 3038 * * Does not apply to `csv`, `file-list`, `tf-record`, or 3039 * `tf-record-gzip`. 3040 * If not specified, Vertex AI converts the batch prediction input as 3041 * follows: 3042 * * For `bigquery` and `csv`, the behavior is the same as `array`. The 3043 * order of columns is the same as defined in the file or table, unless 3044 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3045 * is populated. 3046 * * For `jsonl`, the prediction instance format is determined by 3047 * each line of the input. 3048 * * For `tf-record`/`tf-record-gzip`, each record will be converted to 3049 * an object in the format of `{"b64": <value>}`, where `<value>` is 3050 * the Base64-encoded string of the content of the record. 3051 * * For `file-list`, each file in the list will be converted to an 3052 * object in the format of `{"b64": <value>}`, where `<value>` is 3053 * the Base64-encoded string of the content of the file. 3054 * </pre> 3055 * 3056 * <code>string instance_type = 1;</code> 3057 * 3058 * @return This builder for chaining. 3059 */ clearInstanceType()3060 public Builder clearInstanceType() { 3061 instanceType_ = getDefaultInstance().getInstanceType(); 3062 bitField0_ = (bitField0_ & ~0x00000001); 3063 onChanged(); 3064 return this; 3065 } 3066 /** 3067 * 3068 * 3069 * <pre> 3070 * The format of the instance that the Model accepts. Vertex AI will 3071 * convert compatible 3072 * [batch prediction input instance 3073 * formats][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.instances_format] 3074 * to the specified format. 3075 * Supported values are: 3076 * * `object`: Each input is converted to JSON object format. 3077 * * For `bigquery`, each row is converted to an object. 3078 * * For `jsonl`, each line of the JSONL input must be an object. 3079 * * Does not apply to `csv`, `file-list`, `tf-record`, or 3080 * `tf-record-gzip`. 3081 * * `array`: Each input is converted to JSON array format. 3082 * * For `bigquery`, each row is converted to an array. The order 3083 * of columns is determined by the BigQuery column order, unless 3084 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3085 * is populated. 3086 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3087 * must be populated for specifying field orders. 3088 * * For `jsonl`, if each line of the JSONL input is an object, 3089 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3090 * must be populated for specifying field orders. 3091 * * Does not apply to `csv`, `file-list`, `tf-record`, or 3092 * `tf-record-gzip`. 3093 * If not specified, Vertex AI converts the batch prediction input as 3094 * follows: 3095 * * For `bigquery` and `csv`, the behavior is the same as `array`. The 3096 * order of columns is the same as defined in the file or table, unless 3097 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3098 * is populated. 3099 * * For `jsonl`, the prediction instance format is determined by 3100 * each line of the input. 3101 * * For `tf-record`/`tf-record-gzip`, each record will be converted to 3102 * an object in the format of `{"b64": <value>}`, where `<value>` is 3103 * the Base64-encoded string of the content of the record. 3104 * * For `file-list`, each file in the list will be converted to an 3105 * object in the format of `{"b64": <value>}`, where `<value>` is 3106 * the Base64-encoded string of the content of the file. 3107 * </pre> 3108 * 3109 * <code>string instance_type = 1;</code> 3110 * 3111 * @param value The bytes for instanceType to set. 3112 * @return This builder for chaining. 3113 */ setInstanceTypeBytes(com.google.protobuf.ByteString value)3114 public Builder setInstanceTypeBytes(com.google.protobuf.ByteString value) { 3115 if (value == null) { 3116 throw new NullPointerException(); 3117 } 3118 checkByteStringIsUtf8(value); 3119 instanceType_ = value; 3120 bitField0_ |= 0x00000001; 3121 onChanged(); 3122 return this; 3123 } 3124 3125 private java.lang.Object keyField_ = ""; 3126 /** 3127 * 3128 * 3129 * <pre> 3130 * The name of the field that is considered as a key. 3131 * The values identified by the key field is not included in the transformed 3132 * instances that is sent to the Model. This is similar to 3133 * specifying this name of the field in 3134 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. 3135 * In addition, the batch prediction output will not include the instances. 3136 * Instead the output will only include the value of the key field, in a 3137 * field named `key` in the output: 3138 * * For `jsonl` output format, the output will have a `key` field 3139 * instead of the `instance` field. 3140 * * For `csv`/`bigquery` output format, the output will have have a `key` 3141 * column instead of the instance feature columns. 3142 * The input must be JSONL with objects at each line, CSV, BigQuery 3143 * or TfRecord. 3144 * </pre> 3145 * 3146 * <code>string key_field = 2;</code> 3147 * 3148 * @return The keyField. 3149 */ getKeyField()3150 public java.lang.String getKeyField() { 3151 java.lang.Object ref = keyField_; 3152 if (!(ref instanceof java.lang.String)) { 3153 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 3154 java.lang.String s = bs.toStringUtf8(); 3155 keyField_ = s; 3156 return s; 3157 } else { 3158 return (java.lang.String) ref; 3159 } 3160 } 3161 /** 3162 * 3163 * 3164 * <pre> 3165 * The name of the field that is considered as a key. 3166 * The values identified by the key field is not included in the transformed 3167 * instances that is sent to the Model. This is similar to 3168 * specifying this name of the field in 3169 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. 3170 * In addition, the batch prediction output will not include the instances. 3171 * Instead the output will only include the value of the key field, in a 3172 * field named `key` in the output: 3173 * * For `jsonl` output format, the output will have a `key` field 3174 * instead of the `instance` field. 3175 * * For `csv`/`bigquery` output format, the output will have have a `key` 3176 * column instead of the instance feature columns. 3177 * The input must be JSONL with objects at each line, CSV, BigQuery 3178 * or TfRecord. 3179 * </pre> 3180 * 3181 * <code>string key_field = 2;</code> 3182 * 3183 * @return The bytes for keyField. 3184 */ getKeyFieldBytes()3185 public com.google.protobuf.ByteString getKeyFieldBytes() { 3186 java.lang.Object ref = keyField_; 3187 if (ref instanceof String) { 3188 com.google.protobuf.ByteString b = 3189 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 3190 keyField_ = b; 3191 return b; 3192 } else { 3193 return (com.google.protobuf.ByteString) ref; 3194 } 3195 } 3196 /** 3197 * 3198 * 3199 * <pre> 3200 * The name of the field that is considered as a key. 3201 * The values identified by the key field is not included in the transformed 3202 * instances that is sent to the Model. This is similar to 3203 * specifying this name of the field in 3204 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. 3205 * In addition, the batch prediction output will not include the instances. 3206 * Instead the output will only include the value of the key field, in a 3207 * field named `key` in the output: 3208 * * For `jsonl` output format, the output will have a `key` field 3209 * instead of the `instance` field. 3210 * * For `csv`/`bigquery` output format, the output will have have a `key` 3211 * column instead of the instance feature columns. 3212 * The input must be JSONL with objects at each line, CSV, BigQuery 3213 * or TfRecord. 3214 * </pre> 3215 * 3216 * <code>string key_field = 2;</code> 3217 * 3218 * @param value The keyField to set. 3219 * @return This builder for chaining. 3220 */ setKeyField(java.lang.String value)3221 public Builder setKeyField(java.lang.String value) { 3222 if (value == null) { 3223 throw new NullPointerException(); 3224 } 3225 keyField_ = value; 3226 bitField0_ |= 0x00000002; 3227 onChanged(); 3228 return this; 3229 } 3230 /** 3231 * 3232 * 3233 * <pre> 3234 * The name of the field that is considered as a key. 3235 * The values identified by the key field is not included in the transformed 3236 * instances that is sent to the Model. This is similar to 3237 * specifying this name of the field in 3238 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. 3239 * In addition, the batch prediction output will not include the instances. 3240 * Instead the output will only include the value of the key field, in a 3241 * field named `key` in the output: 3242 * * For `jsonl` output format, the output will have a `key` field 3243 * instead of the `instance` field. 3244 * * For `csv`/`bigquery` output format, the output will have have a `key` 3245 * column instead of the instance feature columns. 3246 * The input must be JSONL with objects at each line, CSV, BigQuery 3247 * or TfRecord. 3248 * </pre> 3249 * 3250 * <code>string key_field = 2;</code> 3251 * 3252 * @return This builder for chaining. 3253 */ clearKeyField()3254 public Builder clearKeyField() { 3255 keyField_ = getDefaultInstance().getKeyField(); 3256 bitField0_ = (bitField0_ & ~0x00000002); 3257 onChanged(); 3258 return this; 3259 } 3260 /** 3261 * 3262 * 3263 * <pre> 3264 * The name of the field that is considered as a key. 3265 * The values identified by the key field is not included in the transformed 3266 * instances that is sent to the Model. This is similar to 3267 * specifying this name of the field in 3268 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields]. 3269 * In addition, the batch prediction output will not include the instances. 3270 * Instead the output will only include the value of the key field, in a 3271 * field named `key` in the output: 3272 * * For `jsonl` output format, the output will have a `key` field 3273 * instead of the `instance` field. 3274 * * For `csv`/`bigquery` output format, the output will have have a `key` 3275 * column instead of the instance feature columns. 3276 * The input must be JSONL with objects at each line, CSV, BigQuery 3277 * or TfRecord. 3278 * </pre> 3279 * 3280 * <code>string key_field = 2;</code> 3281 * 3282 * @param value The bytes for keyField to set. 3283 * @return This builder for chaining. 3284 */ setKeyFieldBytes(com.google.protobuf.ByteString value)3285 public Builder setKeyFieldBytes(com.google.protobuf.ByteString value) { 3286 if (value == null) { 3287 throw new NullPointerException(); 3288 } 3289 checkByteStringIsUtf8(value); 3290 keyField_ = value; 3291 bitField0_ |= 0x00000002; 3292 onChanged(); 3293 return this; 3294 } 3295 3296 private com.google.protobuf.LazyStringList includedFields_ = 3297 com.google.protobuf.LazyStringArrayList.EMPTY; 3298 ensureIncludedFieldsIsMutable()3299 private void ensureIncludedFieldsIsMutable() { 3300 if (!((bitField0_ & 0x00000004) != 0)) { 3301 includedFields_ = new com.google.protobuf.LazyStringArrayList(includedFields_); 3302 bitField0_ |= 0x00000004; 3303 } 3304 } 3305 /** 3306 * 3307 * 3308 * <pre> 3309 * Fields that will be included in the prediction instance that is 3310 * sent to the Model. 3311 * If 3312 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 3313 * is `array`, the order of field names in included_fields also determines 3314 * the order of the values in the array. 3315 * When included_fields is populated, 3316 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 3317 * must be empty. 3318 * The input must be JSONL with objects at each line, CSV, BigQuery 3319 * or TfRecord. 3320 * </pre> 3321 * 3322 * <code>repeated string included_fields = 3;</code> 3323 * 3324 * @return A list containing the includedFields. 3325 */ getIncludedFieldsList()3326 public com.google.protobuf.ProtocolStringList getIncludedFieldsList() { 3327 return includedFields_.getUnmodifiableView(); 3328 } 3329 /** 3330 * 3331 * 3332 * <pre> 3333 * Fields that will be included in the prediction instance that is 3334 * sent to the Model. 3335 * If 3336 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 3337 * is `array`, the order of field names in included_fields also determines 3338 * the order of the values in the array. 3339 * When included_fields is populated, 3340 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 3341 * must be empty. 3342 * The input must be JSONL with objects at each line, CSV, BigQuery 3343 * or TfRecord. 3344 * </pre> 3345 * 3346 * <code>repeated string included_fields = 3;</code> 3347 * 3348 * @return The count of includedFields. 3349 */ getIncludedFieldsCount()3350 public int getIncludedFieldsCount() { 3351 return includedFields_.size(); 3352 } 3353 /** 3354 * 3355 * 3356 * <pre> 3357 * Fields that will be included in the prediction instance that is 3358 * sent to the Model. 3359 * If 3360 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 3361 * is `array`, the order of field names in included_fields also determines 3362 * the order of the values in the array. 3363 * When included_fields is populated, 3364 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 3365 * must be empty. 3366 * The input must be JSONL with objects at each line, CSV, BigQuery 3367 * or TfRecord. 3368 * </pre> 3369 * 3370 * <code>repeated string included_fields = 3;</code> 3371 * 3372 * @param index The index of the element to return. 3373 * @return The includedFields at the given index. 3374 */ getIncludedFields(int index)3375 public java.lang.String getIncludedFields(int index) { 3376 return includedFields_.get(index); 3377 } 3378 /** 3379 * 3380 * 3381 * <pre> 3382 * Fields that will be included in the prediction instance that is 3383 * sent to the Model. 3384 * If 3385 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 3386 * is `array`, the order of field names in included_fields also determines 3387 * the order of the values in the array. 3388 * When included_fields is populated, 3389 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 3390 * must be empty. 3391 * The input must be JSONL with objects at each line, CSV, BigQuery 3392 * or TfRecord. 3393 * </pre> 3394 * 3395 * <code>repeated string included_fields = 3;</code> 3396 * 3397 * @param index The index of the value to return. 3398 * @return The bytes of the includedFields at the given index. 3399 */ getIncludedFieldsBytes(int index)3400 public com.google.protobuf.ByteString getIncludedFieldsBytes(int index) { 3401 return includedFields_.getByteString(index); 3402 } 3403 /** 3404 * 3405 * 3406 * <pre> 3407 * Fields that will be included in the prediction instance that is 3408 * sent to the Model. 3409 * If 3410 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 3411 * is `array`, the order of field names in included_fields also determines 3412 * the order of the values in the array. 3413 * When included_fields is populated, 3414 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 3415 * must be empty. 3416 * The input must be JSONL with objects at each line, CSV, BigQuery 3417 * or TfRecord. 3418 * </pre> 3419 * 3420 * <code>repeated string included_fields = 3;</code> 3421 * 3422 * @param index The index to set the value at. 3423 * @param value The includedFields to set. 3424 * @return This builder for chaining. 3425 */ setIncludedFields(int index, java.lang.String value)3426 public Builder setIncludedFields(int index, java.lang.String value) { 3427 if (value == null) { 3428 throw new NullPointerException(); 3429 } 3430 ensureIncludedFieldsIsMutable(); 3431 includedFields_.set(index, value); 3432 onChanged(); 3433 return this; 3434 } 3435 /** 3436 * 3437 * 3438 * <pre> 3439 * Fields that will be included in the prediction instance that is 3440 * sent to the Model. 3441 * If 3442 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 3443 * is `array`, the order of field names in included_fields also determines 3444 * the order of the values in the array. 3445 * When included_fields is populated, 3446 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 3447 * must be empty. 3448 * The input must be JSONL with objects at each line, CSV, BigQuery 3449 * or TfRecord. 3450 * </pre> 3451 * 3452 * <code>repeated string included_fields = 3;</code> 3453 * 3454 * @param value The includedFields to add. 3455 * @return This builder for chaining. 3456 */ addIncludedFields(java.lang.String value)3457 public Builder addIncludedFields(java.lang.String value) { 3458 if (value == null) { 3459 throw new NullPointerException(); 3460 } 3461 ensureIncludedFieldsIsMutable(); 3462 includedFields_.add(value); 3463 onChanged(); 3464 return this; 3465 } 3466 /** 3467 * 3468 * 3469 * <pre> 3470 * Fields that will be included in the prediction instance that is 3471 * sent to the Model. 3472 * If 3473 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 3474 * is `array`, the order of field names in included_fields also determines 3475 * the order of the values in the array. 3476 * When included_fields is populated, 3477 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 3478 * must be empty. 3479 * The input must be JSONL with objects at each line, CSV, BigQuery 3480 * or TfRecord. 3481 * </pre> 3482 * 3483 * <code>repeated string included_fields = 3;</code> 3484 * 3485 * @param values The includedFields to add. 3486 * @return This builder for chaining. 3487 */ addAllIncludedFields(java.lang.Iterable<java.lang.String> values)3488 public Builder addAllIncludedFields(java.lang.Iterable<java.lang.String> values) { 3489 ensureIncludedFieldsIsMutable(); 3490 com.google.protobuf.AbstractMessageLite.Builder.addAll(values, includedFields_); 3491 onChanged(); 3492 return this; 3493 } 3494 /** 3495 * 3496 * 3497 * <pre> 3498 * Fields that will be included in the prediction instance that is 3499 * sent to the Model. 3500 * If 3501 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 3502 * is `array`, the order of field names in included_fields also determines 3503 * the order of the values in the array. 3504 * When included_fields is populated, 3505 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 3506 * must be empty. 3507 * The input must be JSONL with objects at each line, CSV, BigQuery 3508 * or TfRecord. 3509 * </pre> 3510 * 3511 * <code>repeated string included_fields = 3;</code> 3512 * 3513 * @return This builder for chaining. 3514 */ clearIncludedFields()3515 public Builder clearIncludedFields() { 3516 includedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY; 3517 bitField0_ = (bitField0_ & ~0x00000004); 3518 onChanged(); 3519 return this; 3520 } 3521 /** 3522 * 3523 * 3524 * <pre> 3525 * Fields that will be included in the prediction instance that is 3526 * sent to the Model. 3527 * If 3528 * [instance_type][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.instance_type] 3529 * is `array`, the order of field names in included_fields also determines 3530 * the order of the values in the array. 3531 * When included_fields is populated, 3532 * [excluded_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.excluded_fields] 3533 * must be empty. 3534 * The input must be JSONL with objects at each line, CSV, BigQuery 3535 * or TfRecord. 3536 * </pre> 3537 * 3538 * <code>repeated string included_fields = 3;</code> 3539 * 3540 * @param value The bytes of the includedFields to add. 3541 * @return This builder for chaining. 3542 */ addIncludedFieldsBytes(com.google.protobuf.ByteString value)3543 public Builder addIncludedFieldsBytes(com.google.protobuf.ByteString value) { 3544 if (value == null) { 3545 throw new NullPointerException(); 3546 } 3547 checkByteStringIsUtf8(value); 3548 ensureIncludedFieldsIsMutable(); 3549 includedFields_.add(value); 3550 onChanged(); 3551 return this; 3552 } 3553 3554 private com.google.protobuf.LazyStringList excludedFields_ = 3555 com.google.protobuf.LazyStringArrayList.EMPTY; 3556 ensureExcludedFieldsIsMutable()3557 private void ensureExcludedFieldsIsMutable() { 3558 if (!((bitField0_ & 0x00000008) != 0)) { 3559 excludedFields_ = new com.google.protobuf.LazyStringArrayList(excludedFields_); 3560 bitField0_ |= 0x00000008; 3561 } 3562 } 3563 /** 3564 * 3565 * 3566 * <pre> 3567 * Fields that will be excluded in the prediction instance that is 3568 * sent to the Model. 3569 * Excluded will be attached to the batch prediction output if 3570 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 3571 * is not specified. 3572 * When excluded_fields is populated, 3573 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3574 * must be empty. 3575 * The input must be JSONL with objects at each line, CSV, BigQuery 3576 * or TfRecord. 3577 * </pre> 3578 * 3579 * <code>repeated string excluded_fields = 4;</code> 3580 * 3581 * @return A list containing the excludedFields. 3582 */ getExcludedFieldsList()3583 public com.google.protobuf.ProtocolStringList getExcludedFieldsList() { 3584 return excludedFields_.getUnmodifiableView(); 3585 } 3586 /** 3587 * 3588 * 3589 * <pre> 3590 * Fields that will be excluded in the prediction instance that is 3591 * sent to the Model. 3592 * Excluded will be attached to the batch prediction output if 3593 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 3594 * is not specified. 3595 * When excluded_fields is populated, 3596 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3597 * must be empty. 3598 * The input must be JSONL with objects at each line, CSV, BigQuery 3599 * or TfRecord. 3600 * </pre> 3601 * 3602 * <code>repeated string excluded_fields = 4;</code> 3603 * 3604 * @return The count of excludedFields. 3605 */ getExcludedFieldsCount()3606 public int getExcludedFieldsCount() { 3607 return excludedFields_.size(); 3608 } 3609 /** 3610 * 3611 * 3612 * <pre> 3613 * Fields that will be excluded in the prediction instance that is 3614 * sent to the Model. 3615 * Excluded will be attached to the batch prediction output if 3616 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 3617 * is not specified. 3618 * When excluded_fields is populated, 3619 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3620 * must be empty. 3621 * The input must be JSONL with objects at each line, CSV, BigQuery 3622 * or TfRecord. 3623 * </pre> 3624 * 3625 * <code>repeated string excluded_fields = 4;</code> 3626 * 3627 * @param index The index of the element to return. 3628 * @return The excludedFields at the given index. 3629 */ getExcludedFields(int index)3630 public java.lang.String getExcludedFields(int index) { 3631 return excludedFields_.get(index); 3632 } 3633 /** 3634 * 3635 * 3636 * <pre> 3637 * Fields that will be excluded in the prediction instance that is 3638 * sent to the Model. 3639 * Excluded will be attached to the batch prediction output if 3640 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 3641 * is not specified. 3642 * When excluded_fields is populated, 3643 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3644 * must be empty. 3645 * The input must be JSONL with objects at each line, CSV, BigQuery 3646 * or TfRecord. 3647 * </pre> 3648 * 3649 * <code>repeated string excluded_fields = 4;</code> 3650 * 3651 * @param index The index of the value to return. 3652 * @return The bytes of the excludedFields at the given index. 3653 */ getExcludedFieldsBytes(int index)3654 public com.google.protobuf.ByteString getExcludedFieldsBytes(int index) { 3655 return excludedFields_.getByteString(index); 3656 } 3657 /** 3658 * 3659 * 3660 * <pre> 3661 * Fields that will be excluded in the prediction instance that is 3662 * sent to the Model. 3663 * Excluded will be attached to the batch prediction output if 3664 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 3665 * is not specified. 3666 * When excluded_fields is populated, 3667 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3668 * must be empty. 3669 * The input must be JSONL with objects at each line, CSV, BigQuery 3670 * or TfRecord. 3671 * </pre> 3672 * 3673 * <code>repeated string excluded_fields = 4;</code> 3674 * 3675 * @param index The index to set the value at. 3676 * @param value The excludedFields to set. 3677 * @return This builder for chaining. 3678 */ setExcludedFields(int index, java.lang.String value)3679 public Builder setExcludedFields(int index, java.lang.String value) { 3680 if (value == null) { 3681 throw new NullPointerException(); 3682 } 3683 ensureExcludedFieldsIsMutable(); 3684 excludedFields_.set(index, value); 3685 onChanged(); 3686 return this; 3687 } 3688 /** 3689 * 3690 * 3691 * <pre> 3692 * Fields that will be excluded in the prediction instance that is 3693 * sent to the Model. 3694 * Excluded will be attached to the batch prediction output if 3695 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 3696 * is not specified. 3697 * When excluded_fields is populated, 3698 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3699 * must be empty. 3700 * The input must be JSONL with objects at each line, CSV, BigQuery 3701 * or TfRecord. 3702 * </pre> 3703 * 3704 * <code>repeated string excluded_fields = 4;</code> 3705 * 3706 * @param value The excludedFields to add. 3707 * @return This builder for chaining. 3708 */ addExcludedFields(java.lang.String value)3709 public Builder addExcludedFields(java.lang.String value) { 3710 if (value == null) { 3711 throw new NullPointerException(); 3712 } 3713 ensureExcludedFieldsIsMutable(); 3714 excludedFields_.add(value); 3715 onChanged(); 3716 return this; 3717 } 3718 /** 3719 * 3720 * 3721 * <pre> 3722 * Fields that will be excluded in the prediction instance that is 3723 * sent to the Model. 3724 * Excluded will be attached to the batch prediction output if 3725 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 3726 * is not specified. 3727 * When excluded_fields is populated, 3728 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3729 * must be empty. 3730 * The input must be JSONL with objects at each line, CSV, BigQuery 3731 * or TfRecord. 3732 * </pre> 3733 * 3734 * <code>repeated string excluded_fields = 4;</code> 3735 * 3736 * @param values The excludedFields to add. 3737 * @return This builder for chaining. 3738 */ addAllExcludedFields(java.lang.Iterable<java.lang.String> values)3739 public Builder addAllExcludedFields(java.lang.Iterable<java.lang.String> values) { 3740 ensureExcludedFieldsIsMutable(); 3741 com.google.protobuf.AbstractMessageLite.Builder.addAll(values, excludedFields_); 3742 onChanged(); 3743 return this; 3744 } 3745 /** 3746 * 3747 * 3748 * <pre> 3749 * Fields that will be excluded in the prediction instance that is 3750 * sent to the Model. 3751 * Excluded will be attached to the batch prediction output if 3752 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 3753 * is not specified. 3754 * When excluded_fields is populated, 3755 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3756 * must be empty. 3757 * The input must be JSONL with objects at each line, CSV, BigQuery 3758 * or TfRecord. 3759 * </pre> 3760 * 3761 * <code>repeated string excluded_fields = 4;</code> 3762 * 3763 * @return This builder for chaining. 3764 */ clearExcludedFields()3765 public Builder clearExcludedFields() { 3766 excludedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY; 3767 bitField0_ = (bitField0_ & ~0x00000008); 3768 onChanged(); 3769 return this; 3770 } 3771 /** 3772 * 3773 * 3774 * <pre> 3775 * Fields that will be excluded in the prediction instance that is 3776 * sent to the Model. 3777 * Excluded will be attached to the batch prediction output if 3778 * [key_field][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.key_field] 3779 * is not specified. 3780 * When excluded_fields is populated, 3781 * [included_fields][google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.included_fields] 3782 * must be empty. 3783 * The input must be JSONL with objects at each line, CSV, BigQuery 3784 * or TfRecord. 3785 * </pre> 3786 * 3787 * <code>repeated string excluded_fields = 4;</code> 3788 * 3789 * @param value The bytes of the excludedFields to add. 3790 * @return This builder for chaining. 3791 */ addExcludedFieldsBytes(com.google.protobuf.ByteString value)3792 public Builder addExcludedFieldsBytes(com.google.protobuf.ByteString value) { 3793 if (value == null) { 3794 throw new NullPointerException(); 3795 } 3796 checkByteStringIsUtf8(value); 3797 ensureExcludedFieldsIsMutable(); 3798 excludedFields_.add(value); 3799 onChanged(); 3800 return this; 3801 } 3802 3803 @java.lang.Override setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)3804 public final Builder setUnknownFields( 3805 final com.google.protobuf.UnknownFieldSet unknownFields) { 3806 return super.setUnknownFields(unknownFields); 3807 } 3808 3809 @java.lang.Override mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)3810 public final Builder mergeUnknownFields( 3811 final com.google.protobuf.UnknownFieldSet unknownFields) { 3812 return super.mergeUnknownFields(unknownFields); 3813 } 3814 3815 // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) 3816 } 3817 3818 // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig) 3819 private static final com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig 3820 DEFAULT_INSTANCE; 3821 3822 static { 3823 DEFAULT_INSTANCE = 3824 new com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig(); 3825 } 3826 3827 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig getDefaultInstance()3828 getDefaultInstance() { 3829 return DEFAULT_INSTANCE; 3830 } 3831 3832 private static final com.google.protobuf.Parser<InstanceConfig> PARSER = 3833 new com.google.protobuf.AbstractParser<InstanceConfig>() { 3834 @java.lang.Override 3835 public InstanceConfig parsePartialFrom( 3836 com.google.protobuf.CodedInputStream input, 3837 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3838 throws com.google.protobuf.InvalidProtocolBufferException { 3839 Builder builder = newBuilder(); 3840 try { 3841 builder.mergeFrom(input, extensionRegistry); 3842 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 3843 throw e.setUnfinishedMessage(builder.buildPartial()); 3844 } catch (com.google.protobuf.UninitializedMessageException e) { 3845 throw e.asInvalidProtocolBufferException() 3846 .setUnfinishedMessage(builder.buildPartial()); 3847 } catch (java.io.IOException e) { 3848 throw new com.google.protobuf.InvalidProtocolBufferException(e) 3849 .setUnfinishedMessage(builder.buildPartial()); 3850 } 3851 return builder.buildPartial(); 3852 } 3853 }; 3854 parser()3855 public static com.google.protobuf.Parser<InstanceConfig> parser() { 3856 return PARSER; 3857 } 3858 3859 @java.lang.Override getParserForType()3860 public com.google.protobuf.Parser<InstanceConfig> getParserForType() { 3861 return PARSER; 3862 } 3863 3864 @java.lang.Override 3865 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig getDefaultInstanceForType()3866 getDefaultInstanceForType() { 3867 return DEFAULT_INSTANCE; 3868 } 3869 } 3870 3871 public interface OutputConfigOrBuilder 3872 extends 3873 // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig) 3874 com.google.protobuf.MessageOrBuilder { 3875 3876 /** 3877 * 3878 * 3879 * <pre> 3880 * The Cloud Storage location of the directory where the output is 3881 * to be written to. In the given directory a new directory is created. 3882 * Its name is `prediction-<model-display-name>-<job-create-time>`, 3883 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 3884 * Inside of it files `predictions_0001.<extension>`, 3885 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 3886 * are created where `<extension>` depends on chosen 3887 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 3888 * and N may equal 0001 and depends on the total number of successfully 3889 * predicted instances. If the Model has both 3890 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 3891 * and 3892 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 3893 * schemata defined then each such file contains predictions as per the 3894 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 3895 * If prediction for any instance failed (partially or completely), then 3896 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 3897 * `errors_N.<extension>` files are created (N depends on total number 3898 * of failed predictions). These files contain the failed instances, 3899 * as per their schema, followed by an additional `error` field which as 3900 * value has [google.rpc.Status][google.rpc.Status] 3901 * containing only `code` and `message` fields. 3902 * </pre> 3903 * 3904 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 3905 * 3906 * @return Whether the gcsDestination field is set. 3907 */ hasGcsDestination()3908 boolean hasGcsDestination(); 3909 /** 3910 * 3911 * 3912 * <pre> 3913 * The Cloud Storage location of the directory where the output is 3914 * to be written to. In the given directory a new directory is created. 3915 * Its name is `prediction-<model-display-name>-<job-create-time>`, 3916 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 3917 * Inside of it files `predictions_0001.<extension>`, 3918 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 3919 * are created where `<extension>` depends on chosen 3920 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 3921 * and N may equal 0001 and depends on the total number of successfully 3922 * predicted instances. If the Model has both 3923 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 3924 * and 3925 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 3926 * schemata defined then each such file contains predictions as per the 3927 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 3928 * If prediction for any instance failed (partially or completely), then 3929 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 3930 * `errors_N.<extension>` files are created (N depends on total number 3931 * of failed predictions). These files contain the failed instances, 3932 * as per their schema, followed by an additional `error` field which as 3933 * value has [google.rpc.Status][google.rpc.Status] 3934 * containing only `code` and `message` fields. 3935 * </pre> 3936 * 3937 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 3938 * 3939 * @return The gcsDestination. 3940 */ getGcsDestination()3941 com.google.cloud.aiplatform.v1beta1.GcsDestination getGcsDestination(); 3942 /** 3943 * 3944 * 3945 * <pre> 3946 * The Cloud Storage location of the directory where the output is 3947 * to be written to. In the given directory a new directory is created. 3948 * Its name is `prediction-<model-display-name>-<job-create-time>`, 3949 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 3950 * Inside of it files `predictions_0001.<extension>`, 3951 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 3952 * are created where `<extension>` depends on chosen 3953 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 3954 * and N may equal 0001 and depends on the total number of successfully 3955 * predicted instances. If the Model has both 3956 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 3957 * and 3958 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 3959 * schemata defined then each such file contains predictions as per the 3960 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 3961 * If prediction for any instance failed (partially or completely), then 3962 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 3963 * `errors_N.<extension>` files are created (N depends on total number 3964 * of failed predictions). These files contain the failed instances, 3965 * as per their schema, followed by an additional `error` field which as 3966 * value has [google.rpc.Status][google.rpc.Status] 3967 * containing only `code` and `message` fields. 3968 * </pre> 3969 * 3970 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 3971 */ getGcsDestinationOrBuilder()3972 com.google.cloud.aiplatform.v1beta1.GcsDestinationOrBuilder getGcsDestinationOrBuilder(); 3973 3974 /** 3975 * 3976 * 3977 * <pre> 3978 * The BigQuery project or dataset location where the output is to be 3979 * written to. If project is provided, a new dataset is created with name 3980 * `prediction_<model-display-name>_<job-create-time>` 3981 * where <model-display-name> is made 3982 * BigQuery-dataset-name compatible (for example, most special characters 3983 * become underscores), and timestamp is in 3984 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 3985 * two tables will be created, `predictions`, and `errors`. 3986 * If the Model has both 3987 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 3988 * and 3989 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 3990 * schemata defined then the tables have columns as follows: The 3991 * `predictions` table contains instances for which the prediction 3992 * succeeded, it has columns as per a concatenation of the Model's 3993 * instance and prediction schemata. The `errors` table contains rows for 3994 * which the prediction has failed, it has instance columns, as per the 3995 * instance schema, followed by a single "errors" column, which as values 3996 * has [google.rpc.Status][google.rpc.Status] 3997 * represented as a STRUCT, and containing only `code` and `message`. 3998 * </pre> 3999 * 4000 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 4001 * 4002 * @return Whether the bigqueryDestination field is set. 4003 */ hasBigqueryDestination()4004 boolean hasBigqueryDestination(); 4005 /** 4006 * 4007 * 4008 * <pre> 4009 * The BigQuery project or dataset location where the output is to be 4010 * written to. If project is provided, a new dataset is created with name 4011 * `prediction_<model-display-name>_<job-create-time>` 4012 * where <model-display-name> is made 4013 * BigQuery-dataset-name compatible (for example, most special characters 4014 * become underscores), and timestamp is in 4015 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 4016 * two tables will be created, `predictions`, and `errors`. 4017 * If the Model has both 4018 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 4019 * and 4020 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 4021 * schemata defined then the tables have columns as follows: The 4022 * `predictions` table contains instances for which the prediction 4023 * succeeded, it has columns as per a concatenation of the Model's 4024 * instance and prediction schemata. The `errors` table contains rows for 4025 * which the prediction has failed, it has instance columns, as per the 4026 * instance schema, followed by a single "errors" column, which as values 4027 * has [google.rpc.Status][google.rpc.Status] 4028 * represented as a STRUCT, and containing only `code` and `message`. 4029 * </pre> 4030 * 4031 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 4032 * 4033 * @return The bigqueryDestination. 4034 */ getBigqueryDestination()4035 com.google.cloud.aiplatform.v1beta1.BigQueryDestination getBigqueryDestination(); 4036 /** 4037 * 4038 * 4039 * <pre> 4040 * The BigQuery project or dataset location where the output is to be 4041 * written to. If project is provided, a new dataset is created with name 4042 * `prediction_<model-display-name>_<job-create-time>` 4043 * where <model-display-name> is made 4044 * BigQuery-dataset-name compatible (for example, most special characters 4045 * become underscores), and timestamp is in 4046 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 4047 * two tables will be created, `predictions`, and `errors`. 4048 * If the Model has both 4049 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 4050 * and 4051 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 4052 * schemata defined then the tables have columns as follows: The 4053 * `predictions` table contains instances for which the prediction 4054 * succeeded, it has columns as per a concatenation of the Model's 4055 * instance and prediction schemata. The `errors` table contains rows for 4056 * which the prediction has failed, it has instance columns, as per the 4057 * instance schema, followed by a single "errors" column, which as values 4058 * has [google.rpc.Status][google.rpc.Status] 4059 * represented as a STRUCT, and containing only `code` and `message`. 4060 * </pre> 4061 * 4062 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 4063 */ 4064 com.google.cloud.aiplatform.v1beta1.BigQueryDestinationOrBuilder getBigqueryDestinationOrBuilder()4065 getBigqueryDestinationOrBuilder(); 4066 4067 /** 4068 * 4069 * 4070 * <pre> 4071 * Required. The format in which Vertex AI gives the predictions, must be 4072 * one of the 4073 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 4074 * [supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats]. 4075 * </pre> 4076 * 4077 * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 4078 * 4079 * @return The predictionsFormat. 4080 */ getPredictionsFormat()4081 java.lang.String getPredictionsFormat(); 4082 /** 4083 * 4084 * 4085 * <pre> 4086 * Required. The format in which Vertex AI gives the predictions, must be 4087 * one of the 4088 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 4089 * [supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats]. 4090 * </pre> 4091 * 4092 * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 4093 * 4094 * @return The bytes for predictionsFormat. 4095 */ getPredictionsFormatBytes()4096 com.google.protobuf.ByteString getPredictionsFormatBytes(); 4097 4098 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.DestinationCase getDestinationCase()4099 getDestinationCase(); 4100 } 4101 /** 4102 * 4103 * 4104 * <pre> 4105 * Configures the output of 4106 * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. 4107 * See 4108 * [Model.supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats] 4109 * for supported output formats, and how predictions are expressed via any of 4110 * them. 4111 * </pre> 4112 * 4113 * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig} 4114 */ 4115 public static final class OutputConfig extends com.google.protobuf.GeneratedMessageV3 4116 implements 4117 // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig) 4118 OutputConfigOrBuilder { 4119 private static final long serialVersionUID = 0L; 4120 // Use OutputConfig.newBuilder() to construct. OutputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)4121 private OutputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { 4122 super(builder); 4123 } 4124 OutputConfig()4125 private OutputConfig() { 4126 predictionsFormat_ = ""; 4127 } 4128 4129 @java.lang.Override 4130 @SuppressWarnings({"unused"}) newInstance(UnusedPrivateParameter unused)4131 protected java.lang.Object newInstance(UnusedPrivateParameter unused) { 4132 return new OutputConfig(); 4133 } 4134 4135 @java.lang.Override getUnknownFields()4136 public final com.google.protobuf.UnknownFieldSet getUnknownFields() { 4137 return this.unknownFields; 4138 } 4139 getDescriptor()4140 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { 4141 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 4142 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputConfig_descriptor; 4143 } 4144 4145 @java.lang.Override 4146 protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()4147 internalGetFieldAccessorTable() { 4148 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 4149 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputConfig_fieldAccessorTable 4150 .ensureFieldAccessorsInitialized( 4151 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.class, 4152 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.Builder.class); 4153 } 4154 4155 private int destinationCase_ = 0; 4156 private java.lang.Object destination_; 4157 4158 public enum DestinationCase 4159 implements 4160 com.google.protobuf.Internal.EnumLite, 4161 com.google.protobuf.AbstractMessage.InternalOneOfEnum { 4162 GCS_DESTINATION(2), 4163 BIGQUERY_DESTINATION(3), 4164 DESTINATION_NOT_SET(0); 4165 private final int value; 4166 DestinationCase(int value)4167 private DestinationCase(int value) { 4168 this.value = value; 4169 } 4170 /** 4171 * @param value The number of the enum to look for. 4172 * @return The enum associated with the given number. 4173 * @deprecated Use {@link #forNumber(int)} instead. 4174 */ 4175 @java.lang.Deprecated valueOf(int value)4176 public static DestinationCase valueOf(int value) { 4177 return forNumber(value); 4178 } 4179 forNumber(int value)4180 public static DestinationCase forNumber(int value) { 4181 switch (value) { 4182 case 2: 4183 return GCS_DESTINATION; 4184 case 3: 4185 return BIGQUERY_DESTINATION; 4186 case 0: 4187 return DESTINATION_NOT_SET; 4188 default: 4189 return null; 4190 } 4191 } 4192 getNumber()4193 public int getNumber() { 4194 return this.value; 4195 } 4196 }; 4197 getDestinationCase()4198 public DestinationCase getDestinationCase() { 4199 return DestinationCase.forNumber(destinationCase_); 4200 } 4201 4202 public static final int GCS_DESTINATION_FIELD_NUMBER = 2; 4203 /** 4204 * 4205 * 4206 * <pre> 4207 * The Cloud Storage location of the directory where the output is 4208 * to be written to. In the given directory a new directory is created. 4209 * Its name is `prediction-<model-display-name>-<job-create-time>`, 4210 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 4211 * Inside of it files `predictions_0001.<extension>`, 4212 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 4213 * are created where `<extension>` depends on chosen 4214 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 4215 * and N may equal 0001 and depends on the total number of successfully 4216 * predicted instances. If the Model has both 4217 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 4218 * and 4219 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 4220 * schemata defined then each such file contains predictions as per the 4221 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 4222 * If prediction for any instance failed (partially or completely), then 4223 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 4224 * `errors_N.<extension>` files are created (N depends on total number 4225 * of failed predictions). These files contain the failed instances, 4226 * as per their schema, followed by an additional `error` field which as 4227 * value has [google.rpc.Status][google.rpc.Status] 4228 * containing only `code` and `message` fields. 4229 * </pre> 4230 * 4231 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 4232 * 4233 * @return Whether the gcsDestination field is set. 4234 */ 4235 @java.lang.Override hasGcsDestination()4236 public boolean hasGcsDestination() { 4237 return destinationCase_ == 2; 4238 } 4239 /** 4240 * 4241 * 4242 * <pre> 4243 * The Cloud Storage location of the directory where the output is 4244 * to be written to. In the given directory a new directory is created. 4245 * Its name is `prediction-<model-display-name>-<job-create-time>`, 4246 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 4247 * Inside of it files `predictions_0001.<extension>`, 4248 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 4249 * are created where `<extension>` depends on chosen 4250 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 4251 * and N may equal 0001 and depends on the total number of successfully 4252 * predicted instances. If the Model has both 4253 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 4254 * and 4255 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 4256 * schemata defined then each such file contains predictions as per the 4257 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 4258 * If prediction for any instance failed (partially or completely), then 4259 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 4260 * `errors_N.<extension>` files are created (N depends on total number 4261 * of failed predictions). These files contain the failed instances, 4262 * as per their schema, followed by an additional `error` field which as 4263 * value has [google.rpc.Status][google.rpc.Status] 4264 * containing only `code` and `message` fields. 4265 * </pre> 4266 * 4267 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 4268 * 4269 * @return The gcsDestination. 4270 */ 4271 @java.lang.Override getGcsDestination()4272 public com.google.cloud.aiplatform.v1beta1.GcsDestination getGcsDestination() { 4273 if (destinationCase_ == 2) { 4274 return (com.google.cloud.aiplatform.v1beta1.GcsDestination) destination_; 4275 } 4276 return com.google.cloud.aiplatform.v1beta1.GcsDestination.getDefaultInstance(); 4277 } 4278 /** 4279 * 4280 * 4281 * <pre> 4282 * The Cloud Storage location of the directory where the output is 4283 * to be written to. In the given directory a new directory is created. 4284 * Its name is `prediction-<model-display-name>-<job-create-time>`, 4285 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 4286 * Inside of it files `predictions_0001.<extension>`, 4287 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 4288 * are created where `<extension>` depends on chosen 4289 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 4290 * and N may equal 0001 and depends on the total number of successfully 4291 * predicted instances. If the Model has both 4292 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 4293 * and 4294 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 4295 * schemata defined then each such file contains predictions as per the 4296 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 4297 * If prediction for any instance failed (partially or completely), then 4298 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 4299 * `errors_N.<extension>` files are created (N depends on total number 4300 * of failed predictions). These files contain the failed instances, 4301 * as per their schema, followed by an additional `error` field which as 4302 * value has [google.rpc.Status][google.rpc.Status] 4303 * containing only `code` and `message` fields. 4304 * </pre> 4305 * 4306 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 4307 */ 4308 @java.lang.Override 4309 public com.google.cloud.aiplatform.v1beta1.GcsDestinationOrBuilder getGcsDestinationOrBuilder()4310 getGcsDestinationOrBuilder() { 4311 if (destinationCase_ == 2) { 4312 return (com.google.cloud.aiplatform.v1beta1.GcsDestination) destination_; 4313 } 4314 return com.google.cloud.aiplatform.v1beta1.GcsDestination.getDefaultInstance(); 4315 } 4316 4317 public static final int BIGQUERY_DESTINATION_FIELD_NUMBER = 3; 4318 /** 4319 * 4320 * 4321 * <pre> 4322 * The BigQuery project or dataset location where the output is to be 4323 * written to. If project is provided, a new dataset is created with name 4324 * `prediction_<model-display-name>_<job-create-time>` 4325 * where <model-display-name> is made 4326 * BigQuery-dataset-name compatible (for example, most special characters 4327 * become underscores), and timestamp is in 4328 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 4329 * two tables will be created, `predictions`, and `errors`. 4330 * If the Model has both 4331 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 4332 * and 4333 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 4334 * schemata defined then the tables have columns as follows: The 4335 * `predictions` table contains instances for which the prediction 4336 * succeeded, it has columns as per a concatenation of the Model's 4337 * instance and prediction schemata. The `errors` table contains rows for 4338 * which the prediction has failed, it has instance columns, as per the 4339 * instance schema, followed by a single "errors" column, which as values 4340 * has [google.rpc.Status][google.rpc.Status] 4341 * represented as a STRUCT, and containing only `code` and `message`. 4342 * </pre> 4343 * 4344 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 4345 * 4346 * @return Whether the bigqueryDestination field is set. 4347 */ 4348 @java.lang.Override hasBigqueryDestination()4349 public boolean hasBigqueryDestination() { 4350 return destinationCase_ == 3; 4351 } 4352 /** 4353 * 4354 * 4355 * <pre> 4356 * The BigQuery project or dataset location where the output is to be 4357 * written to. If project is provided, a new dataset is created with name 4358 * `prediction_<model-display-name>_<job-create-time>` 4359 * where <model-display-name> is made 4360 * BigQuery-dataset-name compatible (for example, most special characters 4361 * become underscores), and timestamp is in 4362 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 4363 * two tables will be created, `predictions`, and `errors`. 4364 * If the Model has both 4365 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 4366 * and 4367 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 4368 * schemata defined then the tables have columns as follows: The 4369 * `predictions` table contains instances for which the prediction 4370 * succeeded, it has columns as per a concatenation of the Model's 4371 * instance and prediction schemata. The `errors` table contains rows for 4372 * which the prediction has failed, it has instance columns, as per the 4373 * instance schema, followed by a single "errors" column, which as values 4374 * has [google.rpc.Status][google.rpc.Status] 4375 * represented as a STRUCT, and containing only `code` and `message`. 4376 * </pre> 4377 * 4378 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 4379 * 4380 * @return The bigqueryDestination. 4381 */ 4382 @java.lang.Override getBigqueryDestination()4383 public com.google.cloud.aiplatform.v1beta1.BigQueryDestination getBigqueryDestination() { 4384 if (destinationCase_ == 3) { 4385 return (com.google.cloud.aiplatform.v1beta1.BigQueryDestination) destination_; 4386 } 4387 return com.google.cloud.aiplatform.v1beta1.BigQueryDestination.getDefaultInstance(); 4388 } 4389 /** 4390 * 4391 * 4392 * <pre> 4393 * The BigQuery project or dataset location where the output is to be 4394 * written to. If project is provided, a new dataset is created with name 4395 * `prediction_<model-display-name>_<job-create-time>` 4396 * where <model-display-name> is made 4397 * BigQuery-dataset-name compatible (for example, most special characters 4398 * become underscores), and timestamp is in 4399 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 4400 * two tables will be created, `predictions`, and `errors`. 4401 * If the Model has both 4402 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 4403 * and 4404 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 4405 * schemata defined then the tables have columns as follows: The 4406 * `predictions` table contains instances for which the prediction 4407 * succeeded, it has columns as per a concatenation of the Model's 4408 * instance and prediction schemata. The `errors` table contains rows for 4409 * which the prediction has failed, it has instance columns, as per the 4410 * instance schema, followed by a single "errors" column, which as values 4411 * has [google.rpc.Status][google.rpc.Status] 4412 * represented as a STRUCT, and containing only `code` and `message`. 4413 * </pre> 4414 * 4415 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 4416 */ 4417 @java.lang.Override 4418 public com.google.cloud.aiplatform.v1beta1.BigQueryDestinationOrBuilder getBigqueryDestinationOrBuilder()4419 getBigqueryDestinationOrBuilder() { 4420 if (destinationCase_ == 3) { 4421 return (com.google.cloud.aiplatform.v1beta1.BigQueryDestination) destination_; 4422 } 4423 return com.google.cloud.aiplatform.v1beta1.BigQueryDestination.getDefaultInstance(); 4424 } 4425 4426 public static final int PREDICTIONS_FORMAT_FIELD_NUMBER = 1; 4427 4428 @SuppressWarnings("serial") 4429 private volatile java.lang.Object predictionsFormat_ = ""; 4430 /** 4431 * 4432 * 4433 * <pre> 4434 * Required. The format in which Vertex AI gives the predictions, must be 4435 * one of the 4436 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 4437 * [supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats]. 4438 * </pre> 4439 * 4440 * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 4441 * 4442 * @return The predictionsFormat. 4443 */ 4444 @java.lang.Override getPredictionsFormat()4445 public java.lang.String getPredictionsFormat() { 4446 java.lang.Object ref = predictionsFormat_; 4447 if (ref instanceof java.lang.String) { 4448 return (java.lang.String) ref; 4449 } else { 4450 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 4451 java.lang.String s = bs.toStringUtf8(); 4452 predictionsFormat_ = s; 4453 return s; 4454 } 4455 } 4456 /** 4457 * 4458 * 4459 * <pre> 4460 * Required. The format in which Vertex AI gives the predictions, must be 4461 * one of the 4462 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 4463 * [supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats]. 4464 * </pre> 4465 * 4466 * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 4467 * 4468 * @return The bytes for predictionsFormat. 4469 */ 4470 @java.lang.Override getPredictionsFormatBytes()4471 public com.google.protobuf.ByteString getPredictionsFormatBytes() { 4472 java.lang.Object ref = predictionsFormat_; 4473 if (ref instanceof java.lang.String) { 4474 com.google.protobuf.ByteString b = 4475 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 4476 predictionsFormat_ = b; 4477 return b; 4478 } else { 4479 return (com.google.protobuf.ByteString) ref; 4480 } 4481 } 4482 4483 private byte memoizedIsInitialized = -1; 4484 4485 @java.lang.Override isInitialized()4486 public final boolean isInitialized() { 4487 byte isInitialized = memoizedIsInitialized; 4488 if (isInitialized == 1) return true; 4489 if (isInitialized == 0) return false; 4490 4491 memoizedIsInitialized = 1; 4492 return true; 4493 } 4494 4495 @java.lang.Override writeTo(com.google.protobuf.CodedOutputStream output)4496 public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { 4497 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(predictionsFormat_)) { 4498 com.google.protobuf.GeneratedMessageV3.writeString(output, 1, predictionsFormat_); 4499 } 4500 if (destinationCase_ == 2) { 4501 output.writeMessage(2, (com.google.cloud.aiplatform.v1beta1.GcsDestination) destination_); 4502 } 4503 if (destinationCase_ == 3) { 4504 output.writeMessage( 4505 3, (com.google.cloud.aiplatform.v1beta1.BigQueryDestination) destination_); 4506 } 4507 getUnknownFields().writeTo(output); 4508 } 4509 4510 @java.lang.Override getSerializedSize()4511 public int getSerializedSize() { 4512 int size = memoizedSize; 4513 if (size != -1) return size; 4514 4515 size = 0; 4516 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(predictionsFormat_)) { 4517 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, predictionsFormat_); 4518 } 4519 if (destinationCase_ == 2) { 4520 size += 4521 com.google.protobuf.CodedOutputStream.computeMessageSize( 4522 2, (com.google.cloud.aiplatform.v1beta1.GcsDestination) destination_); 4523 } 4524 if (destinationCase_ == 3) { 4525 size += 4526 com.google.protobuf.CodedOutputStream.computeMessageSize( 4527 3, (com.google.cloud.aiplatform.v1beta1.BigQueryDestination) destination_); 4528 } 4529 size += getUnknownFields().getSerializedSize(); 4530 memoizedSize = size; 4531 return size; 4532 } 4533 4534 @java.lang.Override equals(final java.lang.Object obj)4535 public boolean equals(final java.lang.Object obj) { 4536 if (obj == this) { 4537 return true; 4538 } 4539 if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig)) { 4540 return super.equals(obj); 4541 } 4542 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig other = 4543 (com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig) obj; 4544 4545 if (!getPredictionsFormat().equals(other.getPredictionsFormat())) return false; 4546 if (!getDestinationCase().equals(other.getDestinationCase())) return false; 4547 switch (destinationCase_) { 4548 case 2: 4549 if (!getGcsDestination().equals(other.getGcsDestination())) return false; 4550 break; 4551 case 3: 4552 if (!getBigqueryDestination().equals(other.getBigqueryDestination())) return false; 4553 break; 4554 case 0: 4555 default: 4556 } 4557 if (!getUnknownFields().equals(other.getUnknownFields())) return false; 4558 return true; 4559 } 4560 4561 @java.lang.Override hashCode()4562 public int hashCode() { 4563 if (memoizedHashCode != 0) { 4564 return memoizedHashCode; 4565 } 4566 int hash = 41; 4567 hash = (19 * hash) + getDescriptor().hashCode(); 4568 hash = (37 * hash) + PREDICTIONS_FORMAT_FIELD_NUMBER; 4569 hash = (53 * hash) + getPredictionsFormat().hashCode(); 4570 switch (destinationCase_) { 4571 case 2: 4572 hash = (37 * hash) + GCS_DESTINATION_FIELD_NUMBER; 4573 hash = (53 * hash) + getGcsDestination().hashCode(); 4574 break; 4575 case 3: 4576 hash = (37 * hash) + BIGQUERY_DESTINATION_FIELD_NUMBER; 4577 hash = (53 * hash) + getBigqueryDestination().hashCode(); 4578 break; 4579 case 0: 4580 default: 4581 } 4582 hash = (29 * hash) + getUnknownFields().hashCode(); 4583 memoizedHashCode = hash; 4584 return hash; 4585 } 4586 parseFrom( java.nio.ByteBuffer data)4587 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig parseFrom( 4588 java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { 4589 return PARSER.parseFrom(data); 4590 } 4591 parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4592 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig parseFrom( 4593 java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4594 throws com.google.protobuf.InvalidProtocolBufferException { 4595 return PARSER.parseFrom(data, extensionRegistry); 4596 } 4597 parseFrom( com.google.protobuf.ByteString data)4598 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig parseFrom( 4599 com.google.protobuf.ByteString data) 4600 throws com.google.protobuf.InvalidProtocolBufferException { 4601 return PARSER.parseFrom(data); 4602 } 4603 parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4604 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig parseFrom( 4605 com.google.protobuf.ByteString data, 4606 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4607 throws com.google.protobuf.InvalidProtocolBufferException { 4608 return PARSER.parseFrom(data, extensionRegistry); 4609 } 4610 parseFrom( byte[] data)4611 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig parseFrom( 4612 byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { 4613 return PARSER.parseFrom(data); 4614 } 4615 parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4616 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig parseFrom( 4617 byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4618 throws com.google.protobuf.InvalidProtocolBufferException { 4619 return PARSER.parseFrom(data, extensionRegistry); 4620 } 4621 parseFrom( java.io.InputStream input)4622 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig parseFrom( 4623 java.io.InputStream input) throws java.io.IOException { 4624 return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); 4625 } 4626 parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4627 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig parseFrom( 4628 java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4629 throws java.io.IOException { 4630 return com.google.protobuf.GeneratedMessageV3.parseWithIOException( 4631 PARSER, input, extensionRegistry); 4632 } 4633 4634 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig parseDelimitedFrom(java.io.InputStream input)4635 parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { 4636 return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); 4637 } 4638 4639 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4640 parseDelimitedFrom( 4641 java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4642 throws java.io.IOException { 4643 return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( 4644 PARSER, input, extensionRegistry); 4645 } 4646 parseFrom( com.google.protobuf.CodedInputStream input)4647 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig parseFrom( 4648 com.google.protobuf.CodedInputStream input) throws java.io.IOException { 4649 return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); 4650 } 4651 parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4652 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig parseFrom( 4653 com.google.protobuf.CodedInputStream input, 4654 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4655 throws java.io.IOException { 4656 return com.google.protobuf.GeneratedMessageV3.parseWithIOException( 4657 PARSER, input, extensionRegistry); 4658 } 4659 4660 @java.lang.Override newBuilderForType()4661 public Builder newBuilderForType() { 4662 return newBuilder(); 4663 } 4664 newBuilder()4665 public static Builder newBuilder() { 4666 return DEFAULT_INSTANCE.toBuilder(); 4667 } 4668 newBuilder( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig prototype)4669 public static Builder newBuilder( 4670 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig prototype) { 4671 return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); 4672 } 4673 4674 @java.lang.Override toBuilder()4675 public Builder toBuilder() { 4676 return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); 4677 } 4678 4679 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent)4680 protected Builder newBuilderForType( 4681 com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { 4682 Builder builder = new Builder(parent); 4683 return builder; 4684 } 4685 /** 4686 * 4687 * 4688 * <pre> 4689 * Configures the output of 4690 * [BatchPredictionJob][google.cloud.aiplatform.v1beta1.BatchPredictionJob]. 4691 * See 4692 * [Model.supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats] 4693 * for supported output formats, and how predictions are expressed via any of 4694 * them. 4695 * </pre> 4696 * 4697 * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig} 4698 */ 4699 public static final class Builder 4700 extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> 4701 implements 4702 // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig) 4703 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfigOrBuilder { getDescriptor()4704 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { 4705 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 4706 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputConfig_descriptor; 4707 } 4708 4709 @java.lang.Override 4710 protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()4711 internalGetFieldAccessorTable() { 4712 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 4713 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputConfig_fieldAccessorTable 4714 .ensureFieldAccessorsInitialized( 4715 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.class, 4716 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.Builder.class); 4717 } 4718 4719 // Construct using 4720 // com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.newBuilder() Builder()4721 private Builder() {} 4722 Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)4723 private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { 4724 super(parent); 4725 } 4726 4727 @java.lang.Override clear()4728 public Builder clear() { 4729 super.clear(); 4730 bitField0_ = 0; 4731 if (gcsDestinationBuilder_ != null) { 4732 gcsDestinationBuilder_.clear(); 4733 } 4734 if (bigqueryDestinationBuilder_ != null) { 4735 bigqueryDestinationBuilder_.clear(); 4736 } 4737 predictionsFormat_ = ""; 4738 destinationCase_ = 0; 4739 destination_ = null; 4740 return this; 4741 } 4742 4743 @java.lang.Override getDescriptorForType()4744 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { 4745 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 4746 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputConfig_descriptor; 4747 } 4748 4749 @java.lang.Override 4750 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig getDefaultInstanceForType()4751 getDefaultInstanceForType() { 4752 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig 4753 .getDefaultInstance(); 4754 } 4755 4756 @java.lang.Override build()4757 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig build() { 4758 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig result = buildPartial(); 4759 if (!result.isInitialized()) { 4760 throw newUninitializedMessageException(result); 4761 } 4762 return result; 4763 } 4764 4765 @java.lang.Override buildPartial()4766 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig buildPartial() { 4767 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig result = 4768 new com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig(this); 4769 if (bitField0_ != 0) { 4770 buildPartial0(result); 4771 } 4772 buildPartialOneofs(result); 4773 onBuilt(); 4774 return result; 4775 } 4776 buildPartial0( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig result)4777 private void buildPartial0( 4778 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig result) { 4779 int from_bitField0_ = bitField0_; 4780 if (((from_bitField0_ & 0x00000004) != 0)) { 4781 result.predictionsFormat_ = predictionsFormat_; 4782 } 4783 } 4784 buildPartialOneofs( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig result)4785 private void buildPartialOneofs( 4786 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig result) { 4787 result.destinationCase_ = destinationCase_; 4788 result.destination_ = this.destination_; 4789 if (destinationCase_ == 2 && gcsDestinationBuilder_ != null) { 4790 result.destination_ = gcsDestinationBuilder_.build(); 4791 } 4792 if (destinationCase_ == 3 && bigqueryDestinationBuilder_ != null) { 4793 result.destination_ = bigqueryDestinationBuilder_.build(); 4794 } 4795 } 4796 4797 @java.lang.Override clone()4798 public Builder clone() { 4799 return super.clone(); 4800 } 4801 4802 @java.lang.Override setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)4803 public Builder setField( 4804 com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { 4805 return super.setField(field, value); 4806 } 4807 4808 @java.lang.Override clearField(com.google.protobuf.Descriptors.FieldDescriptor field)4809 public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { 4810 return super.clearField(field); 4811 } 4812 4813 @java.lang.Override clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)4814 public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { 4815 return super.clearOneof(oneof); 4816 } 4817 4818 @java.lang.Override setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)4819 public Builder setRepeatedField( 4820 com.google.protobuf.Descriptors.FieldDescriptor field, 4821 int index, 4822 java.lang.Object value) { 4823 return super.setRepeatedField(field, index, value); 4824 } 4825 4826 @java.lang.Override addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)4827 public Builder addRepeatedField( 4828 com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { 4829 return super.addRepeatedField(field, value); 4830 } 4831 4832 @java.lang.Override mergeFrom(com.google.protobuf.Message other)4833 public Builder mergeFrom(com.google.protobuf.Message other) { 4834 if (other instanceof com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig) { 4835 return mergeFrom( 4836 (com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig) other); 4837 } else { 4838 super.mergeFrom(other); 4839 return this; 4840 } 4841 } 4842 mergeFrom( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig other)4843 public Builder mergeFrom( 4844 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig other) { 4845 if (other 4846 == com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig 4847 .getDefaultInstance()) return this; 4848 if (!other.getPredictionsFormat().isEmpty()) { 4849 predictionsFormat_ = other.predictionsFormat_; 4850 bitField0_ |= 0x00000004; 4851 onChanged(); 4852 } 4853 switch (other.getDestinationCase()) { 4854 case GCS_DESTINATION: 4855 { 4856 mergeGcsDestination(other.getGcsDestination()); 4857 break; 4858 } 4859 case BIGQUERY_DESTINATION: 4860 { 4861 mergeBigqueryDestination(other.getBigqueryDestination()); 4862 break; 4863 } 4864 case DESTINATION_NOT_SET: 4865 { 4866 break; 4867 } 4868 } 4869 this.mergeUnknownFields(other.getUnknownFields()); 4870 onChanged(); 4871 return this; 4872 } 4873 4874 @java.lang.Override isInitialized()4875 public final boolean isInitialized() { 4876 return true; 4877 } 4878 4879 @java.lang.Override mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4880 public Builder mergeFrom( 4881 com.google.protobuf.CodedInputStream input, 4882 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4883 throws java.io.IOException { 4884 if (extensionRegistry == null) { 4885 throw new java.lang.NullPointerException(); 4886 } 4887 try { 4888 boolean done = false; 4889 while (!done) { 4890 int tag = input.readTag(); 4891 switch (tag) { 4892 case 0: 4893 done = true; 4894 break; 4895 case 10: 4896 { 4897 predictionsFormat_ = input.readStringRequireUtf8(); 4898 bitField0_ |= 0x00000004; 4899 break; 4900 } // case 10 4901 case 18: 4902 { 4903 input.readMessage( 4904 getGcsDestinationFieldBuilder().getBuilder(), extensionRegistry); 4905 destinationCase_ = 2; 4906 break; 4907 } // case 18 4908 case 26: 4909 { 4910 input.readMessage( 4911 getBigqueryDestinationFieldBuilder().getBuilder(), extensionRegistry); 4912 destinationCase_ = 3; 4913 break; 4914 } // case 26 4915 default: 4916 { 4917 if (!super.parseUnknownField(input, extensionRegistry, tag)) { 4918 done = true; // was an endgroup tag 4919 } 4920 break; 4921 } // default: 4922 } // switch (tag) 4923 } // while (!done) 4924 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 4925 throw e.unwrapIOException(); 4926 } finally { 4927 onChanged(); 4928 } // finally 4929 return this; 4930 } 4931 4932 private int destinationCase_ = 0; 4933 private java.lang.Object destination_; 4934 getDestinationCase()4935 public DestinationCase getDestinationCase() { 4936 return DestinationCase.forNumber(destinationCase_); 4937 } 4938 clearDestination()4939 public Builder clearDestination() { 4940 destinationCase_ = 0; 4941 destination_ = null; 4942 onChanged(); 4943 return this; 4944 } 4945 4946 private int bitField0_; 4947 4948 private com.google.protobuf.SingleFieldBuilderV3< 4949 com.google.cloud.aiplatform.v1beta1.GcsDestination, 4950 com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder, 4951 com.google.cloud.aiplatform.v1beta1.GcsDestinationOrBuilder> 4952 gcsDestinationBuilder_; 4953 /** 4954 * 4955 * 4956 * <pre> 4957 * The Cloud Storage location of the directory where the output is 4958 * to be written to. In the given directory a new directory is created. 4959 * Its name is `prediction-<model-display-name>-<job-create-time>`, 4960 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 4961 * Inside of it files `predictions_0001.<extension>`, 4962 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 4963 * are created where `<extension>` depends on chosen 4964 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 4965 * and N may equal 0001 and depends on the total number of successfully 4966 * predicted instances. If the Model has both 4967 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 4968 * and 4969 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 4970 * schemata defined then each such file contains predictions as per the 4971 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 4972 * If prediction for any instance failed (partially or completely), then 4973 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 4974 * `errors_N.<extension>` files are created (N depends on total number 4975 * of failed predictions). These files contain the failed instances, 4976 * as per their schema, followed by an additional `error` field which as 4977 * value has [google.rpc.Status][google.rpc.Status] 4978 * containing only `code` and `message` fields. 4979 * </pre> 4980 * 4981 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 4982 * 4983 * @return Whether the gcsDestination field is set. 4984 */ 4985 @java.lang.Override hasGcsDestination()4986 public boolean hasGcsDestination() { 4987 return destinationCase_ == 2; 4988 } 4989 /** 4990 * 4991 * 4992 * <pre> 4993 * The Cloud Storage location of the directory where the output is 4994 * to be written to. In the given directory a new directory is created. 4995 * Its name is `prediction-<model-display-name>-<job-create-time>`, 4996 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 4997 * Inside of it files `predictions_0001.<extension>`, 4998 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 4999 * are created where `<extension>` depends on chosen 5000 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 5001 * and N may equal 0001 and depends on the total number of successfully 5002 * predicted instances. If the Model has both 5003 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5004 * and 5005 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5006 * schemata defined then each such file contains predictions as per the 5007 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 5008 * If prediction for any instance failed (partially or completely), then 5009 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 5010 * `errors_N.<extension>` files are created (N depends on total number 5011 * of failed predictions). These files contain the failed instances, 5012 * as per their schema, followed by an additional `error` field which as 5013 * value has [google.rpc.Status][google.rpc.Status] 5014 * containing only `code` and `message` fields. 5015 * </pre> 5016 * 5017 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 5018 * 5019 * @return The gcsDestination. 5020 */ 5021 @java.lang.Override getGcsDestination()5022 public com.google.cloud.aiplatform.v1beta1.GcsDestination getGcsDestination() { 5023 if (gcsDestinationBuilder_ == null) { 5024 if (destinationCase_ == 2) { 5025 return (com.google.cloud.aiplatform.v1beta1.GcsDestination) destination_; 5026 } 5027 return com.google.cloud.aiplatform.v1beta1.GcsDestination.getDefaultInstance(); 5028 } else { 5029 if (destinationCase_ == 2) { 5030 return gcsDestinationBuilder_.getMessage(); 5031 } 5032 return com.google.cloud.aiplatform.v1beta1.GcsDestination.getDefaultInstance(); 5033 } 5034 } 5035 /** 5036 * 5037 * 5038 * <pre> 5039 * The Cloud Storage location of the directory where the output is 5040 * to be written to. In the given directory a new directory is created. 5041 * Its name is `prediction-<model-display-name>-<job-create-time>`, 5042 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 5043 * Inside of it files `predictions_0001.<extension>`, 5044 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 5045 * are created where `<extension>` depends on chosen 5046 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 5047 * and N may equal 0001 and depends on the total number of successfully 5048 * predicted instances. If the Model has both 5049 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5050 * and 5051 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5052 * schemata defined then each such file contains predictions as per the 5053 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 5054 * If prediction for any instance failed (partially or completely), then 5055 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 5056 * `errors_N.<extension>` files are created (N depends on total number 5057 * of failed predictions). These files contain the failed instances, 5058 * as per their schema, followed by an additional `error` field which as 5059 * value has [google.rpc.Status][google.rpc.Status] 5060 * containing only `code` and `message` fields. 5061 * </pre> 5062 * 5063 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 5064 */ setGcsDestination(com.google.cloud.aiplatform.v1beta1.GcsDestination value)5065 public Builder setGcsDestination(com.google.cloud.aiplatform.v1beta1.GcsDestination value) { 5066 if (gcsDestinationBuilder_ == null) { 5067 if (value == null) { 5068 throw new NullPointerException(); 5069 } 5070 destination_ = value; 5071 onChanged(); 5072 } else { 5073 gcsDestinationBuilder_.setMessage(value); 5074 } 5075 destinationCase_ = 2; 5076 return this; 5077 } 5078 /** 5079 * 5080 * 5081 * <pre> 5082 * The Cloud Storage location of the directory where the output is 5083 * to be written to. In the given directory a new directory is created. 5084 * Its name is `prediction-<model-display-name>-<job-create-time>`, 5085 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 5086 * Inside of it files `predictions_0001.<extension>`, 5087 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 5088 * are created where `<extension>` depends on chosen 5089 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 5090 * and N may equal 0001 and depends on the total number of successfully 5091 * predicted instances. If the Model has both 5092 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5093 * and 5094 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5095 * schemata defined then each such file contains predictions as per the 5096 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 5097 * If prediction for any instance failed (partially or completely), then 5098 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 5099 * `errors_N.<extension>` files are created (N depends on total number 5100 * of failed predictions). These files contain the failed instances, 5101 * as per their schema, followed by an additional `error` field which as 5102 * value has [google.rpc.Status][google.rpc.Status] 5103 * containing only `code` and `message` fields. 5104 * </pre> 5105 * 5106 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 5107 */ setGcsDestination( com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder builderForValue)5108 public Builder setGcsDestination( 5109 com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder builderForValue) { 5110 if (gcsDestinationBuilder_ == null) { 5111 destination_ = builderForValue.build(); 5112 onChanged(); 5113 } else { 5114 gcsDestinationBuilder_.setMessage(builderForValue.build()); 5115 } 5116 destinationCase_ = 2; 5117 return this; 5118 } 5119 /** 5120 * 5121 * 5122 * <pre> 5123 * The Cloud Storage location of the directory where the output is 5124 * to be written to. In the given directory a new directory is created. 5125 * Its name is `prediction-<model-display-name>-<job-create-time>`, 5126 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 5127 * Inside of it files `predictions_0001.<extension>`, 5128 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 5129 * are created where `<extension>` depends on chosen 5130 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 5131 * and N may equal 0001 and depends on the total number of successfully 5132 * predicted instances. If the Model has both 5133 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5134 * and 5135 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5136 * schemata defined then each such file contains predictions as per the 5137 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 5138 * If prediction for any instance failed (partially or completely), then 5139 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 5140 * `errors_N.<extension>` files are created (N depends on total number 5141 * of failed predictions). These files contain the failed instances, 5142 * as per their schema, followed by an additional `error` field which as 5143 * value has [google.rpc.Status][google.rpc.Status] 5144 * containing only `code` and `message` fields. 5145 * </pre> 5146 * 5147 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 5148 */ mergeGcsDestination(com.google.cloud.aiplatform.v1beta1.GcsDestination value)5149 public Builder mergeGcsDestination(com.google.cloud.aiplatform.v1beta1.GcsDestination value) { 5150 if (gcsDestinationBuilder_ == null) { 5151 if (destinationCase_ == 2 5152 && destination_ 5153 != com.google.cloud.aiplatform.v1beta1.GcsDestination.getDefaultInstance()) { 5154 destination_ = 5155 com.google.cloud.aiplatform.v1beta1.GcsDestination.newBuilder( 5156 (com.google.cloud.aiplatform.v1beta1.GcsDestination) destination_) 5157 .mergeFrom(value) 5158 .buildPartial(); 5159 } else { 5160 destination_ = value; 5161 } 5162 onChanged(); 5163 } else { 5164 if (destinationCase_ == 2) { 5165 gcsDestinationBuilder_.mergeFrom(value); 5166 } else { 5167 gcsDestinationBuilder_.setMessage(value); 5168 } 5169 } 5170 destinationCase_ = 2; 5171 return this; 5172 } 5173 /** 5174 * 5175 * 5176 * <pre> 5177 * The Cloud Storage location of the directory where the output is 5178 * to be written to. In the given directory a new directory is created. 5179 * Its name is `prediction-<model-display-name>-<job-create-time>`, 5180 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 5181 * Inside of it files `predictions_0001.<extension>`, 5182 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 5183 * are created where `<extension>` depends on chosen 5184 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 5185 * and N may equal 0001 and depends on the total number of successfully 5186 * predicted instances. If the Model has both 5187 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5188 * and 5189 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5190 * schemata defined then each such file contains predictions as per the 5191 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 5192 * If prediction for any instance failed (partially or completely), then 5193 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 5194 * `errors_N.<extension>` files are created (N depends on total number 5195 * of failed predictions). These files contain the failed instances, 5196 * as per their schema, followed by an additional `error` field which as 5197 * value has [google.rpc.Status][google.rpc.Status] 5198 * containing only `code` and `message` fields. 5199 * </pre> 5200 * 5201 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 5202 */ clearGcsDestination()5203 public Builder clearGcsDestination() { 5204 if (gcsDestinationBuilder_ == null) { 5205 if (destinationCase_ == 2) { 5206 destinationCase_ = 0; 5207 destination_ = null; 5208 onChanged(); 5209 } 5210 } else { 5211 if (destinationCase_ == 2) { 5212 destinationCase_ = 0; 5213 destination_ = null; 5214 } 5215 gcsDestinationBuilder_.clear(); 5216 } 5217 return this; 5218 } 5219 /** 5220 * 5221 * 5222 * <pre> 5223 * The Cloud Storage location of the directory where the output is 5224 * to be written to. In the given directory a new directory is created. 5225 * Its name is `prediction-<model-display-name>-<job-create-time>`, 5226 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 5227 * Inside of it files `predictions_0001.<extension>`, 5228 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 5229 * are created where `<extension>` depends on chosen 5230 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 5231 * and N may equal 0001 and depends on the total number of successfully 5232 * predicted instances. If the Model has both 5233 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5234 * and 5235 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5236 * schemata defined then each such file contains predictions as per the 5237 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 5238 * If prediction for any instance failed (partially or completely), then 5239 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 5240 * `errors_N.<extension>` files are created (N depends on total number 5241 * of failed predictions). These files contain the failed instances, 5242 * as per their schema, followed by an additional `error` field which as 5243 * value has [google.rpc.Status][google.rpc.Status] 5244 * containing only `code` and `message` fields. 5245 * </pre> 5246 * 5247 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 5248 */ getGcsDestinationBuilder()5249 public com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder getGcsDestinationBuilder() { 5250 return getGcsDestinationFieldBuilder().getBuilder(); 5251 } 5252 /** 5253 * 5254 * 5255 * <pre> 5256 * The Cloud Storage location of the directory where the output is 5257 * to be written to. In the given directory a new directory is created. 5258 * Its name is `prediction-<model-display-name>-<job-create-time>`, 5259 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 5260 * Inside of it files `predictions_0001.<extension>`, 5261 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 5262 * are created where `<extension>` depends on chosen 5263 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 5264 * and N may equal 0001 and depends on the total number of successfully 5265 * predicted instances. If the Model has both 5266 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5267 * and 5268 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5269 * schemata defined then each such file contains predictions as per the 5270 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 5271 * If prediction for any instance failed (partially or completely), then 5272 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 5273 * `errors_N.<extension>` files are created (N depends on total number 5274 * of failed predictions). These files contain the failed instances, 5275 * as per their schema, followed by an additional `error` field which as 5276 * value has [google.rpc.Status][google.rpc.Status] 5277 * containing only `code` and `message` fields. 5278 * </pre> 5279 * 5280 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 5281 */ 5282 @java.lang.Override 5283 public com.google.cloud.aiplatform.v1beta1.GcsDestinationOrBuilder getGcsDestinationOrBuilder()5284 getGcsDestinationOrBuilder() { 5285 if ((destinationCase_ == 2) && (gcsDestinationBuilder_ != null)) { 5286 return gcsDestinationBuilder_.getMessageOrBuilder(); 5287 } else { 5288 if (destinationCase_ == 2) { 5289 return (com.google.cloud.aiplatform.v1beta1.GcsDestination) destination_; 5290 } 5291 return com.google.cloud.aiplatform.v1beta1.GcsDestination.getDefaultInstance(); 5292 } 5293 } 5294 /** 5295 * 5296 * 5297 * <pre> 5298 * The Cloud Storage location of the directory where the output is 5299 * to be written to. In the given directory a new directory is created. 5300 * Its name is `prediction-<model-display-name>-<job-create-time>`, 5301 * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. 5302 * Inside of it files `predictions_0001.<extension>`, 5303 * `predictions_0002.<extension>`, ..., `predictions_N.<extension>` 5304 * are created where `<extension>` depends on chosen 5305 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format], 5306 * and N may equal 0001 and depends on the total number of successfully 5307 * predicted instances. If the Model has both 5308 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5309 * and 5310 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5311 * schemata defined then each such file contains predictions as per the 5312 * [predictions_format][google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.predictions_format]. 5313 * If prediction for any instance failed (partially or completely), then 5314 * an additional `errors_0001.<extension>`, `errors_0002.<extension>`,..., 5315 * `errors_N.<extension>` files are created (N depends on total number 5316 * of failed predictions). These files contain the failed instances, 5317 * as per their schema, followed by an additional `error` field which as 5318 * value has [google.rpc.Status][google.rpc.Status] 5319 * containing only `code` and `message` fields. 5320 * </pre> 5321 * 5322 * <code>.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 2;</code> 5323 */ 5324 private com.google.protobuf.SingleFieldBuilderV3< 5325 com.google.cloud.aiplatform.v1beta1.GcsDestination, 5326 com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder, 5327 com.google.cloud.aiplatform.v1beta1.GcsDestinationOrBuilder> getGcsDestinationFieldBuilder()5328 getGcsDestinationFieldBuilder() { 5329 if (gcsDestinationBuilder_ == null) { 5330 if (!(destinationCase_ == 2)) { 5331 destination_ = com.google.cloud.aiplatform.v1beta1.GcsDestination.getDefaultInstance(); 5332 } 5333 gcsDestinationBuilder_ = 5334 new com.google.protobuf.SingleFieldBuilderV3< 5335 com.google.cloud.aiplatform.v1beta1.GcsDestination, 5336 com.google.cloud.aiplatform.v1beta1.GcsDestination.Builder, 5337 com.google.cloud.aiplatform.v1beta1.GcsDestinationOrBuilder>( 5338 (com.google.cloud.aiplatform.v1beta1.GcsDestination) destination_, 5339 getParentForChildren(), 5340 isClean()); 5341 destination_ = null; 5342 } 5343 destinationCase_ = 2; 5344 onChanged(); 5345 return gcsDestinationBuilder_; 5346 } 5347 5348 private com.google.protobuf.SingleFieldBuilderV3< 5349 com.google.cloud.aiplatform.v1beta1.BigQueryDestination, 5350 com.google.cloud.aiplatform.v1beta1.BigQueryDestination.Builder, 5351 com.google.cloud.aiplatform.v1beta1.BigQueryDestinationOrBuilder> 5352 bigqueryDestinationBuilder_; 5353 /** 5354 * 5355 * 5356 * <pre> 5357 * The BigQuery project or dataset location where the output is to be 5358 * written to. If project is provided, a new dataset is created with name 5359 * `prediction_<model-display-name>_<job-create-time>` 5360 * where <model-display-name> is made 5361 * BigQuery-dataset-name compatible (for example, most special characters 5362 * become underscores), and timestamp is in 5363 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 5364 * two tables will be created, `predictions`, and `errors`. 5365 * If the Model has both 5366 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5367 * and 5368 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5369 * schemata defined then the tables have columns as follows: The 5370 * `predictions` table contains instances for which the prediction 5371 * succeeded, it has columns as per a concatenation of the Model's 5372 * instance and prediction schemata. The `errors` table contains rows for 5373 * which the prediction has failed, it has instance columns, as per the 5374 * instance schema, followed by a single "errors" column, which as values 5375 * has [google.rpc.Status][google.rpc.Status] 5376 * represented as a STRUCT, and containing only `code` and `message`. 5377 * </pre> 5378 * 5379 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 5380 * 5381 * @return Whether the bigqueryDestination field is set. 5382 */ 5383 @java.lang.Override hasBigqueryDestination()5384 public boolean hasBigqueryDestination() { 5385 return destinationCase_ == 3; 5386 } 5387 /** 5388 * 5389 * 5390 * <pre> 5391 * The BigQuery project or dataset location where the output is to be 5392 * written to. If project is provided, a new dataset is created with name 5393 * `prediction_<model-display-name>_<job-create-time>` 5394 * where <model-display-name> is made 5395 * BigQuery-dataset-name compatible (for example, most special characters 5396 * become underscores), and timestamp is in 5397 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 5398 * two tables will be created, `predictions`, and `errors`. 5399 * If the Model has both 5400 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5401 * and 5402 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5403 * schemata defined then the tables have columns as follows: The 5404 * `predictions` table contains instances for which the prediction 5405 * succeeded, it has columns as per a concatenation of the Model's 5406 * instance and prediction schemata. The `errors` table contains rows for 5407 * which the prediction has failed, it has instance columns, as per the 5408 * instance schema, followed by a single "errors" column, which as values 5409 * has [google.rpc.Status][google.rpc.Status] 5410 * represented as a STRUCT, and containing only `code` and `message`. 5411 * </pre> 5412 * 5413 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 5414 * 5415 * @return The bigqueryDestination. 5416 */ 5417 @java.lang.Override getBigqueryDestination()5418 public com.google.cloud.aiplatform.v1beta1.BigQueryDestination getBigqueryDestination() { 5419 if (bigqueryDestinationBuilder_ == null) { 5420 if (destinationCase_ == 3) { 5421 return (com.google.cloud.aiplatform.v1beta1.BigQueryDestination) destination_; 5422 } 5423 return com.google.cloud.aiplatform.v1beta1.BigQueryDestination.getDefaultInstance(); 5424 } else { 5425 if (destinationCase_ == 3) { 5426 return bigqueryDestinationBuilder_.getMessage(); 5427 } 5428 return com.google.cloud.aiplatform.v1beta1.BigQueryDestination.getDefaultInstance(); 5429 } 5430 } 5431 /** 5432 * 5433 * 5434 * <pre> 5435 * The BigQuery project or dataset location where the output is to be 5436 * written to. If project is provided, a new dataset is created with name 5437 * `prediction_<model-display-name>_<job-create-time>` 5438 * where <model-display-name> is made 5439 * BigQuery-dataset-name compatible (for example, most special characters 5440 * become underscores), and timestamp is in 5441 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 5442 * two tables will be created, `predictions`, and `errors`. 5443 * If the Model has both 5444 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5445 * and 5446 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5447 * schemata defined then the tables have columns as follows: The 5448 * `predictions` table contains instances for which the prediction 5449 * succeeded, it has columns as per a concatenation of the Model's 5450 * instance and prediction schemata. The `errors` table contains rows for 5451 * which the prediction has failed, it has instance columns, as per the 5452 * instance schema, followed by a single "errors" column, which as values 5453 * has [google.rpc.Status][google.rpc.Status] 5454 * represented as a STRUCT, and containing only `code` and `message`. 5455 * </pre> 5456 * 5457 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 5458 */ setBigqueryDestination( com.google.cloud.aiplatform.v1beta1.BigQueryDestination value)5459 public Builder setBigqueryDestination( 5460 com.google.cloud.aiplatform.v1beta1.BigQueryDestination value) { 5461 if (bigqueryDestinationBuilder_ == null) { 5462 if (value == null) { 5463 throw new NullPointerException(); 5464 } 5465 destination_ = value; 5466 onChanged(); 5467 } else { 5468 bigqueryDestinationBuilder_.setMessage(value); 5469 } 5470 destinationCase_ = 3; 5471 return this; 5472 } 5473 /** 5474 * 5475 * 5476 * <pre> 5477 * The BigQuery project or dataset location where the output is to be 5478 * written to. If project is provided, a new dataset is created with name 5479 * `prediction_<model-display-name>_<job-create-time>` 5480 * where <model-display-name> is made 5481 * BigQuery-dataset-name compatible (for example, most special characters 5482 * become underscores), and timestamp is in 5483 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 5484 * two tables will be created, `predictions`, and `errors`. 5485 * If the Model has both 5486 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5487 * and 5488 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5489 * schemata defined then the tables have columns as follows: The 5490 * `predictions` table contains instances for which the prediction 5491 * succeeded, it has columns as per a concatenation of the Model's 5492 * instance and prediction schemata. The `errors` table contains rows for 5493 * which the prediction has failed, it has instance columns, as per the 5494 * instance schema, followed by a single "errors" column, which as values 5495 * has [google.rpc.Status][google.rpc.Status] 5496 * represented as a STRUCT, and containing only `code` and `message`. 5497 * </pre> 5498 * 5499 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 5500 */ setBigqueryDestination( com.google.cloud.aiplatform.v1beta1.BigQueryDestination.Builder builderForValue)5501 public Builder setBigqueryDestination( 5502 com.google.cloud.aiplatform.v1beta1.BigQueryDestination.Builder builderForValue) { 5503 if (bigqueryDestinationBuilder_ == null) { 5504 destination_ = builderForValue.build(); 5505 onChanged(); 5506 } else { 5507 bigqueryDestinationBuilder_.setMessage(builderForValue.build()); 5508 } 5509 destinationCase_ = 3; 5510 return this; 5511 } 5512 /** 5513 * 5514 * 5515 * <pre> 5516 * The BigQuery project or dataset location where the output is to be 5517 * written to. If project is provided, a new dataset is created with name 5518 * `prediction_<model-display-name>_<job-create-time>` 5519 * where <model-display-name> is made 5520 * BigQuery-dataset-name compatible (for example, most special characters 5521 * become underscores), and timestamp is in 5522 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 5523 * two tables will be created, `predictions`, and `errors`. 5524 * If the Model has both 5525 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5526 * and 5527 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5528 * schemata defined then the tables have columns as follows: The 5529 * `predictions` table contains instances for which the prediction 5530 * succeeded, it has columns as per a concatenation of the Model's 5531 * instance and prediction schemata. The `errors` table contains rows for 5532 * which the prediction has failed, it has instance columns, as per the 5533 * instance schema, followed by a single "errors" column, which as values 5534 * has [google.rpc.Status][google.rpc.Status] 5535 * represented as a STRUCT, and containing only `code` and `message`. 5536 * </pre> 5537 * 5538 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 5539 */ mergeBigqueryDestination( com.google.cloud.aiplatform.v1beta1.BigQueryDestination value)5540 public Builder mergeBigqueryDestination( 5541 com.google.cloud.aiplatform.v1beta1.BigQueryDestination value) { 5542 if (bigqueryDestinationBuilder_ == null) { 5543 if (destinationCase_ == 3 5544 && destination_ 5545 != com.google.cloud.aiplatform.v1beta1.BigQueryDestination.getDefaultInstance()) { 5546 destination_ = 5547 com.google.cloud.aiplatform.v1beta1.BigQueryDestination.newBuilder( 5548 (com.google.cloud.aiplatform.v1beta1.BigQueryDestination) destination_) 5549 .mergeFrom(value) 5550 .buildPartial(); 5551 } else { 5552 destination_ = value; 5553 } 5554 onChanged(); 5555 } else { 5556 if (destinationCase_ == 3) { 5557 bigqueryDestinationBuilder_.mergeFrom(value); 5558 } else { 5559 bigqueryDestinationBuilder_.setMessage(value); 5560 } 5561 } 5562 destinationCase_ = 3; 5563 return this; 5564 } 5565 /** 5566 * 5567 * 5568 * <pre> 5569 * The BigQuery project or dataset location where the output is to be 5570 * written to. If project is provided, a new dataset is created with name 5571 * `prediction_<model-display-name>_<job-create-time>` 5572 * where <model-display-name> is made 5573 * BigQuery-dataset-name compatible (for example, most special characters 5574 * become underscores), and timestamp is in 5575 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 5576 * two tables will be created, `predictions`, and `errors`. 5577 * If the Model has both 5578 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5579 * and 5580 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5581 * schemata defined then the tables have columns as follows: The 5582 * `predictions` table contains instances for which the prediction 5583 * succeeded, it has columns as per a concatenation of the Model's 5584 * instance and prediction schemata. The `errors` table contains rows for 5585 * which the prediction has failed, it has instance columns, as per the 5586 * instance schema, followed by a single "errors" column, which as values 5587 * has [google.rpc.Status][google.rpc.Status] 5588 * represented as a STRUCT, and containing only `code` and `message`. 5589 * </pre> 5590 * 5591 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 5592 */ clearBigqueryDestination()5593 public Builder clearBigqueryDestination() { 5594 if (bigqueryDestinationBuilder_ == null) { 5595 if (destinationCase_ == 3) { 5596 destinationCase_ = 0; 5597 destination_ = null; 5598 onChanged(); 5599 } 5600 } else { 5601 if (destinationCase_ == 3) { 5602 destinationCase_ = 0; 5603 destination_ = null; 5604 } 5605 bigqueryDestinationBuilder_.clear(); 5606 } 5607 return this; 5608 } 5609 /** 5610 * 5611 * 5612 * <pre> 5613 * The BigQuery project or dataset location where the output is to be 5614 * written to. If project is provided, a new dataset is created with name 5615 * `prediction_<model-display-name>_<job-create-time>` 5616 * where <model-display-name> is made 5617 * BigQuery-dataset-name compatible (for example, most special characters 5618 * become underscores), and timestamp is in 5619 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 5620 * two tables will be created, `predictions`, and `errors`. 5621 * If the Model has both 5622 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5623 * and 5624 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5625 * schemata defined then the tables have columns as follows: The 5626 * `predictions` table contains instances for which the prediction 5627 * succeeded, it has columns as per a concatenation of the Model's 5628 * instance and prediction schemata. The `errors` table contains rows for 5629 * which the prediction has failed, it has instance columns, as per the 5630 * instance schema, followed by a single "errors" column, which as values 5631 * has [google.rpc.Status][google.rpc.Status] 5632 * represented as a STRUCT, and containing only `code` and `message`. 5633 * </pre> 5634 * 5635 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 5636 */ 5637 public com.google.cloud.aiplatform.v1beta1.BigQueryDestination.Builder getBigqueryDestinationBuilder()5638 getBigqueryDestinationBuilder() { 5639 return getBigqueryDestinationFieldBuilder().getBuilder(); 5640 } 5641 /** 5642 * 5643 * 5644 * <pre> 5645 * The BigQuery project or dataset location where the output is to be 5646 * written to. If project is provided, a new dataset is created with name 5647 * `prediction_<model-display-name>_<job-create-time>` 5648 * where <model-display-name> is made 5649 * BigQuery-dataset-name compatible (for example, most special characters 5650 * become underscores), and timestamp is in 5651 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 5652 * two tables will be created, `predictions`, and `errors`. 5653 * If the Model has both 5654 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5655 * and 5656 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5657 * schemata defined then the tables have columns as follows: The 5658 * `predictions` table contains instances for which the prediction 5659 * succeeded, it has columns as per a concatenation of the Model's 5660 * instance and prediction schemata. The `errors` table contains rows for 5661 * which the prediction has failed, it has instance columns, as per the 5662 * instance schema, followed by a single "errors" column, which as values 5663 * has [google.rpc.Status][google.rpc.Status] 5664 * represented as a STRUCT, and containing only `code` and `message`. 5665 * </pre> 5666 * 5667 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 5668 */ 5669 @java.lang.Override 5670 public com.google.cloud.aiplatform.v1beta1.BigQueryDestinationOrBuilder getBigqueryDestinationOrBuilder()5671 getBigqueryDestinationOrBuilder() { 5672 if ((destinationCase_ == 3) && (bigqueryDestinationBuilder_ != null)) { 5673 return bigqueryDestinationBuilder_.getMessageOrBuilder(); 5674 } else { 5675 if (destinationCase_ == 3) { 5676 return (com.google.cloud.aiplatform.v1beta1.BigQueryDestination) destination_; 5677 } 5678 return com.google.cloud.aiplatform.v1beta1.BigQueryDestination.getDefaultInstance(); 5679 } 5680 } 5681 /** 5682 * 5683 * 5684 * <pre> 5685 * The BigQuery project or dataset location where the output is to be 5686 * written to. If project is provided, a new dataset is created with name 5687 * `prediction_<model-display-name>_<job-create-time>` 5688 * where <model-display-name> is made 5689 * BigQuery-dataset-name compatible (for example, most special characters 5690 * become underscores), and timestamp is in 5691 * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset 5692 * two tables will be created, `predictions`, and `errors`. 5693 * If the Model has both 5694 * [instance][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 5695 * and 5696 * [prediction][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri] 5697 * schemata defined then the tables have columns as follows: The 5698 * `predictions` table contains instances for which the prediction 5699 * succeeded, it has columns as per a concatenation of the Model's 5700 * instance and prediction schemata. The `errors` table contains rows for 5701 * which the prediction has failed, it has instance columns, as per the 5702 * instance schema, followed by a single "errors" column, which as values 5703 * has [google.rpc.Status][google.rpc.Status] 5704 * represented as a STRUCT, and containing only `code` and `message`. 5705 * </pre> 5706 * 5707 * <code>.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 3;</code> 5708 */ 5709 private com.google.protobuf.SingleFieldBuilderV3< 5710 com.google.cloud.aiplatform.v1beta1.BigQueryDestination, 5711 com.google.cloud.aiplatform.v1beta1.BigQueryDestination.Builder, 5712 com.google.cloud.aiplatform.v1beta1.BigQueryDestinationOrBuilder> getBigqueryDestinationFieldBuilder()5713 getBigqueryDestinationFieldBuilder() { 5714 if (bigqueryDestinationBuilder_ == null) { 5715 if (!(destinationCase_ == 3)) { 5716 destination_ = 5717 com.google.cloud.aiplatform.v1beta1.BigQueryDestination.getDefaultInstance(); 5718 } 5719 bigqueryDestinationBuilder_ = 5720 new com.google.protobuf.SingleFieldBuilderV3< 5721 com.google.cloud.aiplatform.v1beta1.BigQueryDestination, 5722 com.google.cloud.aiplatform.v1beta1.BigQueryDestination.Builder, 5723 com.google.cloud.aiplatform.v1beta1.BigQueryDestinationOrBuilder>( 5724 (com.google.cloud.aiplatform.v1beta1.BigQueryDestination) destination_, 5725 getParentForChildren(), 5726 isClean()); 5727 destination_ = null; 5728 } 5729 destinationCase_ = 3; 5730 onChanged(); 5731 return bigqueryDestinationBuilder_; 5732 } 5733 5734 private java.lang.Object predictionsFormat_ = ""; 5735 /** 5736 * 5737 * 5738 * <pre> 5739 * Required. The format in which Vertex AI gives the predictions, must be 5740 * one of the 5741 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 5742 * [supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats]. 5743 * </pre> 5744 * 5745 * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 5746 * 5747 * @return The predictionsFormat. 5748 */ getPredictionsFormat()5749 public java.lang.String getPredictionsFormat() { 5750 java.lang.Object ref = predictionsFormat_; 5751 if (!(ref instanceof java.lang.String)) { 5752 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 5753 java.lang.String s = bs.toStringUtf8(); 5754 predictionsFormat_ = s; 5755 return s; 5756 } else { 5757 return (java.lang.String) ref; 5758 } 5759 } 5760 /** 5761 * 5762 * 5763 * <pre> 5764 * Required. The format in which Vertex AI gives the predictions, must be 5765 * one of the 5766 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 5767 * [supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats]. 5768 * </pre> 5769 * 5770 * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 5771 * 5772 * @return The bytes for predictionsFormat. 5773 */ getPredictionsFormatBytes()5774 public com.google.protobuf.ByteString getPredictionsFormatBytes() { 5775 java.lang.Object ref = predictionsFormat_; 5776 if (ref instanceof String) { 5777 com.google.protobuf.ByteString b = 5778 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 5779 predictionsFormat_ = b; 5780 return b; 5781 } else { 5782 return (com.google.protobuf.ByteString) ref; 5783 } 5784 } 5785 /** 5786 * 5787 * 5788 * <pre> 5789 * Required. The format in which Vertex AI gives the predictions, must be 5790 * one of the 5791 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 5792 * [supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats]. 5793 * </pre> 5794 * 5795 * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 5796 * 5797 * @param value The predictionsFormat to set. 5798 * @return This builder for chaining. 5799 */ setPredictionsFormat(java.lang.String value)5800 public Builder setPredictionsFormat(java.lang.String value) { 5801 if (value == null) { 5802 throw new NullPointerException(); 5803 } 5804 predictionsFormat_ = value; 5805 bitField0_ |= 0x00000004; 5806 onChanged(); 5807 return this; 5808 } 5809 /** 5810 * 5811 * 5812 * <pre> 5813 * Required. The format in which Vertex AI gives the predictions, must be 5814 * one of the 5815 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 5816 * [supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats]. 5817 * </pre> 5818 * 5819 * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 5820 * 5821 * @return This builder for chaining. 5822 */ clearPredictionsFormat()5823 public Builder clearPredictionsFormat() { 5824 predictionsFormat_ = getDefaultInstance().getPredictionsFormat(); 5825 bitField0_ = (bitField0_ & ~0x00000004); 5826 onChanged(); 5827 return this; 5828 } 5829 /** 5830 * 5831 * 5832 * <pre> 5833 * Required. The format in which Vertex AI gives the predictions, must be 5834 * one of the 5835 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 5836 * [supported_output_storage_formats][google.cloud.aiplatform.v1beta1.Model.supported_output_storage_formats]. 5837 * </pre> 5838 * 5839 * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code> 5840 * 5841 * @param value The bytes for predictionsFormat to set. 5842 * @return This builder for chaining. 5843 */ setPredictionsFormatBytes(com.google.protobuf.ByteString value)5844 public Builder setPredictionsFormatBytes(com.google.protobuf.ByteString value) { 5845 if (value == null) { 5846 throw new NullPointerException(); 5847 } 5848 checkByteStringIsUtf8(value); 5849 predictionsFormat_ = value; 5850 bitField0_ |= 0x00000004; 5851 onChanged(); 5852 return this; 5853 } 5854 5855 @java.lang.Override setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)5856 public final Builder setUnknownFields( 5857 final com.google.protobuf.UnknownFieldSet unknownFields) { 5858 return super.setUnknownFields(unknownFields); 5859 } 5860 5861 @java.lang.Override mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)5862 public final Builder mergeUnknownFields( 5863 final com.google.protobuf.UnknownFieldSet unknownFields) { 5864 return super.mergeUnknownFields(unknownFields); 5865 } 5866 5867 // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig) 5868 } 5869 5870 // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig) 5871 private static final com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig 5872 DEFAULT_INSTANCE; 5873 5874 static { 5875 DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig(); 5876 } 5877 5878 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig getDefaultInstance()5879 getDefaultInstance() { 5880 return DEFAULT_INSTANCE; 5881 } 5882 5883 private static final com.google.protobuf.Parser<OutputConfig> PARSER = 5884 new com.google.protobuf.AbstractParser<OutputConfig>() { 5885 @java.lang.Override 5886 public OutputConfig parsePartialFrom( 5887 com.google.protobuf.CodedInputStream input, 5888 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5889 throws com.google.protobuf.InvalidProtocolBufferException { 5890 Builder builder = newBuilder(); 5891 try { 5892 builder.mergeFrom(input, extensionRegistry); 5893 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 5894 throw e.setUnfinishedMessage(builder.buildPartial()); 5895 } catch (com.google.protobuf.UninitializedMessageException e) { 5896 throw e.asInvalidProtocolBufferException() 5897 .setUnfinishedMessage(builder.buildPartial()); 5898 } catch (java.io.IOException e) { 5899 throw new com.google.protobuf.InvalidProtocolBufferException(e) 5900 .setUnfinishedMessage(builder.buildPartial()); 5901 } 5902 return builder.buildPartial(); 5903 } 5904 }; 5905 parser()5906 public static com.google.protobuf.Parser<OutputConfig> parser() { 5907 return PARSER; 5908 } 5909 5910 @java.lang.Override getParserForType()5911 public com.google.protobuf.Parser<OutputConfig> getParserForType() { 5912 return PARSER; 5913 } 5914 5915 @java.lang.Override 5916 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig getDefaultInstanceForType()5917 getDefaultInstanceForType() { 5918 return DEFAULT_INSTANCE; 5919 } 5920 } 5921 5922 public interface OutputInfoOrBuilder 5923 extends 5924 // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo) 5925 com.google.protobuf.MessageOrBuilder { 5926 5927 /** 5928 * 5929 * 5930 * <pre> 5931 * Output only. The full path of the Cloud Storage directory created, into 5932 * which the prediction output is written. 5933 * </pre> 5934 * 5935 * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 5936 * 5937 * @return Whether the gcsOutputDirectory field is set. 5938 */ hasGcsOutputDirectory()5939 boolean hasGcsOutputDirectory(); 5940 /** 5941 * 5942 * 5943 * <pre> 5944 * Output only. The full path of the Cloud Storage directory created, into 5945 * which the prediction output is written. 5946 * </pre> 5947 * 5948 * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 5949 * 5950 * @return The gcsOutputDirectory. 5951 */ getGcsOutputDirectory()5952 java.lang.String getGcsOutputDirectory(); 5953 /** 5954 * 5955 * 5956 * <pre> 5957 * Output only. The full path of the Cloud Storage directory created, into 5958 * which the prediction output is written. 5959 * </pre> 5960 * 5961 * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 5962 * 5963 * @return The bytes for gcsOutputDirectory. 5964 */ getGcsOutputDirectoryBytes()5965 com.google.protobuf.ByteString getGcsOutputDirectoryBytes(); 5966 5967 /** 5968 * 5969 * 5970 * <pre> 5971 * Output only. The path of the BigQuery dataset created, in 5972 * `bq://projectId.bqDatasetId` 5973 * format, into which the prediction output is written. 5974 * </pre> 5975 * 5976 * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 5977 * 5978 * @return Whether the bigqueryOutputDataset field is set. 5979 */ hasBigqueryOutputDataset()5980 boolean hasBigqueryOutputDataset(); 5981 /** 5982 * 5983 * 5984 * <pre> 5985 * Output only. The path of the BigQuery dataset created, in 5986 * `bq://projectId.bqDatasetId` 5987 * format, into which the prediction output is written. 5988 * </pre> 5989 * 5990 * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 5991 * 5992 * @return The bigqueryOutputDataset. 5993 */ getBigqueryOutputDataset()5994 java.lang.String getBigqueryOutputDataset(); 5995 /** 5996 * 5997 * 5998 * <pre> 5999 * Output only. The path of the BigQuery dataset created, in 6000 * `bq://projectId.bqDatasetId` 6001 * format, into which the prediction output is written. 6002 * </pre> 6003 * 6004 * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6005 * 6006 * @return The bytes for bigqueryOutputDataset. 6007 */ getBigqueryOutputDatasetBytes()6008 com.google.protobuf.ByteString getBigqueryOutputDatasetBytes(); 6009 6010 /** 6011 * 6012 * 6013 * <pre> 6014 * Output only. The name of the BigQuery table created, in 6015 * `predictions_<timestamp>` 6016 * format, into which the prediction output is written. 6017 * Can be used by UI to generate the BigQuery output path, for example. 6018 * </pre> 6019 * 6020 * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6021 * 6022 * @return The bigqueryOutputTable. 6023 */ getBigqueryOutputTable()6024 java.lang.String getBigqueryOutputTable(); 6025 /** 6026 * 6027 * 6028 * <pre> 6029 * Output only. The name of the BigQuery table created, in 6030 * `predictions_<timestamp>` 6031 * format, into which the prediction output is written. 6032 * Can be used by UI to generate the BigQuery output path, for example. 6033 * </pre> 6034 * 6035 * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6036 * 6037 * @return The bytes for bigqueryOutputTable. 6038 */ getBigqueryOutputTableBytes()6039 com.google.protobuf.ByteString getBigqueryOutputTableBytes(); 6040 6041 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.OutputLocationCase getOutputLocationCase()6042 getOutputLocationCase(); 6043 } 6044 /** 6045 * 6046 * 6047 * <pre> 6048 * Further describes this job's output. 6049 * Supplements 6050 * [output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config]. 6051 * </pre> 6052 * 6053 * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo} 6054 */ 6055 public static final class OutputInfo extends com.google.protobuf.GeneratedMessageV3 6056 implements 6057 // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo) 6058 OutputInfoOrBuilder { 6059 private static final long serialVersionUID = 0L; 6060 // Use OutputInfo.newBuilder() to construct. OutputInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)6061 private OutputInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { 6062 super(builder); 6063 } 6064 OutputInfo()6065 private OutputInfo() { 6066 bigqueryOutputTable_ = ""; 6067 } 6068 6069 @java.lang.Override 6070 @SuppressWarnings({"unused"}) newInstance(UnusedPrivateParameter unused)6071 protected java.lang.Object newInstance(UnusedPrivateParameter unused) { 6072 return new OutputInfo(); 6073 } 6074 6075 @java.lang.Override getUnknownFields()6076 public final com.google.protobuf.UnknownFieldSet getUnknownFields() { 6077 return this.unknownFields; 6078 } 6079 getDescriptor()6080 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { 6081 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 6082 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputInfo_descriptor; 6083 } 6084 6085 @java.lang.Override 6086 protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()6087 internalGetFieldAccessorTable() { 6088 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 6089 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputInfo_fieldAccessorTable 6090 .ensureFieldAccessorsInitialized( 6091 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.class, 6092 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.Builder.class); 6093 } 6094 6095 private int outputLocationCase_ = 0; 6096 private java.lang.Object outputLocation_; 6097 6098 public enum OutputLocationCase 6099 implements 6100 com.google.protobuf.Internal.EnumLite, 6101 com.google.protobuf.AbstractMessage.InternalOneOfEnum { 6102 GCS_OUTPUT_DIRECTORY(1), 6103 BIGQUERY_OUTPUT_DATASET(2), 6104 OUTPUTLOCATION_NOT_SET(0); 6105 private final int value; 6106 OutputLocationCase(int value)6107 private OutputLocationCase(int value) { 6108 this.value = value; 6109 } 6110 /** 6111 * @param value The number of the enum to look for. 6112 * @return The enum associated with the given number. 6113 * @deprecated Use {@link #forNumber(int)} instead. 6114 */ 6115 @java.lang.Deprecated valueOf(int value)6116 public static OutputLocationCase valueOf(int value) { 6117 return forNumber(value); 6118 } 6119 forNumber(int value)6120 public static OutputLocationCase forNumber(int value) { 6121 switch (value) { 6122 case 1: 6123 return GCS_OUTPUT_DIRECTORY; 6124 case 2: 6125 return BIGQUERY_OUTPUT_DATASET; 6126 case 0: 6127 return OUTPUTLOCATION_NOT_SET; 6128 default: 6129 return null; 6130 } 6131 } 6132 getNumber()6133 public int getNumber() { 6134 return this.value; 6135 } 6136 }; 6137 getOutputLocationCase()6138 public OutputLocationCase getOutputLocationCase() { 6139 return OutputLocationCase.forNumber(outputLocationCase_); 6140 } 6141 6142 public static final int GCS_OUTPUT_DIRECTORY_FIELD_NUMBER = 1; 6143 /** 6144 * 6145 * 6146 * <pre> 6147 * Output only. The full path of the Cloud Storage directory created, into 6148 * which the prediction output is written. 6149 * </pre> 6150 * 6151 * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6152 * 6153 * @return Whether the gcsOutputDirectory field is set. 6154 */ hasGcsOutputDirectory()6155 public boolean hasGcsOutputDirectory() { 6156 return outputLocationCase_ == 1; 6157 } 6158 /** 6159 * 6160 * 6161 * <pre> 6162 * Output only. The full path of the Cloud Storage directory created, into 6163 * which the prediction output is written. 6164 * </pre> 6165 * 6166 * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6167 * 6168 * @return The gcsOutputDirectory. 6169 */ getGcsOutputDirectory()6170 public java.lang.String getGcsOutputDirectory() { 6171 java.lang.Object ref = ""; 6172 if (outputLocationCase_ == 1) { 6173 ref = outputLocation_; 6174 } 6175 if (ref instanceof java.lang.String) { 6176 return (java.lang.String) ref; 6177 } else { 6178 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 6179 java.lang.String s = bs.toStringUtf8(); 6180 if (outputLocationCase_ == 1) { 6181 outputLocation_ = s; 6182 } 6183 return s; 6184 } 6185 } 6186 /** 6187 * 6188 * 6189 * <pre> 6190 * Output only. The full path of the Cloud Storage directory created, into 6191 * which the prediction output is written. 6192 * </pre> 6193 * 6194 * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6195 * 6196 * @return The bytes for gcsOutputDirectory. 6197 */ getGcsOutputDirectoryBytes()6198 public com.google.protobuf.ByteString getGcsOutputDirectoryBytes() { 6199 java.lang.Object ref = ""; 6200 if (outputLocationCase_ == 1) { 6201 ref = outputLocation_; 6202 } 6203 if (ref instanceof java.lang.String) { 6204 com.google.protobuf.ByteString b = 6205 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 6206 if (outputLocationCase_ == 1) { 6207 outputLocation_ = b; 6208 } 6209 return b; 6210 } else { 6211 return (com.google.protobuf.ByteString) ref; 6212 } 6213 } 6214 6215 public static final int BIGQUERY_OUTPUT_DATASET_FIELD_NUMBER = 2; 6216 /** 6217 * 6218 * 6219 * <pre> 6220 * Output only. The path of the BigQuery dataset created, in 6221 * `bq://projectId.bqDatasetId` 6222 * format, into which the prediction output is written. 6223 * </pre> 6224 * 6225 * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6226 * 6227 * @return Whether the bigqueryOutputDataset field is set. 6228 */ hasBigqueryOutputDataset()6229 public boolean hasBigqueryOutputDataset() { 6230 return outputLocationCase_ == 2; 6231 } 6232 /** 6233 * 6234 * 6235 * <pre> 6236 * Output only. The path of the BigQuery dataset created, in 6237 * `bq://projectId.bqDatasetId` 6238 * format, into which the prediction output is written. 6239 * </pre> 6240 * 6241 * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6242 * 6243 * @return The bigqueryOutputDataset. 6244 */ getBigqueryOutputDataset()6245 public java.lang.String getBigqueryOutputDataset() { 6246 java.lang.Object ref = ""; 6247 if (outputLocationCase_ == 2) { 6248 ref = outputLocation_; 6249 } 6250 if (ref instanceof java.lang.String) { 6251 return (java.lang.String) ref; 6252 } else { 6253 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 6254 java.lang.String s = bs.toStringUtf8(); 6255 if (outputLocationCase_ == 2) { 6256 outputLocation_ = s; 6257 } 6258 return s; 6259 } 6260 } 6261 /** 6262 * 6263 * 6264 * <pre> 6265 * Output only. The path of the BigQuery dataset created, in 6266 * `bq://projectId.bqDatasetId` 6267 * format, into which the prediction output is written. 6268 * </pre> 6269 * 6270 * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6271 * 6272 * @return The bytes for bigqueryOutputDataset. 6273 */ getBigqueryOutputDatasetBytes()6274 public com.google.protobuf.ByteString getBigqueryOutputDatasetBytes() { 6275 java.lang.Object ref = ""; 6276 if (outputLocationCase_ == 2) { 6277 ref = outputLocation_; 6278 } 6279 if (ref instanceof java.lang.String) { 6280 com.google.protobuf.ByteString b = 6281 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 6282 if (outputLocationCase_ == 2) { 6283 outputLocation_ = b; 6284 } 6285 return b; 6286 } else { 6287 return (com.google.protobuf.ByteString) ref; 6288 } 6289 } 6290 6291 public static final int BIGQUERY_OUTPUT_TABLE_FIELD_NUMBER = 4; 6292 6293 @SuppressWarnings("serial") 6294 private volatile java.lang.Object bigqueryOutputTable_ = ""; 6295 /** 6296 * 6297 * 6298 * <pre> 6299 * Output only. The name of the BigQuery table created, in 6300 * `predictions_<timestamp>` 6301 * format, into which the prediction output is written. 6302 * Can be used by UI to generate the BigQuery output path, for example. 6303 * </pre> 6304 * 6305 * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6306 * 6307 * @return The bigqueryOutputTable. 6308 */ 6309 @java.lang.Override getBigqueryOutputTable()6310 public java.lang.String getBigqueryOutputTable() { 6311 java.lang.Object ref = bigqueryOutputTable_; 6312 if (ref instanceof java.lang.String) { 6313 return (java.lang.String) ref; 6314 } else { 6315 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 6316 java.lang.String s = bs.toStringUtf8(); 6317 bigqueryOutputTable_ = s; 6318 return s; 6319 } 6320 } 6321 /** 6322 * 6323 * 6324 * <pre> 6325 * Output only. The name of the BigQuery table created, in 6326 * `predictions_<timestamp>` 6327 * format, into which the prediction output is written. 6328 * Can be used by UI to generate the BigQuery output path, for example. 6329 * </pre> 6330 * 6331 * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6332 * 6333 * @return The bytes for bigqueryOutputTable. 6334 */ 6335 @java.lang.Override getBigqueryOutputTableBytes()6336 public com.google.protobuf.ByteString getBigqueryOutputTableBytes() { 6337 java.lang.Object ref = bigqueryOutputTable_; 6338 if (ref instanceof java.lang.String) { 6339 com.google.protobuf.ByteString b = 6340 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 6341 bigqueryOutputTable_ = b; 6342 return b; 6343 } else { 6344 return (com.google.protobuf.ByteString) ref; 6345 } 6346 } 6347 6348 private byte memoizedIsInitialized = -1; 6349 6350 @java.lang.Override isInitialized()6351 public final boolean isInitialized() { 6352 byte isInitialized = memoizedIsInitialized; 6353 if (isInitialized == 1) return true; 6354 if (isInitialized == 0) return false; 6355 6356 memoizedIsInitialized = 1; 6357 return true; 6358 } 6359 6360 @java.lang.Override writeTo(com.google.protobuf.CodedOutputStream output)6361 public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { 6362 if (outputLocationCase_ == 1) { 6363 com.google.protobuf.GeneratedMessageV3.writeString(output, 1, outputLocation_); 6364 } 6365 if (outputLocationCase_ == 2) { 6366 com.google.protobuf.GeneratedMessageV3.writeString(output, 2, outputLocation_); 6367 } 6368 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bigqueryOutputTable_)) { 6369 com.google.protobuf.GeneratedMessageV3.writeString(output, 4, bigqueryOutputTable_); 6370 } 6371 getUnknownFields().writeTo(output); 6372 } 6373 6374 @java.lang.Override getSerializedSize()6375 public int getSerializedSize() { 6376 int size = memoizedSize; 6377 if (size != -1) return size; 6378 6379 size = 0; 6380 if (outputLocationCase_ == 1) { 6381 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, outputLocation_); 6382 } 6383 if (outputLocationCase_ == 2) { 6384 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, outputLocation_); 6385 } 6386 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bigqueryOutputTable_)) { 6387 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, bigqueryOutputTable_); 6388 } 6389 size += getUnknownFields().getSerializedSize(); 6390 memoizedSize = size; 6391 return size; 6392 } 6393 6394 @java.lang.Override equals(final java.lang.Object obj)6395 public boolean equals(final java.lang.Object obj) { 6396 if (obj == this) { 6397 return true; 6398 } 6399 if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo)) { 6400 return super.equals(obj); 6401 } 6402 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo other = 6403 (com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo) obj; 6404 6405 if (!getBigqueryOutputTable().equals(other.getBigqueryOutputTable())) return false; 6406 if (!getOutputLocationCase().equals(other.getOutputLocationCase())) return false; 6407 switch (outputLocationCase_) { 6408 case 1: 6409 if (!getGcsOutputDirectory().equals(other.getGcsOutputDirectory())) return false; 6410 break; 6411 case 2: 6412 if (!getBigqueryOutputDataset().equals(other.getBigqueryOutputDataset())) return false; 6413 break; 6414 case 0: 6415 default: 6416 } 6417 if (!getUnknownFields().equals(other.getUnknownFields())) return false; 6418 return true; 6419 } 6420 6421 @java.lang.Override hashCode()6422 public int hashCode() { 6423 if (memoizedHashCode != 0) { 6424 return memoizedHashCode; 6425 } 6426 int hash = 41; 6427 hash = (19 * hash) + getDescriptor().hashCode(); 6428 hash = (37 * hash) + BIGQUERY_OUTPUT_TABLE_FIELD_NUMBER; 6429 hash = (53 * hash) + getBigqueryOutputTable().hashCode(); 6430 switch (outputLocationCase_) { 6431 case 1: 6432 hash = (37 * hash) + GCS_OUTPUT_DIRECTORY_FIELD_NUMBER; 6433 hash = (53 * hash) + getGcsOutputDirectory().hashCode(); 6434 break; 6435 case 2: 6436 hash = (37 * hash) + BIGQUERY_OUTPUT_DATASET_FIELD_NUMBER; 6437 hash = (53 * hash) + getBigqueryOutputDataset().hashCode(); 6438 break; 6439 case 0: 6440 default: 6441 } 6442 hash = (29 * hash) + getUnknownFields().hashCode(); 6443 memoizedHashCode = hash; 6444 return hash; 6445 } 6446 parseFrom( java.nio.ByteBuffer data)6447 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo parseFrom( 6448 java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { 6449 return PARSER.parseFrom(data); 6450 } 6451 parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6452 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo parseFrom( 6453 java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6454 throws com.google.protobuf.InvalidProtocolBufferException { 6455 return PARSER.parseFrom(data, extensionRegistry); 6456 } 6457 parseFrom( com.google.protobuf.ByteString data)6458 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo parseFrom( 6459 com.google.protobuf.ByteString data) 6460 throws com.google.protobuf.InvalidProtocolBufferException { 6461 return PARSER.parseFrom(data); 6462 } 6463 parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6464 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo parseFrom( 6465 com.google.protobuf.ByteString data, 6466 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6467 throws com.google.protobuf.InvalidProtocolBufferException { 6468 return PARSER.parseFrom(data, extensionRegistry); 6469 } 6470 parseFrom( byte[] data)6471 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo parseFrom( 6472 byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { 6473 return PARSER.parseFrom(data); 6474 } 6475 parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6476 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo parseFrom( 6477 byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6478 throws com.google.protobuf.InvalidProtocolBufferException { 6479 return PARSER.parseFrom(data, extensionRegistry); 6480 } 6481 parseFrom( java.io.InputStream input)6482 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo parseFrom( 6483 java.io.InputStream input) throws java.io.IOException { 6484 return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); 6485 } 6486 parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6487 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo parseFrom( 6488 java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6489 throws java.io.IOException { 6490 return com.google.protobuf.GeneratedMessageV3.parseWithIOException( 6491 PARSER, input, extensionRegistry); 6492 } 6493 6494 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo parseDelimitedFrom(java.io.InputStream input)6495 parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { 6496 return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); 6497 } 6498 6499 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6500 parseDelimitedFrom( 6501 java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6502 throws java.io.IOException { 6503 return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( 6504 PARSER, input, extensionRegistry); 6505 } 6506 parseFrom( com.google.protobuf.CodedInputStream input)6507 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo parseFrom( 6508 com.google.protobuf.CodedInputStream input) throws java.io.IOException { 6509 return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); 6510 } 6511 parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6512 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo parseFrom( 6513 com.google.protobuf.CodedInputStream input, 6514 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6515 throws java.io.IOException { 6516 return com.google.protobuf.GeneratedMessageV3.parseWithIOException( 6517 PARSER, input, extensionRegistry); 6518 } 6519 6520 @java.lang.Override newBuilderForType()6521 public Builder newBuilderForType() { 6522 return newBuilder(); 6523 } 6524 newBuilder()6525 public static Builder newBuilder() { 6526 return DEFAULT_INSTANCE.toBuilder(); 6527 } 6528 newBuilder( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo prototype)6529 public static Builder newBuilder( 6530 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo prototype) { 6531 return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); 6532 } 6533 6534 @java.lang.Override toBuilder()6535 public Builder toBuilder() { 6536 return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); 6537 } 6538 6539 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent)6540 protected Builder newBuilderForType( 6541 com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { 6542 Builder builder = new Builder(parent); 6543 return builder; 6544 } 6545 /** 6546 * 6547 * 6548 * <pre> 6549 * Further describes this job's output. 6550 * Supplements 6551 * [output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config]. 6552 * </pre> 6553 * 6554 * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo} 6555 */ 6556 public static final class Builder 6557 extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> 6558 implements 6559 // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo) 6560 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfoOrBuilder { getDescriptor()6561 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { 6562 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 6563 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputInfo_descriptor; 6564 } 6565 6566 @java.lang.Override 6567 protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()6568 internalGetFieldAccessorTable() { 6569 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 6570 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputInfo_fieldAccessorTable 6571 .ensureFieldAccessorsInitialized( 6572 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.class, 6573 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.Builder.class); 6574 } 6575 6576 // Construct using 6577 // com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.newBuilder() Builder()6578 private Builder() {} 6579 Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)6580 private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { 6581 super(parent); 6582 } 6583 6584 @java.lang.Override clear()6585 public Builder clear() { 6586 super.clear(); 6587 bitField0_ = 0; 6588 bigqueryOutputTable_ = ""; 6589 outputLocationCase_ = 0; 6590 outputLocation_ = null; 6591 return this; 6592 } 6593 6594 @java.lang.Override getDescriptorForType()6595 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { 6596 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 6597 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_OutputInfo_descriptor; 6598 } 6599 6600 @java.lang.Override 6601 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo getDefaultInstanceForType()6602 getDefaultInstanceForType() { 6603 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo 6604 .getDefaultInstance(); 6605 } 6606 6607 @java.lang.Override build()6608 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo build() { 6609 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo result = buildPartial(); 6610 if (!result.isInitialized()) { 6611 throw newUninitializedMessageException(result); 6612 } 6613 return result; 6614 } 6615 6616 @java.lang.Override buildPartial()6617 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo buildPartial() { 6618 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo result = 6619 new com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo(this); 6620 if (bitField0_ != 0) { 6621 buildPartial0(result); 6622 } 6623 buildPartialOneofs(result); 6624 onBuilt(); 6625 return result; 6626 } 6627 buildPartial0( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo result)6628 private void buildPartial0( 6629 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo result) { 6630 int from_bitField0_ = bitField0_; 6631 if (((from_bitField0_ & 0x00000004) != 0)) { 6632 result.bigqueryOutputTable_ = bigqueryOutputTable_; 6633 } 6634 } 6635 buildPartialOneofs( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo result)6636 private void buildPartialOneofs( 6637 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo result) { 6638 result.outputLocationCase_ = outputLocationCase_; 6639 result.outputLocation_ = this.outputLocation_; 6640 } 6641 6642 @java.lang.Override clone()6643 public Builder clone() { 6644 return super.clone(); 6645 } 6646 6647 @java.lang.Override setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)6648 public Builder setField( 6649 com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { 6650 return super.setField(field, value); 6651 } 6652 6653 @java.lang.Override clearField(com.google.protobuf.Descriptors.FieldDescriptor field)6654 public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { 6655 return super.clearField(field); 6656 } 6657 6658 @java.lang.Override clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)6659 public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { 6660 return super.clearOneof(oneof); 6661 } 6662 6663 @java.lang.Override setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)6664 public Builder setRepeatedField( 6665 com.google.protobuf.Descriptors.FieldDescriptor field, 6666 int index, 6667 java.lang.Object value) { 6668 return super.setRepeatedField(field, index, value); 6669 } 6670 6671 @java.lang.Override addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)6672 public Builder addRepeatedField( 6673 com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { 6674 return super.addRepeatedField(field, value); 6675 } 6676 6677 @java.lang.Override mergeFrom(com.google.protobuf.Message other)6678 public Builder mergeFrom(com.google.protobuf.Message other) { 6679 if (other instanceof com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo) { 6680 return mergeFrom( 6681 (com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo) other); 6682 } else { 6683 super.mergeFrom(other); 6684 return this; 6685 } 6686 } 6687 mergeFrom( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo other)6688 public Builder mergeFrom( 6689 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo other) { 6690 if (other 6691 == com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo 6692 .getDefaultInstance()) return this; 6693 if (!other.getBigqueryOutputTable().isEmpty()) { 6694 bigqueryOutputTable_ = other.bigqueryOutputTable_; 6695 bitField0_ |= 0x00000004; 6696 onChanged(); 6697 } 6698 switch (other.getOutputLocationCase()) { 6699 case GCS_OUTPUT_DIRECTORY: 6700 { 6701 outputLocationCase_ = 1; 6702 outputLocation_ = other.outputLocation_; 6703 onChanged(); 6704 break; 6705 } 6706 case BIGQUERY_OUTPUT_DATASET: 6707 { 6708 outputLocationCase_ = 2; 6709 outputLocation_ = other.outputLocation_; 6710 onChanged(); 6711 break; 6712 } 6713 case OUTPUTLOCATION_NOT_SET: 6714 { 6715 break; 6716 } 6717 } 6718 this.mergeUnknownFields(other.getUnknownFields()); 6719 onChanged(); 6720 return this; 6721 } 6722 6723 @java.lang.Override isInitialized()6724 public final boolean isInitialized() { 6725 return true; 6726 } 6727 6728 @java.lang.Override mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6729 public Builder mergeFrom( 6730 com.google.protobuf.CodedInputStream input, 6731 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6732 throws java.io.IOException { 6733 if (extensionRegistry == null) { 6734 throw new java.lang.NullPointerException(); 6735 } 6736 try { 6737 boolean done = false; 6738 while (!done) { 6739 int tag = input.readTag(); 6740 switch (tag) { 6741 case 0: 6742 done = true; 6743 break; 6744 case 10: 6745 { 6746 java.lang.String s = input.readStringRequireUtf8(); 6747 outputLocationCase_ = 1; 6748 outputLocation_ = s; 6749 break; 6750 } // case 10 6751 case 18: 6752 { 6753 java.lang.String s = input.readStringRequireUtf8(); 6754 outputLocationCase_ = 2; 6755 outputLocation_ = s; 6756 break; 6757 } // case 18 6758 case 34: 6759 { 6760 bigqueryOutputTable_ = input.readStringRequireUtf8(); 6761 bitField0_ |= 0x00000004; 6762 break; 6763 } // case 34 6764 default: 6765 { 6766 if (!super.parseUnknownField(input, extensionRegistry, tag)) { 6767 done = true; // was an endgroup tag 6768 } 6769 break; 6770 } // default: 6771 } // switch (tag) 6772 } // while (!done) 6773 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6774 throw e.unwrapIOException(); 6775 } finally { 6776 onChanged(); 6777 } // finally 6778 return this; 6779 } 6780 6781 private int outputLocationCase_ = 0; 6782 private java.lang.Object outputLocation_; 6783 getOutputLocationCase()6784 public OutputLocationCase getOutputLocationCase() { 6785 return OutputLocationCase.forNumber(outputLocationCase_); 6786 } 6787 clearOutputLocation()6788 public Builder clearOutputLocation() { 6789 outputLocationCase_ = 0; 6790 outputLocation_ = null; 6791 onChanged(); 6792 return this; 6793 } 6794 6795 private int bitField0_; 6796 6797 /** 6798 * 6799 * 6800 * <pre> 6801 * Output only. The full path of the Cloud Storage directory created, into 6802 * which the prediction output is written. 6803 * </pre> 6804 * 6805 * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6806 * 6807 * @return Whether the gcsOutputDirectory field is set. 6808 */ 6809 @java.lang.Override hasGcsOutputDirectory()6810 public boolean hasGcsOutputDirectory() { 6811 return outputLocationCase_ == 1; 6812 } 6813 /** 6814 * 6815 * 6816 * <pre> 6817 * Output only. The full path of the Cloud Storage directory created, into 6818 * which the prediction output is written. 6819 * </pre> 6820 * 6821 * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6822 * 6823 * @return The gcsOutputDirectory. 6824 */ 6825 @java.lang.Override getGcsOutputDirectory()6826 public java.lang.String getGcsOutputDirectory() { 6827 java.lang.Object ref = ""; 6828 if (outputLocationCase_ == 1) { 6829 ref = outputLocation_; 6830 } 6831 if (!(ref instanceof java.lang.String)) { 6832 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 6833 java.lang.String s = bs.toStringUtf8(); 6834 if (outputLocationCase_ == 1) { 6835 outputLocation_ = s; 6836 } 6837 return s; 6838 } else { 6839 return (java.lang.String) ref; 6840 } 6841 } 6842 /** 6843 * 6844 * 6845 * <pre> 6846 * Output only. The full path of the Cloud Storage directory created, into 6847 * which the prediction output is written. 6848 * </pre> 6849 * 6850 * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6851 * 6852 * @return The bytes for gcsOutputDirectory. 6853 */ 6854 @java.lang.Override getGcsOutputDirectoryBytes()6855 public com.google.protobuf.ByteString getGcsOutputDirectoryBytes() { 6856 java.lang.Object ref = ""; 6857 if (outputLocationCase_ == 1) { 6858 ref = outputLocation_; 6859 } 6860 if (ref instanceof String) { 6861 com.google.protobuf.ByteString b = 6862 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 6863 if (outputLocationCase_ == 1) { 6864 outputLocation_ = b; 6865 } 6866 return b; 6867 } else { 6868 return (com.google.protobuf.ByteString) ref; 6869 } 6870 } 6871 /** 6872 * 6873 * 6874 * <pre> 6875 * Output only. The full path of the Cloud Storage directory created, into 6876 * which the prediction output is written. 6877 * </pre> 6878 * 6879 * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6880 * 6881 * @param value The gcsOutputDirectory to set. 6882 * @return This builder for chaining. 6883 */ setGcsOutputDirectory(java.lang.String value)6884 public Builder setGcsOutputDirectory(java.lang.String value) { 6885 if (value == null) { 6886 throw new NullPointerException(); 6887 } 6888 outputLocationCase_ = 1; 6889 outputLocation_ = value; 6890 onChanged(); 6891 return this; 6892 } 6893 /** 6894 * 6895 * 6896 * <pre> 6897 * Output only. The full path of the Cloud Storage directory created, into 6898 * which the prediction output is written. 6899 * </pre> 6900 * 6901 * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6902 * 6903 * @return This builder for chaining. 6904 */ clearGcsOutputDirectory()6905 public Builder clearGcsOutputDirectory() { 6906 if (outputLocationCase_ == 1) { 6907 outputLocationCase_ = 0; 6908 outputLocation_ = null; 6909 onChanged(); 6910 } 6911 return this; 6912 } 6913 /** 6914 * 6915 * 6916 * <pre> 6917 * Output only. The full path of the Cloud Storage directory created, into 6918 * which the prediction output is written. 6919 * </pre> 6920 * 6921 * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 6922 * 6923 * @param value The bytes for gcsOutputDirectory to set. 6924 * @return This builder for chaining. 6925 */ setGcsOutputDirectoryBytes(com.google.protobuf.ByteString value)6926 public Builder setGcsOutputDirectoryBytes(com.google.protobuf.ByteString value) { 6927 if (value == null) { 6928 throw new NullPointerException(); 6929 } 6930 checkByteStringIsUtf8(value); 6931 outputLocationCase_ = 1; 6932 outputLocation_ = value; 6933 onChanged(); 6934 return this; 6935 } 6936 6937 /** 6938 * 6939 * 6940 * <pre> 6941 * Output only. The path of the BigQuery dataset created, in 6942 * `bq://projectId.bqDatasetId` 6943 * format, into which the prediction output is written. 6944 * </pre> 6945 * 6946 * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; 6947 * </code> 6948 * 6949 * @return Whether the bigqueryOutputDataset field is set. 6950 */ 6951 @java.lang.Override hasBigqueryOutputDataset()6952 public boolean hasBigqueryOutputDataset() { 6953 return outputLocationCase_ == 2; 6954 } 6955 /** 6956 * 6957 * 6958 * <pre> 6959 * Output only. The path of the BigQuery dataset created, in 6960 * `bq://projectId.bqDatasetId` 6961 * format, into which the prediction output is written. 6962 * </pre> 6963 * 6964 * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; 6965 * </code> 6966 * 6967 * @return The bigqueryOutputDataset. 6968 */ 6969 @java.lang.Override getBigqueryOutputDataset()6970 public java.lang.String getBigqueryOutputDataset() { 6971 java.lang.Object ref = ""; 6972 if (outputLocationCase_ == 2) { 6973 ref = outputLocation_; 6974 } 6975 if (!(ref instanceof java.lang.String)) { 6976 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 6977 java.lang.String s = bs.toStringUtf8(); 6978 if (outputLocationCase_ == 2) { 6979 outputLocation_ = s; 6980 } 6981 return s; 6982 } else { 6983 return (java.lang.String) ref; 6984 } 6985 } 6986 /** 6987 * 6988 * 6989 * <pre> 6990 * Output only. The path of the BigQuery dataset created, in 6991 * `bq://projectId.bqDatasetId` 6992 * format, into which the prediction output is written. 6993 * </pre> 6994 * 6995 * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; 6996 * </code> 6997 * 6998 * @return The bytes for bigqueryOutputDataset. 6999 */ 7000 @java.lang.Override getBigqueryOutputDatasetBytes()7001 public com.google.protobuf.ByteString getBigqueryOutputDatasetBytes() { 7002 java.lang.Object ref = ""; 7003 if (outputLocationCase_ == 2) { 7004 ref = outputLocation_; 7005 } 7006 if (ref instanceof String) { 7007 com.google.protobuf.ByteString b = 7008 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 7009 if (outputLocationCase_ == 2) { 7010 outputLocation_ = b; 7011 } 7012 return b; 7013 } else { 7014 return (com.google.protobuf.ByteString) ref; 7015 } 7016 } 7017 /** 7018 * 7019 * 7020 * <pre> 7021 * Output only. The path of the BigQuery dataset created, in 7022 * `bq://projectId.bqDatasetId` 7023 * format, into which the prediction output is written. 7024 * </pre> 7025 * 7026 * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; 7027 * </code> 7028 * 7029 * @param value The bigqueryOutputDataset to set. 7030 * @return This builder for chaining. 7031 */ setBigqueryOutputDataset(java.lang.String value)7032 public Builder setBigqueryOutputDataset(java.lang.String value) { 7033 if (value == null) { 7034 throw new NullPointerException(); 7035 } 7036 outputLocationCase_ = 2; 7037 outputLocation_ = value; 7038 onChanged(); 7039 return this; 7040 } 7041 /** 7042 * 7043 * 7044 * <pre> 7045 * Output only. The path of the BigQuery dataset created, in 7046 * `bq://projectId.bqDatasetId` 7047 * format, into which the prediction output is written. 7048 * </pre> 7049 * 7050 * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; 7051 * </code> 7052 * 7053 * @return This builder for chaining. 7054 */ clearBigqueryOutputDataset()7055 public Builder clearBigqueryOutputDataset() { 7056 if (outputLocationCase_ == 2) { 7057 outputLocationCase_ = 0; 7058 outputLocation_ = null; 7059 onChanged(); 7060 } 7061 return this; 7062 } 7063 /** 7064 * 7065 * 7066 * <pre> 7067 * Output only. The path of the BigQuery dataset created, in 7068 * `bq://projectId.bqDatasetId` 7069 * format, into which the prediction output is written. 7070 * </pre> 7071 * 7072 * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; 7073 * </code> 7074 * 7075 * @param value The bytes for bigqueryOutputDataset to set. 7076 * @return This builder for chaining. 7077 */ setBigqueryOutputDatasetBytes(com.google.protobuf.ByteString value)7078 public Builder setBigqueryOutputDatasetBytes(com.google.protobuf.ByteString value) { 7079 if (value == null) { 7080 throw new NullPointerException(); 7081 } 7082 checkByteStringIsUtf8(value); 7083 outputLocationCase_ = 2; 7084 outputLocation_ = value; 7085 onChanged(); 7086 return this; 7087 } 7088 7089 private java.lang.Object bigqueryOutputTable_ = ""; 7090 /** 7091 * 7092 * 7093 * <pre> 7094 * Output only. The name of the BigQuery table created, in 7095 * `predictions_<timestamp>` 7096 * format, into which the prediction output is written. 7097 * Can be used by UI to generate the BigQuery output path, for example. 7098 * </pre> 7099 * 7100 * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 7101 * 7102 * @return The bigqueryOutputTable. 7103 */ getBigqueryOutputTable()7104 public java.lang.String getBigqueryOutputTable() { 7105 java.lang.Object ref = bigqueryOutputTable_; 7106 if (!(ref instanceof java.lang.String)) { 7107 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 7108 java.lang.String s = bs.toStringUtf8(); 7109 bigqueryOutputTable_ = s; 7110 return s; 7111 } else { 7112 return (java.lang.String) ref; 7113 } 7114 } 7115 /** 7116 * 7117 * 7118 * <pre> 7119 * Output only. The name of the BigQuery table created, in 7120 * `predictions_<timestamp>` 7121 * format, into which the prediction output is written. 7122 * Can be used by UI to generate the BigQuery output path, for example. 7123 * </pre> 7124 * 7125 * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 7126 * 7127 * @return The bytes for bigqueryOutputTable. 7128 */ getBigqueryOutputTableBytes()7129 public com.google.protobuf.ByteString getBigqueryOutputTableBytes() { 7130 java.lang.Object ref = bigqueryOutputTable_; 7131 if (ref instanceof String) { 7132 com.google.protobuf.ByteString b = 7133 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 7134 bigqueryOutputTable_ = b; 7135 return b; 7136 } else { 7137 return (com.google.protobuf.ByteString) ref; 7138 } 7139 } 7140 /** 7141 * 7142 * 7143 * <pre> 7144 * Output only. The name of the BigQuery table created, in 7145 * `predictions_<timestamp>` 7146 * format, into which the prediction output is written. 7147 * Can be used by UI to generate the BigQuery output path, for example. 7148 * </pre> 7149 * 7150 * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 7151 * 7152 * @param value The bigqueryOutputTable to set. 7153 * @return This builder for chaining. 7154 */ setBigqueryOutputTable(java.lang.String value)7155 public Builder setBigqueryOutputTable(java.lang.String value) { 7156 if (value == null) { 7157 throw new NullPointerException(); 7158 } 7159 bigqueryOutputTable_ = value; 7160 bitField0_ |= 0x00000004; 7161 onChanged(); 7162 return this; 7163 } 7164 /** 7165 * 7166 * 7167 * <pre> 7168 * Output only. The name of the BigQuery table created, in 7169 * `predictions_<timestamp>` 7170 * format, into which the prediction output is written. 7171 * Can be used by UI to generate the BigQuery output path, for example. 7172 * </pre> 7173 * 7174 * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 7175 * 7176 * @return This builder for chaining. 7177 */ clearBigqueryOutputTable()7178 public Builder clearBigqueryOutputTable() { 7179 bigqueryOutputTable_ = getDefaultInstance().getBigqueryOutputTable(); 7180 bitField0_ = (bitField0_ & ~0x00000004); 7181 onChanged(); 7182 return this; 7183 } 7184 /** 7185 * 7186 * 7187 * <pre> 7188 * Output only. The name of the BigQuery table created, in 7189 * `predictions_<timestamp>` 7190 * format, into which the prediction output is written. 7191 * Can be used by UI to generate the BigQuery output path, for example. 7192 * </pre> 7193 * 7194 * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 7195 * 7196 * @param value The bytes for bigqueryOutputTable to set. 7197 * @return This builder for chaining. 7198 */ setBigqueryOutputTableBytes(com.google.protobuf.ByteString value)7199 public Builder setBigqueryOutputTableBytes(com.google.protobuf.ByteString value) { 7200 if (value == null) { 7201 throw new NullPointerException(); 7202 } 7203 checkByteStringIsUtf8(value); 7204 bigqueryOutputTable_ = value; 7205 bitField0_ |= 0x00000004; 7206 onChanged(); 7207 return this; 7208 } 7209 7210 @java.lang.Override setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)7211 public final Builder setUnknownFields( 7212 final com.google.protobuf.UnknownFieldSet unknownFields) { 7213 return super.setUnknownFields(unknownFields); 7214 } 7215 7216 @java.lang.Override mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)7217 public final Builder mergeUnknownFields( 7218 final com.google.protobuf.UnknownFieldSet unknownFields) { 7219 return super.mergeUnknownFields(unknownFields); 7220 } 7221 7222 // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo) 7223 } 7224 7225 // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo) 7226 private static final com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo 7227 DEFAULT_INSTANCE; 7228 7229 static { 7230 DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo(); 7231 } 7232 7233 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo getDefaultInstance()7234 getDefaultInstance() { 7235 return DEFAULT_INSTANCE; 7236 } 7237 7238 private static final com.google.protobuf.Parser<OutputInfo> PARSER = 7239 new com.google.protobuf.AbstractParser<OutputInfo>() { 7240 @java.lang.Override 7241 public OutputInfo parsePartialFrom( 7242 com.google.protobuf.CodedInputStream input, 7243 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7244 throws com.google.protobuf.InvalidProtocolBufferException { 7245 Builder builder = newBuilder(); 7246 try { 7247 builder.mergeFrom(input, extensionRegistry); 7248 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 7249 throw e.setUnfinishedMessage(builder.buildPartial()); 7250 } catch (com.google.protobuf.UninitializedMessageException e) { 7251 throw e.asInvalidProtocolBufferException() 7252 .setUnfinishedMessage(builder.buildPartial()); 7253 } catch (java.io.IOException e) { 7254 throw new com.google.protobuf.InvalidProtocolBufferException(e) 7255 .setUnfinishedMessage(builder.buildPartial()); 7256 } 7257 return builder.buildPartial(); 7258 } 7259 }; 7260 parser()7261 public static com.google.protobuf.Parser<OutputInfo> parser() { 7262 return PARSER; 7263 } 7264 7265 @java.lang.Override getParserForType()7266 public com.google.protobuf.Parser<OutputInfo> getParserForType() { 7267 return PARSER; 7268 } 7269 7270 @java.lang.Override 7271 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo getDefaultInstanceForType()7272 getDefaultInstanceForType() { 7273 return DEFAULT_INSTANCE; 7274 } 7275 } 7276 7277 public static final int NAME_FIELD_NUMBER = 1; 7278 7279 @SuppressWarnings("serial") 7280 private volatile java.lang.Object name_ = ""; 7281 /** 7282 * 7283 * 7284 * <pre> 7285 * Output only. Resource name of the BatchPredictionJob. 7286 * </pre> 7287 * 7288 * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 7289 * 7290 * @return The name. 7291 */ 7292 @java.lang.Override getName()7293 public java.lang.String getName() { 7294 java.lang.Object ref = name_; 7295 if (ref instanceof java.lang.String) { 7296 return (java.lang.String) ref; 7297 } else { 7298 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 7299 java.lang.String s = bs.toStringUtf8(); 7300 name_ = s; 7301 return s; 7302 } 7303 } 7304 /** 7305 * 7306 * 7307 * <pre> 7308 * Output only. Resource name of the BatchPredictionJob. 7309 * </pre> 7310 * 7311 * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 7312 * 7313 * @return The bytes for name. 7314 */ 7315 @java.lang.Override getNameBytes()7316 public com.google.protobuf.ByteString getNameBytes() { 7317 java.lang.Object ref = name_; 7318 if (ref instanceof java.lang.String) { 7319 com.google.protobuf.ByteString b = 7320 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 7321 name_ = b; 7322 return b; 7323 } else { 7324 return (com.google.protobuf.ByteString) ref; 7325 } 7326 } 7327 7328 public static final int DISPLAY_NAME_FIELD_NUMBER = 2; 7329 7330 @SuppressWarnings("serial") 7331 private volatile java.lang.Object displayName_ = ""; 7332 /** 7333 * 7334 * 7335 * <pre> 7336 * Required. The user-defined name of this BatchPredictionJob. 7337 * </pre> 7338 * 7339 * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> 7340 * 7341 * @return The displayName. 7342 */ 7343 @java.lang.Override getDisplayName()7344 public java.lang.String getDisplayName() { 7345 java.lang.Object ref = displayName_; 7346 if (ref instanceof java.lang.String) { 7347 return (java.lang.String) ref; 7348 } else { 7349 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 7350 java.lang.String s = bs.toStringUtf8(); 7351 displayName_ = s; 7352 return s; 7353 } 7354 } 7355 /** 7356 * 7357 * 7358 * <pre> 7359 * Required. The user-defined name of this BatchPredictionJob. 7360 * </pre> 7361 * 7362 * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> 7363 * 7364 * @return The bytes for displayName. 7365 */ 7366 @java.lang.Override getDisplayNameBytes()7367 public com.google.protobuf.ByteString getDisplayNameBytes() { 7368 java.lang.Object ref = displayName_; 7369 if (ref instanceof java.lang.String) { 7370 com.google.protobuf.ByteString b = 7371 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 7372 displayName_ = b; 7373 return b; 7374 } else { 7375 return (com.google.protobuf.ByteString) ref; 7376 } 7377 } 7378 7379 public static final int MODEL_FIELD_NUMBER = 3; 7380 7381 @SuppressWarnings("serial") 7382 private volatile java.lang.Object model_ = ""; 7383 /** 7384 * 7385 * 7386 * <pre> 7387 * The name of the Model resource that produces the predictions via this job, 7388 * must share the same ancestor Location. 7389 * Starting this job has no impact on any existing deployments of the Model 7390 * and their resources. 7391 * Exactly one of model and unmanaged_container_model must be set. 7392 * The model resource name may contain version id or version alias to specify 7393 * the version. 7394 * Example: `projects/{project}/locations/{location}/models/{model}@2` 7395 * or 7396 * `projects/{project}/locations/{location}/models/{model}@golden` 7397 * if no version is specified, the default version will be deployed. 7398 * </pre> 7399 * 7400 * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code> 7401 * 7402 * @return The model. 7403 */ 7404 @java.lang.Override getModel()7405 public java.lang.String getModel() { 7406 java.lang.Object ref = model_; 7407 if (ref instanceof java.lang.String) { 7408 return (java.lang.String) ref; 7409 } else { 7410 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 7411 java.lang.String s = bs.toStringUtf8(); 7412 model_ = s; 7413 return s; 7414 } 7415 } 7416 /** 7417 * 7418 * 7419 * <pre> 7420 * The name of the Model resource that produces the predictions via this job, 7421 * must share the same ancestor Location. 7422 * Starting this job has no impact on any existing deployments of the Model 7423 * and their resources. 7424 * Exactly one of model and unmanaged_container_model must be set. 7425 * The model resource name may contain version id or version alias to specify 7426 * the version. 7427 * Example: `projects/{project}/locations/{location}/models/{model}@2` 7428 * or 7429 * `projects/{project}/locations/{location}/models/{model}@golden` 7430 * if no version is specified, the default version will be deployed. 7431 * </pre> 7432 * 7433 * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code> 7434 * 7435 * @return The bytes for model. 7436 */ 7437 @java.lang.Override getModelBytes()7438 public com.google.protobuf.ByteString getModelBytes() { 7439 java.lang.Object ref = model_; 7440 if (ref instanceof java.lang.String) { 7441 com.google.protobuf.ByteString b = 7442 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 7443 model_ = b; 7444 return b; 7445 } else { 7446 return (com.google.protobuf.ByteString) ref; 7447 } 7448 } 7449 7450 public static final int MODEL_VERSION_ID_FIELD_NUMBER = 30; 7451 7452 @SuppressWarnings("serial") 7453 private volatile java.lang.Object modelVersionId_ = ""; 7454 /** 7455 * 7456 * 7457 * <pre> 7458 * Output only. The version ID of the Model that produces the predictions via 7459 * this job. 7460 * </pre> 7461 * 7462 * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 7463 * 7464 * @return The modelVersionId. 7465 */ 7466 @java.lang.Override getModelVersionId()7467 public java.lang.String getModelVersionId() { 7468 java.lang.Object ref = modelVersionId_; 7469 if (ref instanceof java.lang.String) { 7470 return (java.lang.String) ref; 7471 } else { 7472 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 7473 java.lang.String s = bs.toStringUtf8(); 7474 modelVersionId_ = s; 7475 return s; 7476 } 7477 } 7478 /** 7479 * 7480 * 7481 * <pre> 7482 * Output only. The version ID of the Model that produces the predictions via 7483 * this job. 7484 * </pre> 7485 * 7486 * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 7487 * 7488 * @return The bytes for modelVersionId. 7489 */ 7490 @java.lang.Override getModelVersionIdBytes()7491 public com.google.protobuf.ByteString getModelVersionIdBytes() { 7492 java.lang.Object ref = modelVersionId_; 7493 if (ref instanceof java.lang.String) { 7494 com.google.protobuf.ByteString b = 7495 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 7496 modelVersionId_ = b; 7497 return b; 7498 } else { 7499 return (com.google.protobuf.ByteString) ref; 7500 } 7501 } 7502 7503 public static final int UNMANAGED_CONTAINER_MODEL_FIELD_NUMBER = 28; 7504 private com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanagedContainerModel_; 7505 /** 7506 * 7507 * 7508 * <pre> 7509 * Contains model information necessary to perform batch prediction without 7510 * requiring uploading to model registry. 7511 * Exactly one of model and unmanaged_container_model must be set. 7512 * </pre> 7513 * 7514 * <code>.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanaged_container_model = 28; 7515 * </code> 7516 * 7517 * @return Whether the unmanagedContainerModel field is set. 7518 */ 7519 @java.lang.Override hasUnmanagedContainerModel()7520 public boolean hasUnmanagedContainerModel() { 7521 return unmanagedContainerModel_ != null; 7522 } 7523 /** 7524 * 7525 * 7526 * <pre> 7527 * Contains model information necessary to perform batch prediction without 7528 * requiring uploading to model registry. 7529 * Exactly one of model and unmanaged_container_model must be set. 7530 * </pre> 7531 * 7532 * <code>.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanaged_container_model = 28; 7533 * </code> 7534 * 7535 * @return The unmanagedContainerModel. 7536 */ 7537 @java.lang.Override getUnmanagedContainerModel()7538 public com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel getUnmanagedContainerModel() { 7539 return unmanagedContainerModel_ == null 7540 ? com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel.getDefaultInstance() 7541 : unmanagedContainerModel_; 7542 } 7543 /** 7544 * 7545 * 7546 * <pre> 7547 * Contains model information necessary to perform batch prediction without 7548 * requiring uploading to model registry. 7549 * Exactly one of model and unmanaged_container_model must be set. 7550 * </pre> 7551 * 7552 * <code>.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanaged_container_model = 28; 7553 * </code> 7554 */ 7555 @java.lang.Override 7556 public com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModelOrBuilder getUnmanagedContainerModelOrBuilder()7557 getUnmanagedContainerModelOrBuilder() { 7558 return unmanagedContainerModel_ == null 7559 ? com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel.getDefaultInstance() 7560 : unmanagedContainerModel_; 7561 } 7562 7563 public static final int INPUT_CONFIG_FIELD_NUMBER = 4; 7564 private com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig inputConfig_; 7565 /** 7566 * 7567 * 7568 * <pre> 7569 * Required. Input configuration of the instances on which predictions are 7570 * performed. The schema of any single instance may be specified via the 7571 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 7572 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 7573 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. 7574 * </pre> 7575 * 7576 * <code> 7577 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED]; 7578 * </code> 7579 * 7580 * @return Whether the inputConfig field is set. 7581 */ 7582 @java.lang.Override hasInputConfig()7583 public boolean hasInputConfig() { 7584 return inputConfig_ != null; 7585 } 7586 /** 7587 * 7588 * 7589 * <pre> 7590 * Required. Input configuration of the instances on which predictions are 7591 * performed. The schema of any single instance may be specified via the 7592 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 7593 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 7594 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. 7595 * </pre> 7596 * 7597 * <code> 7598 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED]; 7599 * </code> 7600 * 7601 * @return The inputConfig. 7602 */ 7603 @java.lang.Override getInputConfig()7604 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig getInputConfig() { 7605 return inputConfig_ == null 7606 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.getDefaultInstance() 7607 : inputConfig_; 7608 } 7609 /** 7610 * 7611 * 7612 * <pre> 7613 * Required. Input configuration of the instances on which predictions are 7614 * performed. The schema of any single instance may be specified via the 7615 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 7616 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 7617 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. 7618 * </pre> 7619 * 7620 * <code> 7621 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED]; 7622 * </code> 7623 */ 7624 @java.lang.Override 7625 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfigOrBuilder getInputConfigOrBuilder()7626 getInputConfigOrBuilder() { 7627 return inputConfig_ == null 7628 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.getDefaultInstance() 7629 : inputConfig_; 7630 } 7631 7632 public static final int INSTANCE_CONFIG_FIELD_NUMBER = 27; 7633 private com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instanceConfig_; 7634 /** 7635 * 7636 * 7637 * <pre> 7638 * Configuration for how to convert batch prediction input instances to the 7639 * prediction instances that are sent to the Model. 7640 * </pre> 7641 * 7642 * <code>.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; 7643 * </code> 7644 * 7645 * @return Whether the instanceConfig field is set. 7646 */ 7647 @java.lang.Override hasInstanceConfig()7648 public boolean hasInstanceConfig() { 7649 return instanceConfig_ != null; 7650 } 7651 /** 7652 * 7653 * 7654 * <pre> 7655 * Configuration for how to convert batch prediction input instances to the 7656 * prediction instances that are sent to the Model. 7657 * </pre> 7658 * 7659 * <code>.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; 7660 * </code> 7661 * 7662 * @return The instanceConfig. 7663 */ 7664 @java.lang.Override getInstanceConfig()7665 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig getInstanceConfig() { 7666 return instanceConfig_ == null 7667 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.getDefaultInstance() 7668 : instanceConfig_; 7669 } 7670 /** 7671 * 7672 * 7673 * <pre> 7674 * Configuration for how to convert batch prediction input instances to the 7675 * prediction instances that are sent to the Model. 7676 * </pre> 7677 * 7678 * <code>.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; 7679 * </code> 7680 */ 7681 @java.lang.Override 7682 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder getInstanceConfigOrBuilder()7683 getInstanceConfigOrBuilder() { 7684 return instanceConfig_ == null 7685 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.getDefaultInstance() 7686 : instanceConfig_; 7687 } 7688 7689 public static final int MODEL_PARAMETERS_FIELD_NUMBER = 5; 7690 private com.google.protobuf.Value modelParameters_; 7691 /** 7692 * 7693 * 7694 * <pre> 7695 * The parameters that govern the predictions. The schema of the parameters 7696 * may be specified via the 7697 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 7698 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 7699 * [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. 7700 * </pre> 7701 * 7702 * <code>.google.protobuf.Value model_parameters = 5;</code> 7703 * 7704 * @return Whether the modelParameters field is set. 7705 */ 7706 @java.lang.Override hasModelParameters()7707 public boolean hasModelParameters() { 7708 return modelParameters_ != null; 7709 } 7710 /** 7711 * 7712 * 7713 * <pre> 7714 * The parameters that govern the predictions. The schema of the parameters 7715 * may be specified via the 7716 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 7717 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 7718 * [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. 7719 * </pre> 7720 * 7721 * <code>.google.protobuf.Value model_parameters = 5;</code> 7722 * 7723 * @return The modelParameters. 7724 */ 7725 @java.lang.Override getModelParameters()7726 public com.google.protobuf.Value getModelParameters() { 7727 return modelParameters_ == null 7728 ? com.google.protobuf.Value.getDefaultInstance() 7729 : modelParameters_; 7730 } 7731 /** 7732 * 7733 * 7734 * <pre> 7735 * The parameters that govern the predictions. The schema of the parameters 7736 * may be specified via the 7737 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 7738 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 7739 * [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. 7740 * </pre> 7741 * 7742 * <code>.google.protobuf.Value model_parameters = 5;</code> 7743 */ 7744 @java.lang.Override getModelParametersOrBuilder()7745 public com.google.protobuf.ValueOrBuilder getModelParametersOrBuilder() { 7746 return modelParameters_ == null 7747 ? com.google.protobuf.Value.getDefaultInstance() 7748 : modelParameters_; 7749 } 7750 7751 public static final int OUTPUT_CONFIG_FIELD_NUMBER = 6; 7752 private com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig outputConfig_; 7753 /** 7754 * 7755 * 7756 * <pre> 7757 * Required. The Configuration specifying where output predictions should 7758 * be written. 7759 * The schema of any single prediction may be specified as a concatenation 7760 * of [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 7761 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 7762 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 7763 * and 7764 * [prediction_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.prediction_schema_uri]. 7765 * </pre> 7766 * 7767 * <code> 7768 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED]; 7769 * </code> 7770 * 7771 * @return Whether the outputConfig field is set. 7772 */ 7773 @java.lang.Override hasOutputConfig()7774 public boolean hasOutputConfig() { 7775 return outputConfig_ != null; 7776 } 7777 /** 7778 * 7779 * 7780 * <pre> 7781 * Required. The Configuration specifying where output predictions should 7782 * be written. 7783 * The schema of any single prediction may be specified as a concatenation 7784 * of [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 7785 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 7786 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 7787 * and 7788 * [prediction_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.prediction_schema_uri]. 7789 * </pre> 7790 * 7791 * <code> 7792 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED]; 7793 * </code> 7794 * 7795 * @return The outputConfig. 7796 */ 7797 @java.lang.Override getOutputConfig()7798 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig getOutputConfig() { 7799 return outputConfig_ == null 7800 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.getDefaultInstance() 7801 : outputConfig_; 7802 } 7803 /** 7804 * 7805 * 7806 * <pre> 7807 * Required. The Configuration specifying where output predictions should 7808 * be written. 7809 * The schema of any single prediction may be specified as a concatenation 7810 * of [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 7811 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 7812 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 7813 * and 7814 * [prediction_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.prediction_schema_uri]. 7815 * </pre> 7816 * 7817 * <code> 7818 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED]; 7819 * </code> 7820 */ 7821 @java.lang.Override 7822 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfigOrBuilder getOutputConfigOrBuilder()7823 getOutputConfigOrBuilder() { 7824 return outputConfig_ == null 7825 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.getDefaultInstance() 7826 : outputConfig_; 7827 } 7828 7829 public static final int DEDICATED_RESOURCES_FIELD_NUMBER = 7; 7830 private com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicatedResources_; 7831 /** 7832 * 7833 * 7834 * <pre> 7835 * The config of resources used by the Model during the batch prediction. If 7836 * the Model 7837 * [supports][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types] 7838 * DEDICATED_RESOURCES this config may be provided (and the job will use these 7839 * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config 7840 * must be provided. 7841 * </pre> 7842 * 7843 * <code>.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicated_resources = 7;</code> 7844 * 7845 * @return Whether the dedicatedResources field is set. 7846 */ 7847 @java.lang.Override hasDedicatedResources()7848 public boolean hasDedicatedResources() { 7849 return dedicatedResources_ != null; 7850 } 7851 /** 7852 * 7853 * 7854 * <pre> 7855 * The config of resources used by the Model during the batch prediction. If 7856 * the Model 7857 * [supports][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types] 7858 * DEDICATED_RESOURCES this config may be provided (and the job will use these 7859 * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config 7860 * must be provided. 7861 * </pre> 7862 * 7863 * <code>.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicated_resources = 7;</code> 7864 * 7865 * @return The dedicatedResources. 7866 */ 7867 @java.lang.Override getDedicatedResources()7868 public com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources getDedicatedResources() { 7869 return dedicatedResources_ == null 7870 ? com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources.getDefaultInstance() 7871 : dedicatedResources_; 7872 } 7873 /** 7874 * 7875 * 7876 * <pre> 7877 * The config of resources used by the Model during the batch prediction. If 7878 * the Model 7879 * [supports][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types] 7880 * DEDICATED_RESOURCES this config may be provided (and the job will use these 7881 * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config 7882 * must be provided. 7883 * </pre> 7884 * 7885 * <code>.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicated_resources = 7;</code> 7886 */ 7887 @java.lang.Override 7888 public com.google.cloud.aiplatform.v1beta1.BatchDedicatedResourcesOrBuilder getDedicatedResourcesOrBuilder()7889 getDedicatedResourcesOrBuilder() { 7890 return dedicatedResources_ == null 7891 ? com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources.getDefaultInstance() 7892 : dedicatedResources_; 7893 } 7894 7895 public static final int SERVICE_ACCOUNT_FIELD_NUMBER = 29; 7896 7897 @SuppressWarnings("serial") 7898 private volatile java.lang.Object serviceAccount_ = ""; 7899 /** 7900 * 7901 * 7902 * <pre> 7903 * The service account that the DeployedModel's container runs as. If not 7904 * specified, a system generated one will be used, which 7905 * has minimal permissions and the custom container, if used, may not have 7906 * enough permission to access other Google Cloud resources. 7907 * Users deploying the Model must have the `iam.serviceAccounts.actAs` 7908 * permission on this service account. 7909 * </pre> 7910 * 7911 * <code>string service_account = 29;</code> 7912 * 7913 * @return The serviceAccount. 7914 */ 7915 @java.lang.Override getServiceAccount()7916 public java.lang.String getServiceAccount() { 7917 java.lang.Object ref = serviceAccount_; 7918 if (ref instanceof java.lang.String) { 7919 return (java.lang.String) ref; 7920 } else { 7921 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 7922 java.lang.String s = bs.toStringUtf8(); 7923 serviceAccount_ = s; 7924 return s; 7925 } 7926 } 7927 /** 7928 * 7929 * 7930 * <pre> 7931 * The service account that the DeployedModel's container runs as. If not 7932 * specified, a system generated one will be used, which 7933 * has minimal permissions and the custom container, if used, may not have 7934 * enough permission to access other Google Cloud resources. 7935 * Users deploying the Model must have the `iam.serviceAccounts.actAs` 7936 * permission on this service account. 7937 * </pre> 7938 * 7939 * <code>string service_account = 29;</code> 7940 * 7941 * @return The bytes for serviceAccount. 7942 */ 7943 @java.lang.Override getServiceAccountBytes()7944 public com.google.protobuf.ByteString getServiceAccountBytes() { 7945 java.lang.Object ref = serviceAccount_; 7946 if (ref instanceof java.lang.String) { 7947 com.google.protobuf.ByteString b = 7948 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 7949 serviceAccount_ = b; 7950 return b; 7951 } else { 7952 return (com.google.protobuf.ByteString) ref; 7953 } 7954 } 7955 7956 public static final int MANUAL_BATCH_TUNING_PARAMETERS_FIELD_NUMBER = 8; 7957 private com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters 7958 manualBatchTuningParameters_; 7959 /** 7960 * 7961 * 7962 * <pre> 7963 * Immutable. Parameters configuring the batch behavior. Currently only 7964 * applicable when 7965 * [dedicated_resources][google.cloud.aiplatform.v1beta1.BatchPredictionJob.dedicated_resources] 7966 * are used (in other cases Vertex AI does the tuning itself). 7967 * </pre> 7968 * 7969 * <code> 7970 * .google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE]; 7971 * </code> 7972 * 7973 * @return Whether the manualBatchTuningParameters field is set. 7974 */ 7975 @java.lang.Override hasManualBatchTuningParameters()7976 public boolean hasManualBatchTuningParameters() { 7977 return manualBatchTuningParameters_ != null; 7978 } 7979 /** 7980 * 7981 * 7982 * <pre> 7983 * Immutable. Parameters configuring the batch behavior. Currently only 7984 * applicable when 7985 * [dedicated_resources][google.cloud.aiplatform.v1beta1.BatchPredictionJob.dedicated_resources] 7986 * are used (in other cases Vertex AI does the tuning itself). 7987 * </pre> 7988 * 7989 * <code> 7990 * .google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE]; 7991 * </code> 7992 * 7993 * @return The manualBatchTuningParameters. 7994 */ 7995 @java.lang.Override 7996 public com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters getManualBatchTuningParameters()7997 getManualBatchTuningParameters() { 7998 return manualBatchTuningParameters_ == null 7999 ? com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters.getDefaultInstance() 8000 : manualBatchTuningParameters_; 8001 } 8002 /** 8003 * 8004 * 8005 * <pre> 8006 * Immutable. Parameters configuring the batch behavior. Currently only 8007 * applicable when 8008 * [dedicated_resources][google.cloud.aiplatform.v1beta1.BatchPredictionJob.dedicated_resources] 8009 * are used (in other cases Vertex AI does the tuning itself). 8010 * </pre> 8011 * 8012 * <code> 8013 * .google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE]; 8014 * </code> 8015 */ 8016 @java.lang.Override 8017 public com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParametersOrBuilder getManualBatchTuningParametersOrBuilder()8018 getManualBatchTuningParametersOrBuilder() { 8019 return manualBatchTuningParameters_ == null 8020 ? com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters.getDefaultInstance() 8021 : manualBatchTuningParameters_; 8022 } 8023 8024 public static final int GENERATE_EXPLANATION_FIELD_NUMBER = 23; 8025 private boolean generateExplanation_ = false; 8026 /** 8027 * 8028 * 8029 * <pre> 8030 * Generate explanation with the batch prediction results. 8031 * When set to `true`, the batch prediction output changes based on the 8032 * `predictions_format` field of the 8033 * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] 8034 * object: 8035 * * `bigquery`: output includes a column named `explanation`. The value 8036 * is a struct that conforms to the 8037 * [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object. 8038 * * `jsonl`: The JSON objects on each line include an additional entry 8039 * keyed `explanation`. The value of the entry is a JSON object that 8040 * conforms to the 8041 * [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object. 8042 * * `csv`: Generating explanations for CSV format is not supported. 8043 * If this field is set to true, either the 8044 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 8045 * or 8046 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 8047 * must be populated. 8048 * </pre> 8049 * 8050 * <code>bool generate_explanation = 23;</code> 8051 * 8052 * @return The generateExplanation. 8053 */ 8054 @java.lang.Override getGenerateExplanation()8055 public boolean getGenerateExplanation() { 8056 return generateExplanation_; 8057 } 8058 8059 public static final int EXPLANATION_SPEC_FIELD_NUMBER = 25; 8060 private com.google.cloud.aiplatform.v1beta1.ExplanationSpec explanationSpec_; 8061 /** 8062 * 8063 * 8064 * <pre> 8065 * Explanation configuration for this BatchPredictionJob. Can be 8066 * specified only if 8067 * [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] 8068 * is set to `true`. 8069 * This value overrides the value of 8070 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. 8071 * All fields of 8072 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 8073 * are optional in the request. If a field of the 8074 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 8075 * object is not populated, the corresponding field of the 8076 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 8077 * object is inherited. 8078 * </pre> 8079 * 8080 * <code>.google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;</code> 8081 * 8082 * @return Whether the explanationSpec field is set. 8083 */ 8084 @java.lang.Override hasExplanationSpec()8085 public boolean hasExplanationSpec() { 8086 return explanationSpec_ != null; 8087 } 8088 /** 8089 * 8090 * 8091 * <pre> 8092 * Explanation configuration for this BatchPredictionJob. Can be 8093 * specified only if 8094 * [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] 8095 * is set to `true`. 8096 * This value overrides the value of 8097 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. 8098 * All fields of 8099 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 8100 * are optional in the request. If a field of the 8101 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 8102 * object is not populated, the corresponding field of the 8103 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 8104 * object is inherited. 8105 * </pre> 8106 * 8107 * <code>.google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;</code> 8108 * 8109 * @return The explanationSpec. 8110 */ 8111 @java.lang.Override getExplanationSpec()8112 public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec() { 8113 return explanationSpec_ == null 8114 ? com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance() 8115 : explanationSpec_; 8116 } 8117 /** 8118 * 8119 * 8120 * <pre> 8121 * Explanation configuration for this BatchPredictionJob. Can be 8122 * specified only if 8123 * [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] 8124 * is set to `true`. 8125 * This value overrides the value of 8126 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. 8127 * All fields of 8128 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 8129 * are optional in the request. If a field of the 8130 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 8131 * object is not populated, the corresponding field of the 8132 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 8133 * object is inherited. 8134 * </pre> 8135 * 8136 * <code>.google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;</code> 8137 */ 8138 @java.lang.Override 8139 public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder getExplanationSpecOrBuilder()8140 getExplanationSpecOrBuilder() { 8141 return explanationSpec_ == null 8142 ? com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance() 8143 : explanationSpec_; 8144 } 8145 8146 public static final int OUTPUT_INFO_FIELD_NUMBER = 9; 8147 private com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo outputInfo_; 8148 /** 8149 * 8150 * 8151 * <pre> 8152 * Output only. Information further describing the output of this job. 8153 * </pre> 8154 * 8155 * <code> 8156 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8157 * </code> 8158 * 8159 * @return Whether the outputInfo field is set. 8160 */ 8161 @java.lang.Override hasOutputInfo()8162 public boolean hasOutputInfo() { 8163 return outputInfo_ != null; 8164 } 8165 /** 8166 * 8167 * 8168 * <pre> 8169 * Output only. Information further describing the output of this job. 8170 * </pre> 8171 * 8172 * <code> 8173 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8174 * </code> 8175 * 8176 * @return The outputInfo. 8177 */ 8178 @java.lang.Override getOutputInfo()8179 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo getOutputInfo() { 8180 return outputInfo_ == null 8181 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.getDefaultInstance() 8182 : outputInfo_; 8183 } 8184 /** 8185 * 8186 * 8187 * <pre> 8188 * Output only. Information further describing the output of this job. 8189 * </pre> 8190 * 8191 * <code> 8192 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8193 * </code> 8194 */ 8195 @java.lang.Override 8196 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfoOrBuilder getOutputInfoOrBuilder()8197 getOutputInfoOrBuilder() { 8198 return outputInfo_ == null 8199 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.getDefaultInstance() 8200 : outputInfo_; 8201 } 8202 8203 public static final int STATE_FIELD_NUMBER = 10; 8204 private int state_ = 0; 8205 /** 8206 * 8207 * 8208 * <pre> 8209 * Output only. The detailed state of the job. 8210 * </pre> 8211 * 8212 * <code> 8213 * .google.cloud.aiplatform.v1beta1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8214 * </code> 8215 * 8216 * @return The enum numeric value on the wire for state. 8217 */ 8218 @java.lang.Override getStateValue()8219 public int getStateValue() { 8220 return state_; 8221 } 8222 /** 8223 * 8224 * 8225 * <pre> 8226 * Output only. The detailed state of the job. 8227 * </pre> 8228 * 8229 * <code> 8230 * .google.cloud.aiplatform.v1beta1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8231 * </code> 8232 * 8233 * @return The state. 8234 */ 8235 @java.lang.Override getState()8236 public com.google.cloud.aiplatform.v1beta1.JobState getState() { 8237 com.google.cloud.aiplatform.v1beta1.JobState result = 8238 com.google.cloud.aiplatform.v1beta1.JobState.forNumber(state_); 8239 return result == null ? com.google.cloud.aiplatform.v1beta1.JobState.UNRECOGNIZED : result; 8240 } 8241 8242 public static final int ERROR_FIELD_NUMBER = 11; 8243 private com.google.rpc.Status error_; 8244 /** 8245 * 8246 * 8247 * <pre> 8248 * Output only. Only populated when the job's state is JOB_STATE_FAILED or 8249 * JOB_STATE_CANCELLED. 8250 * </pre> 8251 * 8252 * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 8253 * 8254 * @return Whether the error field is set. 8255 */ 8256 @java.lang.Override hasError()8257 public boolean hasError() { 8258 return error_ != null; 8259 } 8260 /** 8261 * 8262 * 8263 * <pre> 8264 * Output only. Only populated when the job's state is JOB_STATE_FAILED or 8265 * JOB_STATE_CANCELLED. 8266 * </pre> 8267 * 8268 * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 8269 * 8270 * @return The error. 8271 */ 8272 @java.lang.Override getError()8273 public com.google.rpc.Status getError() { 8274 return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; 8275 } 8276 /** 8277 * 8278 * 8279 * <pre> 8280 * Output only. Only populated when the job's state is JOB_STATE_FAILED or 8281 * JOB_STATE_CANCELLED. 8282 * </pre> 8283 * 8284 * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 8285 */ 8286 @java.lang.Override getErrorOrBuilder()8287 public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { 8288 return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; 8289 } 8290 8291 public static final int PARTIAL_FAILURES_FIELD_NUMBER = 12; 8292 8293 @SuppressWarnings("serial") 8294 private java.util.List<com.google.rpc.Status> partialFailures_; 8295 /** 8296 * 8297 * 8298 * <pre> 8299 * Output only. Partial failures encountered. 8300 * For example, single files that can't be read. 8301 * This field never exceeds 20 entries. 8302 * Status details fields contain standard Google Cloud error details. 8303 * </pre> 8304 * 8305 * <code> 8306 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8307 * </code> 8308 */ 8309 @java.lang.Override getPartialFailuresList()8310 public java.util.List<com.google.rpc.Status> getPartialFailuresList() { 8311 return partialFailures_; 8312 } 8313 /** 8314 * 8315 * 8316 * <pre> 8317 * Output only. Partial failures encountered. 8318 * For example, single files that can't be read. 8319 * This field never exceeds 20 entries. 8320 * Status details fields contain standard Google Cloud error details. 8321 * </pre> 8322 * 8323 * <code> 8324 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8325 * </code> 8326 */ 8327 @java.lang.Override 8328 public java.util.List<? extends com.google.rpc.StatusOrBuilder> getPartialFailuresOrBuilderList()8329 getPartialFailuresOrBuilderList() { 8330 return partialFailures_; 8331 } 8332 /** 8333 * 8334 * 8335 * <pre> 8336 * Output only. Partial failures encountered. 8337 * For example, single files that can't be read. 8338 * This field never exceeds 20 entries. 8339 * Status details fields contain standard Google Cloud error details. 8340 * </pre> 8341 * 8342 * <code> 8343 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8344 * </code> 8345 */ 8346 @java.lang.Override getPartialFailuresCount()8347 public int getPartialFailuresCount() { 8348 return partialFailures_.size(); 8349 } 8350 /** 8351 * 8352 * 8353 * <pre> 8354 * Output only. Partial failures encountered. 8355 * For example, single files that can't be read. 8356 * This field never exceeds 20 entries. 8357 * Status details fields contain standard Google Cloud error details. 8358 * </pre> 8359 * 8360 * <code> 8361 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8362 * </code> 8363 */ 8364 @java.lang.Override getPartialFailures(int index)8365 public com.google.rpc.Status getPartialFailures(int index) { 8366 return partialFailures_.get(index); 8367 } 8368 /** 8369 * 8370 * 8371 * <pre> 8372 * Output only. Partial failures encountered. 8373 * For example, single files that can't be read. 8374 * This field never exceeds 20 entries. 8375 * Status details fields contain standard Google Cloud error details. 8376 * </pre> 8377 * 8378 * <code> 8379 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8380 * </code> 8381 */ 8382 @java.lang.Override getPartialFailuresOrBuilder(int index)8383 public com.google.rpc.StatusOrBuilder getPartialFailuresOrBuilder(int index) { 8384 return partialFailures_.get(index); 8385 } 8386 8387 public static final int RESOURCES_CONSUMED_FIELD_NUMBER = 13; 8388 private com.google.cloud.aiplatform.v1beta1.ResourcesConsumed resourcesConsumed_; 8389 /** 8390 * 8391 * 8392 * <pre> 8393 * Output only. Information about resources that had been consumed by this 8394 * job. Provided in real time at best effort basis, as well as a final value 8395 * once the job completes. 8396 * Note: This field currently may be not populated for batch predictions that 8397 * use AutoML Models. 8398 * </pre> 8399 * 8400 * <code> 8401 * .google.cloud.aiplatform.v1beta1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8402 * </code> 8403 * 8404 * @return Whether the resourcesConsumed field is set. 8405 */ 8406 @java.lang.Override hasResourcesConsumed()8407 public boolean hasResourcesConsumed() { 8408 return resourcesConsumed_ != null; 8409 } 8410 /** 8411 * 8412 * 8413 * <pre> 8414 * Output only. Information about resources that had been consumed by this 8415 * job. Provided in real time at best effort basis, as well as a final value 8416 * once the job completes. 8417 * Note: This field currently may be not populated for batch predictions that 8418 * use AutoML Models. 8419 * </pre> 8420 * 8421 * <code> 8422 * .google.cloud.aiplatform.v1beta1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8423 * </code> 8424 * 8425 * @return The resourcesConsumed. 8426 */ 8427 @java.lang.Override getResourcesConsumed()8428 public com.google.cloud.aiplatform.v1beta1.ResourcesConsumed getResourcesConsumed() { 8429 return resourcesConsumed_ == null 8430 ? com.google.cloud.aiplatform.v1beta1.ResourcesConsumed.getDefaultInstance() 8431 : resourcesConsumed_; 8432 } 8433 /** 8434 * 8435 * 8436 * <pre> 8437 * Output only. Information about resources that had been consumed by this 8438 * job. Provided in real time at best effort basis, as well as a final value 8439 * once the job completes. 8440 * Note: This field currently may be not populated for batch predictions that 8441 * use AutoML Models. 8442 * </pre> 8443 * 8444 * <code> 8445 * .google.cloud.aiplatform.v1beta1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8446 * </code> 8447 */ 8448 @java.lang.Override 8449 public com.google.cloud.aiplatform.v1beta1.ResourcesConsumedOrBuilder getResourcesConsumedOrBuilder()8450 getResourcesConsumedOrBuilder() { 8451 return resourcesConsumed_ == null 8452 ? com.google.cloud.aiplatform.v1beta1.ResourcesConsumed.getDefaultInstance() 8453 : resourcesConsumed_; 8454 } 8455 8456 public static final int COMPLETION_STATS_FIELD_NUMBER = 14; 8457 private com.google.cloud.aiplatform.v1beta1.CompletionStats completionStats_; 8458 /** 8459 * 8460 * 8461 * <pre> 8462 * Output only. Statistics on completed and failed prediction instances. 8463 * </pre> 8464 * 8465 * <code> 8466 * .google.cloud.aiplatform.v1beta1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8467 * </code> 8468 * 8469 * @return Whether the completionStats field is set. 8470 */ 8471 @java.lang.Override hasCompletionStats()8472 public boolean hasCompletionStats() { 8473 return completionStats_ != null; 8474 } 8475 /** 8476 * 8477 * 8478 * <pre> 8479 * Output only. Statistics on completed and failed prediction instances. 8480 * </pre> 8481 * 8482 * <code> 8483 * .google.cloud.aiplatform.v1beta1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8484 * </code> 8485 * 8486 * @return The completionStats. 8487 */ 8488 @java.lang.Override getCompletionStats()8489 public com.google.cloud.aiplatform.v1beta1.CompletionStats getCompletionStats() { 8490 return completionStats_ == null 8491 ? com.google.cloud.aiplatform.v1beta1.CompletionStats.getDefaultInstance() 8492 : completionStats_; 8493 } 8494 /** 8495 * 8496 * 8497 * <pre> 8498 * Output only. Statistics on completed and failed prediction instances. 8499 * </pre> 8500 * 8501 * <code> 8502 * .google.cloud.aiplatform.v1beta1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8503 * </code> 8504 */ 8505 @java.lang.Override 8506 public com.google.cloud.aiplatform.v1beta1.CompletionStatsOrBuilder getCompletionStatsOrBuilder()8507 getCompletionStatsOrBuilder() { 8508 return completionStats_ == null 8509 ? com.google.cloud.aiplatform.v1beta1.CompletionStats.getDefaultInstance() 8510 : completionStats_; 8511 } 8512 8513 public static final int CREATE_TIME_FIELD_NUMBER = 15; 8514 private com.google.protobuf.Timestamp createTime_; 8515 /** 8516 * 8517 * 8518 * <pre> 8519 * Output only. Time when the BatchPredictionJob was created. 8520 * </pre> 8521 * 8522 * <code>.google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8523 * </code> 8524 * 8525 * @return Whether the createTime field is set. 8526 */ 8527 @java.lang.Override hasCreateTime()8528 public boolean hasCreateTime() { 8529 return createTime_ != null; 8530 } 8531 /** 8532 * 8533 * 8534 * <pre> 8535 * Output only. Time when the BatchPredictionJob was created. 8536 * </pre> 8537 * 8538 * <code>.google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8539 * </code> 8540 * 8541 * @return The createTime. 8542 */ 8543 @java.lang.Override getCreateTime()8544 public com.google.protobuf.Timestamp getCreateTime() { 8545 return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; 8546 } 8547 /** 8548 * 8549 * 8550 * <pre> 8551 * Output only. Time when the BatchPredictionJob was created. 8552 * </pre> 8553 * 8554 * <code>.google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8555 * </code> 8556 */ 8557 @java.lang.Override getCreateTimeOrBuilder()8558 public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { 8559 return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; 8560 } 8561 8562 public static final int START_TIME_FIELD_NUMBER = 16; 8563 private com.google.protobuf.Timestamp startTime_; 8564 /** 8565 * 8566 * 8567 * <pre> 8568 * Output only. Time when the BatchPredictionJob for the first time entered 8569 * the `JOB_STATE_RUNNING` state. 8570 * </pre> 8571 * 8572 * <code>.google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8573 * </code> 8574 * 8575 * @return Whether the startTime field is set. 8576 */ 8577 @java.lang.Override hasStartTime()8578 public boolean hasStartTime() { 8579 return startTime_ != null; 8580 } 8581 /** 8582 * 8583 * 8584 * <pre> 8585 * Output only. Time when the BatchPredictionJob for the first time entered 8586 * the `JOB_STATE_RUNNING` state. 8587 * </pre> 8588 * 8589 * <code>.google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8590 * </code> 8591 * 8592 * @return The startTime. 8593 */ 8594 @java.lang.Override getStartTime()8595 public com.google.protobuf.Timestamp getStartTime() { 8596 return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; 8597 } 8598 /** 8599 * 8600 * 8601 * <pre> 8602 * Output only. Time when the BatchPredictionJob for the first time entered 8603 * the `JOB_STATE_RUNNING` state. 8604 * </pre> 8605 * 8606 * <code>.google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8607 * </code> 8608 */ 8609 @java.lang.Override getStartTimeOrBuilder()8610 public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { 8611 return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; 8612 } 8613 8614 public static final int END_TIME_FIELD_NUMBER = 17; 8615 private com.google.protobuf.Timestamp endTime_; 8616 /** 8617 * 8618 * 8619 * <pre> 8620 * Output only. Time when the BatchPredictionJob entered any of the following 8621 * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. 8622 * </pre> 8623 * 8624 * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8625 * </code> 8626 * 8627 * @return Whether the endTime field is set. 8628 */ 8629 @java.lang.Override hasEndTime()8630 public boolean hasEndTime() { 8631 return endTime_ != null; 8632 } 8633 /** 8634 * 8635 * 8636 * <pre> 8637 * Output only. Time when the BatchPredictionJob entered any of the following 8638 * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. 8639 * </pre> 8640 * 8641 * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8642 * </code> 8643 * 8644 * @return The endTime. 8645 */ 8646 @java.lang.Override getEndTime()8647 public com.google.protobuf.Timestamp getEndTime() { 8648 return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; 8649 } 8650 /** 8651 * 8652 * 8653 * <pre> 8654 * Output only. Time when the BatchPredictionJob entered any of the following 8655 * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. 8656 * </pre> 8657 * 8658 * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8659 * </code> 8660 */ 8661 @java.lang.Override getEndTimeOrBuilder()8662 public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { 8663 return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; 8664 } 8665 8666 public static final int UPDATE_TIME_FIELD_NUMBER = 18; 8667 private com.google.protobuf.Timestamp updateTime_; 8668 /** 8669 * 8670 * 8671 * <pre> 8672 * Output only. Time when the BatchPredictionJob was most recently updated. 8673 * </pre> 8674 * 8675 * <code>.google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8676 * </code> 8677 * 8678 * @return Whether the updateTime field is set. 8679 */ 8680 @java.lang.Override hasUpdateTime()8681 public boolean hasUpdateTime() { 8682 return updateTime_ != null; 8683 } 8684 /** 8685 * 8686 * 8687 * <pre> 8688 * Output only. Time when the BatchPredictionJob was most recently updated. 8689 * </pre> 8690 * 8691 * <code>.google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8692 * </code> 8693 * 8694 * @return The updateTime. 8695 */ 8696 @java.lang.Override getUpdateTime()8697 public com.google.protobuf.Timestamp getUpdateTime() { 8698 return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; 8699 } 8700 /** 8701 * 8702 * 8703 * <pre> 8704 * Output only. Time when the BatchPredictionJob was most recently updated. 8705 * </pre> 8706 * 8707 * <code>.google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY]; 8708 * </code> 8709 */ 8710 @java.lang.Override getUpdateTimeOrBuilder()8711 public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { 8712 return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; 8713 } 8714 8715 public static final int LABELS_FIELD_NUMBER = 19; 8716 8717 private static final class LabelsDefaultEntryHolder { 8718 static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry = 8719 com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance( 8720 com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 8721 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_LabelsEntry_descriptor, 8722 com.google.protobuf.WireFormat.FieldType.STRING, 8723 "", 8724 com.google.protobuf.WireFormat.FieldType.STRING, 8725 ""); 8726 } 8727 8728 @SuppressWarnings("serial") 8729 private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_; 8730 internalGetLabels()8731 private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() { 8732 if (labels_ == null) { 8733 return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry); 8734 } 8735 return labels_; 8736 } 8737 getLabelsCount()8738 public int getLabelsCount() { 8739 return internalGetLabels().getMap().size(); 8740 } 8741 /** 8742 * 8743 * 8744 * <pre> 8745 * The labels with user-defined metadata to organize BatchPredictionJobs. 8746 * Label keys and values can be no longer than 64 characters 8747 * (Unicode codepoints), can only contain lowercase letters, numeric 8748 * characters, underscores and dashes. International characters are allowed. 8749 * See https://goo.gl/xmQnxf for more information and examples of labels. 8750 * </pre> 8751 * 8752 * <code>map<string, string> labels = 19;</code> 8753 */ 8754 @java.lang.Override containsLabels(java.lang.String key)8755 public boolean containsLabels(java.lang.String key) { 8756 if (key == null) { 8757 throw new NullPointerException("map key"); 8758 } 8759 return internalGetLabels().getMap().containsKey(key); 8760 } 8761 /** Use {@link #getLabelsMap()} instead. */ 8762 @java.lang.Override 8763 @java.lang.Deprecated getLabels()8764 public java.util.Map<java.lang.String, java.lang.String> getLabels() { 8765 return getLabelsMap(); 8766 } 8767 /** 8768 * 8769 * 8770 * <pre> 8771 * The labels with user-defined metadata to organize BatchPredictionJobs. 8772 * Label keys and values can be no longer than 64 characters 8773 * (Unicode codepoints), can only contain lowercase letters, numeric 8774 * characters, underscores and dashes. International characters are allowed. 8775 * See https://goo.gl/xmQnxf for more information and examples of labels. 8776 * </pre> 8777 * 8778 * <code>map<string, string> labels = 19;</code> 8779 */ 8780 @java.lang.Override getLabelsMap()8781 public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() { 8782 return internalGetLabels().getMap(); 8783 } 8784 /** 8785 * 8786 * 8787 * <pre> 8788 * The labels with user-defined metadata to organize BatchPredictionJobs. 8789 * Label keys and values can be no longer than 64 characters 8790 * (Unicode codepoints), can only contain lowercase letters, numeric 8791 * characters, underscores and dashes. International characters are allowed. 8792 * See https://goo.gl/xmQnxf for more information and examples of labels. 8793 * </pre> 8794 * 8795 * <code>map<string, string> labels = 19;</code> 8796 */ 8797 @java.lang.Override getLabelsOrDefault( java.lang.String key, java.lang.String defaultValue)8798 public /* nullable */ java.lang.String getLabelsOrDefault( 8799 java.lang.String key, 8800 /* nullable */ 8801 java.lang.String defaultValue) { 8802 if (key == null) { 8803 throw new NullPointerException("map key"); 8804 } 8805 java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap(); 8806 return map.containsKey(key) ? map.get(key) : defaultValue; 8807 } 8808 /** 8809 * 8810 * 8811 * <pre> 8812 * The labels with user-defined metadata to organize BatchPredictionJobs. 8813 * Label keys and values can be no longer than 64 characters 8814 * (Unicode codepoints), can only contain lowercase letters, numeric 8815 * characters, underscores and dashes. International characters are allowed. 8816 * See https://goo.gl/xmQnxf for more information and examples of labels. 8817 * </pre> 8818 * 8819 * <code>map<string, string> labels = 19;</code> 8820 */ 8821 @java.lang.Override getLabelsOrThrow(java.lang.String key)8822 public java.lang.String getLabelsOrThrow(java.lang.String key) { 8823 if (key == null) { 8824 throw new NullPointerException("map key"); 8825 } 8826 java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap(); 8827 if (!map.containsKey(key)) { 8828 throw new java.lang.IllegalArgumentException(); 8829 } 8830 return map.get(key); 8831 } 8832 8833 public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 24; 8834 private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; 8835 /** 8836 * 8837 * 8838 * <pre> 8839 * Customer-managed encryption key options for a BatchPredictionJob. If this 8840 * is set, then all resources created by the BatchPredictionJob will be 8841 * encrypted with the provided encryption key. 8842 * </pre> 8843 * 8844 * <code>.google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;</code> 8845 * 8846 * @return Whether the encryptionSpec field is set. 8847 */ 8848 @java.lang.Override hasEncryptionSpec()8849 public boolean hasEncryptionSpec() { 8850 return encryptionSpec_ != null; 8851 } 8852 /** 8853 * 8854 * 8855 * <pre> 8856 * Customer-managed encryption key options for a BatchPredictionJob. If this 8857 * is set, then all resources created by the BatchPredictionJob will be 8858 * encrypted with the provided encryption key. 8859 * </pre> 8860 * 8861 * <code>.google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;</code> 8862 * 8863 * @return The encryptionSpec. 8864 */ 8865 @java.lang.Override getEncryptionSpec()8866 public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { 8867 return encryptionSpec_ == null 8868 ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() 8869 : encryptionSpec_; 8870 } 8871 /** 8872 * 8873 * 8874 * <pre> 8875 * Customer-managed encryption key options for a BatchPredictionJob. If this 8876 * is set, then all resources created by the BatchPredictionJob will be 8877 * encrypted with the provided encryption key. 8878 * </pre> 8879 * 8880 * <code>.google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;</code> 8881 */ 8882 @java.lang.Override getEncryptionSpecOrBuilder()8883 public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() { 8884 return encryptionSpec_ == null 8885 ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() 8886 : encryptionSpec_; 8887 } 8888 8889 public static final int MODEL_MONITORING_CONFIG_FIELD_NUMBER = 26; 8890 private com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig modelMonitoringConfig_; 8891 /** 8892 * 8893 * 8894 * <pre> 8895 * Model monitoring config will be used for analysis model behaviors, based on 8896 * the input and output to the batch prediction job, as well as the provided 8897 * training dataset. 8898 * </pre> 8899 * 8900 * <code>.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig model_monitoring_config = 26; 8901 * </code> 8902 * 8903 * @return Whether the modelMonitoringConfig field is set. 8904 */ 8905 @java.lang.Override hasModelMonitoringConfig()8906 public boolean hasModelMonitoringConfig() { 8907 return modelMonitoringConfig_ != null; 8908 } 8909 /** 8910 * 8911 * 8912 * <pre> 8913 * Model monitoring config will be used for analysis model behaviors, based on 8914 * the input and output to the batch prediction job, as well as the provided 8915 * training dataset. 8916 * </pre> 8917 * 8918 * <code>.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig model_monitoring_config = 26; 8919 * </code> 8920 * 8921 * @return The modelMonitoringConfig. 8922 */ 8923 @java.lang.Override getModelMonitoringConfig()8924 public com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig getModelMonitoringConfig() { 8925 return modelMonitoringConfig_ == null 8926 ? com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig.getDefaultInstance() 8927 : modelMonitoringConfig_; 8928 } 8929 /** 8930 * 8931 * 8932 * <pre> 8933 * Model monitoring config will be used for analysis model behaviors, based on 8934 * the input and output to the batch prediction job, as well as the provided 8935 * training dataset. 8936 * </pre> 8937 * 8938 * <code>.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig model_monitoring_config = 26; 8939 * </code> 8940 */ 8941 @java.lang.Override 8942 public com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfigOrBuilder getModelMonitoringConfigOrBuilder()8943 getModelMonitoringConfigOrBuilder() { 8944 return modelMonitoringConfig_ == null 8945 ? com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig.getDefaultInstance() 8946 : modelMonitoringConfig_; 8947 } 8948 8949 public static final int MODEL_MONITORING_STATS_ANOMALIES_FIELD_NUMBER = 31; 8950 8951 @SuppressWarnings("serial") 8952 private java.util.List<com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies> 8953 modelMonitoringStatsAnomalies_; 8954 /** 8955 * 8956 * 8957 * <pre> 8958 * Get batch prediction job monitoring statistics. 8959 * </pre> 8960 * 8961 * <code> 8962 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 8963 * </code> 8964 */ 8965 @java.lang.Override 8966 public java.util.List<com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies> getModelMonitoringStatsAnomaliesList()8967 getModelMonitoringStatsAnomaliesList() { 8968 return modelMonitoringStatsAnomalies_; 8969 } 8970 /** 8971 * 8972 * 8973 * <pre> 8974 * Get batch prediction job monitoring statistics. 8975 * </pre> 8976 * 8977 * <code> 8978 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 8979 * </code> 8980 */ 8981 @java.lang.Override 8982 public java.util.List< 8983 ? extends com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomaliesOrBuilder> getModelMonitoringStatsAnomaliesOrBuilderList()8984 getModelMonitoringStatsAnomaliesOrBuilderList() { 8985 return modelMonitoringStatsAnomalies_; 8986 } 8987 /** 8988 * 8989 * 8990 * <pre> 8991 * Get batch prediction job monitoring statistics. 8992 * </pre> 8993 * 8994 * <code> 8995 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 8996 * </code> 8997 */ 8998 @java.lang.Override getModelMonitoringStatsAnomaliesCount()8999 public int getModelMonitoringStatsAnomaliesCount() { 9000 return modelMonitoringStatsAnomalies_.size(); 9001 } 9002 /** 9003 * 9004 * 9005 * <pre> 9006 * Get batch prediction job monitoring statistics. 9007 * </pre> 9008 * 9009 * <code> 9010 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 9011 * </code> 9012 */ 9013 @java.lang.Override 9014 public com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies getModelMonitoringStatsAnomalies(int index)9015 getModelMonitoringStatsAnomalies(int index) { 9016 return modelMonitoringStatsAnomalies_.get(index); 9017 } 9018 /** 9019 * 9020 * 9021 * <pre> 9022 * Get batch prediction job monitoring statistics. 9023 * </pre> 9024 * 9025 * <code> 9026 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 9027 * </code> 9028 */ 9029 @java.lang.Override 9030 public com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomaliesOrBuilder getModelMonitoringStatsAnomaliesOrBuilder(int index)9031 getModelMonitoringStatsAnomaliesOrBuilder(int index) { 9032 return modelMonitoringStatsAnomalies_.get(index); 9033 } 9034 9035 public static final int MODEL_MONITORING_STATUS_FIELD_NUMBER = 32; 9036 private com.google.rpc.Status modelMonitoringStatus_; 9037 /** 9038 * 9039 * 9040 * <pre> 9041 * Output only. The running status of the model monitoring pipeline. 9042 * </pre> 9043 * 9044 * <code> 9045 * .google.rpc.Status model_monitoring_status = 32 [(.google.api.field_behavior) = OUTPUT_ONLY]; 9046 * </code> 9047 * 9048 * @return Whether the modelMonitoringStatus field is set. 9049 */ 9050 @java.lang.Override hasModelMonitoringStatus()9051 public boolean hasModelMonitoringStatus() { 9052 return modelMonitoringStatus_ != null; 9053 } 9054 /** 9055 * 9056 * 9057 * <pre> 9058 * Output only. The running status of the model monitoring pipeline. 9059 * </pre> 9060 * 9061 * <code> 9062 * .google.rpc.Status model_monitoring_status = 32 [(.google.api.field_behavior) = OUTPUT_ONLY]; 9063 * </code> 9064 * 9065 * @return The modelMonitoringStatus. 9066 */ 9067 @java.lang.Override getModelMonitoringStatus()9068 public com.google.rpc.Status getModelMonitoringStatus() { 9069 return modelMonitoringStatus_ == null 9070 ? com.google.rpc.Status.getDefaultInstance() 9071 : modelMonitoringStatus_; 9072 } 9073 /** 9074 * 9075 * 9076 * <pre> 9077 * Output only. The running status of the model monitoring pipeline. 9078 * </pre> 9079 * 9080 * <code> 9081 * .google.rpc.Status model_monitoring_status = 32 [(.google.api.field_behavior) = OUTPUT_ONLY]; 9082 * </code> 9083 */ 9084 @java.lang.Override getModelMonitoringStatusOrBuilder()9085 public com.google.rpc.StatusOrBuilder getModelMonitoringStatusOrBuilder() { 9086 return modelMonitoringStatus_ == null 9087 ? com.google.rpc.Status.getDefaultInstance() 9088 : modelMonitoringStatus_; 9089 } 9090 9091 public static final int DISABLE_CONTAINER_LOGGING_FIELD_NUMBER = 34; 9092 private boolean disableContainerLogging_ = false; 9093 /** 9094 * 9095 * 9096 * <pre> 9097 * For custom-trained Models and AutoML Tabular Models, the container of the 9098 * DeployedModel instances will send `stderr` and `stdout` streams to 9099 * Cloud Logging by default. Please note that the logs incur cost, 9100 * which are subject to [Cloud Logging 9101 * pricing](https://cloud.google.com/logging/pricing). 9102 * User can disable container logging by setting this flag to true. 9103 * </pre> 9104 * 9105 * <code>bool disable_container_logging = 34;</code> 9106 * 9107 * @return The disableContainerLogging. 9108 */ 9109 @java.lang.Override getDisableContainerLogging()9110 public boolean getDisableContainerLogging() { 9111 return disableContainerLogging_; 9112 } 9113 9114 private byte memoizedIsInitialized = -1; 9115 9116 @java.lang.Override isInitialized()9117 public final boolean isInitialized() { 9118 byte isInitialized = memoizedIsInitialized; 9119 if (isInitialized == 1) return true; 9120 if (isInitialized == 0) return false; 9121 9122 memoizedIsInitialized = 1; 9123 return true; 9124 } 9125 9126 @java.lang.Override writeTo(com.google.protobuf.CodedOutputStream output)9127 public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { 9128 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { 9129 com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); 9130 } 9131 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { 9132 com.google.protobuf.GeneratedMessageV3.writeString(output, 2, displayName_); 9133 } 9134 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) { 9135 com.google.protobuf.GeneratedMessageV3.writeString(output, 3, model_); 9136 } 9137 if (inputConfig_ != null) { 9138 output.writeMessage(4, getInputConfig()); 9139 } 9140 if (modelParameters_ != null) { 9141 output.writeMessage(5, getModelParameters()); 9142 } 9143 if (outputConfig_ != null) { 9144 output.writeMessage(6, getOutputConfig()); 9145 } 9146 if (dedicatedResources_ != null) { 9147 output.writeMessage(7, getDedicatedResources()); 9148 } 9149 if (manualBatchTuningParameters_ != null) { 9150 output.writeMessage(8, getManualBatchTuningParameters()); 9151 } 9152 if (outputInfo_ != null) { 9153 output.writeMessage(9, getOutputInfo()); 9154 } 9155 if (state_ != com.google.cloud.aiplatform.v1beta1.JobState.JOB_STATE_UNSPECIFIED.getNumber()) { 9156 output.writeEnum(10, state_); 9157 } 9158 if (error_ != null) { 9159 output.writeMessage(11, getError()); 9160 } 9161 for (int i = 0; i < partialFailures_.size(); i++) { 9162 output.writeMessage(12, partialFailures_.get(i)); 9163 } 9164 if (resourcesConsumed_ != null) { 9165 output.writeMessage(13, getResourcesConsumed()); 9166 } 9167 if (completionStats_ != null) { 9168 output.writeMessage(14, getCompletionStats()); 9169 } 9170 if (createTime_ != null) { 9171 output.writeMessage(15, getCreateTime()); 9172 } 9173 if (startTime_ != null) { 9174 output.writeMessage(16, getStartTime()); 9175 } 9176 if (endTime_ != null) { 9177 output.writeMessage(17, getEndTime()); 9178 } 9179 if (updateTime_ != null) { 9180 output.writeMessage(18, getUpdateTime()); 9181 } 9182 com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( 9183 output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 19); 9184 if (generateExplanation_ != false) { 9185 output.writeBool(23, generateExplanation_); 9186 } 9187 if (encryptionSpec_ != null) { 9188 output.writeMessage(24, getEncryptionSpec()); 9189 } 9190 if (explanationSpec_ != null) { 9191 output.writeMessage(25, getExplanationSpec()); 9192 } 9193 if (modelMonitoringConfig_ != null) { 9194 output.writeMessage(26, getModelMonitoringConfig()); 9195 } 9196 if (instanceConfig_ != null) { 9197 output.writeMessage(27, getInstanceConfig()); 9198 } 9199 if (unmanagedContainerModel_ != null) { 9200 output.writeMessage(28, getUnmanagedContainerModel()); 9201 } 9202 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceAccount_)) { 9203 com.google.protobuf.GeneratedMessageV3.writeString(output, 29, serviceAccount_); 9204 } 9205 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(modelVersionId_)) { 9206 com.google.protobuf.GeneratedMessageV3.writeString(output, 30, modelVersionId_); 9207 } 9208 for (int i = 0; i < modelMonitoringStatsAnomalies_.size(); i++) { 9209 output.writeMessage(31, modelMonitoringStatsAnomalies_.get(i)); 9210 } 9211 if (modelMonitoringStatus_ != null) { 9212 output.writeMessage(32, getModelMonitoringStatus()); 9213 } 9214 if (disableContainerLogging_ != false) { 9215 output.writeBool(34, disableContainerLogging_); 9216 } 9217 getUnknownFields().writeTo(output); 9218 } 9219 9220 @java.lang.Override getSerializedSize()9221 public int getSerializedSize() { 9222 int size = memoizedSize; 9223 if (size != -1) return size; 9224 9225 size = 0; 9226 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { 9227 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); 9228 } 9229 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { 9230 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, displayName_); 9231 } 9232 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) { 9233 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, model_); 9234 } 9235 if (inputConfig_ != null) { 9236 size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getInputConfig()); 9237 } 9238 if (modelParameters_ != null) { 9239 size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getModelParameters()); 9240 } 9241 if (outputConfig_ != null) { 9242 size += com.google.protobuf.CodedOutputStream.computeMessageSize(6, getOutputConfig()); 9243 } 9244 if (dedicatedResources_ != null) { 9245 size += com.google.protobuf.CodedOutputStream.computeMessageSize(7, getDedicatedResources()); 9246 } 9247 if (manualBatchTuningParameters_ != null) { 9248 size += 9249 com.google.protobuf.CodedOutputStream.computeMessageSize( 9250 8, getManualBatchTuningParameters()); 9251 } 9252 if (outputInfo_ != null) { 9253 size += com.google.protobuf.CodedOutputStream.computeMessageSize(9, getOutputInfo()); 9254 } 9255 if (state_ != com.google.cloud.aiplatform.v1beta1.JobState.JOB_STATE_UNSPECIFIED.getNumber()) { 9256 size += com.google.protobuf.CodedOutputStream.computeEnumSize(10, state_); 9257 } 9258 if (error_ != null) { 9259 size += com.google.protobuf.CodedOutputStream.computeMessageSize(11, getError()); 9260 } 9261 for (int i = 0; i < partialFailures_.size(); i++) { 9262 size += com.google.protobuf.CodedOutputStream.computeMessageSize(12, partialFailures_.get(i)); 9263 } 9264 if (resourcesConsumed_ != null) { 9265 size += com.google.protobuf.CodedOutputStream.computeMessageSize(13, getResourcesConsumed()); 9266 } 9267 if (completionStats_ != null) { 9268 size += com.google.protobuf.CodedOutputStream.computeMessageSize(14, getCompletionStats()); 9269 } 9270 if (createTime_ != null) { 9271 size += com.google.protobuf.CodedOutputStream.computeMessageSize(15, getCreateTime()); 9272 } 9273 if (startTime_ != null) { 9274 size += com.google.protobuf.CodedOutputStream.computeMessageSize(16, getStartTime()); 9275 } 9276 if (endTime_ != null) { 9277 size += com.google.protobuf.CodedOutputStream.computeMessageSize(17, getEndTime()); 9278 } 9279 if (updateTime_ != null) { 9280 size += com.google.protobuf.CodedOutputStream.computeMessageSize(18, getUpdateTime()); 9281 } 9282 for (java.util.Map.Entry<java.lang.String, java.lang.String> entry : 9283 internalGetLabels().getMap().entrySet()) { 9284 com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ = 9285 LabelsDefaultEntryHolder.defaultEntry 9286 .newBuilderForType() 9287 .setKey(entry.getKey()) 9288 .setValue(entry.getValue()) 9289 .build(); 9290 size += com.google.protobuf.CodedOutputStream.computeMessageSize(19, labels__); 9291 } 9292 if (generateExplanation_ != false) { 9293 size += com.google.protobuf.CodedOutputStream.computeBoolSize(23, generateExplanation_); 9294 } 9295 if (encryptionSpec_ != null) { 9296 size += com.google.protobuf.CodedOutputStream.computeMessageSize(24, getEncryptionSpec()); 9297 } 9298 if (explanationSpec_ != null) { 9299 size += com.google.protobuf.CodedOutputStream.computeMessageSize(25, getExplanationSpec()); 9300 } 9301 if (modelMonitoringConfig_ != null) { 9302 size += 9303 com.google.protobuf.CodedOutputStream.computeMessageSize(26, getModelMonitoringConfig()); 9304 } 9305 if (instanceConfig_ != null) { 9306 size += com.google.protobuf.CodedOutputStream.computeMessageSize(27, getInstanceConfig()); 9307 } 9308 if (unmanagedContainerModel_ != null) { 9309 size += 9310 com.google.protobuf.CodedOutputStream.computeMessageSize( 9311 28, getUnmanagedContainerModel()); 9312 } 9313 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceAccount_)) { 9314 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(29, serviceAccount_); 9315 } 9316 if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(modelVersionId_)) { 9317 size += com.google.protobuf.GeneratedMessageV3.computeStringSize(30, modelVersionId_); 9318 } 9319 for (int i = 0; i < modelMonitoringStatsAnomalies_.size(); i++) { 9320 size += 9321 com.google.protobuf.CodedOutputStream.computeMessageSize( 9322 31, modelMonitoringStatsAnomalies_.get(i)); 9323 } 9324 if (modelMonitoringStatus_ != null) { 9325 size += 9326 com.google.protobuf.CodedOutputStream.computeMessageSize(32, getModelMonitoringStatus()); 9327 } 9328 if (disableContainerLogging_ != false) { 9329 size += com.google.protobuf.CodedOutputStream.computeBoolSize(34, disableContainerLogging_); 9330 } 9331 size += getUnknownFields().getSerializedSize(); 9332 memoizedSize = size; 9333 return size; 9334 } 9335 9336 @java.lang.Override equals(final java.lang.Object obj)9337 public boolean equals(final java.lang.Object obj) { 9338 if (obj == this) { 9339 return true; 9340 } 9341 if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.BatchPredictionJob)) { 9342 return super.equals(obj); 9343 } 9344 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob other = 9345 (com.google.cloud.aiplatform.v1beta1.BatchPredictionJob) obj; 9346 9347 if (!getName().equals(other.getName())) return false; 9348 if (!getDisplayName().equals(other.getDisplayName())) return false; 9349 if (!getModel().equals(other.getModel())) return false; 9350 if (!getModelVersionId().equals(other.getModelVersionId())) return false; 9351 if (hasUnmanagedContainerModel() != other.hasUnmanagedContainerModel()) return false; 9352 if (hasUnmanagedContainerModel()) { 9353 if (!getUnmanagedContainerModel().equals(other.getUnmanagedContainerModel())) return false; 9354 } 9355 if (hasInputConfig() != other.hasInputConfig()) return false; 9356 if (hasInputConfig()) { 9357 if (!getInputConfig().equals(other.getInputConfig())) return false; 9358 } 9359 if (hasInstanceConfig() != other.hasInstanceConfig()) return false; 9360 if (hasInstanceConfig()) { 9361 if (!getInstanceConfig().equals(other.getInstanceConfig())) return false; 9362 } 9363 if (hasModelParameters() != other.hasModelParameters()) return false; 9364 if (hasModelParameters()) { 9365 if (!getModelParameters().equals(other.getModelParameters())) return false; 9366 } 9367 if (hasOutputConfig() != other.hasOutputConfig()) return false; 9368 if (hasOutputConfig()) { 9369 if (!getOutputConfig().equals(other.getOutputConfig())) return false; 9370 } 9371 if (hasDedicatedResources() != other.hasDedicatedResources()) return false; 9372 if (hasDedicatedResources()) { 9373 if (!getDedicatedResources().equals(other.getDedicatedResources())) return false; 9374 } 9375 if (!getServiceAccount().equals(other.getServiceAccount())) return false; 9376 if (hasManualBatchTuningParameters() != other.hasManualBatchTuningParameters()) return false; 9377 if (hasManualBatchTuningParameters()) { 9378 if (!getManualBatchTuningParameters().equals(other.getManualBatchTuningParameters())) 9379 return false; 9380 } 9381 if (getGenerateExplanation() != other.getGenerateExplanation()) return false; 9382 if (hasExplanationSpec() != other.hasExplanationSpec()) return false; 9383 if (hasExplanationSpec()) { 9384 if (!getExplanationSpec().equals(other.getExplanationSpec())) return false; 9385 } 9386 if (hasOutputInfo() != other.hasOutputInfo()) return false; 9387 if (hasOutputInfo()) { 9388 if (!getOutputInfo().equals(other.getOutputInfo())) return false; 9389 } 9390 if (state_ != other.state_) return false; 9391 if (hasError() != other.hasError()) return false; 9392 if (hasError()) { 9393 if (!getError().equals(other.getError())) return false; 9394 } 9395 if (!getPartialFailuresList().equals(other.getPartialFailuresList())) return false; 9396 if (hasResourcesConsumed() != other.hasResourcesConsumed()) return false; 9397 if (hasResourcesConsumed()) { 9398 if (!getResourcesConsumed().equals(other.getResourcesConsumed())) return false; 9399 } 9400 if (hasCompletionStats() != other.hasCompletionStats()) return false; 9401 if (hasCompletionStats()) { 9402 if (!getCompletionStats().equals(other.getCompletionStats())) return false; 9403 } 9404 if (hasCreateTime() != other.hasCreateTime()) return false; 9405 if (hasCreateTime()) { 9406 if (!getCreateTime().equals(other.getCreateTime())) return false; 9407 } 9408 if (hasStartTime() != other.hasStartTime()) return false; 9409 if (hasStartTime()) { 9410 if (!getStartTime().equals(other.getStartTime())) return false; 9411 } 9412 if (hasEndTime() != other.hasEndTime()) return false; 9413 if (hasEndTime()) { 9414 if (!getEndTime().equals(other.getEndTime())) return false; 9415 } 9416 if (hasUpdateTime() != other.hasUpdateTime()) return false; 9417 if (hasUpdateTime()) { 9418 if (!getUpdateTime().equals(other.getUpdateTime())) return false; 9419 } 9420 if (!internalGetLabels().equals(other.internalGetLabels())) return false; 9421 if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false; 9422 if (hasEncryptionSpec()) { 9423 if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false; 9424 } 9425 if (hasModelMonitoringConfig() != other.hasModelMonitoringConfig()) return false; 9426 if (hasModelMonitoringConfig()) { 9427 if (!getModelMonitoringConfig().equals(other.getModelMonitoringConfig())) return false; 9428 } 9429 if (!getModelMonitoringStatsAnomaliesList() 9430 .equals(other.getModelMonitoringStatsAnomaliesList())) return false; 9431 if (hasModelMonitoringStatus() != other.hasModelMonitoringStatus()) return false; 9432 if (hasModelMonitoringStatus()) { 9433 if (!getModelMonitoringStatus().equals(other.getModelMonitoringStatus())) return false; 9434 } 9435 if (getDisableContainerLogging() != other.getDisableContainerLogging()) return false; 9436 if (!getUnknownFields().equals(other.getUnknownFields())) return false; 9437 return true; 9438 } 9439 9440 @java.lang.Override hashCode()9441 public int hashCode() { 9442 if (memoizedHashCode != 0) { 9443 return memoizedHashCode; 9444 } 9445 int hash = 41; 9446 hash = (19 * hash) + getDescriptor().hashCode(); 9447 hash = (37 * hash) + NAME_FIELD_NUMBER; 9448 hash = (53 * hash) + getName().hashCode(); 9449 hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER; 9450 hash = (53 * hash) + getDisplayName().hashCode(); 9451 hash = (37 * hash) + MODEL_FIELD_NUMBER; 9452 hash = (53 * hash) + getModel().hashCode(); 9453 hash = (37 * hash) + MODEL_VERSION_ID_FIELD_NUMBER; 9454 hash = (53 * hash) + getModelVersionId().hashCode(); 9455 if (hasUnmanagedContainerModel()) { 9456 hash = (37 * hash) + UNMANAGED_CONTAINER_MODEL_FIELD_NUMBER; 9457 hash = (53 * hash) + getUnmanagedContainerModel().hashCode(); 9458 } 9459 if (hasInputConfig()) { 9460 hash = (37 * hash) + INPUT_CONFIG_FIELD_NUMBER; 9461 hash = (53 * hash) + getInputConfig().hashCode(); 9462 } 9463 if (hasInstanceConfig()) { 9464 hash = (37 * hash) + INSTANCE_CONFIG_FIELD_NUMBER; 9465 hash = (53 * hash) + getInstanceConfig().hashCode(); 9466 } 9467 if (hasModelParameters()) { 9468 hash = (37 * hash) + MODEL_PARAMETERS_FIELD_NUMBER; 9469 hash = (53 * hash) + getModelParameters().hashCode(); 9470 } 9471 if (hasOutputConfig()) { 9472 hash = (37 * hash) + OUTPUT_CONFIG_FIELD_NUMBER; 9473 hash = (53 * hash) + getOutputConfig().hashCode(); 9474 } 9475 if (hasDedicatedResources()) { 9476 hash = (37 * hash) + DEDICATED_RESOURCES_FIELD_NUMBER; 9477 hash = (53 * hash) + getDedicatedResources().hashCode(); 9478 } 9479 hash = (37 * hash) + SERVICE_ACCOUNT_FIELD_NUMBER; 9480 hash = (53 * hash) + getServiceAccount().hashCode(); 9481 if (hasManualBatchTuningParameters()) { 9482 hash = (37 * hash) + MANUAL_BATCH_TUNING_PARAMETERS_FIELD_NUMBER; 9483 hash = (53 * hash) + getManualBatchTuningParameters().hashCode(); 9484 } 9485 hash = (37 * hash) + GENERATE_EXPLANATION_FIELD_NUMBER; 9486 hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getGenerateExplanation()); 9487 if (hasExplanationSpec()) { 9488 hash = (37 * hash) + EXPLANATION_SPEC_FIELD_NUMBER; 9489 hash = (53 * hash) + getExplanationSpec().hashCode(); 9490 } 9491 if (hasOutputInfo()) { 9492 hash = (37 * hash) + OUTPUT_INFO_FIELD_NUMBER; 9493 hash = (53 * hash) + getOutputInfo().hashCode(); 9494 } 9495 hash = (37 * hash) + STATE_FIELD_NUMBER; 9496 hash = (53 * hash) + state_; 9497 if (hasError()) { 9498 hash = (37 * hash) + ERROR_FIELD_NUMBER; 9499 hash = (53 * hash) + getError().hashCode(); 9500 } 9501 if (getPartialFailuresCount() > 0) { 9502 hash = (37 * hash) + PARTIAL_FAILURES_FIELD_NUMBER; 9503 hash = (53 * hash) + getPartialFailuresList().hashCode(); 9504 } 9505 if (hasResourcesConsumed()) { 9506 hash = (37 * hash) + RESOURCES_CONSUMED_FIELD_NUMBER; 9507 hash = (53 * hash) + getResourcesConsumed().hashCode(); 9508 } 9509 if (hasCompletionStats()) { 9510 hash = (37 * hash) + COMPLETION_STATS_FIELD_NUMBER; 9511 hash = (53 * hash) + getCompletionStats().hashCode(); 9512 } 9513 if (hasCreateTime()) { 9514 hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; 9515 hash = (53 * hash) + getCreateTime().hashCode(); 9516 } 9517 if (hasStartTime()) { 9518 hash = (37 * hash) + START_TIME_FIELD_NUMBER; 9519 hash = (53 * hash) + getStartTime().hashCode(); 9520 } 9521 if (hasEndTime()) { 9522 hash = (37 * hash) + END_TIME_FIELD_NUMBER; 9523 hash = (53 * hash) + getEndTime().hashCode(); 9524 } 9525 if (hasUpdateTime()) { 9526 hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER; 9527 hash = (53 * hash) + getUpdateTime().hashCode(); 9528 } 9529 if (!internalGetLabels().getMap().isEmpty()) { 9530 hash = (37 * hash) + LABELS_FIELD_NUMBER; 9531 hash = (53 * hash) + internalGetLabels().hashCode(); 9532 } 9533 if (hasEncryptionSpec()) { 9534 hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER; 9535 hash = (53 * hash) + getEncryptionSpec().hashCode(); 9536 } 9537 if (hasModelMonitoringConfig()) { 9538 hash = (37 * hash) + MODEL_MONITORING_CONFIG_FIELD_NUMBER; 9539 hash = (53 * hash) + getModelMonitoringConfig().hashCode(); 9540 } 9541 if (getModelMonitoringStatsAnomaliesCount() > 0) { 9542 hash = (37 * hash) + MODEL_MONITORING_STATS_ANOMALIES_FIELD_NUMBER; 9543 hash = (53 * hash) + getModelMonitoringStatsAnomaliesList().hashCode(); 9544 } 9545 if (hasModelMonitoringStatus()) { 9546 hash = (37 * hash) + MODEL_MONITORING_STATUS_FIELD_NUMBER; 9547 hash = (53 * hash) + getModelMonitoringStatus().hashCode(); 9548 } 9549 hash = (37 * hash) + DISABLE_CONTAINER_LOGGING_FIELD_NUMBER; 9550 hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getDisableContainerLogging()); 9551 hash = (29 * hash) + getUnknownFields().hashCode(); 9552 memoizedHashCode = hash; 9553 return hash; 9554 } 9555 parseFrom( java.nio.ByteBuffer data)9556 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob parseFrom( 9557 java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { 9558 return PARSER.parseFrom(data); 9559 } 9560 parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9561 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob parseFrom( 9562 java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9563 throws com.google.protobuf.InvalidProtocolBufferException { 9564 return PARSER.parseFrom(data, extensionRegistry); 9565 } 9566 parseFrom( com.google.protobuf.ByteString data)9567 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob parseFrom( 9568 com.google.protobuf.ByteString data) 9569 throws com.google.protobuf.InvalidProtocolBufferException { 9570 return PARSER.parseFrom(data); 9571 } 9572 parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9573 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob parseFrom( 9574 com.google.protobuf.ByteString data, 9575 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9576 throws com.google.protobuf.InvalidProtocolBufferException { 9577 return PARSER.parseFrom(data, extensionRegistry); 9578 } 9579 parseFrom(byte[] data)9580 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob parseFrom(byte[] data) 9581 throws com.google.protobuf.InvalidProtocolBufferException { 9582 return PARSER.parseFrom(data); 9583 } 9584 parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9585 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob parseFrom( 9586 byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9587 throws com.google.protobuf.InvalidProtocolBufferException { 9588 return PARSER.parseFrom(data, extensionRegistry); 9589 } 9590 parseFrom( java.io.InputStream input)9591 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob parseFrom( 9592 java.io.InputStream input) throws java.io.IOException { 9593 return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); 9594 } 9595 parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9596 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob parseFrom( 9597 java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9598 throws java.io.IOException { 9599 return com.google.protobuf.GeneratedMessageV3.parseWithIOException( 9600 PARSER, input, extensionRegistry); 9601 } 9602 parseDelimitedFrom( java.io.InputStream input)9603 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob parseDelimitedFrom( 9604 java.io.InputStream input) throws java.io.IOException { 9605 return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); 9606 } 9607 parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9608 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob parseDelimitedFrom( 9609 java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9610 throws java.io.IOException { 9611 return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( 9612 PARSER, input, extensionRegistry); 9613 } 9614 parseFrom( com.google.protobuf.CodedInputStream input)9615 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob parseFrom( 9616 com.google.protobuf.CodedInputStream input) throws java.io.IOException { 9617 return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); 9618 } 9619 parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9620 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob parseFrom( 9621 com.google.protobuf.CodedInputStream input, 9622 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9623 throws java.io.IOException { 9624 return com.google.protobuf.GeneratedMessageV3.parseWithIOException( 9625 PARSER, input, extensionRegistry); 9626 } 9627 9628 @java.lang.Override newBuilderForType()9629 public Builder newBuilderForType() { 9630 return newBuilder(); 9631 } 9632 newBuilder()9633 public static Builder newBuilder() { 9634 return DEFAULT_INSTANCE.toBuilder(); 9635 } 9636 newBuilder( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob prototype)9637 public static Builder newBuilder( 9638 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob prototype) { 9639 return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); 9640 } 9641 9642 @java.lang.Override toBuilder()9643 public Builder toBuilder() { 9644 return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); 9645 } 9646 9647 @java.lang.Override newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)9648 protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { 9649 Builder builder = new Builder(parent); 9650 return builder; 9651 } 9652 /** 9653 * 9654 * 9655 * <pre> 9656 * A job that uses a 9657 * [Model][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] to produce 9658 * predictions on multiple [input 9659 * instances][google.cloud.aiplatform.v1beta1.BatchPredictionJob.input_config]. 9660 * If predictions for significant portion of the instances fail, the job may 9661 * finish without attempting predictions for all remaining instances. 9662 * </pre> 9663 * 9664 * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchPredictionJob} 9665 */ 9666 public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> 9667 implements 9668 // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.BatchPredictionJob) 9669 com.google.cloud.aiplatform.v1beta1.BatchPredictionJobOrBuilder { getDescriptor()9670 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { 9671 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 9672 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_descriptor; 9673 } 9674 9675 @SuppressWarnings({"rawtypes"}) internalGetMapField(int number)9676 protected com.google.protobuf.MapField internalGetMapField(int number) { 9677 switch (number) { 9678 case 19: 9679 return internalGetLabels(); 9680 default: 9681 throw new RuntimeException("Invalid map field number: " + number); 9682 } 9683 } 9684 9685 @SuppressWarnings({"rawtypes"}) internalGetMutableMapField(int number)9686 protected com.google.protobuf.MapField internalGetMutableMapField(int number) { 9687 switch (number) { 9688 case 19: 9689 return internalGetMutableLabels(); 9690 default: 9691 throw new RuntimeException("Invalid map field number: " + number); 9692 } 9693 } 9694 9695 @java.lang.Override 9696 protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()9697 internalGetFieldAccessorTable() { 9698 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 9699 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_fieldAccessorTable 9700 .ensureFieldAccessorsInitialized( 9701 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.class, 9702 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.Builder.class); 9703 } 9704 9705 // Construct using com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.newBuilder() Builder()9706 private Builder() {} 9707 Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)9708 private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { 9709 super(parent); 9710 } 9711 9712 @java.lang.Override clear()9713 public Builder clear() { 9714 super.clear(); 9715 bitField0_ = 0; 9716 name_ = ""; 9717 displayName_ = ""; 9718 model_ = ""; 9719 modelVersionId_ = ""; 9720 unmanagedContainerModel_ = null; 9721 if (unmanagedContainerModelBuilder_ != null) { 9722 unmanagedContainerModelBuilder_.dispose(); 9723 unmanagedContainerModelBuilder_ = null; 9724 } 9725 inputConfig_ = null; 9726 if (inputConfigBuilder_ != null) { 9727 inputConfigBuilder_.dispose(); 9728 inputConfigBuilder_ = null; 9729 } 9730 instanceConfig_ = null; 9731 if (instanceConfigBuilder_ != null) { 9732 instanceConfigBuilder_.dispose(); 9733 instanceConfigBuilder_ = null; 9734 } 9735 modelParameters_ = null; 9736 if (modelParametersBuilder_ != null) { 9737 modelParametersBuilder_.dispose(); 9738 modelParametersBuilder_ = null; 9739 } 9740 outputConfig_ = null; 9741 if (outputConfigBuilder_ != null) { 9742 outputConfigBuilder_.dispose(); 9743 outputConfigBuilder_ = null; 9744 } 9745 dedicatedResources_ = null; 9746 if (dedicatedResourcesBuilder_ != null) { 9747 dedicatedResourcesBuilder_.dispose(); 9748 dedicatedResourcesBuilder_ = null; 9749 } 9750 serviceAccount_ = ""; 9751 manualBatchTuningParameters_ = null; 9752 if (manualBatchTuningParametersBuilder_ != null) { 9753 manualBatchTuningParametersBuilder_.dispose(); 9754 manualBatchTuningParametersBuilder_ = null; 9755 } 9756 generateExplanation_ = false; 9757 explanationSpec_ = null; 9758 if (explanationSpecBuilder_ != null) { 9759 explanationSpecBuilder_.dispose(); 9760 explanationSpecBuilder_ = null; 9761 } 9762 outputInfo_ = null; 9763 if (outputInfoBuilder_ != null) { 9764 outputInfoBuilder_.dispose(); 9765 outputInfoBuilder_ = null; 9766 } 9767 state_ = 0; 9768 error_ = null; 9769 if (errorBuilder_ != null) { 9770 errorBuilder_.dispose(); 9771 errorBuilder_ = null; 9772 } 9773 if (partialFailuresBuilder_ == null) { 9774 partialFailures_ = java.util.Collections.emptyList(); 9775 } else { 9776 partialFailures_ = null; 9777 partialFailuresBuilder_.clear(); 9778 } 9779 bitField0_ = (bitField0_ & ~0x00020000); 9780 resourcesConsumed_ = null; 9781 if (resourcesConsumedBuilder_ != null) { 9782 resourcesConsumedBuilder_.dispose(); 9783 resourcesConsumedBuilder_ = null; 9784 } 9785 completionStats_ = null; 9786 if (completionStatsBuilder_ != null) { 9787 completionStatsBuilder_.dispose(); 9788 completionStatsBuilder_ = null; 9789 } 9790 createTime_ = null; 9791 if (createTimeBuilder_ != null) { 9792 createTimeBuilder_.dispose(); 9793 createTimeBuilder_ = null; 9794 } 9795 startTime_ = null; 9796 if (startTimeBuilder_ != null) { 9797 startTimeBuilder_.dispose(); 9798 startTimeBuilder_ = null; 9799 } 9800 endTime_ = null; 9801 if (endTimeBuilder_ != null) { 9802 endTimeBuilder_.dispose(); 9803 endTimeBuilder_ = null; 9804 } 9805 updateTime_ = null; 9806 if (updateTimeBuilder_ != null) { 9807 updateTimeBuilder_.dispose(); 9808 updateTimeBuilder_ = null; 9809 } 9810 internalGetMutableLabels().clear(); 9811 encryptionSpec_ = null; 9812 if (encryptionSpecBuilder_ != null) { 9813 encryptionSpecBuilder_.dispose(); 9814 encryptionSpecBuilder_ = null; 9815 } 9816 modelMonitoringConfig_ = null; 9817 if (modelMonitoringConfigBuilder_ != null) { 9818 modelMonitoringConfigBuilder_.dispose(); 9819 modelMonitoringConfigBuilder_ = null; 9820 } 9821 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 9822 modelMonitoringStatsAnomalies_ = java.util.Collections.emptyList(); 9823 } else { 9824 modelMonitoringStatsAnomalies_ = null; 9825 modelMonitoringStatsAnomaliesBuilder_.clear(); 9826 } 9827 bitField0_ = (bitField0_ & ~0x08000000); 9828 modelMonitoringStatus_ = null; 9829 if (modelMonitoringStatusBuilder_ != null) { 9830 modelMonitoringStatusBuilder_.dispose(); 9831 modelMonitoringStatusBuilder_ = null; 9832 } 9833 disableContainerLogging_ = false; 9834 return this; 9835 } 9836 9837 @java.lang.Override getDescriptorForType()9838 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { 9839 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJobProto 9840 .internal_static_google_cloud_aiplatform_v1beta1_BatchPredictionJob_descriptor; 9841 } 9842 9843 @java.lang.Override getDefaultInstanceForType()9844 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob getDefaultInstanceForType() { 9845 return com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.getDefaultInstance(); 9846 } 9847 9848 @java.lang.Override build()9849 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob build() { 9850 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob result = buildPartial(); 9851 if (!result.isInitialized()) { 9852 throw newUninitializedMessageException(result); 9853 } 9854 return result; 9855 } 9856 9857 @java.lang.Override buildPartial()9858 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob buildPartial() { 9859 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob result = 9860 new com.google.cloud.aiplatform.v1beta1.BatchPredictionJob(this); 9861 buildPartialRepeatedFields(result); 9862 if (bitField0_ != 0) { 9863 buildPartial0(result); 9864 } 9865 onBuilt(); 9866 return result; 9867 } 9868 buildPartialRepeatedFields( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob result)9869 private void buildPartialRepeatedFields( 9870 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob result) { 9871 if (partialFailuresBuilder_ == null) { 9872 if (((bitField0_ & 0x00020000) != 0)) { 9873 partialFailures_ = java.util.Collections.unmodifiableList(partialFailures_); 9874 bitField0_ = (bitField0_ & ~0x00020000); 9875 } 9876 result.partialFailures_ = partialFailures_; 9877 } else { 9878 result.partialFailures_ = partialFailuresBuilder_.build(); 9879 } 9880 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 9881 if (((bitField0_ & 0x08000000) != 0)) { 9882 modelMonitoringStatsAnomalies_ = 9883 java.util.Collections.unmodifiableList(modelMonitoringStatsAnomalies_); 9884 bitField0_ = (bitField0_ & ~0x08000000); 9885 } 9886 result.modelMonitoringStatsAnomalies_ = modelMonitoringStatsAnomalies_; 9887 } else { 9888 result.modelMonitoringStatsAnomalies_ = modelMonitoringStatsAnomaliesBuilder_.build(); 9889 } 9890 } 9891 buildPartial0(com.google.cloud.aiplatform.v1beta1.BatchPredictionJob result)9892 private void buildPartial0(com.google.cloud.aiplatform.v1beta1.BatchPredictionJob result) { 9893 int from_bitField0_ = bitField0_; 9894 if (((from_bitField0_ & 0x00000001) != 0)) { 9895 result.name_ = name_; 9896 } 9897 if (((from_bitField0_ & 0x00000002) != 0)) { 9898 result.displayName_ = displayName_; 9899 } 9900 if (((from_bitField0_ & 0x00000004) != 0)) { 9901 result.model_ = model_; 9902 } 9903 if (((from_bitField0_ & 0x00000008) != 0)) { 9904 result.modelVersionId_ = modelVersionId_; 9905 } 9906 if (((from_bitField0_ & 0x00000010) != 0)) { 9907 result.unmanagedContainerModel_ = 9908 unmanagedContainerModelBuilder_ == null 9909 ? unmanagedContainerModel_ 9910 : unmanagedContainerModelBuilder_.build(); 9911 } 9912 if (((from_bitField0_ & 0x00000020) != 0)) { 9913 result.inputConfig_ = 9914 inputConfigBuilder_ == null ? inputConfig_ : inputConfigBuilder_.build(); 9915 } 9916 if (((from_bitField0_ & 0x00000040) != 0)) { 9917 result.instanceConfig_ = 9918 instanceConfigBuilder_ == null ? instanceConfig_ : instanceConfigBuilder_.build(); 9919 } 9920 if (((from_bitField0_ & 0x00000080) != 0)) { 9921 result.modelParameters_ = 9922 modelParametersBuilder_ == null ? modelParameters_ : modelParametersBuilder_.build(); 9923 } 9924 if (((from_bitField0_ & 0x00000100) != 0)) { 9925 result.outputConfig_ = 9926 outputConfigBuilder_ == null ? outputConfig_ : outputConfigBuilder_.build(); 9927 } 9928 if (((from_bitField0_ & 0x00000200) != 0)) { 9929 result.dedicatedResources_ = 9930 dedicatedResourcesBuilder_ == null 9931 ? dedicatedResources_ 9932 : dedicatedResourcesBuilder_.build(); 9933 } 9934 if (((from_bitField0_ & 0x00000400) != 0)) { 9935 result.serviceAccount_ = serviceAccount_; 9936 } 9937 if (((from_bitField0_ & 0x00000800) != 0)) { 9938 result.manualBatchTuningParameters_ = 9939 manualBatchTuningParametersBuilder_ == null 9940 ? manualBatchTuningParameters_ 9941 : manualBatchTuningParametersBuilder_.build(); 9942 } 9943 if (((from_bitField0_ & 0x00001000) != 0)) { 9944 result.generateExplanation_ = generateExplanation_; 9945 } 9946 if (((from_bitField0_ & 0x00002000) != 0)) { 9947 result.explanationSpec_ = 9948 explanationSpecBuilder_ == null ? explanationSpec_ : explanationSpecBuilder_.build(); 9949 } 9950 if (((from_bitField0_ & 0x00004000) != 0)) { 9951 result.outputInfo_ = outputInfoBuilder_ == null ? outputInfo_ : outputInfoBuilder_.build(); 9952 } 9953 if (((from_bitField0_ & 0x00008000) != 0)) { 9954 result.state_ = state_; 9955 } 9956 if (((from_bitField0_ & 0x00010000) != 0)) { 9957 result.error_ = errorBuilder_ == null ? error_ : errorBuilder_.build(); 9958 } 9959 if (((from_bitField0_ & 0x00040000) != 0)) { 9960 result.resourcesConsumed_ = 9961 resourcesConsumedBuilder_ == null 9962 ? resourcesConsumed_ 9963 : resourcesConsumedBuilder_.build(); 9964 } 9965 if (((from_bitField0_ & 0x00080000) != 0)) { 9966 result.completionStats_ = 9967 completionStatsBuilder_ == null ? completionStats_ : completionStatsBuilder_.build(); 9968 } 9969 if (((from_bitField0_ & 0x00100000) != 0)) { 9970 result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build(); 9971 } 9972 if (((from_bitField0_ & 0x00200000) != 0)) { 9973 result.startTime_ = startTimeBuilder_ == null ? startTime_ : startTimeBuilder_.build(); 9974 } 9975 if (((from_bitField0_ & 0x00400000) != 0)) { 9976 result.endTime_ = endTimeBuilder_ == null ? endTime_ : endTimeBuilder_.build(); 9977 } 9978 if (((from_bitField0_ & 0x00800000) != 0)) { 9979 result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build(); 9980 } 9981 if (((from_bitField0_ & 0x01000000) != 0)) { 9982 result.labels_ = internalGetLabels(); 9983 result.labels_.makeImmutable(); 9984 } 9985 if (((from_bitField0_ & 0x02000000) != 0)) { 9986 result.encryptionSpec_ = 9987 encryptionSpecBuilder_ == null ? encryptionSpec_ : encryptionSpecBuilder_.build(); 9988 } 9989 if (((from_bitField0_ & 0x04000000) != 0)) { 9990 result.modelMonitoringConfig_ = 9991 modelMonitoringConfigBuilder_ == null 9992 ? modelMonitoringConfig_ 9993 : modelMonitoringConfigBuilder_.build(); 9994 } 9995 if (((from_bitField0_ & 0x10000000) != 0)) { 9996 result.modelMonitoringStatus_ = 9997 modelMonitoringStatusBuilder_ == null 9998 ? modelMonitoringStatus_ 9999 : modelMonitoringStatusBuilder_.build(); 10000 } 10001 if (((from_bitField0_ & 0x20000000) != 0)) { 10002 result.disableContainerLogging_ = disableContainerLogging_; 10003 } 10004 } 10005 10006 @java.lang.Override clone()10007 public Builder clone() { 10008 return super.clone(); 10009 } 10010 10011 @java.lang.Override setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)10012 public Builder setField( 10013 com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { 10014 return super.setField(field, value); 10015 } 10016 10017 @java.lang.Override clearField(com.google.protobuf.Descriptors.FieldDescriptor field)10018 public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { 10019 return super.clearField(field); 10020 } 10021 10022 @java.lang.Override clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)10023 public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { 10024 return super.clearOneof(oneof); 10025 } 10026 10027 @java.lang.Override setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)10028 public Builder setRepeatedField( 10029 com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { 10030 return super.setRepeatedField(field, index, value); 10031 } 10032 10033 @java.lang.Override addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)10034 public Builder addRepeatedField( 10035 com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { 10036 return super.addRepeatedField(field, value); 10037 } 10038 10039 @java.lang.Override mergeFrom(com.google.protobuf.Message other)10040 public Builder mergeFrom(com.google.protobuf.Message other) { 10041 if (other instanceof com.google.cloud.aiplatform.v1beta1.BatchPredictionJob) { 10042 return mergeFrom((com.google.cloud.aiplatform.v1beta1.BatchPredictionJob) other); 10043 } else { 10044 super.mergeFrom(other); 10045 return this; 10046 } 10047 } 10048 mergeFrom(com.google.cloud.aiplatform.v1beta1.BatchPredictionJob other)10049 public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.BatchPredictionJob other) { 10050 if (other == com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.getDefaultInstance()) 10051 return this; 10052 if (!other.getName().isEmpty()) { 10053 name_ = other.name_; 10054 bitField0_ |= 0x00000001; 10055 onChanged(); 10056 } 10057 if (!other.getDisplayName().isEmpty()) { 10058 displayName_ = other.displayName_; 10059 bitField0_ |= 0x00000002; 10060 onChanged(); 10061 } 10062 if (!other.getModel().isEmpty()) { 10063 model_ = other.model_; 10064 bitField0_ |= 0x00000004; 10065 onChanged(); 10066 } 10067 if (!other.getModelVersionId().isEmpty()) { 10068 modelVersionId_ = other.modelVersionId_; 10069 bitField0_ |= 0x00000008; 10070 onChanged(); 10071 } 10072 if (other.hasUnmanagedContainerModel()) { 10073 mergeUnmanagedContainerModel(other.getUnmanagedContainerModel()); 10074 } 10075 if (other.hasInputConfig()) { 10076 mergeInputConfig(other.getInputConfig()); 10077 } 10078 if (other.hasInstanceConfig()) { 10079 mergeInstanceConfig(other.getInstanceConfig()); 10080 } 10081 if (other.hasModelParameters()) { 10082 mergeModelParameters(other.getModelParameters()); 10083 } 10084 if (other.hasOutputConfig()) { 10085 mergeOutputConfig(other.getOutputConfig()); 10086 } 10087 if (other.hasDedicatedResources()) { 10088 mergeDedicatedResources(other.getDedicatedResources()); 10089 } 10090 if (!other.getServiceAccount().isEmpty()) { 10091 serviceAccount_ = other.serviceAccount_; 10092 bitField0_ |= 0x00000400; 10093 onChanged(); 10094 } 10095 if (other.hasManualBatchTuningParameters()) { 10096 mergeManualBatchTuningParameters(other.getManualBatchTuningParameters()); 10097 } 10098 if (other.getGenerateExplanation() != false) { 10099 setGenerateExplanation(other.getGenerateExplanation()); 10100 } 10101 if (other.hasExplanationSpec()) { 10102 mergeExplanationSpec(other.getExplanationSpec()); 10103 } 10104 if (other.hasOutputInfo()) { 10105 mergeOutputInfo(other.getOutputInfo()); 10106 } 10107 if (other.state_ != 0) { 10108 setStateValue(other.getStateValue()); 10109 } 10110 if (other.hasError()) { 10111 mergeError(other.getError()); 10112 } 10113 if (partialFailuresBuilder_ == null) { 10114 if (!other.partialFailures_.isEmpty()) { 10115 if (partialFailures_.isEmpty()) { 10116 partialFailures_ = other.partialFailures_; 10117 bitField0_ = (bitField0_ & ~0x00020000); 10118 } else { 10119 ensurePartialFailuresIsMutable(); 10120 partialFailures_.addAll(other.partialFailures_); 10121 } 10122 onChanged(); 10123 } 10124 } else { 10125 if (!other.partialFailures_.isEmpty()) { 10126 if (partialFailuresBuilder_.isEmpty()) { 10127 partialFailuresBuilder_.dispose(); 10128 partialFailuresBuilder_ = null; 10129 partialFailures_ = other.partialFailures_; 10130 bitField0_ = (bitField0_ & ~0x00020000); 10131 partialFailuresBuilder_ = 10132 com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders 10133 ? getPartialFailuresFieldBuilder() 10134 : null; 10135 } else { 10136 partialFailuresBuilder_.addAllMessages(other.partialFailures_); 10137 } 10138 } 10139 } 10140 if (other.hasResourcesConsumed()) { 10141 mergeResourcesConsumed(other.getResourcesConsumed()); 10142 } 10143 if (other.hasCompletionStats()) { 10144 mergeCompletionStats(other.getCompletionStats()); 10145 } 10146 if (other.hasCreateTime()) { 10147 mergeCreateTime(other.getCreateTime()); 10148 } 10149 if (other.hasStartTime()) { 10150 mergeStartTime(other.getStartTime()); 10151 } 10152 if (other.hasEndTime()) { 10153 mergeEndTime(other.getEndTime()); 10154 } 10155 if (other.hasUpdateTime()) { 10156 mergeUpdateTime(other.getUpdateTime()); 10157 } 10158 internalGetMutableLabels().mergeFrom(other.internalGetLabels()); 10159 bitField0_ |= 0x01000000; 10160 if (other.hasEncryptionSpec()) { 10161 mergeEncryptionSpec(other.getEncryptionSpec()); 10162 } 10163 if (other.hasModelMonitoringConfig()) { 10164 mergeModelMonitoringConfig(other.getModelMonitoringConfig()); 10165 } 10166 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 10167 if (!other.modelMonitoringStatsAnomalies_.isEmpty()) { 10168 if (modelMonitoringStatsAnomalies_.isEmpty()) { 10169 modelMonitoringStatsAnomalies_ = other.modelMonitoringStatsAnomalies_; 10170 bitField0_ = (bitField0_ & ~0x08000000); 10171 } else { 10172 ensureModelMonitoringStatsAnomaliesIsMutable(); 10173 modelMonitoringStatsAnomalies_.addAll(other.modelMonitoringStatsAnomalies_); 10174 } 10175 onChanged(); 10176 } 10177 } else { 10178 if (!other.modelMonitoringStatsAnomalies_.isEmpty()) { 10179 if (modelMonitoringStatsAnomaliesBuilder_.isEmpty()) { 10180 modelMonitoringStatsAnomaliesBuilder_.dispose(); 10181 modelMonitoringStatsAnomaliesBuilder_ = null; 10182 modelMonitoringStatsAnomalies_ = other.modelMonitoringStatsAnomalies_; 10183 bitField0_ = (bitField0_ & ~0x08000000); 10184 modelMonitoringStatsAnomaliesBuilder_ = 10185 com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders 10186 ? getModelMonitoringStatsAnomaliesFieldBuilder() 10187 : null; 10188 } else { 10189 modelMonitoringStatsAnomaliesBuilder_.addAllMessages( 10190 other.modelMonitoringStatsAnomalies_); 10191 } 10192 } 10193 } 10194 if (other.hasModelMonitoringStatus()) { 10195 mergeModelMonitoringStatus(other.getModelMonitoringStatus()); 10196 } 10197 if (other.getDisableContainerLogging() != false) { 10198 setDisableContainerLogging(other.getDisableContainerLogging()); 10199 } 10200 this.mergeUnknownFields(other.getUnknownFields()); 10201 onChanged(); 10202 return this; 10203 } 10204 10205 @java.lang.Override isInitialized()10206 public final boolean isInitialized() { 10207 return true; 10208 } 10209 10210 @java.lang.Override mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10211 public Builder mergeFrom( 10212 com.google.protobuf.CodedInputStream input, 10213 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10214 throws java.io.IOException { 10215 if (extensionRegistry == null) { 10216 throw new java.lang.NullPointerException(); 10217 } 10218 try { 10219 boolean done = false; 10220 while (!done) { 10221 int tag = input.readTag(); 10222 switch (tag) { 10223 case 0: 10224 done = true; 10225 break; 10226 case 10: 10227 { 10228 name_ = input.readStringRequireUtf8(); 10229 bitField0_ |= 0x00000001; 10230 break; 10231 } // case 10 10232 case 18: 10233 { 10234 displayName_ = input.readStringRequireUtf8(); 10235 bitField0_ |= 0x00000002; 10236 break; 10237 } // case 18 10238 case 26: 10239 { 10240 model_ = input.readStringRequireUtf8(); 10241 bitField0_ |= 0x00000004; 10242 break; 10243 } // case 26 10244 case 34: 10245 { 10246 input.readMessage(getInputConfigFieldBuilder().getBuilder(), extensionRegistry); 10247 bitField0_ |= 0x00000020; 10248 break; 10249 } // case 34 10250 case 42: 10251 { 10252 input.readMessage(getModelParametersFieldBuilder().getBuilder(), extensionRegistry); 10253 bitField0_ |= 0x00000080; 10254 break; 10255 } // case 42 10256 case 50: 10257 { 10258 input.readMessage(getOutputConfigFieldBuilder().getBuilder(), extensionRegistry); 10259 bitField0_ |= 0x00000100; 10260 break; 10261 } // case 50 10262 case 58: 10263 { 10264 input.readMessage( 10265 getDedicatedResourcesFieldBuilder().getBuilder(), extensionRegistry); 10266 bitField0_ |= 0x00000200; 10267 break; 10268 } // case 58 10269 case 66: 10270 { 10271 input.readMessage( 10272 getManualBatchTuningParametersFieldBuilder().getBuilder(), extensionRegistry); 10273 bitField0_ |= 0x00000800; 10274 break; 10275 } // case 66 10276 case 74: 10277 { 10278 input.readMessage(getOutputInfoFieldBuilder().getBuilder(), extensionRegistry); 10279 bitField0_ |= 0x00004000; 10280 break; 10281 } // case 74 10282 case 80: 10283 { 10284 state_ = input.readEnum(); 10285 bitField0_ |= 0x00008000; 10286 break; 10287 } // case 80 10288 case 90: 10289 { 10290 input.readMessage(getErrorFieldBuilder().getBuilder(), extensionRegistry); 10291 bitField0_ |= 0x00010000; 10292 break; 10293 } // case 90 10294 case 98: 10295 { 10296 com.google.rpc.Status m = 10297 input.readMessage(com.google.rpc.Status.parser(), extensionRegistry); 10298 if (partialFailuresBuilder_ == null) { 10299 ensurePartialFailuresIsMutable(); 10300 partialFailures_.add(m); 10301 } else { 10302 partialFailuresBuilder_.addMessage(m); 10303 } 10304 break; 10305 } // case 98 10306 case 106: 10307 { 10308 input.readMessage( 10309 getResourcesConsumedFieldBuilder().getBuilder(), extensionRegistry); 10310 bitField0_ |= 0x00040000; 10311 break; 10312 } // case 106 10313 case 114: 10314 { 10315 input.readMessage(getCompletionStatsFieldBuilder().getBuilder(), extensionRegistry); 10316 bitField0_ |= 0x00080000; 10317 break; 10318 } // case 114 10319 case 122: 10320 { 10321 input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry); 10322 bitField0_ |= 0x00100000; 10323 break; 10324 } // case 122 10325 case 130: 10326 { 10327 input.readMessage(getStartTimeFieldBuilder().getBuilder(), extensionRegistry); 10328 bitField0_ |= 0x00200000; 10329 break; 10330 } // case 130 10331 case 138: 10332 { 10333 input.readMessage(getEndTimeFieldBuilder().getBuilder(), extensionRegistry); 10334 bitField0_ |= 0x00400000; 10335 break; 10336 } // case 138 10337 case 146: 10338 { 10339 input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry); 10340 bitField0_ |= 0x00800000; 10341 break; 10342 } // case 146 10343 case 154: 10344 { 10345 com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ = 10346 input.readMessage( 10347 LabelsDefaultEntryHolder.defaultEntry.getParserForType(), 10348 extensionRegistry); 10349 internalGetMutableLabels() 10350 .getMutableMap() 10351 .put(labels__.getKey(), labels__.getValue()); 10352 bitField0_ |= 0x01000000; 10353 break; 10354 } // case 154 10355 case 184: 10356 { 10357 generateExplanation_ = input.readBool(); 10358 bitField0_ |= 0x00001000; 10359 break; 10360 } // case 184 10361 case 194: 10362 { 10363 input.readMessage(getEncryptionSpecFieldBuilder().getBuilder(), extensionRegistry); 10364 bitField0_ |= 0x02000000; 10365 break; 10366 } // case 194 10367 case 202: 10368 { 10369 input.readMessage(getExplanationSpecFieldBuilder().getBuilder(), extensionRegistry); 10370 bitField0_ |= 0x00002000; 10371 break; 10372 } // case 202 10373 case 210: 10374 { 10375 input.readMessage( 10376 getModelMonitoringConfigFieldBuilder().getBuilder(), extensionRegistry); 10377 bitField0_ |= 0x04000000; 10378 break; 10379 } // case 210 10380 case 218: 10381 { 10382 input.readMessage(getInstanceConfigFieldBuilder().getBuilder(), extensionRegistry); 10383 bitField0_ |= 0x00000040; 10384 break; 10385 } // case 218 10386 case 226: 10387 { 10388 input.readMessage( 10389 getUnmanagedContainerModelFieldBuilder().getBuilder(), extensionRegistry); 10390 bitField0_ |= 0x00000010; 10391 break; 10392 } // case 226 10393 case 234: 10394 { 10395 serviceAccount_ = input.readStringRequireUtf8(); 10396 bitField0_ |= 0x00000400; 10397 break; 10398 } // case 234 10399 case 242: 10400 { 10401 modelVersionId_ = input.readStringRequireUtf8(); 10402 bitField0_ |= 0x00000008; 10403 break; 10404 } // case 242 10405 case 250: 10406 { 10407 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies m = 10408 input.readMessage( 10409 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.parser(), 10410 extensionRegistry); 10411 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 10412 ensureModelMonitoringStatsAnomaliesIsMutable(); 10413 modelMonitoringStatsAnomalies_.add(m); 10414 } else { 10415 modelMonitoringStatsAnomaliesBuilder_.addMessage(m); 10416 } 10417 break; 10418 } // case 250 10419 case 258: 10420 { 10421 input.readMessage( 10422 getModelMonitoringStatusFieldBuilder().getBuilder(), extensionRegistry); 10423 bitField0_ |= 0x10000000; 10424 break; 10425 } // case 258 10426 case 272: 10427 { 10428 disableContainerLogging_ = input.readBool(); 10429 bitField0_ |= 0x20000000; 10430 break; 10431 } // case 272 10432 default: 10433 { 10434 if (!super.parseUnknownField(input, extensionRegistry, tag)) { 10435 done = true; // was an endgroup tag 10436 } 10437 break; 10438 } // default: 10439 } // switch (tag) 10440 } // while (!done) 10441 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 10442 throw e.unwrapIOException(); 10443 } finally { 10444 onChanged(); 10445 } // finally 10446 return this; 10447 } 10448 10449 private int bitField0_; 10450 10451 private java.lang.Object name_ = ""; 10452 /** 10453 * 10454 * 10455 * <pre> 10456 * Output only. Resource name of the BatchPredictionJob. 10457 * </pre> 10458 * 10459 * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 10460 * 10461 * @return The name. 10462 */ getName()10463 public java.lang.String getName() { 10464 java.lang.Object ref = name_; 10465 if (!(ref instanceof java.lang.String)) { 10466 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 10467 java.lang.String s = bs.toStringUtf8(); 10468 name_ = s; 10469 return s; 10470 } else { 10471 return (java.lang.String) ref; 10472 } 10473 } 10474 /** 10475 * 10476 * 10477 * <pre> 10478 * Output only. Resource name of the BatchPredictionJob. 10479 * </pre> 10480 * 10481 * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 10482 * 10483 * @return The bytes for name. 10484 */ getNameBytes()10485 public com.google.protobuf.ByteString getNameBytes() { 10486 java.lang.Object ref = name_; 10487 if (ref instanceof String) { 10488 com.google.protobuf.ByteString b = 10489 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 10490 name_ = b; 10491 return b; 10492 } else { 10493 return (com.google.protobuf.ByteString) ref; 10494 } 10495 } 10496 /** 10497 * 10498 * 10499 * <pre> 10500 * Output only. Resource name of the BatchPredictionJob. 10501 * </pre> 10502 * 10503 * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 10504 * 10505 * @param value The name to set. 10506 * @return This builder for chaining. 10507 */ setName(java.lang.String value)10508 public Builder setName(java.lang.String value) { 10509 if (value == null) { 10510 throw new NullPointerException(); 10511 } 10512 name_ = value; 10513 bitField0_ |= 0x00000001; 10514 onChanged(); 10515 return this; 10516 } 10517 /** 10518 * 10519 * 10520 * <pre> 10521 * Output only. Resource name of the BatchPredictionJob. 10522 * </pre> 10523 * 10524 * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 10525 * 10526 * @return This builder for chaining. 10527 */ clearName()10528 public Builder clearName() { 10529 name_ = getDefaultInstance().getName(); 10530 bitField0_ = (bitField0_ & ~0x00000001); 10531 onChanged(); 10532 return this; 10533 } 10534 /** 10535 * 10536 * 10537 * <pre> 10538 * Output only. Resource name of the BatchPredictionJob. 10539 * </pre> 10540 * 10541 * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 10542 * 10543 * @param value The bytes for name to set. 10544 * @return This builder for chaining. 10545 */ setNameBytes(com.google.protobuf.ByteString value)10546 public Builder setNameBytes(com.google.protobuf.ByteString value) { 10547 if (value == null) { 10548 throw new NullPointerException(); 10549 } 10550 checkByteStringIsUtf8(value); 10551 name_ = value; 10552 bitField0_ |= 0x00000001; 10553 onChanged(); 10554 return this; 10555 } 10556 10557 private java.lang.Object displayName_ = ""; 10558 /** 10559 * 10560 * 10561 * <pre> 10562 * Required. The user-defined name of this BatchPredictionJob. 10563 * </pre> 10564 * 10565 * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> 10566 * 10567 * @return The displayName. 10568 */ getDisplayName()10569 public java.lang.String getDisplayName() { 10570 java.lang.Object ref = displayName_; 10571 if (!(ref instanceof java.lang.String)) { 10572 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 10573 java.lang.String s = bs.toStringUtf8(); 10574 displayName_ = s; 10575 return s; 10576 } else { 10577 return (java.lang.String) ref; 10578 } 10579 } 10580 /** 10581 * 10582 * 10583 * <pre> 10584 * Required. The user-defined name of this BatchPredictionJob. 10585 * </pre> 10586 * 10587 * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> 10588 * 10589 * @return The bytes for displayName. 10590 */ getDisplayNameBytes()10591 public com.google.protobuf.ByteString getDisplayNameBytes() { 10592 java.lang.Object ref = displayName_; 10593 if (ref instanceof String) { 10594 com.google.protobuf.ByteString b = 10595 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 10596 displayName_ = b; 10597 return b; 10598 } else { 10599 return (com.google.protobuf.ByteString) ref; 10600 } 10601 } 10602 /** 10603 * 10604 * 10605 * <pre> 10606 * Required. The user-defined name of this BatchPredictionJob. 10607 * </pre> 10608 * 10609 * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> 10610 * 10611 * @param value The displayName to set. 10612 * @return This builder for chaining. 10613 */ setDisplayName(java.lang.String value)10614 public Builder setDisplayName(java.lang.String value) { 10615 if (value == null) { 10616 throw new NullPointerException(); 10617 } 10618 displayName_ = value; 10619 bitField0_ |= 0x00000002; 10620 onChanged(); 10621 return this; 10622 } 10623 /** 10624 * 10625 * 10626 * <pre> 10627 * Required. The user-defined name of this BatchPredictionJob. 10628 * </pre> 10629 * 10630 * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> 10631 * 10632 * @return This builder for chaining. 10633 */ clearDisplayName()10634 public Builder clearDisplayName() { 10635 displayName_ = getDefaultInstance().getDisplayName(); 10636 bitField0_ = (bitField0_ & ~0x00000002); 10637 onChanged(); 10638 return this; 10639 } 10640 /** 10641 * 10642 * 10643 * <pre> 10644 * Required. The user-defined name of this BatchPredictionJob. 10645 * </pre> 10646 * 10647 * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> 10648 * 10649 * @param value The bytes for displayName to set. 10650 * @return This builder for chaining. 10651 */ setDisplayNameBytes(com.google.protobuf.ByteString value)10652 public Builder setDisplayNameBytes(com.google.protobuf.ByteString value) { 10653 if (value == null) { 10654 throw new NullPointerException(); 10655 } 10656 checkByteStringIsUtf8(value); 10657 displayName_ = value; 10658 bitField0_ |= 0x00000002; 10659 onChanged(); 10660 return this; 10661 } 10662 10663 private java.lang.Object model_ = ""; 10664 /** 10665 * 10666 * 10667 * <pre> 10668 * The name of the Model resource that produces the predictions via this job, 10669 * must share the same ancestor Location. 10670 * Starting this job has no impact on any existing deployments of the Model 10671 * and their resources. 10672 * Exactly one of model and unmanaged_container_model must be set. 10673 * The model resource name may contain version id or version alias to specify 10674 * the version. 10675 * Example: `projects/{project}/locations/{location}/models/{model}@2` 10676 * or 10677 * `projects/{project}/locations/{location}/models/{model}@golden` 10678 * if no version is specified, the default version will be deployed. 10679 * </pre> 10680 * 10681 * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code> 10682 * 10683 * @return The model. 10684 */ getModel()10685 public java.lang.String getModel() { 10686 java.lang.Object ref = model_; 10687 if (!(ref instanceof java.lang.String)) { 10688 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 10689 java.lang.String s = bs.toStringUtf8(); 10690 model_ = s; 10691 return s; 10692 } else { 10693 return (java.lang.String) ref; 10694 } 10695 } 10696 /** 10697 * 10698 * 10699 * <pre> 10700 * The name of the Model resource that produces the predictions via this job, 10701 * must share the same ancestor Location. 10702 * Starting this job has no impact on any existing deployments of the Model 10703 * and their resources. 10704 * Exactly one of model and unmanaged_container_model must be set. 10705 * The model resource name may contain version id or version alias to specify 10706 * the version. 10707 * Example: `projects/{project}/locations/{location}/models/{model}@2` 10708 * or 10709 * `projects/{project}/locations/{location}/models/{model}@golden` 10710 * if no version is specified, the default version will be deployed. 10711 * </pre> 10712 * 10713 * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code> 10714 * 10715 * @return The bytes for model. 10716 */ getModelBytes()10717 public com.google.protobuf.ByteString getModelBytes() { 10718 java.lang.Object ref = model_; 10719 if (ref instanceof String) { 10720 com.google.protobuf.ByteString b = 10721 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 10722 model_ = b; 10723 return b; 10724 } else { 10725 return (com.google.protobuf.ByteString) ref; 10726 } 10727 } 10728 /** 10729 * 10730 * 10731 * <pre> 10732 * The name of the Model resource that produces the predictions via this job, 10733 * must share the same ancestor Location. 10734 * Starting this job has no impact on any existing deployments of the Model 10735 * and their resources. 10736 * Exactly one of model and unmanaged_container_model must be set. 10737 * The model resource name may contain version id or version alias to specify 10738 * the version. 10739 * Example: `projects/{project}/locations/{location}/models/{model}@2` 10740 * or 10741 * `projects/{project}/locations/{location}/models/{model}@golden` 10742 * if no version is specified, the default version will be deployed. 10743 * </pre> 10744 * 10745 * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code> 10746 * 10747 * @param value The model to set. 10748 * @return This builder for chaining. 10749 */ setModel(java.lang.String value)10750 public Builder setModel(java.lang.String value) { 10751 if (value == null) { 10752 throw new NullPointerException(); 10753 } 10754 model_ = value; 10755 bitField0_ |= 0x00000004; 10756 onChanged(); 10757 return this; 10758 } 10759 /** 10760 * 10761 * 10762 * <pre> 10763 * The name of the Model resource that produces the predictions via this job, 10764 * must share the same ancestor Location. 10765 * Starting this job has no impact on any existing deployments of the Model 10766 * and their resources. 10767 * Exactly one of model and unmanaged_container_model must be set. 10768 * The model resource name may contain version id or version alias to specify 10769 * the version. 10770 * Example: `projects/{project}/locations/{location}/models/{model}@2` 10771 * or 10772 * `projects/{project}/locations/{location}/models/{model}@golden` 10773 * if no version is specified, the default version will be deployed. 10774 * </pre> 10775 * 10776 * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code> 10777 * 10778 * @return This builder for chaining. 10779 */ clearModel()10780 public Builder clearModel() { 10781 model_ = getDefaultInstance().getModel(); 10782 bitField0_ = (bitField0_ & ~0x00000004); 10783 onChanged(); 10784 return this; 10785 } 10786 /** 10787 * 10788 * 10789 * <pre> 10790 * The name of the Model resource that produces the predictions via this job, 10791 * must share the same ancestor Location. 10792 * Starting this job has no impact on any existing deployments of the Model 10793 * and their resources. 10794 * Exactly one of model and unmanaged_container_model must be set. 10795 * The model resource name may contain version id or version alias to specify 10796 * the version. 10797 * Example: `projects/{project}/locations/{location}/models/{model}@2` 10798 * or 10799 * `projects/{project}/locations/{location}/models/{model}@golden` 10800 * if no version is specified, the default version will be deployed. 10801 * </pre> 10802 * 10803 * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code> 10804 * 10805 * @param value The bytes for model to set. 10806 * @return This builder for chaining. 10807 */ setModelBytes(com.google.protobuf.ByteString value)10808 public Builder setModelBytes(com.google.protobuf.ByteString value) { 10809 if (value == null) { 10810 throw new NullPointerException(); 10811 } 10812 checkByteStringIsUtf8(value); 10813 model_ = value; 10814 bitField0_ |= 0x00000004; 10815 onChanged(); 10816 return this; 10817 } 10818 10819 private java.lang.Object modelVersionId_ = ""; 10820 /** 10821 * 10822 * 10823 * <pre> 10824 * Output only. The version ID of the Model that produces the predictions via 10825 * this job. 10826 * </pre> 10827 * 10828 * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 10829 * 10830 * @return The modelVersionId. 10831 */ getModelVersionId()10832 public java.lang.String getModelVersionId() { 10833 java.lang.Object ref = modelVersionId_; 10834 if (!(ref instanceof java.lang.String)) { 10835 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 10836 java.lang.String s = bs.toStringUtf8(); 10837 modelVersionId_ = s; 10838 return s; 10839 } else { 10840 return (java.lang.String) ref; 10841 } 10842 } 10843 /** 10844 * 10845 * 10846 * <pre> 10847 * Output only. The version ID of the Model that produces the predictions via 10848 * this job. 10849 * </pre> 10850 * 10851 * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 10852 * 10853 * @return The bytes for modelVersionId. 10854 */ getModelVersionIdBytes()10855 public com.google.protobuf.ByteString getModelVersionIdBytes() { 10856 java.lang.Object ref = modelVersionId_; 10857 if (ref instanceof String) { 10858 com.google.protobuf.ByteString b = 10859 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 10860 modelVersionId_ = b; 10861 return b; 10862 } else { 10863 return (com.google.protobuf.ByteString) ref; 10864 } 10865 } 10866 /** 10867 * 10868 * 10869 * <pre> 10870 * Output only. The version ID of the Model that produces the predictions via 10871 * this job. 10872 * </pre> 10873 * 10874 * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 10875 * 10876 * @param value The modelVersionId to set. 10877 * @return This builder for chaining. 10878 */ setModelVersionId(java.lang.String value)10879 public Builder setModelVersionId(java.lang.String value) { 10880 if (value == null) { 10881 throw new NullPointerException(); 10882 } 10883 modelVersionId_ = value; 10884 bitField0_ |= 0x00000008; 10885 onChanged(); 10886 return this; 10887 } 10888 /** 10889 * 10890 * 10891 * <pre> 10892 * Output only. The version ID of the Model that produces the predictions via 10893 * this job. 10894 * </pre> 10895 * 10896 * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 10897 * 10898 * @return This builder for chaining. 10899 */ clearModelVersionId()10900 public Builder clearModelVersionId() { 10901 modelVersionId_ = getDefaultInstance().getModelVersionId(); 10902 bitField0_ = (bitField0_ & ~0x00000008); 10903 onChanged(); 10904 return this; 10905 } 10906 /** 10907 * 10908 * 10909 * <pre> 10910 * Output only. The version ID of the Model that produces the predictions via 10911 * this job. 10912 * </pre> 10913 * 10914 * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 10915 * 10916 * @param value The bytes for modelVersionId to set. 10917 * @return This builder for chaining. 10918 */ setModelVersionIdBytes(com.google.protobuf.ByteString value)10919 public Builder setModelVersionIdBytes(com.google.protobuf.ByteString value) { 10920 if (value == null) { 10921 throw new NullPointerException(); 10922 } 10923 checkByteStringIsUtf8(value); 10924 modelVersionId_ = value; 10925 bitField0_ |= 0x00000008; 10926 onChanged(); 10927 return this; 10928 } 10929 10930 private com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanagedContainerModel_; 10931 private com.google.protobuf.SingleFieldBuilderV3< 10932 com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel, 10933 com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel.Builder, 10934 com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModelOrBuilder> 10935 unmanagedContainerModelBuilder_; 10936 /** 10937 * 10938 * 10939 * <pre> 10940 * Contains model information necessary to perform batch prediction without 10941 * requiring uploading to model registry. 10942 * Exactly one of model and unmanaged_container_model must be set. 10943 * </pre> 10944 * 10945 * <code> 10946 * .google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanaged_container_model = 28; 10947 * </code> 10948 * 10949 * @return Whether the unmanagedContainerModel field is set. 10950 */ hasUnmanagedContainerModel()10951 public boolean hasUnmanagedContainerModel() { 10952 return ((bitField0_ & 0x00000010) != 0); 10953 } 10954 /** 10955 * 10956 * 10957 * <pre> 10958 * Contains model information necessary to perform batch prediction without 10959 * requiring uploading to model registry. 10960 * Exactly one of model and unmanaged_container_model must be set. 10961 * </pre> 10962 * 10963 * <code> 10964 * .google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanaged_container_model = 28; 10965 * </code> 10966 * 10967 * @return The unmanagedContainerModel. 10968 */ 10969 public com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel getUnmanagedContainerModel()10970 getUnmanagedContainerModel() { 10971 if (unmanagedContainerModelBuilder_ == null) { 10972 return unmanagedContainerModel_ == null 10973 ? com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel.getDefaultInstance() 10974 : unmanagedContainerModel_; 10975 } else { 10976 return unmanagedContainerModelBuilder_.getMessage(); 10977 } 10978 } 10979 /** 10980 * 10981 * 10982 * <pre> 10983 * Contains model information necessary to perform batch prediction without 10984 * requiring uploading to model registry. 10985 * Exactly one of model and unmanaged_container_model must be set. 10986 * </pre> 10987 * 10988 * <code> 10989 * .google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanaged_container_model = 28; 10990 * </code> 10991 */ setUnmanagedContainerModel( com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel value)10992 public Builder setUnmanagedContainerModel( 10993 com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel value) { 10994 if (unmanagedContainerModelBuilder_ == null) { 10995 if (value == null) { 10996 throw new NullPointerException(); 10997 } 10998 unmanagedContainerModel_ = value; 10999 } else { 11000 unmanagedContainerModelBuilder_.setMessage(value); 11001 } 11002 bitField0_ |= 0x00000010; 11003 onChanged(); 11004 return this; 11005 } 11006 /** 11007 * 11008 * 11009 * <pre> 11010 * Contains model information necessary to perform batch prediction without 11011 * requiring uploading to model registry. 11012 * Exactly one of model and unmanaged_container_model must be set. 11013 * </pre> 11014 * 11015 * <code> 11016 * .google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanaged_container_model = 28; 11017 * </code> 11018 */ setUnmanagedContainerModel( com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel.Builder builderForValue)11019 public Builder setUnmanagedContainerModel( 11020 com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel.Builder builderForValue) { 11021 if (unmanagedContainerModelBuilder_ == null) { 11022 unmanagedContainerModel_ = builderForValue.build(); 11023 } else { 11024 unmanagedContainerModelBuilder_.setMessage(builderForValue.build()); 11025 } 11026 bitField0_ |= 0x00000010; 11027 onChanged(); 11028 return this; 11029 } 11030 /** 11031 * 11032 * 11033 * <pre> 11034 * Contains model information necessary to perform batch prediction without 11035 * requiring uploading to model registry. 11036 * Exactly one of model and unmanaged_container_model must be set. 11037 * </pre> 11038 * 11039 * <code> 11040 * .google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanaged_container_model = 28; 11041 * </code> 11042 */ mergeUnmanagedContainerModel( com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel value)11043 public Builder mergeUnmanagedContainerModel( 11044 com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel value) { 11045 if (unmanagedContainerModelBuilder_ == null) { 11046 if (((bitField0_ & 0x00000010) != 0) 11047 && unmanagedContainerModel_ != null 11048 && unmanagedContainerModel_ 11049 != com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel 11050 .getDefaultInstance()) { 11051 getUnmanagedContainerModelBuilder().mergeFrom(value); 11052 } else { 11053 unmanagedContainerModel_ = value; 11054 } 11055 } else { 11056 unmanagedContainerModelBuilder_.mergeFrom(value); 11057 } 11058 bitField0_ |= 0x00000010; 11059 onChanged(); 11060 return this; 11061 } 11062 /** 11063 * 11064 * 11065 * <pre> 11066 * Contains model information necessary to perform batch prediction without 11067 * requiring uploading to model registry. 11068 * Exactly one of model and unmanaged_container_model must be set. 11069 * </pre> 11070 * 11071 * <code> 11072 * .google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanaged_container_model = 28; 11073 * </code> 11074 */ clearUnmanagedContainerModel()11075 public Builder clearUnmanagedContainerModel() { 11076 bitField0_ = (bitField0_ & ~0x00000010); 11077 unmanagedContainerModel_ = null; 11078 if (unmanagedContainerModelBuilder_ != null) { 11079 unmanagedContainerModelBuilder_.dispose(); 11080 unmanagedContainerModelBuilder_ = null; 11081 } 11082 onChanged(); 11083 return this; 11084 } 11085 /** 11086 * 11087 * 11088 * <pre> 11089 * Contains model information necessary to perform batch prediction without 11090 * requiring uploading to model registry. 11091 * Exactly one of model and unmanaged_container_model must be set. 11092 * </pre> 11093 * 11094 * <code> 11095 * .google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanaged_container_model = 28; 11096 * </code> 11097 */ 11098 public com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel.Builder getUnmanagedContainerModelBuilder()11099 getUnmanagedContainerModelBuilder() { 11100 bitField0_ |= 0x00000010; 11101 onChanged(); 11102 return getUnmanagedContainerModelFieldBuilder().getBuilder(); 11103 } 11104 /** 11105 * 11106 * 11107 * <pre> 11108 * Contains model information necessary to perform batch prediction without 11109 * requiring uploading to model registry. 11110 * Exactly one of model and unmanaged_container_model must be set. 11111 * </pre> 11112 * 11113 * <code> 11114 * .google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanaged_container_model = 28; 11115 * </code> 11116 */ 11117 public com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModelOrBuilder getUnmanagedContainerModelOrBuilder()11118 getUnmanagedContainerModelOrBuilder() { 11119 if (unmanagedContainerModelBuilder_ != null) { 11120 return unmanagedContainerModelBuilder_.getMessageOrBuilder(); 11121 } else { 11122 return unmanagedContainerModel_ == null 11123 ? com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel.getDefaultInstance() 11124 : unmanagedContainerModel_; 11125 } 11126 } 11127 /** 11128 * 11129 * 11130 * <pre> 11131 * Contains model information necessary to perform batch prediction without 11132 * requiring uploading to model registry. 11133 * Exactly one of model and unmanaged_container_model must be set. 11134 * </pre> 11135 * 11136 * <code> 11137 * .google.cloud.aiplatform.v1beta1.UnmanagedContainerModel unmanaged_container_model = 28; 11138 * </code> 11139 */ 11140 private com.google.protobuf.SingleFieldBuilderV3< 11141 com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel, 11142 com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel.Builder, 11143 com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModelOrBuilder> getUnmanagedContainerModelFieldBuilder()11144 getUnmanagedContainerModelFieldBuilder() { 11145 if (unmanagedContainerModelBuilder_ == null) { 11146 unmanagedContainerModelBuilder_ = 11147 new com.google.protobuf.SingleFieldBuilderV3< 11148 com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel, 11149 com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModel.Builder, 11150 com.google.cloud.aiplatform.v1beta1.UnmanagedContainerModelOrBuilder>( 11151 getUnmanagedContainerModel(), getParentForChildren(), isClean()); 11152 unmanagedContainerModel_ = null; 11153 } 11154 return unmanagedContainerModelBuilder_; 11155 } 11156 11157 private com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig inputConfig_; 11158 private com.google.protobuf.SingleFieldBuilderV3< 11159 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig, 11160 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.Builder, 11161 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfigOrBuilder> 11162 inputConfigBuilder_; 11163 /** 11164 * 11165 * 11166 * <pre> 11167 * Required. Input configuration of the instances on which predictions are 11168 * performed. The schema of any single instance may be specified via the 11169 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11170 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11171 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. 11172 * </pre> 11173 * 11174 * <code> 11175 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED]; 11176 * </code> 11177 * 11178 * @return Whether the inputConfig field is set. 11179 */ hasInputConfig()11180 public boolean hasInputConfig() { 11181 return ((bitField0_ & 0x00000020) != 0); 11182 } 11183 /** 11184 * 11185 * 11186 * <pre> 11187 * Required. Input configuration of the instances on which predictions are 11188 * performed. The schema of any single instance may be specified via the 11189 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11190 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11191 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. 11192 * </pre> 11193 * 11194 * <code> 11195 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED]; 11196 * </code> 11197 * 11198 * @return The inputConfig. 11199 */ getInputConfig()11200 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig getInputConfig() { 11201 if (inputConfigBuilder_ == null) { 11202 return inputConfig_ == null 11203 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig 11204 .getDefaultInstance() 11205 : inputConfig_; 11206 } else { 11207 return inputConfigBuilder_.getMessage(); 11208 } 11209 } 11210 /** 11211 * 11212 * 11213 * <pre> 11214 * Required. Input configuration of the instances on which predictions are 11215 * performed. The schema of any single instance may be specified via the 11216 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11217 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11218 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. 11219 * </pre> 11220 * 11221 * <code> 11222 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED]; 11223 * </code> 11224 */ setInputConfig( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig value)11225 public Builder setInputConfig( 11226 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig value) { 11227 if (inputConfigBuilder_ == null) { 11228 if (value == null) { 11229 throw new NullPointerException(); 11230 } 11231 inputConfig_ = value; 11232 } else { 11233 inputConfigBuilder_.setMessage(value); 11234 } 11235 bitField0_ |= 0x00000020; 11236 onChanged(); 11237 return this; 11238 } 11239 /** 11240 * 11241 * 11242 * <pre> 11243 * Required. Input configuration of the instances on which predictions are 11244 * performed. The schema of any single instance may be specified via the 11245 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11246 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11247 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. 11248 * </pre> 11249 * 11250 * <code> 11251 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED]; 11252 * </code> 11253 */ setInputConfig( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.Builder builderForValue)11254 public Builder setInputConfig( 11255 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.Builder 11256 builderForValue) { 11257 if (inputConfigBuilder_ == null) { 11258 inputConfig_ = builderForValue.build(); 11259 } else { 11260 inputConfigBuilder_.setMessage(builderForValue.build()); 11261 } 11262 bitField0_ |= 0x00000020; 11263 onChanged(); 11264 return this; 11265 } 11266 /** 11267 * 11268 * 11269 * <pre> 11270 * Required. Input configuration of the instances on which predictions are 11271 * performed. The schema of any single instance may be specified via the 11272 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11273 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11274 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. 11275 * </pre> 11276 * 11277 * <code> 11278 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED]; 11279 * </code> 11280 */ mergeInputConfig( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig value)11281 public Builder mergeInputConfig( 11282 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig value) { 11283 if (inputConfigBuilder_ == null) { 11284 if (((bitField0_ & 0x00000020) != 0) 11285 && inputConfig_ != null 11286 && inputConfig_ 11287 != com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig 11288 .getDefaultInstance()) { 11289 getInputConfigBuilder().mergeFrom(value); 11290 } else { 11291 inputConfig_ = value; 11292 } 11293 } else { 11294 inputConfigBuilder_.mergeFrom(value); 11295 } 11296 bitField0_ |= 0x00000020; 11297 onChanged(); 11298 return this; 11299 } 11300 /** 11301 * 11302 * 11303 * <pre> 11304 * Required. Input configuration of the instances on which predictions are 11305 * performed. The schema of any single instance may be specified via the 11306 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11307 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11308 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. 11309 * </pre> 11310 * 11311 * <code> 11312 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED]; 11313 * </code> 11314 */ clearInputConfig()11315 public Builder clearInputConfig() { 11316 bitField0_ = (bitField0_ & ~0x00000020); 11317 inputConfig_ = null; 11318 if (inputConfigBuilder_ != null) { 11319 inputConfigBuilder_.dispose(); 11320 inputConfigBuilder_ = null; 11321 } 11322 onChanged(); 11323 return this; 11324 } 11325 /** 11326 * 11327 * 11328 * <pre> 11329 * Required. Input configuration of the instances on which predictions are 11330 * performed. The schema of any single instance may be specified via the 11331 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11332 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11333 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. 11334 * </pre> 11335 * 11336 * <code> 11337 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED]; 11338 * </code> 11339 */ 11340 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.Builder getInputConfigBuilder()11341 getInputConfigBuilder() { 11342 bitField0_ |= 0x00000020; 11343 onChanged(); 11344 return getInputConfigFieldBuilder().getBuilder(); 11345 } 11346 /** 11347 * 11348 * 11349 * <pre> 11350 * Required. Input configuration of the instances on which predictions are 11351 * performed. The schema of any single instance may be specified via the 11352 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11353 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11354 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. 11355 * </pre> 11356 * 11357 * <code> 11358 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED]; 11359 * </code> 11360 */ 11361 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfigOrBuilder getInputConfigOrBuilder()11362 getInputConfigOrBuilder() { 11363 if (inputConfigBuilder_ != null) { 11364 return inputConfigBuilder_.getMessageOrBuilder(); 11365 } else { 11366 return inputConfig_ == null 11367 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig 11368 .getDefaultInstance() 11369 : inputConfig_; 11370 } 11371 } 11372 /** 11373 * 11374 * 11375 * <pre> 11376 * Required. Input configuration of the instances on which predictions are 11377 * performed. The schema of any single instance may be specified via the 11378 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11379 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11380 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri]. 11381 * </pre> 11382 * 11383 * <code> 11384 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED]; 11385 * </code> 11386 */ 11387 private com.google.protobuf.SingleFieldBuilderV3< 11388 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig, 11389 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.Builder, 11390 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfigOrBuilder> getInputConfigFieldBuilder()11391 getInputConfigFieldBuilder() { 11392 if (inputConfigBuilder_ == null) { 11393 inputConfigBuilder_ = 11394 new com.google.protobuf.SingleFieldBuilderV3< 11395 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig, 11396 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfig.Builder, 11397 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InputConfigOrBuilder>( 11398 getInputConfig(), getParentForChildren(), isClean()); 11399 inputConfig_ = null; 11400 } 11401 return inputConfigBuilder_; 11402 } 11403 11404 private com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instanceConfig_; 11405 private com.google.protobuf.SingleFieldBuilderV3< 11406 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig, 11407 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder, 11408 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder> 11409 instanceConfigBuilder_; 11410 /** 11411 * 11412 * 11413 * <pre> 11414 * Configuration for how to convert batch prediction input instances to the 11415 * prediction instances that are sent to the Model. 11416 * </pre> 11417 * 11418 * <code> 11419 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; 11420 * </code> 11421 * 11422 * @return Whether the instanceConfig field is set. 11423 */ hasInstanceConfig()11424 public boolean hasInstanceConfig() { 11425 return ((bitField0_ & 0x00000040) != 0); 11426 } 11427 /** 11428 * 11429 * 11430 * <pre> 11431 * Configuration for how to convert batch prediction input instances to the 11432 * prediction instances that are sent to the Model. 11433 * </pre> 11434 * 11435 * <code> 11436 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; 11437 * </code> 11438 * 11439 * @return The instanceConfig. 11440 */ 11441 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig getInstanceConfig()11442 getInstanceConfig() { 11443 if (instanceConfigBuilder_ == null) { 11444 return instanceConfig_ == null 11445 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig 11446 .getDefaultInstance() 11447 : instanceConfig_; 11448 } else { 11449 return instanceConfigBuilder_.getMessage(); 11450 } 11451 } 11452 /** 11453 * 11454 * 11455 * <pre> 11456 * Configuration for how to convert batch prediction input instances to the 11457 * prediction instances that are sent to the Model. 11458 * </pre> 11459 * 11460 * <code> 11461 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; 11462 * </code> 11463 */ setInstanceConfig( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig value)11464 public Builder setInstanceConfig( 11465 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig value) { 11466 if (instanceConfigBuilder_ == null) { 11467 if (value == null) { 11468 throw new NullPointerException(); 11469 } 11470 instanceConfig_ = value; 11471 } else { 11472 instanceConfigBuilder_.setMessage(value); 11473 } 11474 bitField0_ |= 0x00000040; 11475 onChanged(); 11476 return this; 11477 } 11478 /** 11479 * 11480 * 11481 * <pre> 11482 * Configuration for how to convert batch prediction input instances to the 11483 * prediction instances that are sent to the Model. 11484 * </pre> 11485 * 11486 * <code> 11487 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; 11488 * </code> 11489 */ setInstanceConfig( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder builderForValue)11490 public Builder setInstanceConfig( 11491 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder 11492 builderForValue) { 11493 if (instanceConfigBuilder_ == null) { 11494 instanceConfig_ = builderForValue.build(); 11495 } else { 11496 instanceConfigBuilder_.setMessage(builderForValue.build()); 11497 } 11498 bitField0_ |= 0x00000040; 11499 onChanged(); 11500 return this; 11501 } 11502 /** 11503 * 11504 * 11505 * <pre> 11506 * Configuration for how to convert batch prediction input instances to the 11507 * prediction instances that are sent to the Model. 11508 * </pre> 11509 * 11510 * <code> 11511 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; 11512 * </code> 11513 */ mergeInstanceConfig( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig value)11514 public Builder mergeInstanceConfig( 11515 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig value) { 11516 if (instanceConfigBuilder_ == null) { 11517 if (((bitField0_ & 0x00000040) != 0) 11518 && instanceConfig_ != null 11519 && instanceConfig_ 11520 != com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig 11521 .getDefaultInstance()) { 11522 getInstanceConfigBuilder().mergeFrom(value); 11523 } else { 11524 instanceConfig_ = value; 11525 } 11526 } else { 11527 instanceConfigBuilder_.mergeFrom(value); 11528 } 11529 bitField0_ |= 0x00000040; 11530 onChanged(); 11531 return this; 11532 } 11533 /** 11534 * 11535 * 11536 * <pre> 11537 * Configuration for how to convert batch prediction input instances to the 11538 * prediction instances that are sent to the Model. 11539 * </pre> 11540 * 11541 * <code> 11542 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; 11543 * </code> 11544 */ clearInstanceConfig()11545 public Builder clearInstanceConfig() { 11546 bitField0_ = (bitField0_ & ~0x00000040); 11547 instanceConfig_ = null; 11548 if (instanceConfigBuilder_ != null) { 11549 instanceConfigBuilder_.dispose(); 11550 instanceConfigBuilder_ = null; 11551 } 11552 onChanged(); 11553 return this; 11554 } 11555 /** 11556 * 11557 * 11558 * <pre> 11559 * Configuration for how to convert batch prediction input instances to the 11560 * prediction instances that are sent to the Model. 11561 * </pre> 11562 * 11563 * <code> 11564 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; 11565 * </code> 11566 */ 11567 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder getInstanceConfigBuilder()11568 getInstanceConfigBuilder() { 11569 bitField0_ |= 0x00000040; 11570 onChanged(); 11571 return getInstanceConfigFieldBuilder().getBuilder(); 11572 } 11573 /** 11574 * 11575 * 11576 * <pre> 11577 * Configuration for how to convert batch prediction input instances to the 11578 * prediction instances that are sent to the Model. 11579 * </pre> 11580 * 11581 * <code> 11582 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; 11583 * </code> 11584 */ 11585 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder getInstanceConfigOrBuilder()11586 getInstanceConfigOrBuilder() { 11587 if (instanceConfigBuilder_ != null) { 11588 return instanceConfigBuilder_.getMessageOrBuilder(); 11589 } else { 11590 return instanceConfig_ == null 11591 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig 11592 .getDefaultInstance() 11593 : instanceConfig_; 11594 } 11595 } 11596 /** 11597 * 11598 * 11599 * <pre> 11600 * Configuration for how to convert batch prediction input instances to the 11601 * prediction instances that are sent to the Model. 11602 * </pre> 11603 * 11604 * <code> 11605 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig instance_config = 27; 11606 * </code> 11607 */ 11608 private com.google.protobuf.SingleFieldBuilderV3< 11609 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig, 11610 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder, 11611 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder> getInstanceConfigFieldBuilder()11612 getInstanceConfigFieldBuilder() { 11613 if (instanceConfigBuilder_ == null) { 11614 instanceConfigBuilder_ = 11615 new com.google.protobuf.SingleFieldBuilderV3< 11616 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig, 11617 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfig.Builder, 11618 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.InstanceConfigOrBuilder>( 11619 getInstanceConfig(), getParentForChildren(), isClean()); 11620 instanceConfig_ = null; 11621 } 11622 return instanceConfigBuilder_; 11623 } 11624 11625 private com.google.protobuf.Value modelParameters_; 11626 private com.google.protobuf.SingleFieldBuilderV3< 11627 com.google.protobuf.Value, 11628 com.google.protobuf.Value.Builder, 11629 com.google.protobuf.ValueOrBuilder> 11630 modelParametersBuilder_; 11631 /** 11632 * 11633 * 11634 * <pre> 11635 * The parameters that govern the predictions. The schema of the parameters 11636 * may be specified via the 11637 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11638 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11639 * [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. 11640 * </pre> 11641 * 11642 * <code>.google.protobuf.Value model_parameters = 5;</code> 11643 * 11644 * @return Whether the modelParameters field is set. 11645 */ hasModelParameters()11646 public boolean hasModelParameters() { 11647 return ((bitField0_ & 0x00000080) != 0); 11648 } 11649 /** 11650 * 11651 * 11652 * <pre> 11653 * The parameters that govern the predictions. The schema of the parameters 11654 * may be specified via the 11655 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11656 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11657 * [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. 11658 * </pre> 11659 * 11660 * <code>.google.protobuf.Value model_parameters = 5;</code> 11661 * 11662 * @return The modelParameters. 11663 */ getModelParameters()11664 public com.google.protobuf.Value getModelParameters() { 11665 if (modelParametersBuilder_ == null) { 11666 return modelParameters_ == null 11667 ? com.google.protobuf.Value.getDefaultInstance() 11668 : modelParameters_; 11669 } else { 11670 return modelParametersBuilder_.getMessage(); 11671 } 11672 } 11673 /** 11674 * 11675 * 11676 * <pre> 11677 * The parameters that govern the predictions. The schema of the parameters 11678 * may be specified via the 11679 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11680 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11681 * [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. 11682 * </pre> 11683 * 11684 * <code>.google.protobuf.Value model_parameters = 5;</code> 11685 */ setModelParameters(com.google.protobuf.Value value)11686 public Builder setModelParameters(com.google.protobuf.Value value) { 11687 if (modelParametersBuilder_ == null) { 11688 if (value == null) { 11689 throw new NullPointerException(); 11690 } 11691 modelParameters_ = value; 11692 } else { 11693 modelParametersBuilder_.setMessage(value); 11694 } 11695 bitField0_ |= 0x00000080; 11696 onChanged(); 11697 return this; 11698 } 11699 /** 11700 * 11701 * 11702 * <pre> 11703 * The parameters that govern the predictions. The schema of the parameters 11704 * may be specified via the 11705 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11706 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11707 * [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. 11708 * </pre> 11709 * 11710 * <code>.google.protobuf.Value model_parameters = 5;</code> 11711 */ setModelParameters(com.google.protobuf.Value.Builder builderForValue)11712 public Builder setModelParameters(com.google.protobuf.Value.Builder builderForValue) { 11713 if (modelParametersBuilder_ == null) { 11714 modelParameters_ = builderForValue.build(); 11715 } else { 11716 modelParametersBuilder_.setMessage(builderForValue.build()); 11717 } 11718 bitField0_ |= 0x00000080; 11719 onChanged(); 11720 return this; 11721 } 11722 /** 11723 * 11724 * 11725 * <pre> 11726 * The parameters that govern the predictions. The schema of the parameters 11727 * may be specified via the 11728 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11729 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11730 * [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. 11731 * </pre> 11732 * 11733 * <code>.google.protobuf.Value model_parameters = 5;</code> 11734 */ mergeModelParameters(com.google.protobuf.Value value)11735 public Builder mergeModelParameters(com.google.protobuf.Value value) { 11736 if (modelParametersBuilder_ == null) { 11737 if (((bitField0_ & 0x00000080) != 0) 11738 && modelParameters_ != null 11739 && modelParameters_ != com.google.protobuf.Value.getDefaultInstance()) { 11740 getModelParametersBuilder().mergeFrom(value); 11741 } else { 11742 modelParameters_ = value; 11743 } 11744 } else { 11745 modelParametersBuilder_.mergeFrom(value); 11746 } 11747 bitField0_ |= 0x00000080; 11748 onChanged(); 11749 return this; 11750 } 11751 /** 11752 * 11753 * 11754 * <pre> 11755 * The parameters that govern the predictions. The schema of the parameters 11756 * may be specified via the 11757 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11758 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11759 * [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. 11760 * </pre> 11761 * 11762 * <code>.google.protobuf.Value model_parameters = 5;</code> 11763 */ clearModelParameters()11764 public Builder clearModelParameters() { 11765 bitField0_ = (bitField0_ & ~0x00000080); 11766 modelParameters_ = null; 11767 if (modelParametersBuilder_ != null) { 11768 modelParametersBuilder_.dispose(); 11769 modelParametersBuilder_ = null; 11770 } 11771 onChanged(); 11772 return this; 11773 } 11774 /** 11775 * 11776 * 11777 * <pre> 11778 * The parameters that govern the predictions. The schema of the parameters 11779 * may be specified via the 11780 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11781 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11782 * [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. 11783 * </pre> 11784 * 11785 * <code>.google.protobuf.Value model_parameters = 5;</code> 11786 */ getModelParametersBuilder()11787 public com.google.protobuf.Value.Builder getModelParametersBuilder() { 11788 bitField0_ |= 0x00000080; 11789 onChanged(); 11790 return getModelParametersFieldBuilder().getBuilder(); 11791 } 11792 /** 11793 * 11794 * 11795 * <pre> 11796 * The parameters that govern the predictions. The schema of the parameters 11797 * may be specified via the 11798 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11799 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11800 * [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. 11801 * </pre> 11802 * 11803 * <code>.google.protobuf.Value model_parameters = 5;</code> 11804 */ getModelParametersOrBuilder()11805 public com.google.protobuf.ValueOrBuilder getModelParametersOrBuilder() { 11806 if (modelParametersBuilder_ != null) { 11807 return modelParametersBuilder_.getMessageOrBuilder(); 11808 } else { 11809 return modelParameters_ == null 11810 ? com.google.protobuf.Value.getDefaultInstance() 11811 : modelParameters_; 11812 } 11813 } 11814 /** 11815 * 11816 * 11817 * <pre> 11818 * The parameters that govern the predictions. The schema of the parameters 11819 * may be specified via the 11820 * [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11821 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11822 * [parameters_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.parameters_schema_uri]. 11823 * </pre> 11824 * 11825 * <code>.google.protobuf.Value model_parameters = 5;</code> 11826 */ 11827 private com.google.protobuf.SingleFieldBuilderV3< 11828 com.google.protobuf.Value, 11829 com.google.protobuf.Value.Builder, 11830 com.google.protobuf.ValueOrBuilder> getModelParametersFieldBuilder()11831 getModelParametersFieldBuilder() { 11832 if (modelParametersBuilder_ == null) { 11833 modelParametersBuilder_ = 11834 new com.google.protobuf.SingleFieldBuilderV3< 11835 com.google.protobuf.Value, 11836 com.google.protobuf.Value.Builder, 11837 com.google.protobuf.ValueOrBuilder>( 11838 getModelParameters(), getParentForChildren(), isClean()); 11839 modelParameters_ = null; 11840 } 11841 return modelParametersBuilder_; 11842 } 11843 11844 private com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig outputConfig_; 11845 private com.google.protobuf.SingleFieldBuilderV3< 11846 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig, 11847 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.Builder, 11848 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfigOrBuilder> 11849 outputConfigBuilder_; 11850 /** 11851 * 11852 * 11853 * <pre> 11854 * Required. The Configuration specifying where output predictions should 11855 * be written. 11856 * The schema of any single prediction may be specified as a concatenation 11857 * of [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11858 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11859 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 11860 * and 11861 * [prediction_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.prediction_schema_uri]. 11862 * </pre> 11863 * 11864 * <code> 11865 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED]; 11866 * </code> 11867 * 11868 * @return Whether the outputConfig field is set. 11869 */ hasOutputConfig()11870 public boolean hasOutputConfig() { 11871 return ((bitField0_ & 0x00000100) != 0); 11872 } 11873 /** 11874 * 11875 * 11876 * <pre> 11877 * Required. The Configuration specifying where output predictions should 11878 * be written. 11879 * The schema of any single prediction may be specified as a concatenation 11880 * of [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11881 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11882 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 11883 * and 11884 * [prediction_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.prediction_schema_uri]. 11885 * </pre> 11886 * 11887 * <code> 11888 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED]; 11889 * </code> 11890 * 11891 * @return The outputConfig. 11892 */ getOutputConfig()11893 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig getOutputConfig() { 11894 if (outputConfigBuilder_ == null) { 11895 return outputConfig_ == null 11896 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig 11897 .getDefaultInstance() 11898 : outputConfig_; 11899 } else { 11900 return outputConfigBuilder_.getMessage(); 11901 } 11902 } 11903 /** 11904 * 11905 * 11906 * <pre> 11907 * Required. The Configuration specifying where output predictions should 11908 * be written. 11909 * The schema of any single prediction may be specified as a concatenation 11910 * of [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11911 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11912 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 11913 * and 11914 * [prediction_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.prediction_schema_uri]. 11915 * </pre> 11916 * 11917 * <code> 11918 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED]; 11919 * </code> 11920 */ setOutputConfig( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig value)11921 public Builder setOutputConfig( 11922 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig value) { 11923 if (outputConfigBuilder_ == null) { 11924 if (value == null) { 11925 throw new NullPointerException(); 11926 } 11927 outputConfig_ = value; 11928 } else { 11929 outputConfigBuilder_.setMessage(value); 11930 } 11931 bitField0_ |= 0x00000100; 11932 onChanged(); 11933 return this; 11934 } 11935 /** 11936 * 11937 * 11938 * <pre> 11939 * Required. The Configuration specifying where output predictions should 11940 * be written. 11941 * The schema of any single prediction may be specified as a concatenation 11942 * of [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11943 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11944 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 11945 * and 11946 * [prediction_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.prediction_schema_uri]. 11947 * </pre> 11948 * 11949 * <code> 11950 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED]; 11951 * </code> 11952 */ setOutputConfig( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.Builder builderForValue)11953 public Builder setOutputConfig( 11954 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.Builder 11955 builderForValue) { 11956 if (outputConfigBuilder_ == null) { 11957 outputConfig_ = builderForValue.build(); 11958 } else { 11959 outputConfigBuilder_.setMessage(builderForValue.build()); 11960 } 11961 bitField0_ |= 0x00000100; 11962 onChanged(); 11963 return this; 11964 } 11965 /** 11966 * 11967 * 11968 * <pre> 11969 * Required. The Configuration specifying where output predictions should 11970 * be written. 11971 * The schema of any single prediction may be specified as a concatenation 11972 * of [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 11973 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 11974 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 11975 * and 11976 * [prediction_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.prediction_schema_uri]. 11977 * </pre> 11978 * 11979 * <code> 11980 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED]; 11981 * </code> 11982 */ mergeOutputConfig( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig value)11983 public Builder mergeOutputConfig( 11984 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig value) { 11985 if (outputConfigBuilder_ == null) { 11986 if (((bitField0_ & 0x00000100) != 0) 11987 && outputConfig_ != null 11988 && outputConfig_ 11989 != com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig 11990 .getDefaultInstance()) { 11991 getOutputConfigBuilder().mergeFrom(value); 11992 } else { 11993 outputConfig_ = value; 11994 } 11995 } else { 11996 outputConfigBuilder_.mergeFrom(value); 11997 } 11998 bitField0_ |= 0x00000100; 11999 onChanged(); 12000 return this; 12001 } 12002 /** 12003 * 12004 * 12005 * <pre> 12006 * Required. The Configuration specifying where output predictions should 12007 * be written. 12008 * The schema of any single prediction may be specified as a concatenation 12009 * of [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 12010 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 12011 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 12012 * and 12013 * [prediction_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.prediction_schema_uri]. 12014 * </pre> 12015 * 12016 * <code> 12017 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED]; 12018 * </code> 12019 */ clearOutputConfig()12020 public Builder clearOutputConfig() { 12021 bitField0_ = (bitField0_ & ~0x00000100); 12022 outputConfig_ = null; 12023 if (outputConfigBuilder_ != null) { 12024 outputConfigBuilder_.dispose(); 12025 outputConfigBuilder_ = null; 12026 } 12027 onChanged(); 12028 return this; 12029 } 12030 /** 12031 * 12032 * 12033 * <pre> 12034 * Required. The Configuration specifying where output predictions should 12035 * be written. 12036 * The schema of any single prediction may be specified as a concatenation 12037 * of [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 12038 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 12039 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 12040 * and 12041 * [prediction_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.prediction_schema_uri]. 12042 * </pre> 12043 * 12044 * <code> 12045 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED]; 12046 * </code> 12047 */ 12048 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.Builder getOutputConfigBuilder()12049 getOutputConfigBuilder() { 12050 bitField0_ |= 0x00000100; 12051 onChanged(); 12052 return getOutputConfigFieldBuilder().getBuilder(); 12053 } 12054 /** 12055 * 12056 * 12057 * <pre> 12058 * Required. The Configuration specifying where output predictions should 12059 * be written. 12060 * The schema of any single prediction may be specified as a concatenation 12061 * of [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 12062 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 12063 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 12064 * and 12065 * [prediction_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.prediction_schema_uri]. 12066 * </pre> 12067 * 12068 * <code> 12069 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED]; 12070 * </code> 12071 */ 12072 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfigOrBuilder getOutputConfigOrBuilder()12073 getOutputConfigOrBuilder() { 12074 if (outputConfigBuilder_ != null) { 12075 return outputConfigBuilder_.getMessageOrBuilder(); 12076 } else { 12077 return outputConfig_ == null 12078 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig 12079 .getDefaultInstance() 12080 : outputConfig_; 12081 } 12082 } 12083 /** 12084 * 12085 * 12086 * <pre> 12087 * Required. The Configuration specifying where output predictions should 12088 * be written. 12089 * The schema of any single prediction may be specified as a concatenation 12090 * of [Model's][google.cloud.aiplatform.v1beta1.BatchPredictionJob.model] 12091 * [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] 12092 * [instance_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.instance_schema_uri] 12093 * and 12094 * [prediction_schema_uri][google.cloud.aiplatform.v1beta1.PredictSchemata.prediction_schema_uri]. 12095 * </pre> 12096 * 12097 * <code> 12098 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED]; 12099 * </code> 12100 */ 12101 private com.google.protobuf.SingleFieldBuilderV3< 12102 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig, 12103 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.Builder, 12104 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfigOrBuilder> getOutputConfigFieldBuilder()12105 getOutputConfigFieldBuilder() { 12106 if (outputConfigBuilder_ == null) { 12107 outputConfigBuilder_ = 12108 new com.google.protobuf.SingleFieldBuilderV3< 12109 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig, 12110 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfig.Builder, 12111 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputConfigOrBuilder>( 12112 getOutputConfig(), getParentForChildren(), isClean()); 12113 outputConfig_ = null; 12114 } 12115 return outputConfigBuilder_; 12116 } 12117 12118 private com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicatedResources_; 12119 private com.google.protobuf.SingleFieldBuilderV3< 12120 com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources, 12121 com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources.Builder, 12122 com.google.cloud.aiplatform.v1beta1.BatchDedicatedResourcesOrBuilder> 12123 dedicatedResourcesBuilder_; 12124 /** 12125 * 12126 * 12127 * <pre> 12128 * The config of resources used by the Model during the batch prediction. If 12129 * the Model 12130 * [supports][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types] 12131 * DEDICATED_RESOURCES this config may be provided (and the job will use these 12132 * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config 12133 * must be provided. 12134 * </pre> 12135 * 12136 * <code>.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicated_resources = 7; 12137 * </code> 12138 * 12139 * @return Whether the dedicatedResources field is set. 12140 */ hasDedicatedResources()12141 public boolean hasDedicatedResources() { 12142 return ((bitField0_ & 0x00000200) != 0); 12143 } 12144 /** 12145 * 12146 * 12147 * <pre> 12148 * The config of resources used by the Model during the batch prediction. If 12149 * the Model 12150 * [supports][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types] 12151 * DEDICATED_RESOURCES this config may be provided (and the job will use these 12152 * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config 12153 * must be provided. 12154 * </pre> 12155 * 12156 * <code>.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicated_resources = 7; 12157 * </code> 12158 * 12159 * @return The dedicatedResources. 12160 */ getDedicatedResources()12161 public com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources getDedicatedResources() { 12162 if (dedicatedResourcesBuilder_ == null) { 12163 return dedicatedResources_ == null 12164 ? com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources.getDefaultInstance() 12165 : dedicatedResources_; 12166 } else { 12167 return dedicatedResourcesBuilder_.getMessage(); 12168 } 12169 } 12170 /** 12171 * 12172 * 12173 * <pre> 12174 * The config of resources used by the Model during the batch prediction. If 12175 * the Model 12176 * [supports][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types] 12177 * DEDICATED_RESOURCES this config may be provided (and the job will use these 12178 * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config 12179 * must be provided. 12180 * </pre> 12181 * 12182 * <code>.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicated_resources = 7; 12183 * </code> 12184 */ setDedicatedResources( com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources value)12185 public Builder setDedicatedResources( 12186 com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources value) { 12187 if (dedicatedResourcesBuilder_ == null) { 12188 if (value == null) { 12189 throw new NullPointerException(); 12190 } 12191 dedicatedResources_ = value; 12192 } else { 12193 dedicatedResourcesBuilder_.setMessage(value); 12194 } 12195 bitField0_ |= 0x00000200; 12196 onChanged(); 12197 return this; 12198 } 12199 /** 12200 * 12201 * 12202 * <pre> 12203 * The config of resources used by the Model during the batch prediction. If 12204 * the Model 12205 * [supports][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types] 12206 * DEDICATED_RESOURCES this config may be provided (and the job will use these 12207 * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config 12208 * must be provided. 12209 * </pre> 12210 * 12211 * <code>.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicated_resources = 7; 12212 * </code> 12213 */ setDedicatedResources( com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources.Builder builderForValue)12214 public Builder setDedicatedResources( 12215 com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources.Builder builderForValue) { 12216 if (dedicatedResourcesBuilder_ == null) { 12217 dedicatedResources_ = builderForValue.build(); 12218 } else { 12219 dedicatedResourcesBuilder_.setMessage(builderForValue.build()); 12220 } 12221 bitField0_ |= 0x00000200; 12222 onChanged(); 12223 return this; 12224 } 12225 /** 12226 * 12227 * 12228 * <pre> 12229 * The config of resources used by the Model during the batch prediction. If 12230 * the Model 12231 * [supports][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types] 12232 * DEDICATED_RESOURCES this config may be provided (and the job will use these 12233 * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config 12234 * must be provided. 12235 * </pre> 12236 * 12237 * <code>.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicated_resources = 7; 12238 * </code> 12239 */ mergeDedicatedResources( com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources value)12240 public Builder mergeDedicatedResources( 12241 com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources value) { 12242 if (dedicatedResourcesBuilder_ == null) { 12243 if (((bitField0_ & 0x00000200) != 0) 12244 && dedicatedResources_ != null 12245 && dedicatedResources_ 12246 != com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources 12247 .getDefaultInstance()) { 12248 getDedicatedResourcesBuilder().mergeFrom(value); 12249 } else { 12250 dedicatedResources_ = value; 12251 } 12252 } else { 12253 dedicatedResourcesBuilder_.mergeFrom(value); 12254 } 12255 bitField0_ |= 0x00000200; 12256 onChanged(); 12257 return this; 12258 } 12259 /** 12260 * 12261 * 12262 * <pre> 12263 * The config of resources used by the Model during the batch prediction. If 12264 * the Model 12265 * [supports][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types] 12266 * DEDICATED_RESOURCES this config may be provided (and the job will use these 12267 * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config 12268 * must be provided. 12269 * </pre> 12270 * 12271 * <code>.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicated_resources = 7; 12272 * </code> 12273 */ clearDedicatedResources()12274 public Builder clearDedicatedResources() { 12275 bitField0_ = (bitField0_ & ~0x00000200); 12276 dedicatedResources_ = null; 12277 if (dedicatedResourcesBuilder_ != null) { 12278 dedicatedResourcesBuilder_.dispose(); 12279 dedicatedResourcesBuilder_ = null; 12280 } 12281 onChanged(); 12282 return this; 12283 } 12284 /** 12285 * 12286 * 12287 * <pre> 12288 * The config of resources used by the Model during the batch prediction. If 12289 * the Model 12290 * [supports][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types] 12291 * DEDICATED_RESOURCES this config may be provided (and the job will use these 12292 * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config 12293 * must be provided. 12294 * </pre> 12295 * 12296 * <code>.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicated_resources = 7; 12297 * </code> 12298 */ 12299 public com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources.Builder getDedicatedResourcesBuilder()12300 getDedicatedResourcesBuilder() { 12301 bitField0_ |= 0x00000200; 12302 onChanged(); 12303 return getDedicatedResourcesFieldBuilder().getBuilder(); 12304 } 12305 /** 12306 * 12307 * 12308 * <pre> 12309 * The config of resources used by the Model during the batch prediction. If 12310 * the Model 12311 * [supports][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types] 12312 * DEDICATED_RESOURCES this config may be provided (and the job will use these 12313 * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config 12314 * must be provided. 12315 * </pre> 12316 * 12317 * <code>.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicated_resources = 7; 12318 * </code> 12319 */ 12320 public com.google.cloud.aiplatform.v1beta1.BatchDedicatedResourcesOrBuilder getDedicatedResourcesOrBuilder()12321 getDedicatedResourcesOrBuilder() { 12322 if (dedicatedResourcesBuilder_ != null) { 12323 return dedicatedResourcesBuilder_.getMessageOrBuilder(); 12324 } else { 12325 return dedicatedResources_ == null 12326 ? com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources.getDefaultInstance() 12327 : dedicatedResources_; 12328 } 12329 } 12330 /** 12331 * 12332 * 12333 * <pre> 12334 * The config of resources used by the Model during the batch prediction. If 12335 * the Model 12336 * [supports][google.cloud.aiplatform.v1beta1.Model.supported_deployment_resources_types] 12337 * DEDICATED_RESOURCES this config may be provided (and the job will use these 12338 * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config 12339 * must be provided. 12340 * </pre> 12341 * 12342 * <code>.google.cloud.aiplatform.v1beta1.BatchDedicatedResources dedicated_resources = 7; 12343 * </code> 12344 */ 12345 private com.google.protobuf.SingleFieldBuilderV3< 12346 com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources, 12347 com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources.Builder, 12348 com.google.cloud.aiplatform.v1beta1.BatchDedicatedResourcesOrBuilder> getDedicatedResourcesFieldBuilder()12349 getDedicatedResourcesFieldBuilder() { 12350 if (dedicatedResourcesBuilder_ == null) { 12351 dedicatedResourcesBuilder_ = 12352 new com.google.protobuf.SingleFieldBuilderV3< 12353 com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources, 12354 com.google.cloud.aiplatform.v1beta1.BatchDedicatedResources.Builder, 12355 com.google.cloud.aiplatform.v1beta1.BatchDedicatedResourcesOrBuilder>( 12356 getDedicatedResources(), getParentForChildren(), isClean()); 12357 dedicatedResources_ = null; 12358 } 12359 return dedicatedResourcesBuilder_; 12360 } 12361 12362 private java.lang.Object serviceAccount_ = ""; 12363 /** 12364 * 12365 * 12366 * <pre> 12367 * The service account that the DeployedModel's container runs as. If not 12368 * specified, a system generated one will be used, which 12369 * has minimal permissions and the custom container, if used, may not have 12370 * enough permission to access other Google Cloud resources. 12371 * Users deploying the Model must have the `iam.serviceAccounts.actAs` 12372 * permission on this service account. 12373 * </pre> 12374 * 12375 * <code>string service_account = 29;</code> 12376 * 12377 * @return The serviceAccount. 12378 */ getServiceAccount()12379 public java.lang.String getServiceAccount() { 12380 java.lang.Object ref = serviceAccount_; 12381 if (!(ref instanceof java.lang.String)) { 12382 com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; 12383 java.lang.String s = bs.toStringUtf8(); 12384 serviceAccount_ = s; 12385 return s; 12386 } else { 12387 return (java.lang.String) ref; 12388 } 12389 } 12390 /** 12391 * 12392 * 12393 * <pre> 12394 * The service account that the DeployedModel's container runs as. If not 12395 * specified, a system generated one will be used, which 12396 * has minimal permissions and the custom container, if used, may not have 12397 * enough permission to access other Google Cloud resources. 12398 * Users deploying the Model must have the `iam.serviceAccounts.actAs` 12399 * permission on this service account. 12400 * </pre> 12401 * 12402 * <code>string service_account = 29;</code> 12403 * 12404 * @return The bytes for serviceAccount. 12405 */ getServiceAccountBytes()12406 public com.google.protobuf.ByteString getServiceAccountBytes() { 12407 java.lang.Object ref = serviceAccount_; 12408 if (ref instanceof String) { 12409 com.google.protobuf.ByteString b = 12410 com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); 12411 serviceAccount_ = b; 12412 return b; 12413 } else { 12414 return (com.google.protobuf.ByteString) ref; 12415 } 12416 } 12417 /** 12418 * 12419 * 12420 * <pre> 12421 * The service account that the DeployedModel's container runs as. If not 12422 * specified, a system generated one will be used, which 12423 * has minimal permissions and the custom container, if used, may not have 12424 * enough permission to access other Google Cloud resources. 12425 * Users deploying the Model must have the `iam.serviceAccounts.actAs` 12426 * permission on this service account. 12427 * </pre> 12428 * 12429 * <code>string service_account = 29;</code> 12430 * 12431 * @param value The serviceAccount to set. 12432 * @return This builder for chaining. 12433 */ setServiceAccount(java.lang.String value)12434 public Builder setServiceAccount(java.lang.String value) { 12435 if (value == null) { 12436 throw new NullPointerException(); 12437 } 12438 serviceAccount_ = value; 12439 bitField0_ |= 0x00000400; 12440 onChanged(); 12441 return this; 12442 } 12443 /** 12444 * 12445 * 12446 * <pre> 12447 * The service account that the DeployedModel's container runs as. If not 12448 * specified, a system generated one will be used, which 12449 * has minimal permissions and the custom container, if used, may not have 12450 * enough permission to access other Google Cloud resources. 12451 * Users deploying the Model must have the `iam.serviceAccounts.actAs` 12452 * permission on this service account. 12453 * </pre> 12454 * 12455 * <code>string service_account = 29;</code> 12456 * 12457 * @return This builder for chaining. 12458 */ clearServiceAccount()12459 public Builder clearServiceAccount() { 12460 serviceAccount_ = getDefaultInstance().getServiceAccount(); 12461 bitField0_ = (bitField0_ & ~0x00000400); 12462 onChanged(); 12463 return this; 12464 } 12465 /** 12466 * 12467 * 12468 * <pre> 12469 * The service account that the DeployedModel's container runs as. If not 12470 * specified, a system generated one will be used, which 12471 * has minimal permissions and the custom container, if used, may not have 12472 * enough permission to access other Google Cloud resources. 12473 * Users deploying the Model must have the `iam.serviceAccounts.actAs` 12474 * permission on this service account. 12475 * </pre> 12476 * 12477 * <code>string service_account = 29;</code> 12478 * 12479 * @param value The bytes for serviceAccount to set. 12480 * @return This builder for chaining. 12481 */ setServiceAccountBytes(com.google.protobuf.ByteString value)12482 public Builder setServiceAccountBytes(com.google.protobuf.ByteString value) { 12483 if (value == null) { 12484 throw new NullPointerException(); 12485 } 12486 checkByteStringIsUtf8(value); 12487 serviceAccount_ = value; 12488 bitField0_ |= 0x00000400; 12489 onChanged(); 12490 return this; 12491 } 12492 12493 private com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters 12494 manualBatchTuningParameters_; 12495 private com.google.protobuf.SingleFieldBuilderV3< 12496 com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters, 12497 com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters.Builder, 12498 com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParametersOrBuilder> 12499 manualBatchTuningParametersBuilder_; 12500 /** 12501 * 12502 * 12503 * <pre> 12504 * Immutable. Parameters configuring the batch behavior. Currently only 12505 * applicable when 12506 * [dedicated_resources][google.cloud.aiplatform.v1beta1.BatchPredictionJob.dedicated_resources] 12507 * are used (in other cases Vertex AI does the tuning itself). 12508 * </pre> 12509 * 12510 * <code> 12511 * .google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE]; 12512 * </code> 12513 * 12514 * @return Whether the manualBatchTuningParameters field is set. 12515 */ hasManualBatchTuningParameters()12516 public boolean hasManualBatchTuningParameters() { 12517 return ((bitField0_ & 0x00000800) != 0); 12518 } 12519 /** 12520 * 12521 * 12522 * <pre> 12523 * Immutable. Parameters configuring the batch behavior. Currently only 12524 * applicable when 12525 * [dedicated_resources][google.cloud.aiplatform.v1beta1.BatchPredictionJob.dedicated_resources] 12526 * are used (in other cases Vertex AI does the tuning itself). 12527 * </pre> 12528 * 12529 * <code> 12530 * .google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE]; 12531 * </code> 12532 * 12533 * @return The manualBatchTuningParameters. 12534 */ 12535 public com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters getManualBatchTuningParameters()12536 getManualBatchTuningParameters() { 12537 if (manualBatchTuningParametersBuilder_ == null) { 12538 return manualBatchTuningParameters_ == null 12539 ? com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters.getDefaultInstance() 12540 : manualBatchTuningParameters_; 12541 } else { 12542 return manualBatchTuningParametersBuilder_.getMessage(); 12543 } 12544 } 12545 /** 12546 * 12547 * 12548 * <pre> 12549 * Immutable. Parameters configuring the batch behavior. Currently only 12550 * applicable when 12551 * [dedicated_resources][google.cloud.aiplatform.v1beta1.BatchPredictionJob.dedicated_resources] 12552 * are used (in other cases Vertex AI does the tuning itself). 12553 * </pre> 12554 * 12555 * <code> 12556 * .google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE]; 12557 * </code> 12558 */ setManualBatchTuningParameters( com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters value)12559 public Builder setManualBatchTuningParameters( 12560 com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters value) { 12561 if (manualBatchTuningParametersBuilder_ == null) { 12562 if (value == null) { 12563 throw new NullPointerException(); 12564 } 12565 manualBatchTuningParameters_ = value; 12566 } else { 12567 manualBatchTuningParametersBuilder_.setMessage(value); 12568 } 12569 bitField0_ |= 0x00000800; 12570 onChanged(); 12571 return this; 12572 } 12573 /** 12574 * 12575 * 12576 * <pre> 12577 * Immutable. Parameters configuring the batch behavior. Currently only 12578 * applicable when 12579 * [dedicated_resources][google.cloud.aiplatform.v1beta1.BatchPredictionJob.dedicated_resources] 12580 * are used (in other cases Vertex AI does the tuning itself). 12581 * </pre> 12582 * 12583 * <code> 12584 * .google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE]; 12585 * </code> 12586 */ setManualBatchTuningParameters( com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters.Builder builderForValue)12587 public Builder setManualBatchTuningParameters( 12588 com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters.Builder builderForValue) { 12589 if (manualBatchTuningParametersBuilder_ == null) { 12590 manualBatchTuningParameters_ = builderForValue.build(); 12591 } else { 12592 manualBatchTuningParametersBuilder_.setMessage(builderForValue.build()); 12593 } 12594 bitField0_ |= 0x00000800; 12595 onChanged(); 12596 return this; 12597 } 12598 /** 12599 * 12600 * 12601 * <pre> 12602 * Immutable. Parameters configuring the batch behavior. Currently only 12603 * applicable when 12604 * [dedicated_resources][google.cloud.aiplatform.v1beta1.BatchPredictionJob.dedicated_resources] 12605 * are used (in other cases Vertex AI does the tuning itself). 12606 * </pre> 12607 * 12608 * <code> 12609 * .google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE]; 12610 * </code> 12611 */ mergeManualBatchTuningParameters( com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters value)12612 public Builder mergeManualBatchTuningParameters( 12613 com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters value) { 12614 if (manualBatchTuningParametersBuilder_ == null) { 12615 if (((bitField0_ & 0x00000800) != 0) 12616 && manualBatchTuningParameters_ != null 12617 && manualBatchTuningParameters_ 12618 != com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters 12619 .getDefaultInstance()) { 12620 getManualBatchTuningParametersBuilder().mergeFrom(value); 12621 } else { 12622 manualBatchTuningParameters_ = value; 12623 } 12624 } else { 12625 manualBatchTuningParametersBuilder_.mergeFrom(value); 12626 } 12627 bitField0_ |= 0x00000800; 12628 onChanged(); 12629 return this; 12630 } 12631 /** 12632 * 12633 * 12634 * <pre> 12635 * Immutable. Parameters configuring the batch behavior. Currently only 12636 * applicable when 12637 * [dedicated_resources][google.cloud.aiplatform.v1beta1.BatchPredictionJob.dedicated_resources] 12638 * are used (in other cases Vertex AI does the tuning itself). 12639 * </pre> 12640 * 12641 * <code> 12642 * .google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE]; 12643 * </code> 12644 */ clearManualBatchTuningParameters()12645 public Builder clearManualBatchTuningParameters() { 12646 bitField0_ = (bitField0_ & ~0x00000800); 12647 manualBatchTuningParameters_ = null; 12648 if (manualBatchTuningParametersBuilder_ != null) { 12649 manualBatchTuningParametersBuilder_.dispose(); 12650 manualBatchTuningParametersBuilder_ = null; 12651 } 12652 onChanged(); 12653 return this; 12654 } 12655 /** 12656 * 12657 * 12658 * <pre> 12659 * Immutable. Parameters configuring the batch behavior. Currently only 12660 * applicable when 12661 * [dedicated_resources][google.cloud.aiplatform.v1beta1.BatchPredictionJob.dedicated_resources] 12662 * are used (in other cases Vertex AI does the tuning itself). 12663 * </pre> 12664 * 12665 * <code> 12666 * .google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE]; 12667 * </code> 12668 */ 12669 public com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters.Builder getManualBatchTuningParametersBuilder()12670 getManualBatchTuningParametersBuilder() { 12671 bitField0_ |= 0x00000800; 12672 onChanged(); 12673 return getManualBatchTuningParametersFieldBuilder().getBuilder(); 12674 } 12675 /** 12676 * 12677 * 12678 * <pre> 12679 * Immutable. Parameters configuring the batch behavior. Currently only 12680 * applicable when 12681 * [dedicated_resources][google.cloud.aiplatform.v1beta1.BatchPredictionJob.dedicated_resources] 12682 * are used (in other cases Vertex AI does the tuning itself). 12683 * </pre> 12684 * 12685 * <code> 12686 * .google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE]; 12687 * </code> 12688 */ 12689 public com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParametersOrBuilder getManualBatchTuningParametersOrBuilder()12690 getManualBatchTuningParametersOrBuilder() { 12691 if (manualBatchTuningParametersBuilder_ != null) { 12692 return manualBatchTuningParametersBuilder_.getMessageOrBuilder(); 12693 } else { 12694 return manualBatchTuningParameters_ == null 12695 ? com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters.getDefaultInstance() 12696 : manualBatchTuningParameters_; 12697 } 12698 } 12699 /** 12700 * 12701 * 12702 * <pre> 12703 * Immutable. Parameters configuring the batch behavior. Currently only 12704 * applicable when 12705 * [dedicated_resources][google.cloud.aiplatform.v1beta1.BatchPredictionJob.dedicated_resources] 12706 * are used (in other cases Vertex AI does the tuning itself). 12707 * </pre> 12708 * 12709 * <code> 12710 * .google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE]; 12711 * </code> 12712 */ 12713 private com.google.protobuf.SingleFieldBuilderV3< 12714 com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters, 12715 com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters.Builder, 12716 com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParametersOrBuilder> getManualBatchTuningParametersFieldBuilder()12717 getManualBatchTuningParametersFieldBuilder() { 12718 if (manualBatchTuningParametersBuilder_ == null) { 12719 manualBatchTuningParametersBuilder_ = 12720 new com.google.protobuf.SingleFieldBuilderV3< 12721 com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters, 12722 com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParameters.Builder, 12723 com.google.cloud.aiplatform.v1beta1.ManualBatchTuningParametersOrBuilder>( 12724 getManualBatchTuningParameters(), getParentForChildren(), isClean()); 12725 manualBatchTuningParameters_ = null; 12726 } 12727 return manualBatchTuningParametersBuilder_; 12728 } 12729 12730 private boolean generateExplanation_; 12731 /** 12732 * 12733 * 12734 * <pre> 12735 * Generate explanation with the batch prediction results. 12736 * When set to `true`, the batch prediction output changes based on the 12737 * `predictions_format` field of the 12738 * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] 12739 * object: 12740 * * `bigquery`: output includes a column named `explanation`. The value 12741 * is a struct that conforms to the 12742 * [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object. 12743 * * `jsonl`: The JSON objects on each line include an additional entry 12744 * keyed `explanation`. The value of the entry is a JSON object that 12745 * conforms to the 12746 * [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object. 12747 * * `csv`: Generating explanations for CSV format is not supported. 12748 * If this field is set to true, either the 12749 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 12750 * or 12751 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12752 * must be populated. 12753 * </pre> 12754 * 12755 * <code>bool generate_explanation = 23;</code> 12756 * 12757 * @return The generateExplanation. 12758 */ 12759 @java.lang.Override getGenerateExplanation()12760 public boolean getGenerateExplanation() { 12761 return generateExplanation_; 12762 } 12763 /** 12764 * 12765 * 12766 * <pre> 12767 * Generate explanation with the batch prediction results. 12768 * When set to `true`, the batch prediction output changes based on the 12769 * `predictions_format` field of the 12770 * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] 12771 * object: 12772 * * `bigquery`: output includes a column named `explanation`. The value 12773 * is a struct that conforms to the 12774 * [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object. 12775 * * `jsonl`: The JSON objects on each line include an additional entry 12776 * keyed `explanation`. The value of the entry is a JSON object that 12777 * conforms to the 12778 * [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object. 12779 * * `csv`: Generating explanations for CSV format is not supported. 12780 * If this field is set to true, either the 12781 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 12782 * or 12783 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12784 * must be populated. 12785 * </pre> 12786 * 12787 * <code>bool generate_explanation = 23;</code> 12788 * 12789 * @param value The generateExplanation to set. 12790 * @return This builder for chaining. 12791 */ setGenerateExplanation(boolean value)12792 public Builder setGenerateExplanation(boolean value) { 12793 12794 generateExplanation_ = value; 12795 bitField0_ |= 0x00001000; 12796 onChanged(); 12797 return this; 12798 } 12799 /** 12800 * 12801 * 12802 * <pre> 12803 * Generate explanation with the batch prediction results. 12804 * When set to `true`, the batch prediction output changes based on the 12805 * `predictions_format` field of the 12806 * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1beta1.BatchPredictionJob.output_config] 12807 * object: 12808 * * `bigquery`: output includes a column named `explanation`. The value 12809 * is a struct that conforms to the 12810 * [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object. 12811 * * `jsonl`: The JSON objects on each line include an additional entry 12812 * keyed `explanation`. The value of the entry is a JSON object that 12813 * conforms to the 12814 * [Explanation][google.cloud.aiplatform.v1beta1.Explanation] object. 12815 * * `csv`: Generating explanations for CSV format is not supported. 12816 * If this field is set to true, either the 12817 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 12818 * or 12819 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12820 * must be populated. 12821 * </pre> 12822 * 12823 * <code>bool generate_explanation = 23;</code> 12824 * 12825 * @return This builder for chaining. 12826 */ clearGenerateExplanation()12827 public Builder clearGenerateExplanation() { 12828 bitField0_ = (bitField0_ & ~0x00001000); 12829 generateExplanation_ = false; 12830 onChanged(); 12831 return this; 12832 } 12833 12834 private com.google.cloud.aiplatform.v1beta1.ExplanationSpec explanationSpec_; 12835 private com.google.protobuf.SingleFieldBuilderV3< 12836 com.google.cloud.aiplatform.v1beta1.ExplanationSpec, 12837 com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder, 12838 com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder> 12839 explanationSpecBuilder_; 12840 /** 12841 * 12842 * 12843 * <pre> 12844 * Explanation configuration for this BatchPredictionJob. Can be 12845 * specified only if 12846 * [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] 12847 * is set to `true`. 12848 * This value overrides the value of 12849 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. 12850 * All fields of 12851 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12852 * are optional in the request. If a field of the 12853 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12854 * object is not populated, the corresponding field of the 12855 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 12856 * object is inherited. 12857 * </pre> 12858 * 12859 * <code>.google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;</code> 12860 * 12861 * @return Whether the explanationSpec field is set. 12862 */ hasExplanationSpec()12863 public boolean hasExplanationSpec() { 12864 return ((bitField0_ & 0x00002000) != 0); 12865 } 12866 /** 12867 * 12868 * 12869 * <pre> 12870 * Explanation configuration for this BatchPredictionJob. Can be 12871 * specified only if 12872 * [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] 12873 * is set to `true`. 12874 * This value overrides the value of 12875 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. 12876 * All fields of 12877 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12878 * are optional in the request. If a field of the 12879 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12880 * object is not populated, the corresponding field of the 12881 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 12882 * object is inherited. 12883 * </pre> 12884 * 12885 * <code>.google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;</code> 12886 * 12887 * @return The explanationSpec. 12888 */ getExplanationSpec()12889 public com.google.cloud.aiplatform.v1beta1.ExplanationSpec getExplanationSpec() { 12890 if (explanationSpecBuilder_ == null) { 12891 return explanationSpec_ == null 12892 ? com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance() 12893 : explanationSpec_; 12894 } else { 12895 return explanationSpecBuilder_.getMessage(); 12896 } 12897 } 12898 /** 12899 * 12900 * 12901 * <pre> 12902 * Explanation configuration for this BatchPredictionJob. Can be 12903 * specified only if 12904 * [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] 12905 * is set to `true`. 12906 * This value overrides the value of 12907 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. 12908 * All fields of 12909 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12910 * are optional in the request. If a field of the 12911 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12912 * object is not populated, the corresponding field of the 12913 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 12914 * object is inherited. 12915 * </pre> 12916 * 12917 * <code>.google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;</code> 12918 */ setExplanationSpec(com.google.cloud.aiplatform.v1beta1.ExplanationSpec value)12919 public Builder setExplanationSpec(com.google.cloud.aiplatform.v1beta1.ExplanationSpec value) { 12920 if (explanationSpecBuilder_ == null) { 12921 if (value == null) { 12922 throw new NullPointerException(); 12923 } 12924 explanationSpec_ = value; 12925 } else { 12926 explanationSpecBuilder_.setMessage(value); 12927 } 12928 bitField0_ |= 0x00002000; 12929 onChanged(); 12930 return this; 12931 } 12932 /** 12933 * 12934 * 12935 * <pre> 12936 * Explanation configuration for this BatchPredictionJob. Can be 12937 * specified only if 12938 * [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] 12939 * is set to `true`. 12940 * This value overrides the value of 12941 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. 12942 * All fields of 12943 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12944 * are optional in the request. If a field of the 12945 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12946 * object is not populated, the corresponding field of the 12947 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 12948 * object is inherited. 12949 * </pre> 12950 * 12951 * <code>.google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;</code> 12952 */ setExplanationSpec( com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder builderForValue)12953 public Builder setExplanationSpec( 12954 com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder builderForValue) { 12955 if (explanationSpecBuilder_ == null) { 12956 explanationSpec_ = builderForValue.build(); 12957 } else { 12958 explanationSpecBuilder_.setMessage(builderForValue.build()); 12959 } 12960 bitField0_ |= 0x00002000; 12961 onChanged(); 12962 return this; 12963 } 12964 /** 12965 * 12966 * 12967 * <pre> 12968 * Explanation configuration for this BatchPredictionJob. Can be 12969 * specified only if 12970 * [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] 12971 * is set to `true`. 12972 * This value overrides the value of 12973 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. 12974 * All fields of 12975 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12976 * are optional in the request. If a field of the 12977 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 12978 * object is not populated, the corresponding field of the 12979 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 12980 * object is inherited. 12981 * </pre> 12982 * 12983 * <code>.google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;</code> 12984 */ mergeExplanationSpec(com.google.cloud.aiplatform.v1beta1.ExplanationSpec value)12985 public Builder mergeExplanationSpec(com.google.cloud.aiplatform.v1beta1.ExplanationSpec value) { 12986 if (explanationSpecBuilder_ == null) { 12987 if (((bitField0_ & 0x00002000) != 0) 12988 && explanationSpec_ != null 12989 && explanationSpec_ 12990 != com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance()) { 12991 getExplanationSpecBuilder().mergeFrom(value); 12992 } else { 12993 explanationSpec_ = value; 12994 } 12995 } else { 12996 explanationSpecBuilder_.mergeFrom(value); 12997 } 12998 bitField0_ |= 0x00002000; 12999 onChanged(); 13000 return this; 13001 } 13002 /** 13003 * 13004 * 13005 * <pre> 13006 * Explanation configuration for this BatchPredictionJob. Can be 13007 * specified only if 13008 * [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] 13009 * is set to `true`. 13010 * This value overrides the value of 13011 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. 13012 * All fields of 13013 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 13014 * are optional in the request. If a field of the 13015 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 13016 * object is not populated, the corresponding field of the 13017 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 13018 * object is inherited. 13019 * </pre> 13020 * 13021 * <code>.google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;</code> 13022 */ clearExplanationSpec()13023 public Builder clearExplanationSpec() { 13024 bitField0_ = (bitField0_ & ~0x00002000); 13025 explanationSpec_ = null; 13026 if (explanationSpecBuilder_ != null) { 13027 explanationSpecBuilder_.dispose(); 13028 explanationSpecBuilder_ = null; 13029 } 13030 onChanged(); 13031 return this; 13032 } 13033 /** 13034 * 13035 * 13036 * <pre> 13037 * Explanation configuration for this BatchPredictionJob. Can be 13038 * specified only if 13039 * [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] 13040 * is set to `true`. 13041 * This value overrides the value of 13042 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. 13043 * All fields of 13044 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 13045 * are optional in the request. If a field of the 13046 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 13047 * object is not populated, the corresponding field of the 13048 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 13049 * object is inherited. 13050 * </pre> 13051 * 13052 * <code>.google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;</code> 13053 */ getExplanationSpecBuilder()13054 public com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder getExplanationSpecBuilder() { 13055 bitField0_ |= 0x00002000; 13056 onChanged(); 13057 return getExplanationSpecFieldBuilder().getBuilder(); 13058 } 13059 /** 13060 * 13061 * 13062 * <pre> 13063 * Explanation configuration for this BatchPredictionJob. Can be 13064 * specified only if 13065 * [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] 13066 * is set to `true`. 13067 * This value overrides the value of 13068 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. 13069 * All fields of 13070 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 13071 * are optional in the request. If a field of the 13072 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 13073 * object is not populated, the corresponding field of the 13074 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 13075 * object is inherited. 13076 * </pre> 13077 * 13078 * <code>.google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;</code> 13079 */ 13080 public com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder getExplanationSpecOrBuilder()13081 getExplanationSpecOrBuilder() { 13082 if (explanationSpecBuilder_ != null) { 13083 return explanationSpecBuilder_.getMessageOrBuilder(); 13084 } else { 13085 return explanationSpec_ == null 13086 ? com.google.cloud.aiplatform.v1beta1.ExplanationSpec.getDefaultInstance() 13087 : explanationSpec_; 13088 } 13089 } 13090 /** 13091 * 13092 * 13093 * <pre> 13094 * Explanation configuration for this BatchPredictionJob. Can be 13095 * specified only if 13096 * [generate_explanation][google.cloud.aiplatform.v1beta1.BatchPredictionJob.generate_explanation] 13097 * is set to `true`. 13098 * This value overrides the value of 13099 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec]. 13100 * All fields of 13101 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 13102 * are optional in the request. If a field of the 13103 * [explanation_spec][google.cloud.aiplatform.v1beta1.BatchPredictionJob.explanation_spec] 13104 * object is not populated, the corresponding field of the 13105 * [Model.explanation_spec][google.cloud.aiplatform.v1beta1.Model.explanation_spec] 13106 * object is inherited. 13107 * </pre> 13108 * 13109 * <code>.google.cloud.aiplatform.v1beta1.ExplanationSpec explanation_spec = 25;</code> 13110 */ 13111 private com.google.protobuf.SingleFieldBuilderV3< 13112 com.google.cloud.aiplatform.v1beta1.ExplanationSpec, 13113 com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder, 13114 com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder> getExplanationSpecFieldBuilder()13115 getExplanationSpecFieldBuilder() { 13116 if (explanationSpecBuilder_ == null) { 13117 explanationSpecBuilder_ = 13118 new com.google.protobuf.SingleFieldBuilderV3< 13119 com.google.cloud.aiplatform.v1beta1.ExplanationSpec, 13120 com.google.cloud.aiplatform.v1beta1.ExplanationSpec.Builder, 13121 com.google.cloud.aiplatform.v1beta1.ExplanationSpecOrBuilder>( 13122 getExplanationSpec(), getParentForChildren(), isClean()); 13123 explanationSpec_ = null; 13124 } 13125 return explanationSpecBuilder_; 13126 } 13127 13128 private com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo outputInfo_; 13129 private com.google.protobuf.SingleFieldBuilderV3< 13130 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo, 13131 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.Builder, 13132 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfoOrBuilder> 13133 outputInfoBuilder_; 13134 /** 13135 * 13136 * 13137 * <pre> 13138 * Output only. Information further describing the output of this job. 13139 * </pre> 13140 * 13141 * <code> 13142 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13143 * </code> 13144 * 13145 * @return Whether the outputInfo field is set. 13146 */ hasOutputInfo()13147 public boolean hasOutputInfo() { 13148 return ((bitField0_ & 0x00004000) != 0); 13149 } 13150 /** 13151 * 13152 * 13153 * <pre> 13154 * Output only. Information further describing the output of this job. 13155 * </pre> 13156 * 13157 * <code> 13158 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13159 * </code> 13160 * 13161 * @return The outputInfo. 13162 */ getOutputInfo()13163 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo getOutputInfo() { 13164 if (outputInfoBuilder_ == null) { 13165 return outputInfo_ == null 13166 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.getDefaultInstance() 13167 : outputInfo_; 13168 } else { 13169 return outputInfoBuilder_.getMessage(); 13170 } 13171 } 13172 /** 13173 * 13174 * 13175 * <pre> 13176 * Output only. Information further describing the output of this job. 13177 * </pre> 13178 * 13179 * <code> 13180 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13181 * </code> 13182 */ setOutputInfo( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo value)13183 public Builder setOutputInfo( 13184 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo value) { 13185 if (outputInfoBuilder_ == null) { 13186 if (value == null) { 13187 throw new NullPointerException(); 13188 } 13189 outputInfo_ = value; 13190 } else { 13191 outputInfoBuilder_.setMessage(value); 13192 } 13193 bitField0_ |= 0x00004000; 13194 onChanged(); 13195 return this; 13196 } 13197 /** 13198 * 13199 * 13200 * <pre> 13201 * Output only. Information further describing the output of this job. 13202 * </pre> 13203 * 13204 * <code> 13205 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13206 * </code> 13207 */ setOutputInfo( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.Builder builderForValue)13208 public Builder setOutputInfo( 13209 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.Builder builderForValue) { 13210 if (outputInfoBuilder_ == null) { 13211 outputInfo_ = builderForValue.build(); 13212 } else { 13213 outputInfoBuilder_.setMessage(builderForValue.build()); 13214 } 13215 bitField0_ |= 0x00004000; 13216 onChanged(); 13217 return this; 13218 } 13219 /** 13220 * 13221 * 13222 * <pre> 13223 * Output only. Information further describing the output of this job. 13224 * </pre> 13225 * 13226 * <code> 13227 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13228 * </code> 13229 */ mergeOutputInfo( com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo value)13230 public Builder mergeOutputInfo( 13231 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo value) { 13232 if (outputInfoBuilder_ == null) { 13233 if (((bitField0_ & 0x00004000) != 0) 13234 && outputInfo_ != null 13235 && outputInfo_ 13236 != com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo 13237 .getDefaultInstance()) { 13238 getOutputInfoBuilder().mergeFrom(value); 13239 } else { 13240 outputInfo_ = value; 13241 } 13242 } else { 13243 outputInfoBuilder_.mergeFrom(value); 13244 } 13245 bitField0_ |= 0x00004000; 13246 onChanged(); 13247 return this; 13248 } 13249 /** 13250 * 13251 * 13252 * <pre> 13253 * Output only. Information further describing the output of this job. 13254 * </pre> 13255 * 13256 * <code> 13257 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13258 * </code> 13259 */ clearOutputInfo()13260 public Builder clearOutputInfo() { 13261 bitField0_ = (bitField0_ & ~0x00004000); 13262 outputInfo_ = null; 13263 if (outputInfoBuilder_ != null) { 13264 outputInfoBuilder_.dispose(); 13265 outputInfoBuilder_ = null; 13266 } 13267 onChanged(); 13268 return this; 13269 } 13270 /** 13271 * 13272 * 13273 * <pre> 13274 * Output only. Information further describing the output of this job. 13275 * </pre> 13276 * 13277 * <code> 13278 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13279 * </code> 13280 */ 13281 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.Builder getOutputInfoBuilder()13282 getOutputInfoBuilder() { 13283 bitField0_ |= 0x00004000; 13284 onChanged(); 13285 return getOutputInfoFieldBuilder().getBuilder(); 13286 } 13287 /** 13288 * 13289 * 13290 * <pre> 13291 * Output only. Information further describing the output of this job. 13292 * </pre> 13293 * 13294 * <code> 13295 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13296 * </code> 13297 */ 13298 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfoOrBuilder getOutputInfoOrBuilder()13299 getOutputInfoOrBuilder() { 13300 if (outputInfoBuilder_ != null) { 13301 return outputInfoBuilder_.getMessageOrBuilder(); 13302 } else { 13303 return outputInfo_ == null 13304 ? com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.getDefaultInstance() 13305 : outputInfo_; 13306 } 13307 } 13308 /** 13309 * 13310 * 13311 * <pre> 13312 * Output only. Information further describing the output of this job. 13313 * </pre> 13314 * 13315 * <code> 13316 * .google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13317 * </code> 13318 */ 13319 private com.google.protobuf.SingleFieldBuilderV3< 13320 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo, 13321 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.Builder, 13322 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfoOrBuilder> getOutputInfoFieldBuilder()13323 getOutputInfoFieldBuilder() { 13324 if (outputInfoBuilder_ == null) { 13325 outputInfoBuilder_ = 13326 new com.google.protobuf.SingleFieldBuilderV3< 13327 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo, 13328 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfo.Builder, 13329 com.google.cloud.aiplatform.v1beta1.BatchPredictionJob.OutputInfoOrBuilder>( 13330 getOutputInfo(), getParentForChildren(), isClean()); 13331 outputInfo_ = null; 13332 } 13333 return outputInfoBuilder_; 13334 } 13335 13336 private int state_ = 0; 13337 /** 13338 * 13339 * 13340 * <pre> 13341 * Output only. The detailed state of the job. 13342 * </pre> 13343 * 13344 * <code> 13345 * .google.cloud.aiplatform.v1beta1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13346 * </code> 13347 * 13348 * @return The enum numeric value on the wire for state. 13349 */ 13350 @java.lang.Override getStateValue()13351 public int getStateValue() { 13352 return state_; 13353 } 13354 /** 13355 * 13356 * 13357 * <pre> 13358 * Output only. The detailed state of the job. 13359 * </pre> 13360 * 13361 * <code> 13362 * .google.cloud.aiplatform.v1beta1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13363 * </code> 13364 * 13365 * @param value The enum numeric value on the wire for state to set. 13366 * @return This builder for chaining. 13367 */ setStateValue(int value)13368 public Builder setStateValue(int value) { 13369 state_ = value; 13370 bitField0_ |= 0x00008000; 13371 onChanged(); 13372 return this; 13373 } 13374 /** 13375 * 13376 * 13377 * <pre> 13378 * Output only. The detailed state of the job. 13379 * </pre> 13380 * 13381 * <code> 13382 * .google.cloud.aiplatform.v1beta1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13383 * </code> 13384 * 13385 * @return The state. 13386 */ 13387 @java.lang.Override getState()13388 public com.google.cloud.aiplatform.v1beta1.JobState getState() { 13389 com.google.cloud.aiplatform.v1beta1.JobState result = 13390 com.google.cloud.aiplatform.v1beta1.JobState.forNumber(state_); 13391 return result == null ? com.google.cloud.aiplatform.v1beta1.JobState.UNRECOGNIZED : result; 13392 } 13393 /** 13394 * 13395 * 13396 * <pre> 13397 * Output only. The detailed state of the job. 13398 * </pre> 13399 * 13400 * <code> 13401 * .google.cloud.aiplatform.v1beta1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13402 * </code> 13403 * 13404 * @param value The state to set. 13405 * @return This builder for chaining. 13406 */ setState(com.google.cloud.aiplatform.v1beta1.JobState value)13407 public Builder setState(com.google.cloud.aiplatform.v1beta1.JobState value) { 13408 if (value == null) { 13409 throw new NullPointerException(); 13410 } 13411 bitField0_ |= 0x00008000; 13412 state_ = value.getNumber(); 13413 onChanged(); 13414 return this; 13415 } 13416 /** 13417 * 13418 * 13419 * <pre> 13420 * Output only. The detailed state of the job. 13421 * </pre> 13422 * 13423 * <code> 13424 * .google.cloud.aiplatform.v1beta1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13425 * </code> 13426 * 13427 * @return This builder for chaining. 13428 */ clearState()13429 public Builder clearState() { 13430 bitField0_ = (bitField0_ & ~0x00008000); 13431 state_ = 0; 13432 onChanged(); 13433 return this; 13434 } 13435 13436 private com.google.rpc.Status error_; 13437 private com.google.protobuf.SingleFieldBuilderV3< 13438 com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> 13439 errorBuilder_; 13440 /** 13441 * 13442 * 13443 * <pre> 13444 * Output only. Only populated when the job's state is JOB_STATE_FAILED or 13445 * JOB_STATE_CANCELLED. 13446 * </pre> 13447 * 13448 * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 13449 * 13450 * @return Whether the error field is set. 13451 */ hasError()13452 public boolean hasError() { 13453 return ((bitField0_ & 0x00010000) != 0); 13454 } 13455 /** 13456 * 13457 * 13458 * <pre> 13459 * Output only. Only populated when the job's state is JOB_STATE_FAILED or 13460 * JOB_STATE_CANCELLED. 13461 * </pre> 13462 * 13463 * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 13464 * 13465 * @return The error. 13466 */ getError()13467 public com.google.rpc.Status getError() { 13468 if (errorBuilder_ == null) { 13469 return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; 13470 } else { 13471 return errorBuilder_.getMessage(); 13472 } 13473 } 13474 /** 13475 * 13476 * 13477 * <pre> 13478 * Output only. Only populated when the job's state is JOB_STATE_FAILED or 13479 * JOB_STATE_CANCELLED. 13480 * </pre> 13481 * 13482 * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 13483 */ setError(com.google.rpc.Status value)13484 public Builder setError(com.google.rpc.Status value) { 13485 if (errorBuilder_ == null) { 13486 if (value == null) { 13487 throw new NullPointerException(); 13488 } 13489 error_ = value; 13490 } else { 13491 errorBuilder_.setMessage(value); 13492 } 13493 bitField0_ |= 0x00010000; 13494 onChanged(); 13495 return this; 13496 } 13497 /** 13498 * 13499 * 13500 * <pre> 13501 * Output only. Only populated when the job's state is JOB_STATE_FAILED or 13502 * JOB_STATE_CANCELLED. 13503 * </pre> 13504 * 13505 * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 13506 */ setError(com.google.rpc.Status.Builder builderForValue)13507 public Builder setError(com.google.rpc.Status.Builder builderForValue) { 13508 if (errorBuilder_ == null) { 13509 error_ = builderForValue.build(); 13510 } else { 13511 errorBuilder_.setMessage(builderForValue.build()); 13512 } 13513 bitField0_ |= 0x00010000; 13514 onChanged(); 13515 return this; 13516 } 13517 /** 13518 * 13519 * 13520 * <pre> 13521 * Output only. Only populated when the job's state is JOB_STATE_FAILED or 13522 * JOB_STATE_CANCELLED. 13523 * </pre> 13524 * 13525 * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 13526 */ mergeError(com.google.rpc.Status value)13527 public Builder mergeError(com.google.rpc.Status value) { 13528 if (errorBuilder_ == null) { 13529 if (((bitField0_ & 0x00010000) != 0) 13530 && error_ != null 13531 && error_ != com.google.rpc.Status.getDefaultInstance()) { 13532 getErrorBuilder().mergeFrom(value); 13533 } else { 13534 error_ = value; 13535 } 13536 } else { 13537 errorBuilder_.mergeFrom(value); 13538 } 13539 bitField0_ |= 0x00010000; 13540 onChanged(); 13541 return this; 13542 } 13543 /** 13544 * 13545 * 13546 * <pre> 13547 * Output only. Only populated when the job's state is JOB_STATE_FAILED or 13548 * JOB_STATE_CANCELLED. 13549 * </pre> 13550 * 13551 * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 13552 */ clearError()13553 public Builder clearError() { 13554 bitField0_ = (bitField0_ & ~0x00010000); 13555 error_ = null; 13556 if (errorBuilder_ != null) { 13557 errorBuilder_.dispose(); 13558 errorBuilder_ = null; 13559 } 13560 onChanged(); 13561 return this; 13562 } 13563 /** 13564 * 13565 * 13566 * <pre> 13567 * Output only. Only populated when the job's state is JOB_STATE_FAILED or 13568 * JOB_STATE_CANCELLED. 13569 * </pre> 13570 * 13571 * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 13572 */ getErrorBuilder()13573 public com.google.rpc.Status.Builder getErrorBuilder() { 13574 bitField0_ |= 0x00010000; 13575 onChanged(); 13576 return getErrorFieldBuilder().getBuilder(); 13577 } 13578 /** 13579 * 13580 * 13581 * <pre> 13582 * Output only. Only populated when the job's state is JOB_STATE_FAILED or 13583 * JOB_STATE_CANCELLED. 13584 * </pre> 13585 * 13586 * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 13587 */ getErrorOrBuilder()13588 public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { 13589 if (errorBuilder_ != null) { 13590 return errorBuilder_.getMessageOrBuilder(); 13591 } else { 13592 return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_; 13593 } 13594 } 13595 /** 13596 * 13597 * 13598 * <pre> 13599 * Output only. Only populated when the job's state is JOB_STATE_FAILED or 13600 * JOB_STATE_CANCELLED. 13601 * </pre> 13602 * 13603 * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> 13604 */ 13605 private com.google.protobuf.SingleFieldBuilderV3< 13606 com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getErrorFieldBuilder()13607 getErrorFieldBuilder() { 13608 if (errorBuilder_ == null) { 13609 errorBuilder_ = 13610 new com.google.protobuf.SingleFieldBuilderV3< 13611 com.google.rpc.Status, 13612 com.google.rpc.Status.Builder, 13613 com.google.rpc.StatusOrBuilder>(getError(), getParentForChildren(), isClean()); 13614 error_ = null; 13615 } 13616 return errorBuilder_; 13617 } 13618 13619 private java.util.List<com.google.rpc.Status> partialFailures_ = 13620 java.util.Collections.emptyList(); 13621 ensurePartialFailuresIsMutable()13622 private void ensurePartialFailuresIsMutable() { 13623 if (!((bitField0_ & 0x00020000) != 0)) { 13624 partialFailures_ = new java.util.ArrayList<com.google.rpc.Status>(partialFailures_); 13625 bitField0_ |= 0x00020000; 13626 } 13627 } 13628 13629 private com.google.protobuf.RepeatedFieldBuilderV3< 13630 com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> 13631 partialFailuresBuilder_; 13632 13633 /** 13634 * 13635 * 13636 * <pre> 13637 * Output only. Partial failures encountered. 13638 * For example, single files that can't be read. 13639 * This field never exceeds 20 entries. 13640 * Status details fields contain standard Google Cloud error details. 13641 * </pre> 13642 * 13643 * <code> 13644 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13645 * </code> 13646 */ getPartialFailuresList()13647 public java.util.List<com.google.rpc.Status> getPartialFailuresList() { 13648 if (partialFailuresBuilder_ == null) { 13649 return java.util.Collections.unmodifiableList(partialFailures_); 13650 } else { 13651 return partialFailuresBuilder_.getMessageList(); 13652 } 13653 } 13654 /** 13655 * 13656 * 13657 * <pre> 13658 * Output only. Partial failures encountered. 13659 * For example, single files that can't be read. 13660 * This field never exceeds 20 entries. 13661 * Status details fields contain standard Google Cloud error details. 13662 * </pre> 13663 * 13664 * <code> 13665 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13666 * </code> 13667 */ getPartialFailuresCount()13668 public int getPartialFailuresCount() { 13669 if (partialFailuresBuilder_ == null) { 13670 return partialFailures_.size(); 13671 } else { 13672 return partialFailuresBuilder_.getCount(); 13673 } 13674 } 13675 /** 13676 * 13677 * 13678 * <pre> 13679 * Output only. Partial failures encountered. 13680 * For example, single files that can't be read. 13681 * This field never exceeds 20 entries. 13682 * Status details fields contain standard Google Cloud error details. 13683 * </pre> 13684 * 13685 * <code> 13686 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13687 * </code> 13688 */ getPartialFailures(int index)13689 public com.google.rpc.Status getPartialFailures(int index) { 13690 if (partialFailuresBuilder_ == null) { 13691 return partialFailures_.get(index); 13692 } else { 13693 return partialFailuresBuilder_.getMessage(index); 13694 } 13695 } 13696 /** 13697 * 13698 * 13699 * <pre> 13700 * Output only. Partial failures encountered. 13701 * For example, single files that can't be read. 13702 * This field never exceeds 20 entries. 13703 * Status details fields contain standard Google Cloud error details. 13704 * </pre> 13705 * 13706 * <code> 13707 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13708 * </code> 13709 */ setPartialFailures(int index, com.google.rpc.Status value)13710 public Builder setPartialFailures(int index, com.google.rpc.Status value) { 13711 if (partialFailuresBuilder_ == null) { 13712 if (value == null) { 13713 throw new NullPointerException(); 13714 } 13715 ensurePartialFailuresIsMutable(); 13716 partialFailures_.set(index, value); 13717 onChanged(); 13718 } else { 13719 partialFailuresBuilder_.setMessage(index, value); 13720 } 13721 return this; 13722 } 13723 /** 13724 * 13725 * 13726 * <pre> 13727 * Output only. Partial failures encountered. 13728 * For example, single files that can't be read. 13729 * This field never exceeds 20 entries. 13730 * Status details fields contain standard Google Cloud error details. 13731 * </pre> 13732 * 13733 * <code> 13734 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13735 * </code> 13736 */ setPartialFailures(int index, com.google.rpc.Status.Builder builderForValue)13737 public Builder setPartialFailures(int index, com.google.rpc.Status.Builder builderForValue) { 13738 if (partialFailuresBuilder_ == null) { 13739 ensurePartialFailuresIsMutable(); 13740 partialFailures_.set(index, builderForValue.build()); 13741 onChanged(); 13742 } else { 13743 partialFailuresBuilder_.setMessage(index, builderForValue.build()); 13744 } 13745 return this; 13746 } 13747 /** 13748 * 13749 * 13750 * <pre> 13751 * Output only. Partial failures encountered. 13752 * For example, single files that can't be read. 13753 * This field never exceeds 20 entries. 13754 * Status details fields contain standard Google Cloud error details. 13755 * </pre> 13756 * 13757 * <code> 13758 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13759 * </code> 13760 */ addPartialFailures(com.google.rpc.Status value)13761 public Builder addPartialFailures(com.google.rpc.Status value) { 13762 if (partialFailuresBuilder_ == null) { 13763 if (value == null) { 13764 throw new NullPointerException(); 13765 } 13766 ensurePartialFailuresIsMutable(); 13767 partialFailures_.add(value); 13768 onChanged(); 13769 } else { 13770 partialFailuresBuilder_.addMessage(value); 13771 } 13772 return this; 13773 } 13774 /** 13775 * 13776 * 13777 * <pre> 13778 * Output only. Partial failures encountered. 13779 * For example, single files that can't be read. 13780 * This field never exceeds 20 entries. 13781 * Status details fields contain standard Google Cloud error details. 13782 * </pre> 13783 * 13784 * <code> 13785 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13786 * </code> 13787 */ addPartialFailures(int index, com.google.rpc.Status value)13788 public Builder addPartialFailures(int index, com.google.rpc.Status value) { 13789 if (partialFailuresBuilder_ == null) { 13790 if (value == null) { 13791 throw new NullPointerException(); 13792 } 13793 ensurePartialFailuresIsMutable(); 13794 partialFailures_.add(index, value); 13795 onChanged(); 13796 } else { 13797 partialFailuresBuilder_.addMessage(index, value); 13798 } 13799 return this; 13800 } 13801 /** 13802 * 13803 * 13804 * <pre> 13805 * Output only. Partial failures encountered. 13806 * For example, single files that can't be read. 13807 * This field never exceeds 20 entries. 13808 * Status details fields contain standard Google Cloud error details. 13809 * </pre> 13810 * 13811 * <code> 13812 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13813 * </code> 13814 */ addPartialFailures(com.google.rpc.Status.Builder builderForValue)13815 public Builder addPartialFailures(com.google.rpc.Status.Builder builderForValue) { 13816 if (partialFailuresBuilder_ == null) { 13817 ensurePartialFailuresIsMutable(); 13818 partialFailures_.add(builderForValue.build()); 13819 onChanged(); 13820 } else { 13821 partialFailuresBuilder_.addMessage(builderForValue.build()); 13822 } 13823 return this; 13824 } 13825 /** 13826 * 13827 * 13828 * <pre> 13829 * Output only. Partial failures encountered. 13830 * For example, single files that can't be read. 13831 * This field never exceeds 20 entries. 13832 * Status details fields contain standard Google Cloud error details. 13833 * </pre> 13834 * 13835 * <code> 13836 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13837 * </code> 13838 */ addPartialFailures(int index, com.google.rpc.Status.Builder builderForValue)13839 public Builder addPartialFailures(int index, com.google.rpc.Status.Builder builderForValue) { 13840 if (partialFailuresBuilder_ == null) { 13841 ensurePartialFailuresIsMutable(); 13842 partialFailures_.add(index, builderForValue.build()); 13843 onChanged(); 13844 } else { 13845 partialFailuresBuilder_.addMessage(index, builderForValue.build()); 13846 } 13847 return this; 13848 } 13849 /** 13850 * 13851 * 13852 * <pre> 13853 * Output only. Partial failures encountered. 13854 * For example, single files that can't be read. 13855 * This field never exceeds 20 entries. 13856 * Status details fields contain standard Google Cloud error details. 13857 * </pre> 13858 * 13859 * <code> 13860 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13861 * </code> 13862 */ addAllPartialFailures( java.lang.Iterable<? extends com.google.rpc.Status> values)13863 public Builder addAllPartialFailures( 13864 java.lang.Iterable<? extends com.google.rpc.Status> values) { 13865 if (partialFailuresBuilder_ == null) { 13866 ensurePartialFailuresIsMutable(); 13867 com.google.protobuf.AbstractMessageLite.Builder.addAll(values, partialFailures_); 13868 onChanged(); 13869 } else { 13870 partialFailuresBuilder_.addAllMessages(values); 13871 } 13872 return this; 13873 } 13874 /** 13875 * 13876 * 13877 * <pre> 13878 * Output only. Partial failures encountered. 13879 * For example, single files that can't be read. 13880 * This field never exceeds 20 entries. 13881 * Status details fields contain standard Google Cloud error details. 13882 * </pre> 13883 * 13884 * <code> 13885 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13886 * </code> 13887 */ clearPartialFailures()13888 public Builder clearPartialFailures() { 13889 if (partialFailuresBuilder_ == null) { 13890 partialFailures_ = java.util.Collections.emptyList(); 13891 bitField0_ = (bitField0_ & ~0x00020000); 13892 onChanged(); 13893 } else { 13894 partialFailuresBuilder_.clear(); 13895 } 13896 return this; 13897 } 13898 /** 13899 * 13900 * 13901 * <pre> 13902 * Output only. Partial failures encountered. 13903 * For example, single files that can't be read. 13904 * This field never exceeds 20 entries. 13905 * Status details fields contain standard Google Cloud error details. 13906 * </pre> 13907 * 13908 * <code> 13909 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13910 * </code> 13911 */ removePartialFailures(int index)13912 public Builder removePartialFailures(int index) { 13913 if (partialFailuresBuilder_ == null) { 13914 ensurePartialFailuresIsMutable(); 13915 partialFailures_.remove(index); 13916 onChanged(); 13917 } else { 13918 partialFailuresBuilder_.remove(index); 13919 } 13920 return this; 13921 } 13922 /** 13923 * 13924 * 13925 * <pre> 13926 * Output only. Partial failures encountered. 13927 * For example, single files that can't be read. 13928 * This field never exceeds 20 entries. 13929 * Status details fields contain standard Google Cloud error details. 13930 * </pre> 13931 * 13932 * <code> 13933 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13934 * </code> 13935 */ getPartialFailuresBuilder(int index)13936 public com.google.rpc.Status.Builder getPartialFailuresBuilder(int index) { 13937 return getPartialFailuresFieldBuilder().getBuilder(index); 13938 } 13939 /** 13940 * 13941 * 13942 * <pre> 13943 * Output only. Partial failures encountered. 13944 * For example, single files that can't be read. 13945 * This field never exceeds 20 entries. 13946 * Status details fields contain standard Google Cloud error details. 13947 * </pre> 13948 * 13949 * <code> 13950 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13951 * </code> 13952 */ getPartialFailuresOrBuilder(int index)13953 public com.google.rpc.StatusOrBuilder getPartialFailuresOrBuilder(int index) { 13954 if (partialFailuresBuilder_ == null) { 13955 return partialFailures_.get(index); 13956 } else { 13957 return partialFailuresBuilder_.getMessageOrBuilder(index); 13958 } 13959 } 13960 /** 13961 * 13962 * 13963 * <pre> 13964 * Output only. Partial failures encountered. 13965 * For example, single files that can't be read. 13966 * This field never exceeds 20 entries. 13967 * Status details fields contain standard Google Cloud error details. 13968 * </pre> 13969 * 13970 * <code> 13971 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13972 * </code> 13973 */ 13974 public java.util.List<? extends com.google.rpc.StatusOrBuilder> getPartialFailuresOrBuilderList()13975 getPartialFailuresOrBuilderList() { 13976 if (partialFailuresBuilder_ != null) { 13977 return partialFailuresBuilder_.getMessageOrBuilderList(); 13978 } else { 13979 return java.util.Collections.unmodifiableList(partialFailures_); 13980 } 13981 } 13982 /** 13983 * 13984 * 13985 * <pre> 13986 * Output only. Partial failures encountered. 13987 * For example, single files that can't be read. 13988 * This field never exceeds 20 entries. 13989 * Status details fields contain standard Google Cloud error details. 13990 * </pre> 13991 * 13992 * <code> 13993 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 13994 * </code> 13995 */ addPartialFailuresBuilder()13996 public com.google.rpc.Status.Builder addPartialFailuresBuilder() { 13997 return getPartialFailuresFieldBuilder() 13998 .addBuilder(com.google.rpc.Status.getDefaultInstance()); 13999 } 14000 /** 14001 * 14002 * 14003 * <pre> 14004 * Output only. Partial failures encountered. 14005 * For example, single files that can't be read. 14006 * This field never exceeds 20 entries. 14007 * Status details fields contain standard Google Cloud error details. 14008 * </pre> 14009 * 14010 * <code> 14011 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14012 * </code> 14013 */ addPartialFailuresBuilder(int index)14014 public com.google.rpc.Status.Builder addPartialFailuresBuilder(int index) { 14015 return getPartialFailuresFieldBuilder() 14016 .addBuilder(index, com.google.rpc.Status.getDefaultInstance()); 14017 } 14018 /** 14019 * 14020 * 14021 * <pre> 14022 * Output only. Partial failures encountered. 14023 * For example, single files that can't be read. 14024 * This field never exceeds 20 entries. 14025 * Status details fields contain standard Google Cloud error details. 14026 * </pre> 14027 * 14028 * <code> 14029 * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14030 * </code> 14031 */ getPartialFailuresBuilderList()14032 public java.util.List<com.google.rpc.Status.Builder> getPartialFailuresBuilderList() { 14033 return getPartialFailuresFieldBuilder().getBuilderList(); 14034 } 14035 14036 private com.google.protobuf.RepeatedFieldBuilderV3< 14037 com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getPartialFailuresFieldBuilder()14038 getPartialFailuresFieldBuilder() { 14039 if (partialFailuresBuilder_ == null) { 14040 partialFailuresBuilder_ = 14041 new com.google.protobuf.RepeatedFieldBuilderV3< 14042 com.google.rpc.Status, 14043 com.google.rpc.Status.Builder, 14044 com.google.rpc.StatusOrBuilder>( 14045 partialFailures_, 14046 ((bitField0_ & 0x00020000) != 0), 14047 getParentForChildren(), 14048 isClean()); 14049 partialFailures_ = null; 14050 } 14051 return partialFailuresBuilder_; 14052 } 14053 14054 private com.google.cloud.aiplatform.v1beta1.ResourcesConsumed resourcesConsumed_; 14055 private com.google.protobuf.SingleFieldBuilderV3< 14056 com.google.cloud.aiplatform.v1beta1.ResourcesConsumed, 14057 com.google.cloud.aiplatform.v1beta1.ResourcesConsumed.Builder, 14058 com.google.cloud.aiplatform.v1beta1.ResourcesConsumedOrBuilder> 14059 resourcesConsumedBuilder_; 14060 /** 14061 * 14062 * 14063 * <pre> 14064 * Output only. Information about resources that had been consumed by this 14065 * job. Provided in real time at best effort basis, as well as a final value 14066 * once the job completes. 14067 * Note: This field currently may be not populated for batch predictions that 14068 * use AutoML Models. 14069 * </pre> 14070 * 14071 * <code> 14072 * .google.cloud.aiplatform.v1beta1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14073 * </code> 14074 * 14075 * @return Whether the resourcesConsumed field is set. 14076 */ hasResourcesConsumed()14077 public boolean hasResourcesConsumed() { 14078 return ((bitField0_ & 0x00040000) != 0); 14079 } 14080 /** 14081 * 14082 * 14083 * <pre> 14084 * Output only. Information about resources that had been consumed by this 14085 * job. Provided in real time at best effort basis, as well as a final value 14086 * once the job completes. 14087 * Note: This field currently may be not populated for batch predictions that 14088 * use AutoML Models. 14089 * </pre> 14090 * 14091 * <code> 14092 * .google.cloud.aiplatform.v1beta1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14093 * </code> 14094 * 14095 * @return The resourcesConsumed. 14096 */ getResourcesConsumed()14097 public com.google.cloud.aiplatform.v1beta1.ResourcesConsumed getResourcesConsumed() { 14098 if (resourcesConsumedBuilder_ == null) { 14099 return resourcesConsumed_ == null 14100 ? com.google.cloud.aiplatform.v1beta1.ResourcesConsumed.getDefaultInstance() 14101 : resourcesConsumed_; 14102 } else { 14103 return resourcesConsumedBuilder_.getMessage(); 14104 } 14105 } 14106 /** 14107 * 14108 * 14109 * <pre> 14110 * Output only. Information about resources that had been consumed by this 14111 * job. Provided in real time at best effort basis, as well as a final value 14112 * once the job completes. 14113 * Note: This field currently may be not populated for batch predictions that 14114 * use AutoML Models. 14115 * </pre> 14116 * 14117 * <code> 14118 * .google.cloud.aiplatform.v1beta1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14119 * </code> 14120 */ setResourcesConsumed( com.google.cloud.aiplatform.v1beta1.ResourcesConsumed value)14121 public Builder setResourcesConsumed( 14122 com.google.cloud.aiplatform.v1beta1.ResourcesConsumed value) { 14123 if (resourcesConsumedBuilder_ == null) { 14124 if (value == null) { 14125 throw new NullPointerException(); 14126 } 14127 resourcesConsumed_ = value; 14128 } else { 14129 resourcesConsumedBuilder_.setMessage(value); 14130 } 14131 bitField0_ |= 0x00040000; 14132 onChanged(); 14133 return this; 14134 } 14135 /** 14136 * 14137 * 14138 * <pre> 14139 * Output only. Information about resources that had been consumed by this 14140 * job. Provided in real time at best effort basis, as well as a final value 14141 * once the job completes. 14142 * Note: This field currently may be not populated for batch predictions that 14143 * use AutoML Models. 14144 * </pre> 14145 * 14146 * <code> 14147 * .google.cloud.aiplatform.v1beta1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14148 * </code> 14149 */ setResourcesConsumed( com.google.cloud.aiplatform.v1beta1.ResourcesConsumed.Builder builderForValue)14150 public Builder setResourcesConsumed( 14151 com.google.cloud.aiplatform.v1beta1.ResourcesConsumed.Builder builderForValue) { 14152 if (resourcesConsumedBuilder_ == null) { 14153 resourcesConsumed_ = builderForValue.build(); 14154 } else { 14155 resourcesConsumedBuilder_.setMessage(builderForValue.build()); 14156 } 14157 bitField0_ |= 0x00040000; 14158 onChanged(); 14159 return this; 14160 } 14161 /** 14162 * 14163 * 14164 * <pre> 14165 * Output only. Information about resources that had been consumed by this 14166 * job. Provided in real time at best effort basis, as well as a final value 14167 * once the job completes. 14168 * Note: This field currently may be not populated for batch predictions that 14169 * use AutoML Models. 14170 * </pre> 14171 * 14172 * <code> 14173 * .google.cloud.aiplatform.v1beta1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14174 * </code> 14175 */ mergeResourcesConsumed( com.google.cloud.aiplatform.v1beta1.ResourcesConsumed value)14176 public Builder mergeResourcesConsumed( 14177 com.google.cloud.aiplatform.v1beta1.ResourcesConsumed value) { 14178 if (resourcesConsumedBuilder_ == null) { 14179 if (((bitField0_ & 0x00040000) != 0) 14180 && resourcesConsumed_ != null 14181 && resourcesConsumed_ 14182 != com.google.cloud.aiplatform.v1beta1.ResourcesConsumed.getDefaultInstance()) { 14183 getResourcesConsumedBuilder().mergeFrom(value); 14184 } else { 14185 resourcesConsumed_ = value; 14186 } 14187 } else { 14188 resourcesConsumedBuilder_.mergeFrom(value); 14189 } 14190 bitField0_ |= 0x00040000; 14191 onChanged(); 14192 return this; 14193 } 14194 /** 14195 * 14196 * 14197 * <pre> 14198 * Output only. Information about resources that had been consumed by this 14199 * job. Provided in real time at best effort basis, as well as a final value 14200 * once the job completes. 14201 * Note: This field currently may be not populated for batch predictions that 14202 * use AutoML Models. 14203 * </pre> 14204 * 14205 * <code> 14206 * .google.cloud.aiplatform.v1beta1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14207 * </code> 14208 */ clearResourcesConsumed()14209 public Builder clearResourcesConsumed() { 14210 bitField0_ = (bitField0_ & ~0x00040000); 14211 resourcesConsumed_ = null; 14212 if (resourcesConsumedBuilder_ != null) { 14213 resourcesConsumedBuilder_.dispose(); 14214 resourcesConsumedBuilder_ = null; 14215 } 14216 onChanged(); 14217 return this; 14218 } 14219 /** 14220 * 14221 * 14222 * <pre> 14223 * Output only. Information about resources that had been consumed by this 14224 * job. Provided in real time at best effort basis, as well as a final value 14225 * once the job completes. 14226 * Note: This field currently may be not populated for batch predictions that 14227 * use AutoML Models. 14228 * </pre> 14229 * 14230 * <code> 14231 * .google.cloud.aiplatform.v1beta1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14232 * </code> 14233 */ 14234 public com.google.cloud.aiplatform.v1beta1.ResourcesConsumed.Builder getResourcesConsumedBuilder()14235 getResourcesConsumedBuilder() { 14236 bitField0_ |= 0x00040000; 14237 onChanged(); 14238 return getResourcesConsumedFieldBuilder().getBuilder(); 14239 } 14240 /** 14241 * 14242 * 14243 * <pre> 14244 * Output only. Information about resources that had been consumed by this 14245 * job. Provided in real time at best effort basis, as well as a final value 14246 * once the job completes. 14247 * Note: This field currently may be not populated for batch predictions that 14248 * use AutoML Models. 14249 * </pre> 14250 * 14251 * <code> 14252 * .google.cloud.aiplatform.v1beta1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14253 * </code> 14254 */ 14255 public com.google.cloud.aiplatform.v1beta1.ResourcesConsumedOrBuilder getResourcesConsumedOrBuilder()14256 getResourcesConsumedOrBuilder() { 14257 if (resourcesConsumedBuilder_ != null) { 14258 return resourcesConsumedBuilder_.getMessageOrBuilder(); 14259 } else { 14260 return resourcesConsumed_ == null 14261 ? com.google.cloud.aiplatform.v1beta1.ResourcesConsumed.getDefaultInstance() 14262 : resourcesConsumed_; 14263 } 14264 } 14265 /** 14266 * 14267 * 14268 * <pre> 14269 * Output only. Information about resources that had been consumed by this 14270 * job. Provided in real time at best effort basis, as well as a final value 14271 * once the job completes. 14272 * Note: This field currently may be not populated for batch predictions that 14273 * use AutoML Models. 14274 * </pre> 14275 * 14276 * <code> 14277 * .google.cloud.aiplatform.v1beta1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14278 * </code> 14279 */ 14280 private com.google.protobuf.SingleFieldBuilderV3< 14281 com.google.cloud.aiplatform.v1beta1.ResourcesConsumed, 14282 com.google.cloud.aiplatform.v1beta1.ResourcesConsumed.Builder, 14283 com.google.cloud.aiplatform.v1beta1.ResourcesConsumedOrBuilder> getResourcesConsumedFieldBuilder()14284 getResourcesConsumedFieldBuilder() { 14285 if (resourcesConsumedBuilder_ == null) { 14286 resourcesConsumedBuilder_ = 14287 new com.google.protobuf.SingleFieldBuilderV3< 14288 com.google.cloud.aiplatform.v1beta1.ResourcesConsumed, 14289 com.google.cloud.aiplatform.v1beta1.ResourcesConsumed.Builder, 14290 com.google.cloud.aiplatform.v1beta1.ResourcesConsumedOrBuilder>( 14291 getResourcesConsumed(), getParentForChildren(), isClean()); 14292 resourcesConsumed_ = null; 14293 } 14294 return resourcesConsumedBuilder_; 14295 } 14296 14297 private com.google.cloud.aiplatform.v1beta1.CompletionStats completionStats_; 14298 private com.google.protobuf.SingleFieldBuilderV3< 14299 com.google.cloud.aiplatform.v1beta1.CompletionStats, 14300 com.google.cloud.aiplatform.v1beta1.CompletionStats.Builder, 14301 com.google.cloud.aiplatform.v1beta1.CompletionStatsOrBuilder> 14302 completionStatsBuilder_; 14303 /** 14304 * 14305 * 14306 * <pre> 14307 * Output only. Statistics on completed and failed prediction instances. 14308 * </pre> 14309 * 14310 * <code> 14311 * .google.cloud.aiplatform.v1beta1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14312 * </code> 14313 * 14314 * @return Whether the completionStats field is set. 14315 */ hasCompletionStats()14316 public boolean hasCompletionStats() { 14317 return ((bitField0_ & 0x00080000) != 0); 14318 } 14319 /** 14320 * 14321 * 14322 * <pre> 14323 * Output only. Statistics on completed and failed prediction instances. 14324 * </pre> 14325 * 14326 * <code> 14327 * .google.cloud.aiplatform.v1beta1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14328 * </code> 14329 * 14330 * @return The completionStats. 14331 */ getCompletionStats()14332 public com.google.cloud.aiplatform.v1beta1.CompletionStats getCompletionStats() { 14333 if (completionStatsBuilder_ == null) { 14334 return completionStats_ == null 14335 ? com.google.cloud.aiplatform.v1beta1.CompletionStats.getDefaultInstance() 14336 : completionStats_; 14337 } else { 14338 return completionStatsBuilder_.getMessage(); 14339 } 14340 } 14341 /** 14342 * 14343 * 14344 * <pre> 14345 * Output only. Statistics on completed and failed prediction instances. 14346 * </pre> 14347 * 14348 * <code> 14349 * .google.cloud.aiplatform.v1beta1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14350 * </code> 14351 */ setCompletionStats(com.google.cloud.aiplatform.v1beta1.CompletionStats value)14352 public Builder setCompletionStats(com.google.cloud.aiplatform.v1beta1.CompletionStats value) { 14353 if (completionStatsBuilder_ == null) { 14354 if (value == null) { 14355 throw new NullPointerException(); 14356 } 14357 completionStats_ = value; 14358 } else { 14359 completionStatsBuilder_.setMessage(value); 14360 } 14361 bitField0_ |= 0x00080000; 14362 onChanged(); 14363 return this; 14364 } 14365 /** 14366 * 14367 * 14368 * <pre> 14369 * Output only. Statistics on completed and failed prediction instances. 14370 * </pre> 14371 * 14372 * <code> 14373 * .google.cloud.aiplatform.v1beta1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14374 * </code> 14375 */ setCompletionStats( com.google.cloud.aiplatform.v1beta1.CompletionStats.Builder builderForValue)14376 public Builder setCompletionStats( 14377 com.google.cloud.aiplatform.v1beta1.CompletionStats.Builder builderForValue) { 14378 if (completionStatsBuilder_ == null) { 14379 completionStats_ = builderForValue.build(); 14380 } else { 14381 completionStatsBuilder_.setMessage(builderForValue.build()); 14382 } 14383 bitField0_ |= 0x00080000; 14384 onChanged(); 14385 return this; 14386 } 14387 /** 14388 * 14389 * 14390 * <pre> 14391 * Output only. Statistics on completed and failed prediction instances. 14392 * </pre> 14393 * 14394 * <code> 14395 * .google.cloud.aiplatform.v1beta1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14396 * </code> 14397 */ mergeCompletionStats(com.google.cloud.aiplatform.v1beta1.CompletionStats value)14398 public Builder mergeCompletionStats(com.google.cloud.aiplatform.v1beta1.CompletionStats value) { 14399 if (completionStatsBuilder_ == null) { 14400 if (((bitField0_ & 0x00080000) != 0) 14401 && completionStats_ != null 14402 && completionStats_ 14403 != com.google.cloud.aiplatform.v1beta1.CompletionStats.getDefaultInstance()) { 14404 getCompletionStatsBuilder().mergeFrom(value); 14405 } else { 14406 completionStats_ = value; 14407 } 14408 } else { 14409 completionStatsBuilder_.mergeFrom(value); 14410 } 14411 bitField0_ |= 0x00080000; 14412 onChanged(); 14413 return this; 14414 } 14415 /** 14416 * 14417 * 14418 * <pre> 14419 * Output only. Statistics on completed and failed prediction instances. 14420 * </pre> 14421 * 14422 * <code> 14423 * .google.cloud.aiplatform.v1beta1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14424 * </code> 14425 */ clearCompletionStats()14426 public Builder clearCompletionStats() { 14427 bitField0_ = (bitField0_ & ~0x00080000); 14428 completionStats_ = null; 14429 if (completionStatsBuilder_ != null) { 14430 completionStatsBuilder_.dispose(); 14431 completionStatsBuilder_ = null; 14432 } 14433 onChanged(); 14434 return this; 14435 } 14436 /** 14437 * 14438 * 14439 * <pre> 14440 * Output only. Statistics on completed and failed prediction instances. 14441 * </pre> 14442 * 14443 * <code> 14444 * .google.cloud.aiplatform.v1beta1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14445 * </code> 14446 */ getCompletionStatsBuilder()14447 public com.google.cloud.aiplatform.v1beta1.CompletionStats.Builder getCompletionStatsBuilder() { 14448 bitField0_ |= 0x00080000; 14449 onChanged(); 14450 return getCompletionStatsFieldBuilder().getBuilder(); 14451 } 14452 /** 14453 * 14454 * 14455 * <pre> 14456 * Output only. Statistics on completed and failed prediction instances. 14457 * </pre> 14458 * 14459 * <code> 14460 * .google.cloud.aiplatform.v1beta1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14461 * </code> 14462 */ 14463 public com.google.cloud.aiplatform.v1beta1.CompletionStatsOrBuilder getCompletionStatsOrBuilder()14464 getCompletionStatsOrBuilder() { 14465 if (completionStatsBuilder_ != null) { 14466 return completionStatsBuilder_.getMessageOrBuilder(); 14467 } else { 14468 return completionStats_ == null 14469 ? com.google.cloud.aiplatform.v1beta1.CompletionStats.getDefaultInstance() 14470 : completionStats_; 14471 } 14472 } 14473 /** 14474 * 14475 * 14476 * <pre> 14477 * Output only. Statistics on completed and failed prediction instances. 14478 * </pre> 14479 * 14480 * <code> 14481 * .google.cloud.aiplatform.v1beta1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14482 * </code> 14483 */ 14484 private com.google.protobuf.SingleFieldBuilderV3< 14485 com.google.cloud.aiplatform.v1beta1.CompletionStats, 14486 com.google.cloud.aiplatform.v1beta1.CompletionStats.Builder, 14487 com.google.cloud.aiplatform.v1beta1.CompletionStatsOrBuilder> getCompletionStatsFieldBuilder()14488 getCompletionStatsFieldBuilder() { 14489 if (completionStatsBuilder_ == null) { 14490 completionStatsBuilder_ = 14491 new com.google.protobuf.SingleFieldBuilderV3< 14492 com.google.cloud.aiplatform.v1beta1.CompletionStats, 14493 com.google.cloud.aiplatform.v1beta1.CompletionStats.Builder, 14494 com.google.cloud.aiplatform.v1beta1.CompletionStatsOrBuilder>( 14495 getCompletionStats(), getParentForChildren(), isClean()); 14496 completionStats_ = null; 14497 } 14498 return completionStatsBuilder_; 14499 } 14500 14501 private com.google.protobuf.Timestamp createTime_; 14502 private com.google.protobuf.SingleFieldBuilderV3< 14503 com.google.protobuf.Timestamp, 14504 com.google.protobuf.Timestamp.Builder, 14505 com.google.protobuf.TimestampOrBuilder> 14506 createTimeBuilder_; 14507 /** 14508 * 14509 * 14510 * <pre> 14511 * Output only. Time when the BatchPredictionJob was created. 14512 * </pre> 14513 * 14514 * <code> 14515 * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14516 * </code> 14517 * 14518 * @return Whether the createTime field is set. 14519 */ hasCreateTime()14520 public boolean hasCreateTime() { 14521 return ((bitField0_ & 0x00100000) != 0); 14522 } 14523 /** 14524 * 14525 * 14526 * <pre> 14527 * Output only. Time when the BatchPredictionJob was created. 14528 * </pre> 14529 * 14530 * <code> 14531 * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14532 * </code> 14533 * 14534 * @return The createTime. 14535 */ getCreateTime()14536 public com.google.protobuf.Timestamp getCreateTime() { 14537 if (createTimeBuilder_ == null) { 14538 return createTime_ == null 14539 ? com.google.protobuf.Timestamp.getDefaultInstance() 14540 : createTime_; 14541 } else { 14542 return createTimeBuilder_.getMessage(); 14543 } 14544 } 14545 /** 14546 * 14547 * 14548 * <pre> 14549 * Output only. Time when the BatchPredictionJob was created. 14550 * </pre> 14551 * 14552 * <code> 14553 * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14554 * </code> 14555 */ setCreateTime(com.google.protobuf.Timestamp value)14556 public Builder setCreateTime(com.google.protobuf.Timestamp value) { 14557 if (createTimeBuilder_ == null) { 14558 if (value == null) { 14559 throw new NullPointerException(); 14560 } 14561 createTime_ = value; 14562 } else { 14563 createTimeBuilder_.setMessage(value); 14564 } 14565 bitField0_ |= 0x00100000; 14566 onChanged(); 14567 return this; 14568 } 14569 /** 14570 * 14571 * 14572 * <pre> 14573 * Output only. Time when the BatchPredictionJob was created. 14574 * </pre> 14575 * 14576 * <code> 14577 * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14578 * </code> 14579 */ setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue)14580 public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) { 14581 if (createTimeBuilder_ == null) { 14582 createTime_ = builderForValue.build(); 14583 } else { 14584 createTimeBuilder_.setMessage(builderForValue.build()); 14585 } 14586 bitField0_ |= 0x00100000; 14587 onChanged(); 14588 return this; 14589 } 14590 /** 14591 * 14592 * 14593 * <pre> 14594 * Output only. Time when the BatchPredictionJob was created. 14595 * </pre> 14596 * 14597 * <code> 14598 * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14599 * </code> 14600 */ mergeCreateTime(com.google.protobuf.Timestamp value)14601 public Builder mergeCreateTime(com.google.protobuf.Timestamp value) { 14602 if (createTimeBuilder_ == null) { 14603 if (((bitField0_ & 0x00100000) != 0) 14604 && createTime_ != null 14605 && createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { 14606 getCreateTimeBuilder().mergeFrom(value); 14607 } else { 14608 createTime_ = value; 14609 } 14610 } else { 14611 createTimeBuilder_.mergeFrom(value); 14612 } 14613 bitField0_ |= 0x00100000; 14614 onChanged(); 14615 return this; 14616 } 14617 /** 14618 * 14619 * 14620 * <pre> 14621 * Output only. Time when the BatchPredictionJob was created. 14622 * </pre> 14623 * 14624 * <code> 14625 * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14626 * </code> 14627 */ clearCreateTime()14628 public Builder clearCreateTime() { 14629 bitField0_ = (bitField0_ & ~0x00100000); 14630 createTime_ = null; 14631 if (createTimeBuilder_ != null) { 14632 createTimeBuilder_.dispose(); 14633 createTimeBuilder_ = null; 14634 } 14635 onChanged(); 14636 return this; 14637 } 14638 /** 14639 * 14640 * 14641 * <pre> 14642 * Output only. Time when the BatchPredictionJob was created. 14643 * </pre> 14644 * 14645 * <code> 14646 * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14647 * </code> 14648 */ getCreateTimeBuilder()14649 public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() { 14650 bitField0_ |= 0x00100000; 14651 onChanged(); 14652 return getCreateTimeFieldBuilder().getBuilder(); 14653 } 14654 /** 14655 * 14656 * 14657 * <pre> 14658 * Output only. Time when the BatchPredictionJob was created. 14659 * </pre> 14660 * 14661 * <code> 14662 * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14663 * </code> 14664 */ getCreateTimeOrBuilder()14665 public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { 14666 if (createTimeBuilder_ != null) { 14667 return createTimeBuilder_.getMessageOrBuilder(); 14668 } else { 14669 return createTime_ == null 14670 ? com.google.protobuf.Timestamp.getDefaultInstance() 14671 : createTime_; 14672 } 14673 } 14674 /** 14675 * 14676 * 14677 * <pre> 14678 * Output only. Time when the BatchPredictionJob was created. 14679 * </pre> 14680 * 14681 * <code> 14682 * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14683 * </code> 14684 */ 14685 private com.google.protobuf.SingleFieldBuilderV3< 14686 com.google.protobuf.Timestamp, 14687 com.google.protobuf.Timestamp.Builder, 14688 com.google.protobuf.TimestampOrBuilder> getCreateTimeFieldBuilder()14689 getCreateTimeFieldBuilder() { 14690 if (createTimeBuilder_ == null) { 14691 createTimeBuilder_ = 14692 new com.google.protobuf.SingleFieldBuilderV3< 14693 com.google.protobuf.Timestamp, 14694 com.google.protobuf.Timestamp.Builder, 14695 com.google.protobuf.TimestampOrBuilder>( 14696 getCreateTime(), getParentForChildren(), isClean()); 14697 createTime_ = null; 14698 } 14699 return createTimeBuilder_; 14700 } 14701 14702 private com.google.protobuf.Timestamp startTime_; 14703 private com.google.protobuf.SingleFieldBuilderV3< 14704 com.google.protobuf.Timestamp, 14705 com.google.protobuf.Timestamp.Builder, 14706 com.google.protobuf.TimestampOrBuilder> 14707 startTimeBuilder_; 14708 /** 14709 * 14710 * 14711 * <pre> 14712 * Output only. Time when the BatchPredictionJob for the first time entered 14713 * the `JOB_STATE_RUNNING` state. 14714 * </pre> 14715 * 14716 * <code> 14717 * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14718 * </code> 14719 * 14720 * @return Whether the startTime field is set. 14721 */ hasStartTime()14722 public boolean hasStartTime() { 14723 return ((bitField0_ & 0x00200000) != 0); 14724 } 14725 /** 14726 * 14727 * 14728 * <pre> 14729 * Output only. Time when the BatchPredictionJob for the first time entered 14730 * the `JOB_STATE_RUNNING` state. 14731 * </pre> 14732 * 14733 * <code> 14734 * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14735 * </code> 14736 * 14737 * @return The startTime. 14738 */ getStartTime()14739 public com.google.protobuf.Timestamp getStartTime() { 14740 if (startTimeBuilder_ == null) { 14741 return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; 14742 } else { 14743 return startTimeBuilder_.getMessage(); 14744 } 14745 } 14746 /** 14747 * 14748 * 14749 * <pre> 14750 * Output only. Time when the BatchPredictionJob for the first time entered 14751 * the `JOB_STATE_RUNNING` state. 14752 * </pre> 14753 * 14754 * <code> 14755 * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14756 * </code> 14757 */ setStartTime(com.google.protobuf.Timestamp value)14758 public Builder setStartTime(com.google.protobuf.Timestamp value) { 14759 if (startTimeBuilder_ == null) { 14760 if (value == null) { 14761 throw new NullPointerException(); 14762 } 14763 startTime_ = value; 14764 } else { 14765 startTimeBuilder_.setMessage(value); 14766 } 14767 bitField0_ |= 0x00200000; 14768 onChanged(); 14769 return this; 14770 } 14771 /** 14772 * 14773 * 14774 * <pre> 14775 * Output only. Time when the BatchPredictionJob for the first time entered 14776 * the `JOB_STATE_RUNNING` state. 14777 * </pre> 14778 * 14779 * <code> 14780 * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14781 * </code> 14782 */ setStartTime(com.google.protobuf.Timestamp.Builder builderForValue)14783 public Builder setStartTime(com.google.protobuf.Timestamp.Builder builderForValue) { 14784 if (startTimeBuilder_ == null) { 14785 startTime_ = builderForValue.build(); 14786 } else { 14787 startTimeBuilder_.setMessage(builderForValue.build()); 14788 } 14789 bitField0_ |= 0x00200000; 14790 onChanged(); 14791 return this; 14792 } 14793 /** 14794 * 14795 * 14796 * <pre> 14797 * Output only. Time when the BatchPredictionJob for the first time entered 14798 * the `JOB_STATE_RUNNING` state. 14799 * </pre> 14800 * 14801 * <code> 14802 * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14803 * </code> 14804 */ mergeStartTime(com.google.protobuf.Timestamp value)14805 public Builder mergeStartTime(com.google.protobuf.Timestamp value) { 14806 if (startTimeBuilder_ == null) { 14807 if (((bitField0_ & 0x00200000) != 0) 14808 && startTime_ != null 14809 && startTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { 14810 getStartTimeBuilder().mergeFrom(value); 14811 } else { 14812 startTime_ = value; 14813 } 14814 } else { 14815 startTimeBuilder_.mergeFrom(value); 14816 } 14817 bitField0_ |= 0x00200000; 14818 onChanged(); 14819 return this; 14820 } 14821 /** 14822 * 14823 * 14824 * <pre> 14825 * Output only. Time when the BatchPredictionJob for the first time entered 14826 * the `JOB_STATE_RUNNING` state. 14827 * </pre> 14828 * 14829 * <code> 14830 * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14831 * </code> 14832 */ clearStartTime()14833 public Builder clearStartTime() { 14834 bitField0_ = (bitField0_ & ~0x00200000); 14835 startTime_ = null; 14836 if (startTimeBuilder_ != null) { 14837 startTimeBuilder_.dispose(); 14838 startTimeBuilder_ = null; 14839 } 14840 onChanged(); 14841 return this; 14842 } 14843 /** 14844 * 14845 * 14846 * <pre> 14847 * Output only. Time when the BatchPredictionJob for the first time entered 14848 * the `JOB_STATE_RUNNING` state. 14849 * </pre> 14850 * 14851 * <code> 14852 * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14853 * </code> 14854 */ getStartTimeBuilder()14855 public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() { 14856 bitField0_ |= 0x00200000; 14857 onChanged(); 14858 return getStartTimeFieldBuilder().getBuilder(); 14859 } 14860 /** 14861 * 14862 * 14863 * <pre> 14864 * Output only. Time when the BatchPredictionJob for the first time entered 14865 * the `JOB_STATE_RUNNING` state. 14866 * </pre> 14867 * 14868 * <code> 14869 * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14870 * </code> 14871 */ getStartTimeOrBuilder()14872 public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { 14873 if (startTimeBuilder_ != null) { 14874 return startTimeBuilder_.getMessageOrBuilder(); 14875 } else { 14876 return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; 14877 } 14878 } 14879 /** 14880 * 14881 * 14882 * <pre> 14883 * Output only. Time when the BatchPredictionJob for the first time entered 14884 * the `JOB_STATE_RUNNING` state. 14885 * </pre> 14886 * 14887 * <code> 14888 * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14889 * </code> 14890 */ 14891 private com.google.protobuf.SingleFieldBuilderV3< 14892 com.google.protobuf.Timestamp, 14893 com.google.protobuf.Timestamp.Builder, 14894 com.google.protobuf.TimestampOrBuilder> getStartTimeFieldBuilder()14895 getStartTimeFieldBuilder() { 14896 if (startTimeBuilder_ == null) { 14897 startTimeBuilder_ = 14898 new com.google.protobuf.SingleFieldBuilderV3< 14899 com.google.protobuf.Timestamp, 14900 com.google.protobuf.Timestamp.Builder, 14901 com.google.protobuf.TimestampOrBuilder>( 14902 getStartTime(), getParentForChildren(), isClean()); 14903 startTime_ = null; 14904 } 14905 return startTimeBuilder_; 14906 } 14907 14908 private com.google.protobuf.Timestamp endTime_; 14909 private com.google.protobuf.SingleFieldBuilderV3< 14910 com.google.protobuf.Timestamp, 14911 com.google.protobuf.Timestamp.Builder, 14912 com.google.protobuf.TimestampOrBuilder> 14913 endTimeBuilder_; 14914 /** 14915 * 14916 * 14917 * <pre> 14918 * Output only. Time when the BatchPredictionJob entered any of the following 14919 * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. 14920 * </pre> 14921 * 14922 * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14923 * </code> 14924 * 14925 * @return Whether the endTime field is set. 14926 */ hasEndTime()14927 public boolean hasEndTime() { 14928 return ((bitField0_ & 0x00400000) != 0); 14929 } 14930 /** 14931 * 14932 * 14933 * <pre> 14934 * Output only. Time when the BatchPredictionJob entered any of the following 14935 * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. 14936 * </pre> 14937 * 14938 * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14939 * </code> 14940 * 14941 * @return The endTime. 14942 */ getEndTime()14943 public com.google.protobuf.Timestamp getEndTime() { 14944 if (endTimeBuilder_ == null) { 14945 return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; 14946 } else { 14947 return endTimeBuilder_.getMessage(); 14948 } 14949 } 14950 /** 14951 * 14952 * 14953 * <pre> 14954 * Output only. Time when the BatchPredictionJob entered any of the following 14955 * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. 14956 * </pre> 14957 * 14958 * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14959 * </code> 14960 */ setEndTime(com.google.protobuf.Timestamp value)14961 public Builder setEndTime(com.google.protobuf.Timestamp value) { 14962 if (endTimeBuilder_ == null) { 14963 if (value == null) { 14964 throw new NullPointerException(); 14965 } 14966 endTime_ = value; 14967 } else { 14968 endTimeBuilder_.setMessage(value); 14969 } 14970 bitField0_ |= 0x00400000; 14971 onChanged(); 14972 return this; 14973 } 14974 /** 14975 * 14976 * 14977 * <pre> 14978 * Output only. Time when the BatchPredictionJob entered any of the following 14979 * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. 14980 * </pre> 14981 * 14982 * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; 14983 * </code> 14984 */ setEndTime(com.google.protobuf.Timestamp.Builder builderForValue)14985 public Builder setEndTime(com.google.protobuf.Timestamp.Builder builderForValue) { 14986 if (endTimeBuilder_ == null) { 14987 endTime_ = builderForValue.build(); 14988 } else { 14989 endTimeBuilder_.setMessage(builderForValue.build()); 14990 } 14991 bitField0_ |= 0x00400000; 14992 onChanged(); 14993 return this; 14994 } 14995 /** 14996 * 14997 * 14998 * <pre> 14999 * Output only. Time when the BatchPredictionJob entered any of the following 15000 * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. 15001 * </pre> 15002 * 15003 * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15004 * </code> 15005 */ mergeEndTime(com.google.protobuf.Timestamp value)15006 public Builder mergeEndTime(com.google.protobuf.Timestamp value) { 15007 if (endTimeBuilder_ == null) { 15008 if (((bitField0_ & 0x00400000) != 0) 15009 && endTime_ != null 15010 && endTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { 15011 getEndTimeBuilder().mergeFrom(value); 15012 } else { 15013 endTime_ = value; 15014 } 15015 } else { 15016 endTimeBuilder_.mergeFrom(value); 15017 } 15018 bitField0_ |= 0x00400000; 15019 onChanged(); 15020 return this; 15021 } 15022 /** 15023 * 15024 * 15025 * <pre> 15026 * Output only. Time when the BatchPredictionJob entered any of the following 15027 * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. 15028 * </pre> 15029 * 15030 * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15031 * </code> 15032 */ clearEndTime()15033 public Builder clearEndTime() { 15034 bitField0_ = (bitField0_ & ~0x00400000); 15035 endTime_ = null; 15036 if (endTimeBuilder_ != null) { 15037 endTimeBuilder_.dispose(); 15038 endTimeBuilder_ = null; 15039 } 15040 onChanged(); 15041 return this; 15042 } 15043 /** 15044 * 15045 * 15046 * <pre> 15047 * Output only. Time when the BatchPredictionJob entered any of the following 15048 * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. 15049 * </pre> 15050 * 15051 * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15052 * </code> 15053 */ getEndTimeBuilder()15054 public com.google.protobuf.Timestamp.Builder getEndTimeBuilder() { 15055 bitField0_ |= 0x00400000; 15056 onChanged(); 15057 return getEndTimeFieldBuilder().getBuilder(); 15058 } 15059 /** 15060 * 15061 * 15062 * <pre> 15063 * Output only. Time when the BatchPredictionJob entered any of the following 15064 * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. 15065 * </pre> 15066 * 15067 * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15068 * </code> 15069 */ getEndTimeOrBuilder()15070 public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { 15071 if (endTimeBuilder_ != null) { 15072 return endTimeBuilder_.getMessageOrBuilder(); 15073 } else { 15074 return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; 15075 } 15076 } 15077 /** 15078 * 15079 * 15080 * <pre> 15081 * Output only. Time when the BatchPredictionJob entered any of the following 15082 * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. 15083 * </pre> 15084 * 15085 * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15086 * </code> 15087 */ 15088 private com.google.protobuf.SingleFieldBuilderV3< 15089 com.google.protobuf.Timestamp, 15090 com.google.protobuf.Timestamp.Builder, 15091 com.google.protobuf.TimestampOrBuilder> getEndTimeFieldBuilder()15092 getEndTimeFieldBuilder() { 15093 if (endTimeBuilder_ == null) { 15094 endTimeBuilder_ = 15095 new com.google.protobuf.SingleFieldBuilderV3< 15096 com.google.protobuf.Timestamp, 15097 com.google.protobuf.Timestamp.Builder, 15098 com.google.protobuf.TimestampOrBuilder>( 15099 getEndTime(), getParentForChildren(), isClean()); 15100 endTime_ = null; 15101 } 15102 return endTimeBuilder_; 15103 } 15104 15105 private com.google.protobuf.Timestamp updateTime_; 15106 private com.google.protobuf.SingleFieldBuilderV3< 15107 com.google.protobuf.Timestamp, 15108 com.google.protobuf.Timestamp.Builder, 15109 com.google.protobuf.TimestampOrBuilder> 15110 updateTimeBuilder_; 15111 /** 15112 * 15113 * 15114 * <pre> 15115 * Output only. Time when the BatchPredictionJob was most recently updated. 15116 * </pre> 15117 * 15118 * <code> 15119 * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15120 * </code> 15121 * 15122 * @return Whether the updateTime field is set. 15123 */ hasUpdateTime()15124 public boolean hasUpdateTime() { 15125 return ((bitField0_ & 0x00800000) != 0); 15126 } 15127 /** 15128 * 15129 * 15130 * <pre> 15131 * Output only. Time when the BatchPredictionJob was most recently updated. 15132 * </pre> 15133 * 15134 * <code> 15135 * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15136 * </code> 15137 * 15138 * @return The updateTime. 15139 */ getUpdateTime()15140 public com.google.protobuf.Timestamp getUpdateTime() { 15141 if (updateTimeBuilder_ == null) { 15142 return updateTime_ == null 15143 ? com.google.protobuf.Timestamp.getDefaultInstance() 15144 : updateTime_; 15145 } else { 15146 return updateTimeBuilder_.getMessage(); 15147 } 15148 } 15149 /** 15150 * 15151 * 15152 * <pre> 15153 * Output only. Time when the BatchPredictionJob was most recently updated. 15154 * </pre> 15155 * 15156 * <code> 15157 * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15158 * </code> 15159 */ setUpdateTime(com.google.protobuf.Timestamp value)15160 public Builder setUpdateTime(com.google.protobuf.Timestamp value) { 15161 if (updateTimeBuilder_ == null) { 15162 if (value == null) { 15163 throw new NullPointerException(); 15164 } 15165 updateTime_ = value; 15166 } else { 15167 updateTimeBuilder_.setMessage(value); 15168 } 15169 bitField0_ |= 0x00800000; 15170 onChanged(); 15171 return this; 15172 } 15173 /** 15174 * 15175 * 15176 * <pre> 15177 * Output only. Time when the BatchPredictionJob was most recently updated. 15178 * </pre> 15179 * 15180 * <code> 15181 * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15182 * </code> 15183 */ setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue)15184 public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { 15185 if (updateTimeBuilder_ == null) { 15186 updateTime_ = builderForValue.build(); 15187 } else { 15188 updateTimeBuilder_.setMessage(builderForValue.build()); 15189 } 15190 bitField0_ |= 0x00800000; 15191 onChanged(); 15192 return this; 15193 } 15194 /** 15195 * 15196 * 15197 * <pre> 15198 * Output only. Time when the BatchPredictionJob was most recently updated. 15199 * </pre> 15200 * 15201 * <code> 15202 * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15203 * </code> 15204 */ mergeUpdateTime(com.google.protobuf.Timestamp value)15205 public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) { 15206 if (updateTimeBuilder_ == null) { 15207 if (((bitField0_ & 0x00800000) != 0) 15208 && updateTime_ != null 15209 && updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { 15210 getUpdateTimeBuilder().mergeFrom(value); 15211 } else { 15212 updateTime_ = value; 15213 } 15214 } else { 15215 updateTimeBuilder_.mergeFrom(value); 15216 } 15217 bitField0_ |= 0x00800000; 15218 onChanged(); 15219 return this; 15220 } 15221 /** 15222 * 15223 * 15224 * <pre> 15225 * Output only. Time when the BatchPredictionJob was most recently updated. 15226 * </pre> 15227 * 15228 * <code> 15229 * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15230 * </code> 15231 */ clearUpdateTime()15232 public Builder clearUpdateTime() { 15233 bitField0_ = (bitField0_ & ~0x00800000); 15234 updateTime_ = null; 15235 if (updateTimeBuilder_ != null) { 15236 updateTimeBuilder_.dispose(); 15237 updateTimeBuilder_ = null; 15238 } 15239 onChanged(); 15240 return this; 15241 } 15242 /** 15243 * 15244 * 15245 * <pre> 15246 * Output only. Time when the BatchPredictionJob was most recently updated. 15247 * </pre> 15248 * 15249 * <code> 15250 * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15251 * </code> 15252 */ getUpdateTimeBuilder()15253 public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() { 15254 bitField0_ |= 0x00800000; 15255 onChanged(); 15256 return getUpdateTimeFieldBuilder().getBuilder(); 15257 } 15258 /** 15259 * 15260 * 15261 * <pre> 15262 * Output only. Time when the BatchPredictionJob was most recently updated. 15263 * </pre> 15264 * 15265 * <code> 15266 * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15267 * </code> 15268 */ getUpdateTimeOrBuilder()15269 public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { 15270 if (updateTimeBuilder_ != null) { 15271 return updateTimeBuilder_.getMessageOrBuilder(); 15272 } else { 15273 return updateTime_ == null 15274 ? com.google.protobuf.Timestamp.getDefaultInstance() 15275 : updateTime_; 15276 } 15277 } 15278 /** 15279 * 15280 * 15281 * <pre> 15282 * Output only. Time when the BatchPredictionJob was most recently updated. 15283 * </pre> 15284 * 15285 * <code> 15286 * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY]; 15287 * </code> 15288 */ 15289 private com.google.protobuf.SingleFieldBuilderV3< 15290 com.google.protobuf.Timestamp, 15291 com.google.protobuf.Timestamp.Builder, 15292 com.google.protobuf.TimestampOrBuilder> getUpdateTimeFieldBuilder()15293 getUpdateTimeFieldBuilder() { 15294 if (updateTimeBuilder_ == null) { 15295 updateTimeBuilder_ = 15296 new com.google.protobuf.SingleFieldBuilderV3< 15297 com.google.protobuf.Timestamp, 15298 com.google.protobuf.Timestamp.Builder, 15299 com.google.protobuf.TimestampOrBuilder>( 15300 getUpdateTime(), getParentForChildren(), isClean()); 15301 updateTime_ = null; 15302 } 15303 return updateTimeBuilder_; 15304 } 15305 15306 private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_; 15307 internalGetLabels()15308 private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() { 15309 if (labels_ == null) { 15310 return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry); 15311 } 15312 return labels_; 15313 } 15314 15315 private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetMutableLabels()15316 internalGetMutableLabels() { 15317 if (labels_ == null) { 15318 labels_ = com.google.protobuf.MapField.newMapField(LabelsDefaultEntryHolder.defaultEntry); 15319 } 15320 if (!labels_.isMutable()) { 15321 labels_ = labels_.copy(); 15322 } 15323 bitField0_ |= 0x01000000; 15324 onChanged(); 15325 return labels_; 15326 } 15327 getLabelsCount()15328 public int getLabelsCount() { 15329 return internalGetLabels().getMap().size(); 15330 } 15331 /** 15332 * 15333 * 15334 * <pre> 15335 * The labels with user-defined metadata to organize BatchPredictionJobs. 15336 * Label keys and values can be no longer than 64 characters 15337 * (Unicode codepoints), can only contain lowercase letters, numeric 15338 * characters, underscores and dashes. International characters are allowed. 15339 * See https://goo.gl/xmQnxf for more information and examples of labels. 15340 * </pre> 15341 * 15342 * <code>map<string, string> labels = 19;</code> 15343 */ 15344 @java.lang.Override containsLabels(java.lang.String key)15345 public boolean containsLabels(java.lang.String key) { 15346 if (key == null) { 15347 throw new NullPointerException("map key"); 15348 } 15349 return internalGetLabels().getMap().containsKey(key); 15350 } 15351 /** Use {@link #getLabelsMap()} instead. */ 15352 @java.lang.Override 15353 @java.lang.Deprecated getLabels()15354 public java.util.Map<java.lang.String, java.lang.String> getLabels() { 15355 return getLabelsMap(); 15356 } 15357 /** 15358 * 15359 * 15360 * <pre> 15361 * The labels with user-defined metadata to organize BatchPredictionJobs. 15362 * Label keys and values can be no longer than 64 characters 15363 * (Unicode codepoints), can only contain lowercase letters, numeric 15364 * characters, underscores and dashes. International characters are allowed. 15365 * See https://goo.gl/xmQnxf for more information and examples of labels. 15366 * </pre> 15367 * 15368 * <code>map<string, string> labels = 19;</code> 15369 */ 15370 @java.lang.Override getLabelsMap()15371 public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() { 15372 return internalGetLabels().getMap(); 15373 } 15374 /** 15375 * 15376 * 15377 * <pre> 15378 * The labels with user-defined metadata to organize BatchPredictionJobs. 15379 * Label keys and values can be no longer than 64 characters 15380 * (Unicode codepoints), can only contain lowercase letters, numeric 15381 * characters, underscores and dashes. International characters are allowed. 15382 * See https://goo.gl/xmQnxf for more information and examples of labels. 15383 * </pre> 15384 * 15385 * <code>map<string, string> labels = 19;</code> 15386 */ 15387 @java.lang.Override getLabelsOrDefault( java.lang.String key, java.lang.String defaultValue)15388 public /* nullable */ java.lang.String getLabelsOrDefault( 15389 java.lang.String key, 15390 /* nullable */ 15391 java.lang.String defaultValue) { 15392 if (key == null) { 15393 throw new NullPointerException("map key"); 15394 } 15395 java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap(); 15396 return map.containsKey(key) ? map.get(key) : defaultValue; 15397 } 15398 /** 15399 * 15400 * 15401 * <pre> 15402 * The labels with user-defined metadata to organize BatchPredictionJobs. 15403 * Label keys and values can be no longer than 64 characters 15404 * (Unicode codepoints), can only contain lowercase letters, numeric 15405 * characters, underscores and dashes. International characters are allowed. 15406 * See https://goo.gl/xmQnxf for more information and examples of labels. 15407 * </pre> 15408 * 15409 * <code>map<string, string> labels = 19;</code> 15410 */ 15411 @java.lang.Override getLabelsOrThrow(java.lang.String key)15412 public java.lang.String getLabelsOrThrow(java.lang.String key) { 15413 if (key == null) { 15414 throw new NullPointerException("map key"); 15415 } 15416 java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap(); 15417 if (!map.containsKey(key)) { 15418 throw new java.lang.IllegalArgumentException(); 15419 } 15420 return map.get(key); 15421 } 15422 clearLabels()15423 public Builder clearLabels() { 15424 bitField0_ = (bitField0_ & ~0x01000000); 15425 internalGetMutableLabels().getMutableMap().clear(); 15426 return this; 15427 } 15428 /** 15429 * 15430 * 15431 * <pre> 15432 * The labels with user-defined metadata to organize BatchPredictionJobs. 15433 * Label keys and values can be no longer than 64 characters 15434 * (Unicode codepoints), can only contain lowercase letters, numeric 15435 * characters, underscores and dashes. International characters are allowed. 15436 * See https://goo.gl/xmQnxf for more information and examples of labels. 15437 * </pre> 15438 * 15439 * <code>map<string, string> labels = 19;</code> 15440 */ removeLabels(java.lang.String key)15441 public Builder removeLabels(java.lang.String key) { 15442 if (key == null) { 15443 throw new NullPointerException("map key"); 15444 } 15445 internalGetMutableLabels().getMutableMap().remove(key); 15446 return this; 15447 } 15448 /** Use alternate mutation accessors instead. */ 15449 @java.lang.Deprecated getMutableLabels()15450 public java.util.Map<java.lang.String, java.lang.String> getMutableLabels() { 15451 bitField0_ |= 0x01000000; 15452 return internalGetMutableLabels().getMutableMap(); 15453 } 15454 /** 15455 * 15456 * 15457 * <pre> 15458 * The labels with user-defined metadata to organize BatchPredictionJobs. 15459 * Label keys and values can be no longer than 64 characters 15460 * (Unicode codepoints), can only contain lowercase letters, numeric 15461 * characters, underscores and dashes. International characters are allowed. 15462 * See https://goo.gl/xmQnxf for more information and examples of labels. 15463 * </pre> 15464 * 15465 * <code>map<string, string> labels = 19;</code> 15466 */ putLabels(java.lang.String key, java.lang.String value)15467 public Builder putLabels(java.lang.String key, java.lang.String value) { 15468 if (key == null) { 15469 throw new NullPointerException("map key"); 15470 } 15471 if (value == null) { 15472 throw new NullPointerException("map value"); 15473 } 15474 internalGetMutableLabels().getMutableMap().put(key, value); 15475 bitField0_ |= 0x01000000; 15476 return this; 15477 } 15478 /** 15479 * 15480 * 15481 * <pre> 15482 * The labels with user-defined metadata to organize BatchPredictionJobs. 15483 * Label keys and values can be no longer than 64 characters 15484 * (Unicode codepoints), can only contain lowercase letters, numeric 15485 * characters, underscores and dashes. International characters are allowed. 15486 * See https://goo.gl/xmQnxf for more information and examples of labels. 15487 * </pre> 15488 * 15489 * <code>map<string, string> labels = 19;</code> 15490 */ putAllLabels(java.util.Map<java.lang.String, java.lang.String> values)15491 public Builder putAllLabels(java.util.Map<java.lang.String, java.lang.String> values) { 15492 internalGetMutableLabels().getMutableMap().putAll(values); 15493 bitField0_ |= 0x01000000; 15494 return this; 15495 } 15496 15497 private com.google.cloud.aiplatform.v1beta1.EncryptionSpec encryptionSpec_; 15498 private com.google.protobuf.SingleFieldBuilderV3< 15499 com.google.cloud.aiplatform.v1beta1.EncryptionSpec, 15500 com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, 15501 com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> 15502 encryptionSpecBuilder_; 15503 /** 15504 * 15505 * 15506 * <pre> 15507 * Customer-managed encryption key options for a BatchPredictionJob. If this 15508 * is set, then all resources created by the BatchPredictionJob will be 15509 * encrypted with the provided encryption key. 15510 * </pre> 15511 * 15512 * <code>.google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;</code> 15513 * 15514 * @return Whether the encryptionSpec field is set. 15515 */ hasEncryptionSpec()15516 public boolean hasEncryptionSpec() { 15517 return ((bitField0_ & 0x02000000) != 0); 15518 } 15519 /** 15520 * 15521 * 15522 * <pre> 15523 * Customer-managed encryption key options for a BatchPredictionJob. If this 15524 * is set, then all resources created by the BatchPredictionJob will be 15525 * encrypted with the provided encryption key. 15526 * </pre> 15527 * 15528 * <code>.google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;</code> 15529 * 15530 * @return The encryptionSpec. 15531 */ getEncryptionSpec()15532 public com.google.cloud.aiplatform.v1beta1.EncryptionSpec getEncryptionSpec() { 15533 if (encryptionSpecBuilder_ == null) { 15534 return encryptionSpec_ == null 15535 ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() 15536 : encryptionSpec_; 15537 } else { 15538 return encryptionSpecBuilder_.getMessage(); 15539 } 15540 } 15541 /** 15542 * 15543 * 15544 * <pre> 15545 * Customer-managed encryption key options for a BatchPredictionJob. If this 15546 * is set, then all resources created by the BatchPredictionJob will be 15547 * encrypted with the provided encryption key. 15548 * </pre> 15549 * 15550 * <code>.google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;</code> 15551 */ setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value)15552 public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { 15553 if (encryptionSpecBuilder_ == null) { 15554 if (value == null) { 15555 throw new NullPointerException(); 15556 } 15557 encryptionSpec_ = value; 15558 } else { 15559 encryptionSpecBuilder_.setMessage(value); 15560 } 15561 bitField0_ |= 0x02000000; 15562 onChanged(); 15563 return this; 15564 } 15565 /** 15566 * 15567 * 15568 * <pre> 15569 * Customer-managed encryption key options for a BatchPredictionJob. If this 15570 * is set, then all resources created by the BatchPredictionJob will be 15571 * encrypted with the provided encryption key. 15572 * </pre> 15573 * 15574 * <code>.google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;</code> 15575 */ setEncryptionSpec( com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue)15576 public Builder setEncryptionSpec( 15577 com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder builderForValue) { 15578 if (encryptionSpecBuilder_ == null) { 15579 encryptionSpec_ = builderForValue.build(); 15580 } else { 15581 encryptionSpecBuilder_.setMessage(builderForValue.build()); 15582 } 15583 bitField0_ |= 0x02000000; 15584 onChanged(); 15585 return this; 15586 } 15587 /** 15588 * 15589 * 15590 * <pre> 15591 * Customer-managed encryption key options for a BatchPredictionJob. If this 15592 * is set, then all resources created by the BatchPredictionJob will be 15593 * encrypted with the provided encryption key. 15594 * </pre> 15595 * 15596 * <code>.google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;</code> 15597 */ mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value)15598 public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1beta1.EncryptionSpec value) { 15599 if (encryptionSpecBuilder_ == null) { 15600 if (((bitField0_ & 0x02000000) != 0) 15601 && encryptionSpec_ != null 15602 && encryptionSpec_ 15603 != com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance()) { 15604 getEncryptionSpecBuilder().mergeFrom(value); 15605 } else { 15606 encryptionSpec_ = value; 15607 } 15608 } else { 15609 encryptionSpecBuilder_.mergeFrom(value); 15610 } 15611 bitField0_ |= 0x02000000; 15612 onChanged(); 15613 return this; 15614 } 15615 /** 15616 * 15617 * 15618 * <pre> 15619 * Customer-managed encryption key options for a BatchPredictionJob. If this 15620 * is set, then all resources created by the BatchPredictionJob will be 15621 * encrypted with the provided encryption key. 15622 * </pre> 15623 * 15624 * <code>.google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;</code> 15625 */ clearEncryptionSpec()15626 public Builder clearEncryptionSpec() { 15627 bitField0_ = (bitField0_ & ~0x02000000); 15628 encryptionSpec_ = null; 15629 if (encryptionSpecBuilder_ != null) { 15630 encryptionSpecBuilder_.dispose(); 15631 encryptionSpecBuilder_ = null; 15632 } 15633 onChanged(); 15634 return this; 15635 } 15636 /** 15637 * 15638 * 15639 * <pre> 15640 * Customer-managed encryption key options for a BatchPredictionJob. If this 15641 * is set, then all resources created by the BatchPredictionJob will be 15642 * encrypted with the provided encryption key. 15643 * </pre> 15644 * 15645 * <code>.google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;</code> 15646 */ getEncryptionSpecBuilder()15647 public com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder getEncryptionSpecBuilder() { 15648 bitField0_ |= 0x02000000; 15649 onChanged(); 15650 return getEncryptionSpecFieldBuilder().getBuilder(); 15651 } 15652 /** 15653 * 15654 * 15655 * <pre> 15656 * Customer-managed encryption key options for a BatchPredictionJob. If this 15657 * is set, then all resources created by the BatchPredictionJob will be 15658 * encrypted with the provided encryption key. 15659 * </pre> 15660 * 15661 * <code>.google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;</code> 15662 */ 15663 public com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder()15664 getEncryptionSpecOrBuilder() { 15665 if (encryptionSpecBuilder_ != null) { 15666 return encryptionSpecBuilder_.getMessageOrBuilder(); 15667 } else { 15668 return encryptionSpec_ == null 15669 ? com.google.cloud.aiplatform.v1beta1.EncryptionSpec.getDefaultInstance() 15670 : encryptionSpec_; 15671 } 15672 } 15673 /** 15674 * 15675 * 15676 * <pre> 15677 * Customer-managed encryption key options for a BatchPredictionJob. If this 15678 * is set, then all resources created by the BatchPredictionJob will be 15679 * encrypted with the provided encryption key. 15680 * </pre> 15681 * 15682 * <code>.google.cloud.aiplatform.v1beta1.EncryptionSpec encryption_spec = 24;</code> 15683 */ 15684 private com.google.protobuf.SingleFieldBuilderV3< 15685 com.google.cloud.aiplatform.v1beta1.EncryptionSpec, 15686 com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, 15687 com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder> getEncryptionSpecFieldBuilder()15688 getEncryptionSpecFieldBuilder() { 15689 if (encryptionSpecBuilder_ == null) { 15690 encryptionSpecBuilder_ = 15691 new com.google.protobuf.SingleFieldBuilderV3< 15692 com.google.cloud.aiplatform.v1beta1.EncryptionSpec, 15693 com.google.cloud.aiplatform.v1beta1.EncryptionSpec.Builder, 15694 com.google.cloud.aiplatform.v1beta1.EncryptionSpecOrBuilder>( 15695 getEncryptionSpec(), getParentForChildren(), isClean()); 15696 encryptionSpec_ = null; 15697 } 15698 return encryptionSpecBuilder_; 15699 } 15700 15701 private com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig modelMonitoringConfig_; 15702 private com.google.protobuf.SingleFieldBuilderV3< 15703 com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig, 15704 com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig.Builder, 15705 com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfigOrBuilder> 15706 modelMonitoringConfigBuilder_; 15707 /** 15708 * 15709 * 15710 * <pre> 15711 * Model monitoring config will be used for analysis model behaviors, based on 15712 * the input and output to the batch prediction job, as well as the provided 15713 * training dataset. 15714 * </pre> 15715 * 15716 * <code>.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig model_monitoring_config = 26; 15717 * </code> 15718 * 15719 * @return Whether the modelMonitoringConfig field is set. 15720 */ hasModelMonitoringConfig()15721 public boolean hasModelMonitoringConfig() { 15722 return ((bitField0_ & 0x04000000) != 0); 15723 } 15724 /** 15725 * 15726 * 15727 * <pre> 15728 * Model monitoring config will be used for analysis model behaviors, based on 15729 * the input and output to the batch prediction job, as well as the provided 15730 * training dataset. 15731 * </pre> 15732 * 15733 * <code>.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig model_monitoring_config = 26; 15734 * </code> 15735 * 15736 * @return The modelMonitoringConfig. 15737 */ getModelMonitoringConfig()15738 public com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig getModelMonitoringConfig() { 15739 if (modelMonitoringConfigBuilder_ == null) { 15740 return modelMonitoringConfig_ == null 15741 ? com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig.getDefaultInstance() 15742 : modelMonitoringConfig_; 15743 } else { 15744 return modelMonitoringConfigBuilder_.getMessage(); 15745 } 15746 } 15747 /** 15748 * 15749 * 15750 * <pre> 15751 * Model monitoring config will be used for analysis model behaviors, based on 15752 * the input and output to the batch prediction job, as well as the provided 15753 * training dataset. 15754 * </pre> 15755 * 15756 * <code>.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig model_monitoring_config = 26; 15757 * </code> 15758 */ setModelMonitoringConfig( com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig value)15759 public Builder setModelMonitoringConfig( 15760 com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig value) { 15761 if (modelMonitoringConfigBuilder_ == null) { 15762 if (value == null) { 15763 throw new NullPointerException(); 15764 } 15765 modelMonitoringConfig_ = value; 15766 } else { 15767 modelMonitoringConfigBuilder_.setMessage(value); 15768 } 15769 bitField0_ |= 0x04000000; 15770 onChanged(); 15771 return this; 15772 } 15773 /** 15774 * 15775 * 15776 * <pre> 15777 * Model monitoring config will be used for analysis model behaviors, based on 15778 * the input and output to the batch prediction job, as well as the provided 15779 * training dataset. 15780 * </pre> 15781 * 15782 * <code>.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig model_monitoring_config = 26; 15783 * </code> 15784 */ setModelMonitoringConfig( com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig.Builder builderForValue)15785 public Builder setModelMonitoringConfig( 15786 com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig.Builder builderForValue) { 15787 if (modelMonitoringConfigBuilder_ == null) { 15788 modelMonitoringConfig_ = builderForValue.build(); 15789 } else { 15790 modelMonitoringConfigBuilder_.setMessage(builderForValue.build()); 15791 } 15792 bitField0_ |= 0x04000000; 15793 onChanged(); 15794 return this; 15795 } 15796 /** 15797 * 15798 * 15799 * <pre> 15800 * Model monitoring config will be used for analysis model behaviors, based on 15801 * the input and output to the batch prediction job, as well as the provided 15802 * training dataset. 15803 * </pre> 15804 * 15805 * <code>.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig model_monitoring_config = 26; 15806 * </code> 15807 */ mergeModelMonitoringConfig( com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig value)15808 public Builder mergeModelMonitoringConfig( 15809 com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig value) { 15810 if (modelMonitoringConfigBuilder_ == null) { 15811 if (((bitField0_ & 0x04000000) != 0) 15812 && modelMonitoringConfig_ != null 15813 && modelMonitoringConfig_ 15814 != com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig.getDefaultInstance()) { 15815 getModelMonitoringConfigBuilder().mergeFrom(value); 15816 } else { 15817 modelMonitoringConfig_ = value; 15818 } 15819 } else { 15820 modelMonitoringConfigBuilder_.mergeFrom(value); 15821 } 15822 bitField0_ |= 0x04000000; 15823 onChanged(); 15824 return this; 15825 } 15826 /** 15827 * 15828 * 15829 * <pre> 15830 * Model monitoring config will be used for analysis model behaviors, based on 15831 * the input and output to the batch prediction job, as well as the provided 15832 * training dataset. 15833 * </pre> 15834 * 15835 * <code>.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig model_monitoring_config = 26; 15836 * </code> 15837 */ clearModelMonitoringConfig()15838 public Builder clearModelMonitoringConfig() { 15839 bitField0_ = (bitField0_ & ~0x04000000); 15840 modelMonitoringConfig_ = null; 15841 if (modelMonitoringConfigBuilder_ != null) { 15842 modelMonitoringConfigBuilder_.dispose(); 15843 modelMonitoringConfigBuilder_ = null; 15844 } 15845 onChanged(); 15846 return this; 15847 } 15848 /** 15849 * 15850 * 15851 * <pre> 15852 * Model monitoring config will be used for analysis model behaviors, based on 15853 * the input and output to the batch prediction job, as well as the provided 15854 * training dataset. 15855 * </pre> 15856 * 15857 * <code>.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig model_monitoring_config = 26; 15858 * </code> 15859 */ 15860 public com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig.Builder getModelMonitoringConfigBuilder()15861 getModelMonitoringConfigBuilder() { 15862 bitField0_ |= 0x04000000; 15863 onChanged(); 15864 return getModelMonitoringConfigFieldBuilder().getBuilder(); 15865 } 15866 /** 15867 * 15868 * 15869 * <pre> 15870 * Model monitoring config will be used for analysis model behaviors, based on 15871 * the input and output to the batch prediction job, as well as the provided 15872 * training dataset. 15873 * </pre> 15874 * 15875 * <code>.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig model_monitoring_config = 26; 15876 * </code> 15877 */ 15878 public com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfigOrBuilder getModelMonitoringConfigOrBuilder()15879 getModelMonitoringConfigOrBuilder() { 15880 if (modelMonitoringConfigBuilder_ != null) { 15881 return modelMonitoringConfigBuilder_.getMessageOrBuilder(); 15882 } else { 15883 return modelMonitoringConfig_ == null 15884 ? com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig.getDefaultInstance() 15885 : modelMonitoringConfig_; 15886 } 15887 } 15888 /** 15889 * 15890 * 15891 * <pre> 15892 * Model monitoring config will be used for analysis model behaviors, based on 15893 * the input and output to the batch prediction job, as well as the provided 15894 * training dataset. 15895 * </pre> 15896 * 15897 * <code>.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig model_monitoring_config = 26; 15898 * </code> 15899 */ 15900 private com.google.protobuf.SingleFieldBuilderV3< 15901 com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig, 15902 com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig.Builder, 15903 com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfigOrBuilder> getModelMonitoringConfigFieldBuilder()15904 getModelMonitoringConfigFieldBuilder() { 15905 if (modelMonitoringConfigBuilder_ == null) { 15906 modelMonitoringConfigBuilder_ = 15907 new com.google.protobuf.SingleFieldBuilderV3< 15908 com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig, 15909 com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfig.Builder, 15910 com.google.cloud.aiplatform.v1beta1.ModelMonitoringConfigOrBuilder>( 15911 getModelMonitoringConfig(), getParentForChildren(), isClean()); 15912 modelMonitoringConfig_ = null; 15913 } 15914 return modelMonitoringConfigBuilder_; 15915 } 15916 15917 private java.util.List<com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies> 15918 modelMonitoringStatsAnomalies_ = java.util.Collections.emptyList(); 15919 ensureModelMonitoringStatsAnomaliesIsMutable()15920 private void ensureModelMonitoringStatsAnomaliesIsMutable() { 15921 if (!((bitField0_ & 0x08000000) != 0)) { 15922 modelMonitoringStatsAnomalies_ = 15923 new java.util.ArrayList< 15924 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies>( 15925 modelMonitoringStatsAnomalies_); 15926 bitField0_ |= 0x08000000; 15927 } 15928 } 15929 15930 private com.google.protobuf.RepeatedFieldBuilderV3< 15931 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies, 15932 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder, 15933 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomaliesOrBuilder> 15934 modelMonitoringStatsAnomaliesBuilder_; 15935 15936 /** 15937 * 15938 * 15939 * <pre> 15940 * Get batch prediction job monitoring statistics. 15941 * </pre> 15942 * 15943 * <code> 15944 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 15945 * </code> 15946 */ 15947 public java.util.List<com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies> getModelMonitoringStatsAnomaliesList()15948 getModelMonitoringStatsAnomaliesList() { 15949 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 15950 return java.util.Collections.unmodifiableList(modelMonitoringStatsAnomalies_); 15951 } else { 15952 return modelMonitoringStatsAnomaliesBuilder_.getMessageList(); 15953 } 15954 } 15955 /** 15956 * 15957 * 15958 * <pre> 15959 * Get batch prediction job monitoring statistics. 15960 * </pre> 15961 * 15962 * <code> 15963 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 15964 * </code> 15965 */ getModelMonitoringStatsAnomaliesCount()15966 public int getModelMonitoringStatsAnomaliesCount() { 15967 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 15968 return modelMonitoringStatsAnomalies_.size(); 15969 } else { 15970 return modelMonitoringStatsAnomaliesBuilder_.getCount(); 15971 } 15972 } 15973 /** 15974 * 15975 * 15976 * <pre> 15977 * Get batch prediction job monitoring statistics. 15978 * </pre> 15979 * 15980 * <code> 15981 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 15982 * </code> 15983 */ 15984 public com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies getModelMonitoringStatsAnomalies(int index)15985 getModelMonitoringStatsAnomalies(int index) { 15986 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 15987 return modelMonitoringStatsAnomalies_.get(index); 15988 } else { 15989 return modelMonitoringStatsAnomaliesBuilder_.getMessage(index); 15990 } 15991 } 15992 /** 15993 * 15994 * 15995 * <pre> 15996 * Get batch prediction job monitoring statistics. 15997 * </pre> 15998 * 15999 * <code> 16000 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16001 * </code> 16002 */ setModelMonitoringStatsAnomalies( int index, com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies value)16003 public Builder setModelMonitoringStatsAnomalies( 16004 int index, com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies value) { 16005 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 16006 if (value == null) { 16007 throw new NullPointerException(); 16008 } 16009 ensureModelMonitoringStatsAnomaliesIsMutable(); 16010 modelMonitoringStatsAnomalies_.set(index, value); 16011 onChanged(); 16012 } else { 16013 modelMonitoringStatsAnomaliesBuilder_.setMessage(index, value); 16014 } 16015 return this; 16016 } 16017 /** 16018 * 16019 * 16020 * <pre> 16021 * Get batch prediction job monitoring statistics. 16022 * </pre> 16023 * 16024 * <code> 16025 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16026 * </code> 16027 */ setModelMonitoringStatsAnomalies( int index, com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder builderForValue)16028 public Builder setModelMonitoringStatsAnomalies( 16029 int index, 16030 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder builderForValue) { 16031 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 16032 ensureModelMonitoringStatsAnomaliesIsMutable(); 16033 modelMonitoringStatsAnomalies_.set(index, builderForValue.build()); 16034 onChanged(); 16035 } else { 16036 modelMonitoringStatsAnomaliesBuilder_.setMessage(index, builderForValue.build()); 16037 } 16038 return this; 16039 } 16040 /** 16041 * 16042 * 16043 * <pre> 16044 * Get batch prediction job monitoring statistics. 16045 * </pre> 16046 * 16047 * <code> 16048 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16049 * </code> 16050 */ addModelMonitoringStatsAnomalies( com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies value)16051 public Builder addModelMonitoringStatsAnomalies( 16052 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies value) { 16053 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 16054 if (value == null) { 16055 throw new NullPointerException(); 16056 } 16057 ensureModelMonitoringStatsAnomaliesIsMutable(); 16058 modelMonitoringStatsAnomalies_.add(value); 16059 onChanged(); 16060 } else { 16061 modelMonitoringStatsAnomaliesBuilder_.addMessage(value); 16062 } 16063 return this; 16064 } 16065 /** 16066 * 16067 * 16068 * <pre> 16069 * Get batch prediction job monitoring statistics. 16070 * </pre> 16071 * 16072 * <code> 16073 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16074 * </code> 16075 */ addModelMonitoringStatsAnomalies( int index, com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies value)16076 public Builder addModelMonitoringStatsAnomalies( 16077 int index, com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies value) { 16078 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 16079 if (value == null) { 16080 throw new NullPointerException(); 16081 } 16082 ensureModelMonitoringStatsAnomaliesIsMutable(); 16083 modelMonitoringStatsAnomalies_.add(index, value); 16084 onChanged(); 16085 } else { 16086 modelMonitoringStatsAnomaliesBuilder_.addMessage(index, value); 16087 } 16088 return this; 16089 } 16090 /** 16091 * 16092 * 16093 * <pre> 16094 * Get batch prediction job monitoring statistics. 16095 * </pre> 16096 * 16097 * <code> 16098 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16099 * </code> 16100 */ addModelMonitoringStatsAnomalies( com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder builderForValue)16101 public Builder addModelMonitoringStatsAnomalies( 16102 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder builderForValue) { 16103 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 16104 ensureModelMonitoringStatsAnomaliesIsMutable(); 16105 modelMonitoringStatsAnomalies_.add(builderForValue.build()); 16106 onChanged(); 16107 } else { 16108 modelMonitoringStatsAnomaliesBuilder_.addMessage(builderForValue.build()); 16109 } 16110 return this; 16111 } 16112 /** 16113 * 16114 * 16115 * <pre> 16116 * Get batch prediction job monitoring statistics. 16117 * </pre> 16118 * 16119 * <code> 16120 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16121 * </code> 16122 */ addModelMonitoringStatsAnomalies( int index, com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder builderForValue)16123 public Builder addModelMonitoringStatsAnomalies( 16124 int index, 16125 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder builderForValue) { 16126 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 16127 ensureModelMonitoringStatsAnomaliesIsMutable(); 16128 modelMonitoringStatsAnomalies_.add(index, builderForValue.build()); 16129 onChanged(); 16130 } else { 16131 modelMonitoringStatsAnomaliesBuilder_.addMessage(index, builderForValue.build()); 16132 } 16133 return this; 16134 } 16135 /** 16136 * 16137 * 16138 * <pre> 16139 * Get batch prediction job monitoring statistics. 16140 * </pre> 16141 * 16142 * <code> 16143 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16144 * </code> 16145 */ addAllModelMonitoringStatsAnomalies( java.lang.Iterable< ? extends com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies> values)16146 public Builder addAllModelMonitoringStatsAnomalies( 16147 java.lang.Iterable< 16148 ? extends com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies> 16149 values) { 16150 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 16151 ensureModelMonitoringStatsAnomaliesIsMutable(); 16152 com.google.protobuf.AbstractMessageLite.Builder.addAll( 16153 values, modelMonitoringStatsAnomalies_); 16154 onChanged(); 16155 } else { 16156 modelMonitoringStatsAnomaliesBuilder_.addAllMessages(values); 16157 } 16158 return this; 16159 } 16160 /** 16161 * 16162 * 16163 * <pre> 16164 * Get batch prediction job monitoring statistics. 16165 * </pre> 16166 * 16167 * <code> 16168 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16169 * </code> 16170 */ clearModelMonitoringStatsAnomalies()16171 public Builder clearModelMonitoringStatsAnomalies() { 16172 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 16173 modelMonitoringStatsAnomalies_ = java.util.Collections.emptyList(); 16174 bitField0_ = (bitField0_ & ~0x08000000); 16175 onChanged(); 16176 } else { 16177 modelMonitoringStatsAnomaliesBuilder_.clear(); 16178 } 16179 return this; 16180 } 16181 /** 16182 * 16183 * 16184 * <pre> 16185 * Get batch prediction job monitoring statistics. 16186 * </pre> 16187 * 16188 * <code> 16189 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16190 * </code> 16191 */ removeModelMonitoringStatsAnomalies(int index)16192 public Builder removeModelMonitoringStatsAnomalies(int index) { 16193 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 16194 ensureModelMonitoringStatsAnomaliesIsMutable(); 16195 modelMonitoringStatsAnomalies_.remove(index); 16196 onChanged(); 16197 } else { 16198 modelMonitoringStatsAnomaliesBuilder_.remove(index); 16199 } 16200 return this; 16201 } 16202 /** 16203 * 16204 * 16205 * <pre> 16206 * Get batch prediction job monitoring statistics. 16207 * </pre> 16208 * 16209 * <code> 16210 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16211 * </code> 16212 */ 16213 public com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder getModelMonitoringStatsAnomaliesBuilder(int index)16214 getModelMonitoringStatsAnomaliesBuilder(int index) { 16215 return getModelMonitoringStatsAnomaliesFieldBuilder().getBuilder(index); 16216 } 16217 /** 16218 * 16219 * 16220 * <pre> 16221 * Get batch prediction job monitoring statistics. 16222 * </pre> 16223 * 16224 * <code> 16225 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16226 * </code> 16227 */ 16228 public com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomaliesOrBuilder getModelMonitoringStatsAnomaliesOrBuilder(int index)16229 getModelMonitoringStatsAnomaliesOrBuilder(int index) { 16230 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 16231 return modelMonitoringStatsAnomalies_.get(index); 16232 } else { 16233 return modelMonitoringStatsAnomaliesBuilder_.getMessageOrBuilder(index); 16234 } 16235 } 16236 /** 16237 * 16238 * 16239 * <pre> 16240 * Get batch prediction job monitoring statistics. 16241 * </pre> 16242 * 16243 * <code> 16244 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16245 * </code> 16246 */ 16247 public java.util.List< 16248 ? extends com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomaliesOrBuilder> getModelMonitoringStatsAnomaliesOrBuilderList()16249 getModelMonitoringStatsAnomaliesOrBuilderList() { 16250 if (modelMonitoringStatsAnomaliesBuilder_ != null) { 16251 return modelMonitoringStatsAnomaliesBuilder_.getMessageOrBuilderList(); 16252 } else { 16253 return java.util.Collections.unmodifiableList(modelMonitoringStatsAnomalies_); 16254 } 16255 } 16256 /** 16257 * 16258 * 16259 * <pre> 16260 * Get batch prediction job monitoring statistics. 16261 * </pre> 16262 * 16263 * <code> 16264 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16265 * </code> 16266 */ 16267 public com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder addModelMonitoringStatsAnomaliesBuilder()16268 addModelMonitoringStatsAnomaliesBuilder() { 16269 return getModelMonitoringStatsAnomaliesFieldBuilder() 16270 .addBuilder( 16271 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies 16272 .getDefaultInstance()); 16273 } 16274 /** 16275 * 16276 * 16277 * <pre> 16278 * Get batch prediction job monitoring statistics. 16279 * </pre> 16280 * 16281 * <code> 16282 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16283 * </code> 16284 */ 16285 public com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder addModelMonitoringStatsAnomaliesBuilder(int index)16286 addModelMonitoringStatsAnomaliesBuilder(int index) { 16287 return getModelMonitoringStatsAnomaliesFieldBuilder() 16288 .addBuilder( 16289 index, 16290 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies 16291 .getDefaultInstance()); 16292 } 16293 /** 16294 * 16295 * 16296 * <pre> 16297 * Get batch prediction job monitoring statistics. 16298 * </pre> 16299 * 16300 * <code> 16301 * repeated .google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies model_monitoring_stats_anomalies = 31; 16302 * </code> 16303 */ 16304 public java.util.List<com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder> getModelMonitoringStatsAnomaliesBuilderList()16305 getModelMonitoringStatsAnomaliesBuilderList() { 16306 return getModelMonitoringStatsAnomaliesFieldBuilder().getBuilderList(); 16307 } 16308 16309 private com.google.protobuf.RepeatedFieldBuilderV3< 16310 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies, 16311 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder, 16312 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomaliesOrBuilder> getModelMonitoringStatsAnomaliesFieldBuilder()16313 getModelMonitoringStatsAnomaliesFieldBuilder() { 16314 if (modelMonitoringStatsAnomaliesBuilder_ == null) { 16315 modelMonitoringStatsAnomaliesBuilder_ = 16316 new com.google.protobuf.RepeatedFieldBuilderV3< 16317 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies, 16318 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomalies.Builder, 16319 com.google.cloud.aiplatform.v1beta1.ModelMonitoringStatsAnomaliesOrBuilder>( 16320 modelMonitoringStatsAnomalies_, 16321 ((bitField0_ & 0x08000000) != 0), 16322 getParentForChildren(), 16323 isClean()); 16324 modelMonitoringStatsAnomalies_ = null; 16325 } 16326 return modelMonitoringStatsAnomaliesBuilder_; 16327 } 16328 16329 private com.google.rpc.Status modelMonitoringStatus_; 16330 private com.google.protobuf.SingleFieldBuilderV3< 16331 com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> 16332 modelMonitoringStatusBuilder_; 16333 /** 16334 * 16335 * 16336 * <pre> 16337 * Output only. The running status of the model monitoring pipeline. 16338 * </pre> 16339 * 16340 * <code> 16341 * .google.rpc.Status model_monitoring_status = 32 [(.google.api.field_behavior) = OUTPUT_ONLY]; 16342 * </code> 16343 * 16344 * @return Whether the modelMonitoringStatus field is set. 16345 */ hasModelMonitoringStatus()16346 public boolean hasModelMonitoringStatus() { 16347 return ((bitField0_ & 0x10000000) != 0); 16348 } 16349 /** 16350 * 16351 * 16352 * <pre> 16353 * Output only. The running status of the model monitoring pipeline. 16354 * </pre> 16355 * 16356 * <code> 16357 * .google.rpc.Status model_monitoring_status = 32 [(.google.api.field_behavior) = OUTPUT_ONLY]; 16358 * </code> 16359 * 16360 * @return The modelMonitoringStatus. 16361 */ getModelMonitoringStatus()16362 public com.google.rpc.Status getModelMonitoringStatus() { 16363 if (modelMonitoringStatusBuilder_ == null) { 16364 return modelMonitoringStatus_ == null 16365 ? com.google.rpc.Status.getDefaultInstance() 16366 : modelMonitoringStatus_; 16367 } else { 16368 return modelMonitoringStatusBuilder_.getMessage(); 16369 } 16370 } 16371 /** 16372 * 16373 * 16374 * <pre> 16375 * Output only. The running status of the model monitoring pipeline. 16376 * </pre> 16377 * 16378 * <code> 16379 * .google.rpc.Status model_monitoring_status = 32 [(.google.api.field_behavior) = OUTPUT_ONLY]; 16380 * </code> 16381 */ setModelMonitoringStatus(com.google.rpc.Status value)16382 public Builder setModelMonitoringStatus(com.google.rpc.Status value) { 16383 if (modelMonitoringStatusBuilder_ == null) { 16384 if (value == null) { 16385 throw new NullPointerException(); 16386 } 16387 modelMonitoringStatus_ = value; 16388 } else { 16389 modelMonitoringStatusBuilder_.setMessage(value); 16390 } 16391 bitField0_ |= 0x10000000; 16392 onChanged(); 16393 return this; 16394 } 16395 /** 16396 * 16397 * 16398 * <pre> 16399 * Output only. The running status of the model monitoring pipeline. 16400 * </pre> 16401 * 16402 * <code> 16403 * .google.rpc.Status model_monitoring_status = 32 [(.google.api.field_behavior) = OUTPUT_ONLY]; 16404 * </code> 16405 */ setModelMonitoringStatus(com.google.rpc.Status.Builder builderForValue)16406 public Builder setModelMonitoringStatus(com.google.rpc.Status.Builder builderForValue) { 16407 if (modelMonitoringStatusBuilder_ == null) { 16408 modelMonitoringStatus_ = builderForValue.build(); 16409 } else { 16410 modelMonitoringStatusBuilder_.setMessage(builderForValue.build()); 16411 } 16412 bitField0_ |= 0x10000000; 16413 onChanged(); 16414 return this; 16415 } 16416 /** 16417 * 16418 * 16419 * <pre> 16420 * Output only. The running status of the model monitoring pipeline. 16421 * </pre> 16422 * 16423 * <code> 16424 * .google.rpc.Status model_monitoring_status = 32 [(.google.api.field_behavior) = OUTPUT_ONLY]; 16425 * </code> 16426 */ mergeModelMonitoringStatus(com.google.rpc.Status value)16427 public Builder mergeModelMonitoringStatus(com.google.rpc.Status value) { 16428 if (modelMonitoringStatusBuilder_ == null) { 16429 if (((bitField0_ & 0x10000000) != 0) 16430 && modelMonitoringStatus_ != null 16431 && modelMonitoringStatus_ != com.google.rpc.Status.getDefaultInstance()) { 16432 getModelMonitoringStatusBuilder().mergeFrom(value); 16433 } else { 16434 modelMonitoringStatus_ = value; 16435 } 16436 } else { 16437 modelMonitoringStatusBuilder_.mergeFrom(value); 16438 } 16439 bitField0_ |= 0x10000000; 16440 onChanged(); 16441 return this; 16442 } 16443 /** 16444 * 16445 * 16446 * <pre> 16447 * Output only. The running status of the model monitoring pipeline. 16448 * </pre> 16449 * 16450 * <code> 16451 * .google.rpc.Status model_monitoring_status = 32 [(.google.api.field_behavior) = OUTPUT_ONLY]; 16452 * </code> 16453 */ clearModelMonitoringStatus()16454 public Builder clearModelMonitoringStatus() { 16455 bitField0_ = (bitField0_ & ~0x10000000); 16456 modelMonitoringStatus_ = null; 16457 if (modelMonitoringStatusBuilder_ != null) { 16458 modelMonitoringStatusBuilder_.dispose(); 16459 modelMonitoringStatusBuilder_ = null; 16460 } 16461 onChanged(); 16462 return this; 16463 } 16464 /** 16465 * 16466 * 16467 * <pre> 16468 * Output only. The running status of the model monitoring pipeline. 16469 * </pre> 16470 * 16471 * <code> 16472 * .google.rpc.Status model_monitoring_status = 32 [(.google.api.field_behavior) = OUTPUT_ONLY]; 16473 * </code> 16474 */ getModelMonitoringStatusBuilder()16475 public com.google.rpc.Status.Builder getModelMonitoringStatusBuilder() { 16476 bitField0_ |= 0x10000000; 16477 onChanged(); 16478 return getModelMonitoringStatusFieldBuilder().getBuilder(); 16479 } 16480 /** 16481 * 16482 * 16483 * <pre> 16484 * Output only. The running status of the model monitoring pipeline. 16485 * </pre> 16486 * 16487 * <code> 16488 * .google.rpc.Status model_monitoring_status = 32 [(.google.api.field_behavior) = OUTPUT_ONLY]; 16489 * </code> 16490 */ getModelMonitoringStatusOrBuilder()16491 public com.google.rpc.StatusOrBuilder getModelMonitoringStatusOrBuilder() { 16492 if (modelMonitoringStatusBuilder_ != null) { 16493 return modelMonitoringStatusBuilder_.getMessageOrBuilder(); 16494 } else { 16495 return modelMonitoringStatus_ == null 16496 ? com.google.rpc.Status.getDefaultInstance() 16497 : modelMonitoringStatus_; 16498 } 16499 } 16500 /** 16501 * 16502 * 16503 * <pre> 16504 * Output only. The running status of the model monitoring pipeline. 16505 * </pre> 16506 * 16507 * <code> 16508 * .google.rpc.Status model_monitoring_status = 32 [(.google.api.field_behavior) = OUTPUT_ONLY]; 16509 * </code> 16510 */ 16511 private com.google.protobuf.SingleFieldBuilderV3< 16512 com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getModelMonitoringStatusFieldBuilder()16513 getModelMonitoringStatusFieldBuilder() { 16514 if (modelMonitoringStatusBuilder_ == null) { 16515 modelMonitoringStatusBuilder_ = 16516 new com.google.protobuf.SingleFieldBuilderV3< 16517 com.google.rpc.Status, 16518 com.google.rpc.Status.Builder, 16519 com.google.rpc.StatusOrBuilder>( 16520 getModelMonitoringStatus(), getParentForChildren(), isClean()); 16521 modelMonitoringStatus_ = null; 16522 } 16523 return modelMonitoringStatusBuilder_; 16524 } 16525 16526 private boolean disableContainerLogging_; 16527 /** 16528 * 16529 * 16530 * <pre> 16531 * For custom-trained Models and AutoML Tabular Models, the container of the 16532 * DeployedModel instances will send `stderr` and `stdout` streams to 16533 * Cloud Logging by default. Please note that the logs incur cost, 16534 * which are subject to [Cloud Logging 16535 * pricing](https://cloud.google.com/logging/pricing). 16536 * User can disable container logging by setting this flag to true. 16537 * </pre> 16538 * 16539 * <code>bool disable_container_logging = 34;</code> 16540 * 16541 * @return The disableContainerLogging. 16542 */ 16543 @java.lang.Override getDisableContainerLogging()16544 public boolean getDisableContainerLogging() { 16545 return disableContainerLogging_; 16546 } 16547 /** 16548 * 16549 * 16550 * <pre> 16551 * For custom-trained Models and AutoML Tabular Models, the container of the 16552 * DeployedModel instances will send `stderr` and `stdout` streams to 16553 * Cloud Logging by default. Please note that the logs incur cost, 16554 * which are subject to [Cloud Logging 16555 * pricing](https://cloud.google.com/logging/pricing). 16556 * User can disable container logging by setting this flag to true. 16557 * </pre> 16558 * 16559 * <code>bool disable_container_logging = 34;</code> 16560 * 16561 * @param value The disableContainerLogging to set. 16562 * @return This builder for chaining. 16563 */ setDisableContainerLogging(boolean value)16564 public Builder setDisableContainerLogging(boolean value) { 16565 16566 disableContainerLogging_ = value; 16567 bitField0_ |= 0x20000000; 16568 onChanged(); 16569 return this; 16570 } 16571 /** 16572 * 16573 * 16574 * <pre> 16575 * For custom-trained Models and AutoML Tabular Models, the container of the 16576 * DeployedModel instances will send `stderr` and `stdout` streams to 16577 * Cloud Logging by default. Please note that the logs incur cost, 16578 * which are subject to [Cloud Logging 16579 * pricing](https://cloud.google.com/logging/pricing). 16580 * User can disable container logging by setting this flag to true. 16581 * </pre> 16582 * 16583 * <code>bool disable_container_logging = 34;</code> 16584 * 16585 * @return This builder for chaining. 16586 */ clearDisableContainerLogging()16587 public Builder clearDisableContainerLogging() { 16588 bitField0_ = (bitField0_ & ~0x20000000); 16589 disableContainerLogging_ = false; 16590 onChanged(); 16591 return this; 16592 } 16593 16594 @java.lang.Override setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields)16595 public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { 16596 return super.setUnknownFields(unknownFields); 16597 } 16598 16599 @java.lang.Override mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)16600 public final Builder mergeUnknownFields( 16601 final com.google.protobuf.UnknownFieldSet unknownFields) { 16602 return super.mergeUnknownFields(unknownFields); 16603 } 16604 16605 // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.BatchPredictionJob) 16606 } 16607 16608 // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.BatchPredictionJob) 16609 private static final com.google.cloud.aiplatform.v1beta1.BatchPredictionJob DEFAULT_INSTANCE; 16610 16611 static { 16612 DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.BatchPredictionJob(); 16613 } 16614 getDefaultInstance()16615 public static com.google.cloud.aiplatform.v1beta1.BatchPredictionJob getDefaultInstance() { 16616 return DEFAULT_INSTANCE; 16617 } 16618 16619 private static final com.google.protobuf.Parser<BatchPredictionJob> PARSER = 16620 new com.google.protobuf.AbstractParser<BatchPredictionJob>() { 16621 @java.lang.Override 16622 public BatchPredictionJob parsePartialFrom( 16623 com.google.protobuf.CodedInputStream input, 16624 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 16625 throws com.google.protobuf.InvalidProtocolBufferException { 16626 Builder builder = newBuilder(); 16627 try { 16628 builder.mergeFrom(input, extensionRegistry); 16629 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 16630 throw e.setUnfinishedMessage(builder.buildPartial()); 16631 } catch (com.google.protobuf.UninitializedMessageException e) { 16632 throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); 16633 } catch (java.io.IOException e) { 16634 throw new com.google.protobuf.InvalidProtocolBufferException(e) 16635 .setUnfinishedMessage(builder.buildPartial()); 16636 } 16637 return builder.buildPartial(); 16638 } 16639 }; 16640 parser()16641 public static com.google.protobuf.Parser<BatchPredictionJob> parser() { 16642 return PARSER; 16643 } 16644 16645 @java.lang.Override getParserForType()16646 public com.google.protobuf.Parser<BatchPredictionJob> getParserForType() { 16647 return PARSER; 16648 } 16649 16650 @java.lang.Override getDefaultInstanceForType()16651 public com.google.cloud.aiplatform.v1beta1.BatchPredictionJob getDefaultInstanceForType() { 16652 return DEFAULT_INSTANCE; 16653 } 16654 } 16655