• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2020 Google LLC
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     https://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 // Generated by the protocol buffer compiler.  DO NOT EDIT!
17 // source: google/cloud/aiplatform/v1/batch_prediction_job.proto
18 
19 package com.google.cloud.aiplatform.v1;
20 
21 /**
22  *
23  *
24  * <pre>
25  * A job that uses a
26  * [Model][google.cloud.aiplatform.v1.BatchPredictionJob.model] to produce
27  * predictions on multiple [input
28  * instances][google.cloud.aiplatform.v1.BatchPredictionJob.input_config]. If
29  * predictions for significant portion of the instances fail, the job may finish
30  * without attempting predictions for all remaining instances.
31  * </pre>
32  *
33  * Protobuf type {@code google.cloud.aiplatform.v1.BatchPredictionJob}
34  */
35 public final class BatchPredictionJob extends com.google.protobuf.GeneratedMessageV3
36     implements
37     // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.BatchPredictionJob)
38     BatchPredictionJobOrBuilder {
39   private static final long serialVersionUID = 0L;
40   // Use BatchPredictionJob.newBuilder() to construct.
BatchPredictionJob(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)41   private BatchPredictionJob(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
42     super(builder);
43   }
44 
BatchPredictionJob()45   private BatchPredictionJob() {
46     name_ = "";
47     displayName_ = "";
48     model_ = "";
49     modelVersionId_ = "";
50     serviceAccount_ = "";
51     state_ = 0;
52     partialFailures_ = java.util.Collections.emptyList();
53   }
54 
55   @java.lang.Override
56   @SuppressWarnings({"unused"})
newInstance(UnusedPrivateParameter unused)57   protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
58     return new BatchPredictionJob();
59   }
60 
61   @java.lang.Override
getUnknownFields()62   public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
63     return this.unknownFields;
64   }
65 
getDescriptor()66   public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
67     return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
68         .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_descriptor;
69   }
70 
71   @SuppressWarnings({"rawtypes"})
72   @java.lang.Override
internalGetMapField(int number)73   protected com.google.protobuf.MapField internalGetMapField(int number) {
74     switch (number) {
75       case 19:
76         return internalGetLabels();
77       default:
78         throw new RuntimeException("Invalid map field number: " + number);
79     }
80   }
81 
82   @java.lang.Override
83   protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()84       internalGetFieldAccessorTable() {
85     return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
86         .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_fieldAccessorTable
87         .ensureFieldAccessorsInitialized(
88             com.google.cloud.aiplatform.v1.BatchPredictionJob.class,
89             com.google.cloud.aiplatform.v1.BatchPredictionJob.Builder.class);
90   }
91 
92   public interface InputConfigOrBuilder
93       extends
94       // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig)
95       com.google.protobuf.MessageOrBuilder {
96 
97     /**
98      *
99      *
100      * <pre>
101      * The Cloud Storage location for the input instances.
102      * </pre>
103      *
104      * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
105      *
106      * @return Whether the gcsSource field is set.
107      */
hasGcsSource()108     boolean hasGcsSource();
109     /**
110      *
111      *
112      * <pre>
113      * The Cloud Storage location for the input instances.
114      * </pre>
115      *
116      * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
117      *
118      * @return The gcsSource.
119      */
getGcsSource()120     com.google.cloud.aiplatform.v1.GcsSource getGcsSource();
121     /**
122      *
123      *
124      * <pre>
125      * The Cloud Storage location for the input instances.
126      * </pre>
127      *
128      * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
129      */
getGcsSourceOrBuilder()130     com.google.cloud.aiplatform.v1.GcsSourceOrBuilder getGcsSourceOrBuilder();
131 
132     /**
133      *
134      *
135      * <pre>
136      * The BigQuery location of the input table.
137      * The schema of the table should be in the format described by the given
138      * context OpenAPI Schema, if one is provided. The table may contain
139      * additional columns that are not described by the schema, and they will
140      * be ignored.
141      * </pre>
142      *
143      * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
144      *
145      * @return Whether the bigquerySource field is set.
146      */
hasBigquerySource()147     boolean hasBigquerySource();
148     /**
149      *
150      *
151      * <pre>
152      * The BigQuery location of the input table.
153      * The schema of the table should be in the format described by the given
154      * context OpenAPI Schema, if one is provided. The table may contain
155      * additional columns that are not described by the schema, and they will
156      * be ignored.
157      * </pre>
158      *
159      * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
160      *
161      * @return The bigquerySource.
162      */
getBigquerySource()163     com.google.cloud.aiplatform.v1.BigQuerySource getBigquerySource();
164     /**
165      *
166      *
167      * <pre>
168      * The BigQuery location of the input table.
169      * The schema of the table should be in the format described by the given
170      * context OpenAPI Schema, if one is provided. The table may contain
171      * additional columns that are not described by the schema, and they will
172      * be ignored.
173      * </pre>
174      *
175      * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
176      */
getBigquerySourceOrBuilder()177     com.google.cloud.aiplatform.v1.BigQuerySourceOrBuilder getBigquerySourceOrBuilder();
178 
179     /**
180      *
181      *
182      * <pre>
183      * Required. The format in which instances are given, must be one of the
184      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
185      * [supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats].
186      * </pre>
187      *
188      * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
189      *
190      * @return The instancesFormat.
191      */
getInstancesFormat()192     java.lang.String getInstancesFormat();
193     /**
194      *
195      *
196      * <pre>
197      * Required. The format in which instances are given, must be one of the
198      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
199      * [supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats].
200      * </pre>
201      *
202      * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
203      *
204      * @return The bytes for instancesFormat.
205      */
getInstancesFormatBytes()206     com.google.protobuf.ByteString getInstancesFormatBytes();
207 
getSourceCase()208     public com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.SourceCase getSourceCase();
209   }
210   /**
211    *
212    *
213    * <pre>
214    * Configures the input to
215    * [BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob]. See
216    * [Model.supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats]
217    * for Model's supported input formats, and how instances should be expressed
218    * via any of them.
219    * </pre>
220    *
221    * Protobuf type {@code google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig}
222    */
223   public static final class InputConfig extends com.google.protobuf.GeneratedMessageV3
224       implements
225       // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig)
226       InputConfigOrBuilder {
227     private static final long serialVersionUID = 0L;
228     // Use InputConfig.newBuilder() to construct.
InputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)229     private InputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
230       super(builder);
231     }
232 
InputConfig()233     private InputConfig() {
234       instancesFormat_ = "";
235     }
236 
237     @java.lang.Override
238     @SuppressWarnings({"unused"})
newInstance(UnusedPrivateParameter unused)239     protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
240       return new InputConfig();
241     }
242 
243     @java.lang.Override
getUnknownFields()244     public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
245       return this.unknownFields;
246     }
247 
getDescriptor()248     public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
249       return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
250           .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_InputConfig_descriptor;
251     }
252 
253     @java.lang.Override
254     protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()255         internalGetFieldAccessorTable() {
256       return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
257           .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_InputConfig_fieldAccessorTable
258           .ensureFieldAccessorsInitialized(
259               com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.class,
260               com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.Builder.class);
261     }
262 
263     private int sourceCase_ = 0;
264     private java.lang.Object source_;
265 
266     public enum SourceCase
267         implements
268             com.google.protobuf.Internal.EnumLite,
269             com.google.protobuf.AbstractMessage.InternalOneOfEnum {
270       GCS_SOURCE(2),
271       BIGQUERY_SOURCE(3),
272       SOURCE_NOT_SET(0);
273       private final int value;
274 
SourceCase(int value)275       private SourceCase(int value) {
276         this.value = value;
277       }
278       /**
279        * @param value The number of the enum to look for.
280        * @return The enum associated with the given number.
281        * @deprecated Use {@link #forNumber(int)} instead.
282        */
283       @java.lang.Deprecated
valueOf(int value)284       public static SourceCase valueOf(int value) {
285         return forNumber(value);
286       }
287 
forNumber(int value)288       public static SourceCase forNumber(int value) {
289         switch (value) {
290           case 2:
291             return GCS_SOURCE;
292           case 3:
293             return BIGQUERY_SOURCE;
294           case 0:
295             return SOURCE_NOT_SET;
296           default:
297             return null;
298         }
299       }
300 
getNumber()301       public int getNumber() {
302         return this.value;
303       }
304     };
305 
getSourceCase()306     public SourceCase getSourceCase() {
307       return SourceCase.forNumber(sourceCase_);
308     }
309 
310     public static final int GCS_SOURCE_FIELD_NUMBER = 2;
311     /**
312      *
313      *
314      * <pre>
315      * The Cloud Storage location for the input instances.
316      * </pre>
317      *
318      * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
319      *
320      * @return Whether the gcsSource field is set.
321      */
322     @java.lang.Override
hasGcsSource()323     public boolean hasGcsSource() {
324       return sourceCase_ == 2;
325     }
326     /**
327      *
328      *
329      * <pre>
330      * The Cloud Storage location for the input instances.
331      * </pre>
332      *
333      * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
334      *
335      * @return The gcsSource.
336      */
337     @java.lang.Override
getGcsSource()338     public com.google.cloud.aiplatform.v1.GcsSource getGcsSource() {
339       if (sourceCase_ == 2) {
340         return (com.google.cloud.aiplatform.v1.GcsSource) source_;
341       }
342       return com.google.cloud.aiplatform.v1.GcsSource.getDefaultInstance();
343     }
344     /**
345      *
346      *
347      * <pre>
348      * The Cloud Storage location for the input instances.
349      * </pre>
350      *
351      * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
352      */
353     @java.lang.Override
getGcsSourceOrBuilder()354     public com.google.cloud.aiplatform.v1.GcsSourceOrBuilder getGcsSourceOrBuilder() {
355       if (sourceCase_ == 2) {
356         return (com.google.cloud.aiplatform.v1.GcsSource) source_;
357       }
358       return com.google.cloud.aiplatform.v1.GcsSource.getDefaultInstance();
359     }
360 
361     public static final int BIGQUERY_SOURCE_FIELD_NUMBER = 3;
362     /**
363      *
364      *
365      * <pre>
366      * The BigQuery location of the input table.
367      * The schema of the table should be in the format described by the given
368      * context OpenAPI Schema, if one is provided. The table may contain
369      * additional columns that are not described by the schema, and they will
370      * be ignored.
371      * </pre>
372      *
373      * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
374      *
375      * @return Whether the bigquerySource field is set.
376      */
377     @java.lang.Override
hasBigquerySource()378     public boolean hasBigquerySource() {
379       return sourceCase_ == 3;
380     }
381     /**
382      *
383      *
384      * <pre>
385      * The BigQuery location of the input table.
386      * The schema of the table should be in the format described by the given
387      * context OpenAPI Schema, if one is provided. The table may contain
388      * additional columns that are not described by the schema, and they will
389      * be ignored.
390      * </pre>
391      *
392      * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
393      *
394      * @return The bigquerySource.
395      */
396     @java.lang.Override
getBigquerySource()397     public com.google.cloud.aiplatform.v1.BigQuerySource getBigquerySource() {
398       if (sourceCase_ == 3) {
399         return (com.google.cloud.aiplatform.v1.BigQuerySource) source_;
400       }
401       return com.google.cloud.aiplatform.v1.BigQuerySource.getDefaultInstance();
402     }
403     /**
404      *
405      *
406      * <pre>
407      * The BigQuery location of the input table.
408      * The schema of the table should be in the format described by the given
409      * context OpenAPI Schema, if one is provided. The table may contain
410      * additional columns that are not described by the schema, and they will
411      * be ignored.
412      * </pre>
413      *
414      * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
415      */
416     @java.lang.Override
getBigquerySourceOrBuilder()417     public com.google.cloud.aiplatform.v1.BigQuerySourceOrBuilder getBigquerySourceOrBuilder() {
418       if (sourceCase_ == 3) {
419         return (com.google.cloud.aiplatform.v1.BigQuerySource) source_;
420       }
421       return com.google.cloud.aiplatform.v1.BigQuerySource.getDefaultInstance();
422     }
423 
424     public static final int INSTANCES_FORMAT_FIELD_NUMBER = 1;
425 
426     @SuppressWarnings("serial")
427     private volatile java.lang.Object instancesFormat_ = "";
428     /**
429      *
430      *
431      * <pre>
432      * Required. The format in which instances are given, must be one of the
433      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
434      * [supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats].
435      * </pre>
436      *
437      * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
438      *
439      * @return The instancesFormat.
440      */
441     @java.lang.Override
getInstancesFormat()442     public java.lang.String getInstancesFormat() {
443       java.lang.Object ref = instancesFormat_;
444       if (ref instanceof java.lang.String) {
445         return (java.lang.String) ref;
446       } else {
447         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
448         java.lang.String s = bs.toStringUtf8();
449         instancesFormat_ = s;
450         return s;
451       }
452     }
453     /**
454      *
455      *
456      * <pre>
457      * Required. The format in which instances are given, must be one of the
458      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
459      * [supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats].
460      * </pre>
461      *
462      * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
463      *
464      * @return The bytes for instancesFormat.
465      */
466     @java.lang.Override
getInstancesFormatBytes()467     public com.google.protobuf.ByteString getInstancesFormatBytes() {
468       java.lang.Object ref = instancesFormat_;
469       if (ref instanceof java.lang.String) {
470         com.google.protobuf.ByteString b =
471             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
472         instancesFormat_ = b;
473         return b;
474       } else {
475         return (com.google.protobuf.ByteString) ref;
476       }
477     }
478 
479     private byte memoizedIsInitialized = -1;
480 
481     @java.lang.Override
isInitialized()482     public final boolean isInitialized() {
483       byte isInitialized = memoizedIsInitialized;
484       if (isInitialized == 1) return true;
485       if (isInitialized == 0) return false;
486 
487       memoizedIsInitialized = 1;
488       return true;
489     }
490 
491     @java.lang.Override
writeTo(com.google.protobuf.CodedOutputStream output)492     public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
493       if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instancesFormat_)) {
494         com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instancesFormat_);
495       }
496       if (sourceCase_ == 2) {
497         output.writeMessage(2, (com.google.cloud.aiplatform.v1.GcsSource) source_);
498       }
499       if (sourceCase_ == 3) {
500         output.writeMessage(3, (com.google.cloud.aiplatform.v1.BigQuerySource) source_);
501       }
502       getUnknownFields().writeTo(output);
503     }
504 
505     @java.lang.Override
getSerializedSize()506     public int getSerializedSize() {
507       int size = memoizedSize;
508       if (size != -1) return size;
509 
510       size = 0;
511       if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instancesFormat_)) {
512         size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instancesFormat_);
513       }
514       if (sourceCase_ == 2) {
515         size +=
516             com.google.protobuf.CodedOutputStream.computeMessageSize(
517                 2, (com.google.cloud.aiplatform.v1.GcsSource) source_);
518       }
519       if (sourceCase_ == 3) {
520         size +=
521             com.google.protobuf.CodedOutputStream.computeMessageSize(
522                 3, (com.google.cloud.aiplatform.v1.BigQuerySource) source_);
523       }
524       size += getUnknownFields().getSerializedSize();
525       memoizedSize = size;
526       return size;
527     }
528 
529     @java.lang.Override
equals(final java.lang.Object obj)530     public boolean equals(final java.lang.Object obj) {
531       if (obj == this) {
532         return true;
533       }
534       if (!(obj instanceof com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig)) {
535         return super.equals(obj);
536       }
537       com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig other =
538           (com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig) obj;
539 
540       if (!getInstancesFormat().equals(other.getInstancesFormat())) return false;
541       if (!getSourceCase().equals(other.getSourceCase())) return false;
542       switch (sourceCase_) {
543         case 2:
544           if (!getGcsSource().equals(other.getGcsSource())) return false;
545           break;
546         case 3:
547           if (!getBigquerySource().equals(other.getBigquerySource())) return false;
548           break;
549         case 0:
550         default:
551       }
552       if (!getUnknownFields().equals(other.getUnknownFields())) return false;
553       return true;
554     }
555 
556     @java.lang.Override
hashCode()557     public int hashCode() {
558       if (memoizedHashCode != 0) {
559         return memoizedHashCode;
560       }
561       int hash = 41;
562       hash = (19 * hash) + getDescriptor().hashCode();
563       hash = (37 * hash) + INSTANCES_FORMAT_FIELD_NUMBER;
564       hash = (53 * hash) + getInstancesFormat().hashCode();
565       switch (sourceCase_) {
566         case 2:
567           hash = (37 * hash) + GCS_SOURCE_FIELD_NUMBER;
568           hash = (53 * hash) + getGcsSource().hashCode();
569           break;
570         case 3:
571           hash = (37 * hash) + BIGQUERY_SOURCE_FIELD_NUMBER;
572           hash = (53 * hash) + getBigquerySource().hashCode();
573           break;
574         case 0:
575         default:
576       }
577       hash = (29 * hash) + getUnknownFields().hashCode();
578       memoizedHashCode = hash;
579       return hash;
580     }
581 
parseFrom( java.nio.ByteBuffer data)582     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig parseFrom(
583         java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
584       return PARSER.parseFrom(data);
585     }
586 
parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)587     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig parseFrom(
588         java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
589         throws com.google.protobuf.InvalidProtocolBufferException {
590       return PARSER.parseFrom(data, extensionRegistry);
591     }
592 
parseFrom( com.google.protobuf.ByteString data)593     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig parseFrom(
594         com.google.protobuf.ByteString data)
595         throws com.google.protobuf.InvalidProtocolBufferException {
596       return PARSER.parseFrom(data);
597     }
598 
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)599     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig parseFrom(
600         com.google.protobuf.ByteString data,
601         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
602         throws com.google.protobuf.InvalidProtocolBufferException {
603       return PARSER.parseFrom(data, extensionRegistry);
604     }
605 
parseFrom( byte[] data)606     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig parseFrom(
607         byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
608       return PARSER.parseFrom(data);
609     }
610 
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)611     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig parseFrom(
612         byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
613         throws com.google.protobuf.InvalidProtocolBufferException {
614       return PARSER.parseFrom(data, extensionRegistry);
615     }
616 
parseFrom( java.io.InputStream input)617     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig parseFrom(
618         java.io.InputStream input) throws java.io.IOException {
619       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
620     }
621 
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)622     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig parseFrom(
623         java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
624         throws java.io.IOException {
625       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
626           PARSER, input, extensionRegistry);
627     }
628 
parseDelimitedFrom( java.io.InputStream input)629     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig parseDelimitedFrom(
630         java.io.InputStream input) throws java.io.IOException {
631       return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
632     }
633 
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)634     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig parseDelimitedFrom(
635         java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
636         throws java.io.IOException {
637       return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
638           PARSER, input, extensionRegistry);
639     }
640 
parseFrom( com.google.protobuf.CodedInputStream input)641     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig parseFrom(
642         com.google.protobuf.CodedInputStream input) throws java.io.IOException {
643       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
644     }
645 
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)646     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig parseFrom(
647         com.google.protobuf.CodedInputStream input,
648         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
649         throws java.io.IOException {
650       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
651           PARSER, input, extensionRegistry);
652     }
653 
654     @java.lang.Override
newBuilderForType()655     public Builder newBuilderForType() {
656       return newBuilder();
657     }
658 
newBuilder()659     public static Builder newBuilder() {
660       return DEFAULT_INSTANCE.toBuilder();
661     }
662 
newBuilder( com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig prototype)663     public static Builder newBuilder(
664         com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig prototype) {
665       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
666     }
667 
668     @java.lang.Override
toBuilder()669     public Builder toBuilder() {
670       return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
671     }
672 
673     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent)674     protected Builder newBuilderForType(
675         com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
676       Builder builder = new Builder(parent);
677       return builder;
678     }
679     /**
680      *
681      *
682      * <pre>
683      * Configures the input to
684      * [BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob]. See
685      * [Model.supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats]
686      * for Model's supported input formats, and how instances should be expressed
687      * via any of them.
688      * </pre>
689      *
690      * Protobuf type {@code google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig}
691      */
692     public static final class Builder
693         extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
694         implements
695         // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig)
696         com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfigOrBuilder {
getDescriptor()697       public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
698         return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
699             .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_InputConfig_descriptor;
700       }
701 
702       @java.lang.Override
703       protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()704           internalGetFieldAccessorTable() {
705         return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
706             .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_InputConfig_fieldAccessorTable
707             .ensureFieldAccessorsInitialized(
708                 com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.class,
709                 com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.Builder.class);
710       }
711 
712       // Construct using com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.newBuilder()
Builder()713       private Builder() {}
714 
Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)715       private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
716         super(parent);
717       }
718 
719       @java.lang.Override
clear()720       public Builder clear() {
721         super.clear();
722         bitField0_ = 0;
723         if (gcsSourceBuilder_ != null) {
724           gcsSourceBuilder_.clear();
725         }
726         if (bigquerySourceBuilder_ != null) {
727           bigquerySourceBuilder_.clear();
728         }
729         instancesFormat_ = "";
730         sourceCase_ = 0;
731         source_ = null;
732         return this;
733       }
734 
735       @java.lang.Override
getDescriptorForType()736       public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
737         return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
738             .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_InputConfig_descriptor;
739       }
740 
741       @java.lang.Override
742       public com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig
getDefaultInstanceForType()743           getDefaultInstanceForType() {
744         return com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.getDefaultInstance();
745       }
746 
747       @java.lang.Override
build()748       public com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig build() {
749         com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig result = buildPartial();
750         if (!result.isInitialized()) {
751           throw newUninitializedMessageException(result);
752         }
753         return result;
754       }
755 
756       @java.lang.Override
buildPartial()757       public com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig buildPartial() {
758         com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig result =
759             new com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig(this);
760         if (bitField0_ != 0) {
761           buildPartial0(result);
762         }
763         buildPartialOneofs(result);
764         onBuilt();
765         return result;
766       }
767 
buildPartial0( com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig result)768       private void buildPartial0(
769           com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig result) {
770         int from_bitField0_ = bitField0_;
771         if (((from_bitField0_ & 0x00000004) != 0)) {
772           result.instancesFormat_ = instancesFormat_;
773         }
774       }
775 
buildPartialOneofs( com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig result)776       private void buildPartialOneofs(
777           com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig result) {
778         result.sourceCase_ = sourceCase_;
779         result.source_ = this.source_;
780         if (sourceCase_ == 2 && gcsSourceBuilder_ != null) {
781           result.source_ = gcsSourceBuilder_.build();
782         }
783         if (sourceCase_ == 3 && bigquerySourceBuilder_ != null) {
784           result.source_ = bigquerySourceBuilder_.build();
785         }
786       }
787 
788       @java.lang.Override
clone()789       public Builder clone() {
790         return super.clone();
791       }
792 
793       @java.lang.Override
setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)794       public Builder setField(
795           com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
796         return super.setField(field, value);
797       }
798 
799       @java.lang.Override
clearField(com.google.protobuf.Descriptors.FieldDescriptor field)800       public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
801         return super.clearField(field);
802       }
803 
804       @java.lang.Override
clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)805       public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
806         return super.clearOneof(oneof);
807       }
808 
809       @java.lang.Override
setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)810       public Builder setRepeatedField(
811           com.google.protobuf.Descriptors.FieldDescriptor field,
812           int index,
813           java.lang.Object value) {
814         return super.setRepeatedField(field, index, value);
815       }
816 
817       @java.lang.Override
addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)818       public Builder addRepeatedField(
819           com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
820         return super.addRepeatedField(field, value);
821       }
822 
823       @java.lang.Override
mergeFrom(com.google.protobuf.Message other)824       public Builder mergeFrom(com.google.protobuf.Message other) {
825         if (other instanceof com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig) {
826           return mergeFrom((com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig) other);
827         } else {
828           super.mergeFrom(other);
829           return this;
830         }
831       }
832 
mergeFrom( com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig other)833       public Builder mergeFrom(
834           com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig other) {
835         if (other
836             == com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.getDefaultInstance())
837           return this;
838         if (!other.getInstancesFormat().isEmpty()) {
839           instancesFormat_ = other.instancesFormat_;
840           bitField0_ |= 0x00000004;
841           onChanged();
842         }
843         switch (other.getSourceCase()) {
844           case GCS_SOURCE:
845             {
846               mergeGcsSource(other.getGcsSource());
847               break;
848             }
849           case BIGQUERY_SOURCE:
850             {
851               mergeBigquerySource(other.getBigquerySource());
852               break;
853             }
854           case SOURCE_NOT_SET:
855             {
856               break;
857             }
858         }
859         this.mergeUnknownFields(other.getUnknownFields());
860         onChanged();
861         return this;
862       }
863 
864       @java.lang.Override
isInitialized()865       public final boolean isInitialized() {
866         return true;
867       }
868 
869       @java.lang.Override
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)870       public Builder mergeFrom(
871           com.google.protobuf.CodedInputStream input,
872           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
873           throws java.io.IOException {
874         if (extensionRegistry == null) {
875           throw new java.lang.NullPointerException();
876         }
877         try {
878           boolean done = false;
879           while (!done) {
880             int tag = input.readTag();
881             switch (tag) {
882               case 0:
883                 done = true;
884                 break;
885               case 10:
886                 {
887                   instancesFormat_ = input.readStringRequireUtf8();
888                   bitField0_ |= 0x00000004;
889                   break;
890                 } // case 10
891               case 18:
892                 {
893                   input.readMessage(getGcsSourceFieldBuilder().getBuilder(), extensionRegistry);
894                   sourceCase_ = 2;
895                   break;
896                 } // case 18
897               case 26:
898                 {
899                   input.readMessage(
900                       getBigquerySourceFieldBuilder().getBuilder(), extensionRegistry);
901                   sourceCase_ = 3;
902                   break;
903                 } // case 26
904               default:
905                 {
906                   if (!super.parseUnknownField(input, extensionRegistry, tag)) {
907                     done = true; // was an endgroup tag
908                   }
909                   break;
910                 } // default:
911             } // switch (tag)
912           } // while (!done)
913         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
914           throw e.unwrapIOException();
915         } finally {
916           onChanged();
917         } // finally
918         return this;
919       }
920 
921       private int sourceCase_ = 0;
922       private java.lang.Object source_;
923 
getSourceCase()924       public SourceCase getSourceCase() {
925         return SourceCase.forNumber(sourceCase_);
926       }
927 
clearSource()928       public Builder clearSource() {
929         sourceCase_ = 0;
930         source_ = null;
931         onChanged();
932         return this;
933       }
934 
935       private int bitField0_;
936 
937       private com.google.protobuf.SingleFieldBuilderV3<
938               com.google.cloud.aiplatform.v1.GcsSource,
939               com.google.cloud.aiplatform.v1.GcsSource.Builder,
940               com.google.cloud.aiplatform.v1.GcsSourceOrBuilder>
941           gcsSourceBuilder_;
942       /**
943        *
944        *
945        * <pre>
946        * The Cloud Storage location for the input instances.
947        * </pre>
948        *
949        * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
950        *
951        * @return Whether the gcsSource field is set.
952        */
953       @java.lang.Override
hasGcsSource()954       public boolean hasGcsSource() {
955         return sourceCase_ == 2;
956       }
957       /**
958        *
959        *
960        * <pre>
961        * The Cloud Storage location for the input instances.
962        * </pre>
963        *
964        * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
965        *
966        * @return The gcsSource.
967        */
968       @java.lang.Override
getGcsSource()969       public com.google.cloud.aiplatform.v1.GcsSource getGcsSource() {
970         if (gcsSourceBuilder_ == null) {
971           if (sourceCase_ == 2) {
972             return (com.google.cloud.aiplatform.v1.GcsSource) source_;
973           }
974           return com.google.cloud.aiplatform.v1.GcsSource.getDefaultInstance();
975         } else {
976           if (sourceCase_ == 2) {
977             return gcsSourceBuilder_.getMessage();
978           }
979           return com.google.cloud.aiplatform.v1.GcsSource.getDefaultInstance();
980         }
981       }
982       /**
983        *
984        *
985        * <pre>
986        * The Cloud Storage location for the input instances.
987        * </pre>
988        *
989        * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
990        */
setGcsSource(com.google.cloud.aiplatform.v1.GcsSource value)991       public Builder setGcsSource(com.google.cloud.aiplatform.v1.GcsSource value) {
992         if (gcsSourceBuilder_ == null) {
993           if (value == null) {
994             throw new NullPointerException();
995           }
996           source_ = value;
997           onChanged();
998         } else {
999           gcsSourceBuilder_.setMessage(value);
1000         }
1001         sourceCase_ = 2;
1002         return this;
1003       }
1004       /**
1005        *
1006        *
1007        * <pre>
1008        * The Cloud Storage location for the input instances.
1009        * </pre>
1010        *
1011        * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
1012        */
setGcsSource( com.google.cloud.aiplatform.v1.GcsSource.Builder builderForValue)1013       public Builder setGcsSource(
1014           com.google.cloud.aiplatform.v1.GcsSource.Builder builderForValue) {
1015         if (gcsSourceBuilder_ == null) {
1016           source_ = builderForValue.build();
1017           onChanged();
1018         } else {
1019           gcsSourceBuilder_.setMessage(builderForValue.build());
1020         }
1021         sourceCase_ = 2;
1022         return this;
1023       }
1024       /**
1025        *
1026        *
1027        * <pre>
1028        * The Cloud Storage location for the input instances.
1029        * </pre>
1030        *
1031        * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
1032        */
mergeGcsSource(com.google.cloud.aiplatform.v1.GcsSource value)1033       public Builder mergeGcsSource(com.google.cloud.aiplatform.v1.GcsSource value) {
1034         if (gcsSourceBuilder_ == null) {
1035           if (sourceCase_ == 2
1036               && source_ != com.google.cloud.aiplatform.v1.GcsSource.getDefaultInstance()) {
1037             source_ =
1038                 com.google.cloud.aiplatform.v1.GcsSource.newBuilder(
1039                         (com.google.cloud.aiplatform.v1.GcsSource) source_)
1040                     .mergeFrom(value)
1041                     .buildPartial();
1042           } else {
1043             source_ = value;
1044           }
1045           onChanged();
1046         } else {
1047           if (sourceCase_ == 2) {
1048             gcsSourceBuilder_.mergeFrom(value);
1049           } else {
1050             gcsSourceBuilder_.setMessage(value);
1051           }
1052         }
1053         sourceCase_ = 2;
1054         return this;
1055       }
1056       /**
1057        *
1058        *
1059        * <pre>
1060        * The Cloud Storage location for the input instances.
1061        * </pre>
1062        *
1063        * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
1064        */
clearGcsSource()1065       public Builder clearGcsSource() {
1066         if (gcsSourceBuilder_ == null) {
1067           if (sourceCase_ == 2) {
1068             sourceCase_ = 0;
1069             source_ = null;
1070             onChanged();
1071           }
1072         } else {
1073           if (sourceCase_ == 2) {
1074             sourceCase_ = 0;
1075             source_ = null;
1076           }
1077           gcsSourceBuilder_.clear();
1078         }
1079         return this;
1080       }
1081       /**
1082        *
1083        *
1084        * <pre>
1085        * The Cloud Storage location for the input instances.
1086        * </pre>
1087        *
1088        * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
1089        */
getGcsSourceBuilder()1090       public com.google.cloud.aiplatform.v1.GcsSource.Builder getGcsSourceBuilder() {
1091         return getGcsSourceFieldBuilder().getBuilder();
1092       }
1093       /**
1094        *
1095        *
1096        * <pre>
1097        * The Cloud Storage location for the input instances.
1098        * </pre>
1099        *
1100        * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
1101        */
1102       @java.lang.Override
getGcsSourceOrBuilder()1103       public com.google.cloud.aiplatform.v1.GcsSourceOrBuilder getGcsSourceOrBuilder() {
1104         if ((sourceCase_ == 2) && (gcsSourceBuilder_ != null)) {
1105           return gcsSourceBuilder_.getMessageOrBuilder();
1106         } else {
1107           if (sourceCase_ == 2) {
1108             return (com.google.cloud.aiplatform.v1.GcsSource) source_;
1109           }
1110           return com.google.cloud.aiplatform.v1.GcsSource.getDefaultInstance();
1111         }
1112       }
1113       /**
1114        *
1115        *
1116        * <pre>
1117        * The Cloud Storage location for the input instances.
1118        * </pre>
1119        *
1120        * <code>.google.cloud.aiplatform.v1.GcsSource gcs_source = 2;</code>
1121        */
1122       private com.google.protobuf.SingleFieldBuilderV3<
1123               com.google.cloud.aiplatform.v1.GcsSource,
1124               com.google.cloud.aiplatform.v1.GcsSource.Builder,
1125               com.google.cloud.aiplatform.v1.GcsSourceOrBuilder>
getGcsSourceFieldBuilder()1126           getGcsSourceFieldBuilder() {
1127         if (gcsSourceBuilder_ == null) {
1128           if (!(sourceCase_ == 2)) {
1129             source_ = com.google.cloud.aiplatform.v1.GcsSource.getDefaultInstance();
1130           }
1131           gcsSourceBuilder_ =
1132               new com.google.protobuf.SingleFieldBuilderV3<
1133                   com.google.cloud.aiplatform.v1.GcsSource,
1134                   com.google.cloud.aiplatform.v1.GcsSource.Builder,
1135                   com.google.cloud.aiplatform.v1.GcsSourceOrBuilder>(
1136                   (com.google.cloud.aiplatform.v1.GcsSource) source_,
1137                   getParentForChildren(),
1138                   isClean());
1139           source_ = null;
1140         }
1141         sourceCase_ = 2;
1142         onChanged();
1143         return gcsSourceBuilder_;
1144       }
1145 
1146       private com.google.protobuf.SingleFieldBuilderV3<
1147               com.google.cloud.aiplatform.v1.BigQuerySource,
1148               com.google.cloud.aiplatform.v1.BigQuerySource.Builder,
1149               com.google.cloud.aiplatform.v1.BigQuerySourceOrBuilder>
1150           bigquerySourceBuilder_;
1151       /**
1152        *
1153        *
1154        * <pre>
1155        * The BigQuery location of the input table.
1156        * The schema of the table should be in the format described by the given
1157        * context OpenAPI Schema, if one is provided. The table may contain
1158        * additional columns that are not described by the schema, and they will
1159        * be ignored.
1160        * </pre>
1161        *
1162        * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
1163        *
1164        * @return Whether the bigquerySource field is set.
1165        */
1166       @java.lang.Override
hasBigquerySource()1167       public boolean hasBigquerySource() {
1168         return sourceCase_ == 3;
1169       }
1170       /**
1171        *
1172        *
1173        * <pre>
1174        * The BigQuery location of the input table.
1175        * The schema of the table should be in the format described by the given
1176        * context OpenAPI Schema, if one is provided. The table may contain
1177        * additional columns that are not described by the schema, and they will
1178        * be ignored.
1179        * </pre>
1180        *
1181        * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
1182        *
1183        * @return The bigquerySource.
1184        */
1185       @java.lang.Override
getBigquerySource()1186       public com.google.cloud.aiplatform.v1.BigQuerySource getBigquerySource() {
1187         if (bigquerySourceBuilder_ == null) {
1188           if (sourceCase_ == 3) {
1189             return (com.google.cloud.aiplatform.v1.BigQuerySource) source_;
1190           }
1191           return com.google.cloud.aiplatform.v1.BigQuerySource.getDefaultInstance();
1192         } else {
1193           if (sourceCase_ == 3) {
1194             return bigquerySourceBuilder_.getMessage();
1195           }
1196           return com.google.cloud.aiplatform.v1.BigQuerySource.getDefaultInstance();
1197         }
1198       }
1199       /**
1200        *
1201        *
1202        * <pre>
1203        * The BigQuery location of the input table.
1204        * The schema of the table should be in the format described by the given
1205        * context OpenAPI Schema, if one is provided. The table may contain
1206        * additional columns that are not described by the schema, and they will
1207        * be ignored.
1208        * </pre>
1209        *
1210        * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
1211        */
setBigquerySource(com.google.cloud.aiplatform.v1.BigQuerySource value)1212       public Builder setBigquerySource(com.google.cloud.aiplatform.v1.BigQuerySource value) {
1213         if (bigquerySourceBuilder_ == null) {
1214           if (value == null) {
1215             throw new NullPointerException();
1216           }
1217           source_ = value;
1218           onChanged();
1219         } else {
1220           bigquerySourceBuilder_.setMessage(value);
1221         }
1222         sourceCase_ = 3;
1223         return this;
1224       }
1225       /**
1226        *
1227        *
1228        * <pre>
1229        * The BigQuery location of the input table.
1230        * The schema of the table should be in the format described by the given
1231        * context OpenAPI Schema, if one is provided. The table may contain
1232        * additional columns that are not described by the schema, and they will
1233        * be ignored.
1234        * </pre>
1235        *
1236        * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
1237        */
setBigquerySource( com.google.cloud.aiplatform.v1.BigQuerySource.Builder builderForValue)1238       public Builder setBigquerySource(
1239           com.google.cloud.aiplatform.v1.BigQuerySource.Builder builderForValue) {
1240         if (bigquerySourceBuilder_ == null) {
1241           source_ = builderForValue.build();
1242           onChanged();
1243         } else {
1244           bigquerySourceBuilder_.setMessage(builderForValue.build());
1245         }
1246         sourceCase_ = 3;
1247         return this;
1248       }
1249       /**
1250        *
1251        *
1252        * <pre>
1253        * The BigQuery location of the input table.
1254        * The schema of the table should be in the format described by the given
1255        * context OpenAPI Schema, if one is provided. The table may contain
1256        * additional columns that are not described by the schema, and they will
1257        * be ignored.
1258        * </pre>
1259        *
1260        * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
1261        */
mergeBigquerySource(com.google.cloud.aiplatform.v1.BigQuerySource value)1262       public Builder mergeBigquerySource(com.google.cloud.aiplatform.v1.BigQuerySource value) {
1263         if (bigquerySourceBuilder_ == null) {
1264           if (sourceCase_ == 3
1265               && source_ != com.google.cloud.aiplatform.v1.BigQuerySource.getDefaultInstance()) {
1266             source_ =
1267                 com.google.cloud.aiplatform.v1.BigQuerySource.newBuilder(
1268                         (com.google.cloud.aiplatform.v1.BigQuerySource) source_)
1269                     .mergeFrom(value)
1270                     .buildPartial();
1271           } else {
1272             source_ = value;
1273           }
1274           onChanged();
1275         } else {
1276           if (sourceCase_ == 3) {
1277             bigquerySourceBuilder_.mergeFrom(value);
1278           } else {
1279             bigquerySourceBuilder_.setMessage(value);
1280           }
1281         }
1282         sourceCase_ = 3;
1283         return this;
1284       }
1285       /**
1286        *
1287        *
1288        * <pre>
1289        * The BigQuery location of the input table.
1290        * The schema of the table should be in the format described by the given
1291        * context OpenAPI Schema, if one is provided. The table may contain
1292        * additional columns that are not described by the schema, and they will
1293        * be ignored.
1294        * </pre>
1295        *
1296        * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
1297        */
clearBigquerySource()1298       public Builder clearBigquerySource() {
1299         if (bigquerySourceBuilder_ == null) {
1300           if (sourceCase_ == 3) {
1301             sourceCase_ = 0;
1302             source_ = null;
1303             onChanged();
1304           }
1305         } else {
1306           if (sourceCase_ == 3) {
1307             sourceCase_ = 0;
1308             source_ = null;
1309           }
1310           bigquerySourceBuilder_.clear();
1311         }
1312         return this;
1313       }
1314       /**
1315        *
1316        *
1317        * <pre>
1318        * The BigQuery location of the input table.
1319        * The schema of the table should be in the format described by the given
1320        * context OpenAPI Schema, if one is provided. The table may contain
1321        * additional columns that are not described by the schema, and they will
1322        * be ignored.
1323        * </pre>
1324        *
1325        * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
1326        */
getBigquerySourceBuilder()1327       public com.google.cloud.aiplatform.v1.BigQuerySource.Builder getBigquerySourceBuilder() {
1328         return getBigquerySourceFieldBuilder().getBuilder();
1329       }
1330       /**
1331        *
1332        *
1333        * <pre>
1334        * The BigQuery location of the input table.
1335        * The schema of the table should be in the format described by the given
1336        * context OpenAPI Schema, if one is provided. The table may contain
1337        * additional columns that are not described by the schema, and they will
1338        * be ignored.
1339        * </pre>
1340        *
1341        * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
1342        */
1343       @java.lang.Override
getBigquerySourceOrBuilder()1344       public com.google.cloud.aiplatform.v1.BigQuerySourceOrBuilder getBigquerySourceOrBuilder() {
1345         if ((sourceCase_ == 3) && (bigquerySourceBuilder_ != null)) {
1346           return bigquerySourceBuilder_.getMessageOrBuilder();
1347         } else {
1348           if (sourceCase_ == 3) {
1349             return (com.google.cloud.aiplatform.v1.BigQuerySource) source_;
1350           }
1351           return com.google.cloud.aiplatform.v1.BigQuerySource.getDefaultInstance();
1352         }
1353       }
1354       /**
1355        *
1356        *
1357        * <pre>
1358        * The BigQuery location of the input table.
1359        * The schema of the table should be in the format described by the given
1360        * context OpenAPI Schema, if one is provided. The table may contain
1361        * additional columns that are not described by the schema, and they will
1362        * be ignored.
1363        * </pre>
1364        *
1365        * <code>.google.cloud.aiplatform.v1.BigQuerySource bigquery_source = 3;</code>
1366        */
1367       private com.google.protobuf.SingleFieldBuilderV3<
1368               com.google.cloud.aiplatform.v1.BigQuerySource,
1369               com.google.cloud.aiplatform.v1.BigQuerySource.Builder,
1370               com.google.cloud.aiplatform.v1.BigQuerySourceOrBuilder>
getBigquerySourceFieldBuilder()1371           getBigquerySourceFieldBuilder() {
1372         if (bigquerySourceBuilder_ == null) {
1373           if (!(sourceCase_ == 3)) {
1374             source_ = com.google.cloud.aiplatform.v1.BigQuerySource.getDefaultInstance();
1375           }
1376           bigquerySourceBuilder_ =
1377               new com.google.protobuf.SingleFieldBuilderV3<
1378                   com.google.cloud.aiplatform.v1.BigQuerySource,
1379                   com.google.cloud.aiplatform.v1.BigQuerySource.Builder,
1380                   com.google.cloud.aiplatform.v1.BigQuerySourceOrBuilder>(
1381                   (com.google.cloud.aiplatform.v1.BigQuerySource) source_,
1382                   getParentForChildren(),
1383                   isClean());
1384           source_ = null;
1385         }
1386         sourceCase_ = 3;
1387         onChanged();
1388         return bigquerySourceBuilder_;
1389       }
1390 
1391       private java.lang.Object instancesFormat_ = "";
1392       /**
1393        *
1394        *
1395        * <pre>
1396        * Required. The format in which instances are given, must be one of the
1397        * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
1398        * [supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats].
1399        * </pre>
1400        *
1401        * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
1402        *
1403        * @return The instancesFormat.
1404        */
getInstancesFormat()1405       public java.lang.String getInstancesFormat() {
1406         java.lang.Object ref = instancesFormat_;
1407         if (!(ref instanceof java.lang.String)) {
1408           com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
1409           java.lang.String s = bs.toStringUtf8();
1410           instancesFormat_ = s;
1411           return s;
1412         } else {
1413           return (java.lang.String) ref;
1414         }
1415       }
1416       /**
1417        *
1418        *
1419        * <pre>
1420        * Required. The format in which instances are given, must be one of the
1421        * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
1422        * [supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats].
1423        * </pre>
1424        *
1425        * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
1426        *
1427        * @return The bytes for instancesFormat.
1428        */
getInstancesFormatBytes()1429       public com.google.protobuf.ByteString getInstancesFormatBytes() {
1430         java.lang.Object ref = instancesFormat_;
1431         if (ref instanceof String) {
1432           com.google.protobuf.ByteString b =
1433               com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
1434           instancesFormat_ = b;
1435           return b;
1436         } else {
1437           return (com.google.protobuf.ByteString) ref;
1438         }
1439       }
1440       /**
1441        *
1442        *
1443        * <pre>
1444        * Required. The format in which instances are given, must be one of the
1445        * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
1446        * [supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats].
1447        * </pre>
1448        *
1449        * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
1450        *
1451        * @param value The instancesFormat to set.
1452        * @return This builder for chaining.
1453        */
setInstancesFormat(java.lang.String value)1454       public Builder setInstancesFormat(java.lang.String value) {
1455         if (value == null) {
1456           throw new NullPointerException();
1457         }
1458         instancesFormat_ = value;
1459         bitField0_ |= 0x00000004;
1460         onChanged();
1461         return this;
1462       }
1463       /**
1464        *
1465        *
1466        * <pre>
1467        * Required. The format in which instances are given, must be one of the
1468        * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
1469        * [supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats].
1470        * </pre>
1471        *
1472        * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
1473        *
1474        * @return This builder for chaining.
1475        */
clearInstancesFormat()1476       public Builder clearInstancesFormat() {
1477         instancesFormat_ = getDefaultInstance().getInstancesFormat();
1478         bitField0_ = (bitField0_ & ~0x00000004);
1479         onChanged();
1480         return this;
1481       }
1482       /**
1483        *
1484        *
1485        * <pre>
1486        * Required. The format in which instances are given, must be one of the
1487        * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
1488        * [supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats].
1489        * </pre>
1490        *
1491        * <code>string instances_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
1492        *
1493        * @param value The bytes for instancesFormat to set.
1494        * @return This builder for chaining.
1495        */
setInstancesFormatBytes(com.google.protobuf.ByteString value)1496       public Builder setInstancesFormatBytes(com.google.protobuf.ByteString value) {
1497         if (value == null) {
1498           throw new NullPointerException();
1499         }
1500         checkByteStringIsUtf8(value);
1501         instancesFormat_ = value;
1502         bitField0_ |= 0x00000004;
1503         onChanged();
1504         return this;
1505       }
1506 
1507       @java.lang.Override
setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)1508       public final Builder setUnknownFields(
1509           final com.google.protobuf.UnknownFieldSet unknownFields) {
1510         return super.setUnknownFields(unknownFields);
1511       }
1512 
1513       @java.lang.Override
mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)1514       public final Builder mergeUnknownFields(
1515           final com.google.protobuf.UnknownFieldSet unknownFields) {
1516         return super.mergeUnknownFields(unknownFields);
1517       }
1518 
1519       // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig)
1520     }
1521 
1522     // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig)
1523     private static final com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig
1524         DEFAULT_INSTANCE;
1525 
1526     static {
1527       DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig();
1528     }
1529 
1530     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig
getDefaultInstance()1531         getDefaultInstance() {
1532       return DEFAULT_INSTANCE;
1533     }
1534 
1535     private static final com.google.protobuf.Parser<InputConfig> PARSER =
1536         new com.google.protobuf.AbstractParser<InputConfig>() {
1537           @java.lang.Override
1538           public InputConfig parsePartialFrom(
1539               com.google.protobuf.CodedInputStream input,
1540               com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1541               throws com.google.protobuf.InvalidProtocolBufferException {
1542             Builder builder = newBuilder();
1543             try {
1544               builder.mergeFrom(input, extensionRegistry);
1545             } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1546               throw e.setUnfinishedMessage(builder.buildPartial());
1547             } catch (com.google.protobuf.UninitializedMessageException e) {
1548               throw e.asInvalidProtocolBufferException()
1549                   .setUnfinishedMessage(builder.buildPartial());
1550             } catch (java.io.IOException e) {
1551               throw new com.google.protobuf.InvalidProtocolBufferException(e)
1552                   .setUnfinishedMessage(builder.buildPartial());
1553             }
1554             return builder.buildPartial();
1555           }
1556         };
1557 
parser()1558     public static com.google.protobuf.Parser<InputConfig> parser() {
1559       return PARSER;
1560     }
1561 
1562     @java.lang.Override
getParserForType()1563     public com.google.protobuf.Parser<InputConfig> getParserForType() {
1564       return PARSER;
1565     }
1566 
1567     @java.lang.Override
1568     public com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig
getDefaultInstanceForType()1569         getDefaultInstanceForType() {
1570       return DEFAULT_INSTANCE;
1571     }
1572   }
1573 
1574   public interface InstanceConfigOrBuilder
1575       extends
1576       // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig)
1577       com.google.protobuf.MessageOrBuilder {
1578 
1579     /**
1580      *
1581      *
1582      * <pre>
1583      * The format of the instance that the Model accepts. Vertex AI will
1584      * convert compatible
1585      * [batch prediction input instance
1586      * formats][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.instances_format]
1587      * to the specified format.
1588      * Supported values are:
1589      * * `object`: Each input is converted to JSON object format.
1590      *     * For `bigquery`, each row is converted to an object.
1591      *     * For `jsonl`, each line of the JSONL input must be an object.
1592      *     * Does not apply to `csv`, `file-list`, `tf-record`, or
1593      *       `tf-record-gzip`.
1594      * * `array`: Each input is converted to JSON array format.
1595      *     * For `bigquery`, each row is converted to an array. The order
1596      *       of columns is determined by the BigQuery column order, unless
1597      *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1598      *       is populated.
1599      *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1600      *       must be populated for specifying field orders.
1601      *     * For `jsonl`, if each line of the JSONL input is an object,
1602      *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1603      *       must be populated for specifying field orders.
1604      *     * Does not apply to `csv`, `file-list`, `tf-record`, or
1605      *       `tf-record-gzip`.
1606      * If not specified, Vertex AI converts the batch prediction input as
1607      * follows:
1608      *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
1609      *    order of columns is the same as defined in the file or table, unless
1610      *    [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1611      *    is populated.
1612      *  * For `jsonl`, the prediction instance format is determined by
1613      *    each line of the input.
1614      *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
1615      *    an object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
1616      *    the Base64-encoded string of the content of the record.
1617      *  * For `file-list`, each file in the list will be converted to an
1618      *    object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
1619      *    the Base64-encoded string of the content of the file.
1620      * </pre>
1621      *
1622      * <code>string instance_type = 1;</code>
1623      *
1624      * @return The instanceType.
1625      */
getInstanceType()1626     java.lang.String getInstanceType();
1627     /**
1628      *
1629      *
1630      * <pre>
1631      * The format of the instance that the Model accepts. Vertex AI will
1632      * convert compatible
1633      * [batch prediction input instance
1634      * formats][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.instances_format]
1635      * to the specified format.
1636      * Supported values are:
1637      * * `object`: Each input is converted to JSON object format.
1638      *     * For `bigquery`, each row is converted to an object.
1639      *     * For `jsonl`, each line of the JSONL input must be an object.
1640      *     * Does not apply to `csv`, `file-list`, `tf-record`, or
1641      *       `tf-record-gzip`.
1642      * * `array`: Each input is converted to JSON array format.
1643      *     * For `bigquery`, each row is converted to an array. The order
1644      *       of columns is determined by the BigQuery column order, unless
1645      *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1646      *       is populated.
1647      *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1648      *       must be populated for specifying field orders.
1649      *     * For `jsonl`, if each line of the JSONL input is an object,
1650      *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1651      *       must be populated for specifying field orders.
1652      *     * Does not apply to `csv`, `file-list`, `tf-record`, or
1653      *       `tf-record-gzip`.
1654      * If not specified, Vertex AI converts the batch prediction input as
1655      * follows:
1656      *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
1657      *    order of columns is the same as defined in the file or table, unless
1658      *    [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1659      *    is populated.
1660      *  * For `jsonl`, the prediction instance format is determined by
1661      *    each line of the input.
1662      *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
1663      *    an object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
1664      *    the Base64-encoded string of the content of the record.
1665      *  * For `file-list`, each file in the list will be converted to an
1666      *    object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
1667      *    the Base64-encoded string of the content of the file.
1668      * </pre>
1669      *
1670      * <code>string instance_type = 1;</code>
1671      *
1672      * @return The bytes for instanceType.
1673      */
getInstanceTypeBytes()1674     com.google.protobuf.ByteString getInstanceTypeBytes();
1675 
1676     /**
1677      *
1678      *
1679      * <pre>
1680      * The name of the field that is considered as a key.
1681      * The values identified by the key field is not included in the transformed
1682      * instances that is sent to the Model. This is similar to
1683      * specifying this name of the field in
1684      * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields].
1685      * In addition, the batch prediction output will not include the instances.
1686      * Instead the output will only include the value of the key field, in a
1687      * field named `key` in the output:
1688      *  * For `jsonl` output format, the output will have a `key` field
1689      *    instead of the `instance` field.
1690      *  * For `csv`/`bigquery` output format, the output will have have a `key`
1691      *    column instead of the instance feature columns.
1692      * The input must be JSONL with objects at each line, CSV, BigQuery
1693      * or TfRecord.
1694      * </pre>
1695      *
1696      * <code>string key_field = 2;</code>
1697      *
1698      * @return The keyField.
1699      */
getKeyField()1700     java.lang.String getKeyField();
1701     /**
1702      *
1703      *
1704      * <pre>
1705      * The name of the field that is considered as a key.
1706      * The values identified by the key field is not included in the transformed
1707      * instances that is sent to the Model. This is similar to
1708      * specifying this name of the field in
1709      * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields].
1710      * In addition, the batch prediction output will not include the instances.
1711      * Instead the output will only include the value of the key field, in a
1712      * field named `key` in the output:
1713      *  * For `jsonl` output format, the output will have a `key` field
1714      *    instead of the `instance` field.
1715      *  * For `csv`/`bigquery` output format, the output will have have a `key`
1716      *    column instead of the instance feature columns.
1717      * The input must be JSONL with objects at each line, CSV, BigQuery
1718      * or TfRecord.
1719      * </pre>
1720      *
1721      * <code>string key_field = 2;</code>
1722      *
1723      * @return The bytes for keyField.
1724      */
getKeyFieldBytes()1725     com.google.protobuf.ByteString getKeyFieldBytes();
1726 
1727     /**
1728      *
1729      *
1730      * <pre>
1731      * Fields that will be included in the prediction instance that is
1732      * sent to the Model.
1733      * If
1734      * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
1735      * is `array`, the order of field names in included_fields also determines
1736      * the order of the values in the array.
1737      * When included_fields is populated,
1738      * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
1739      * must be empty.
1740      * The input must be JSONL with objects at each line, CSV, BigQuery
1741      * or TfRecord.
1742      * </pre>
1743      *
1744      * <code>repeated string included_fields = 3;</code>
1745      *
1746      * @return A list containing the includedFields.
1747      */
getIncludedFieldsList()1748     java.util.List<java.lang.String> getIncludedFieldsList();
1749     /**
1750      *
1751      *
1752      * <pre>
1753      * Fields that will be included in the prediction instance that is
1754      * sent to the Model.
1755      * If
1756      * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
1757      * is `array`, the order of field names in included_fields also determines
1758      * the order of the values in the array.
1759      * When included_fields is populated,
1760      * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
1761      * must be empty.
1762      * The input must be JSONL with objects at each line, CSV, BigQuery
1763      * or TfRecord.
1764      * </pre>
1765      *
1766      * <code>repeated string included_fields = 3;</code>
1767      *
1768      * @return The count of includedFields.
1769      */
getIncludedFieldsCount()1770     int getIncludedFieldsCount();
1771     /**
1772      *
1773      *
1774      * <pre>
1775      * Fields that will be included in the prediction instance that is
1776      * sent to the Model.
1777      * If
1778      * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
1779      * is `array`, the order of field names in included_fields also determines
1780      * the order of the values in the array.
1781      * When included_fields is populated,
1782      * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
1783      * must be empty.
1784      * The input must be JSONL with objects at each line, CSV, BigQuery
1785      * or TfRecord.
1786      * </pre>
1787      *
1788      * <code>repeated string included_fields = 3;</code>
1789      *
1790      * @param index The index of the element to return.
1791      * @return The includedFields at the given index.
1792      */
getIncludedFields(int index)1793     java.lang.String getIncludedFields(int index);
1794     /**
1795      *
1796      *
1797      * <pre>
1798      * Fields that will be included in the prediction instance that is
1799      * sent to the Model.
1800      * If
1801      * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
1802      * is `array`, the order of field names in included_fields also determines
1803      * the order of the values in the array.
1804      * When included_fields is populated,
1805      * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
1806      * must be empty.
1807      * The input must be JSONL with objects at each line, CSV, BigQuery
1808      * or TfRecord.
1809      * </pre>
1810      *
1811      * <code>repeated string included_fields = 3;</code>
1812      *
1813      * @param index The index of the value to return.
1814      * @return The bytes of the includedFields at the given index.
1815      */
getIncludedFieldsBytes(int index)1816     com.google.protobuf.ByteString getIncludedFieldsBytes(int index);
1817 
1818     /**
1819      *
1820      *
1821      * <pre>
1822      * Fields that will be excluded in the prediction instance that is
1823      * sent to the Model.
1824      * Excluded will be attached to the batch prediction output if
1825      * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
1826      * is not specified.
1827      * When excluded_fields is populated,
1828      * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1829      * must be empty.
1830      * The input must be JSONL with objects at each line, CSV, BigQuery
1831      * or TfRecord.
1832      * </pre>
1833      *
1834      * <code>repeated string excluded_fields = 4;</code>
1835      *
1836      * @return A list containing the excludedFields.
1837      */
getExcludedFieldsList()1838     java.util.List<java.lang.String> getExcludedFieldsList();
1839     /**
1840      *
1841      *
1842      * <pre>
1843      * Fields that will be excluded in the prediction instance that is
1844      * sent to the Model.
1845      * Excluded will be attached to the batch prediction output if
1846      * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
1847      * is not specified.
1848      * When excluded_fields is populated,
1849      * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1850      * must be empty.
1851      * The input must be JSONL with objects at each line, CSV, BigQuery
1852      * or TfRecord.
1853      * </pre>
1854      *
1855      * <code>repeated string excluded_fields = 4;</code>
1856      *
1857      * @return The count of excludedFields.
1858      */
getExcludedFieldsCount()1859     int getExcludedFieldsCount();
1860     /**
1861      *
1862      *
1863      * <pre>
1864      * Fields that will be excluded in the prediction instance that is
1865      * sent to the Model.
1866      * Excluded will be attached to the batch prediction output if
1867      * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
1868      * is not specified.
1869      * When excluded_fields is populated,
1870      * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1871      * must be empty.
1872      * The input must be JSONL with objects at each line, CSV, BigQuery
1873      * or TfRecord.
1874      * </pre>
1875      *
1876      * <code>repeated string excluded_fields = 4;</code>
1877      *
1878      * @param index The index of the element to return.
1879      * @return The excludedFields at the given index.
1880      */
getExcludedFields(int index)1881     java.lang.String getExcludedFields(int index);
1882     /**
1883      *
1884      *
1885      * <pre>
1886      * Fields that will be excluded in the prediction instance that is
1887      * sent to the Model.
1888      * Excluded will be attached to the batch prediction output if
1889      * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
1890      * is not specified.
1891      * When excluded_fields is populated,
1892      * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1893      * must be empty.
1894      * The input must be JSONL with objects at each line, CSV, BigQuery
1895      * or TfRecord.
1896      * </pre>
1897      *
1898      * <code>repeated string excluded_fields = 4;</code>
1899      *
1900      * @param index The index of the value to return.
1901      * @return The bytes of the excludedFields at the given index.
1902      */
getExcludedFieldsBytes(int index)1903     com.google.protobuf.ByteString getExcludedFieldsBytes(int index);
1904   }
1905   /**
1906    *
1907    *
1908    * <pre>
1909    * Configuration defining how to transform batch prediction input instances to
1910    * the instances that the Model accepts.
1911    * </pre>
1912    *
1913    * Protobuf type {@code google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig}
1914    */
1915   public static final class InstanceConfig extends com.google.protobuf.GeneratedMessageV3
1916       implements
1917       // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig)
1918       InstanceConfigOrBuilder {
1919     private static final long serialVersionUID = 0L;
1920     // Use InstanceConfig.newBuilder() to construct.
InstanceConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)1921     private InstanceConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
1922       super(builder);
1923     }
1924 
InstanceConfig()1925     private InstanceConfig() {
1926       instanceType_ = "";
1927       keyField_ = "";
1928       includedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY;
1929       excludedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY;
1930     }
1931 
1932     @java.lang.Override
1933     @SuppressWarnings({"unused"})
newInstance(UnusedPrivateParameter unused)1934     protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
1935       return new InstanceConfig();
1936     }
1937 
1938     @java.lang.Override
getUnknownFields()1939     public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
1940       return this.unknownFields;
1941     }
1942 
getDescriptor()1943     public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
1944       return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
1945           .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_InstanceConfig_descriptor;
1946     }
1947 
1948     @java.lang.Override
1949     protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()1950         internalGetFieldAccessorTable() {
1951       return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
1952           .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_InstanceConfig_fieldAccessorTable
1953           .ensureFieldAccessorsInitialized(
1954               com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.class,
1955               com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.Builder.class);
1956     }
1957 
1958     public static final int INSTANCE_TYPE_FIELD_NUMBER = 1;
1959 
1960     @SuppressWarnings("serial")
1961     private volatile java.lang.Object instanceType_ = "";
1962     /**
1963      *
1964      *
1965      * <pre>
1966      * The format of the instance that the Model accepts. Vertex AI will
1967      * convert compatible
1968      * [batch prediction input instance
1969      * formats][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.instances_format]
1970      * to the specified format.
1971      * Supported values are:
1972      * * `object`: Each input is converted to JSON object format.
1973      *     * For `bigquery`, each row is converted to an object.
1974      *     * For `jsonl`, each line of the JSONL input must be an object.
1975      *     * Does not apply to `csv`, `file-list`, `tf-record`, or
1976      *       `tf-record-gzip`.
1977      * * `array`: Each input is converted to JSON array format.
1978      *     * For `bigquery`, each row is converted to an array. The order
1979      *       of columns is determined by the BigQuery column order, unless
1980      *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1981      *       is populated.
1982      *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1983      *       must be populated for specifying field orders.
1984      *     * For `jsonl`, if each line of the JSONL input is an object,
1985      *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1986      *       must be populated for specifying field orders.
1987      *     * Does not apply to `csv`, `file-list`, `tf-record`, or
1988      *       `tf-record-gzip`.
1989      * If not specified, Vertex AI converts the batch prediction input as
1990      * follows:
1991      *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
1992      *    order of columns is the same as defined in the file or table, unless
1993      *    [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
1994      *    is populated.
1995      *  * For `jsonl`, the prediction instance format is determined by
1996      *    each line of the input.
1997      *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
1998      *    an object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
1999      *    the Base64-encoded string of the content of the record.
2000      *  * For `file-list`, each file in the list will be converted to an
2001      *    object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
2002      *    the Base64-encoded string of the content of the file.
2003      * </pre>
2004      *
2005      * <code>string instance_type = 1;</code>
2006      *
2007      * @return The instanceType.
2008      */
2009     @java.lang.Override
getInstanceType()2010     public java.lang.String getInstanceType() {
2011       java.lang.Object ref = instanceType_;
2012       if (ref instanceof java.lang.String) {
2013         return (java.lang.String) ref;
2014       } else {
2015         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
2016         java.lang.String s = bs.toStringUtf8();
2017         instanceType_ = s;
2018         return s;
2019       }
2020     }
2021     /**
2022      *
2023      *
2024      * <pre>
2025      * The format of the instance that the Model accepts. Vertex AI will
2026      * convert compatible
2027      * [batch prediction input instance
2028      * formats][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.instances_format]
2029      * to the specified format.
2030      * Supported values are:
2031      * * `object`: Each input is converted to JSON object format.
2032      *     * For `bigquery`, each row is converted to an object.
2033      *     * For `jsonl`, each line of the JSONL input must be an object.
2034      *     * Does not apply to `csv`, `file-list`, `tf-record`, or
2035      *       `tf-record-gzip`.
2036      * * `array`: Each input is converted to JSON array format.
2037      *     * For `bigquery`, each row is converted to an array. The order
2038      *       of columns is determined by the BigQuery column order, unless
2039      *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2040      *       is populated.
2041      *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2042      *       must be populated for specifying field orders.
2043      *     * For `jsonl`, if each line of the JSONL input is an object,
2044      *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2045      *       must be populated for specifying field orders.
2046      *     * Does not apply to `csv`, `file-list`, `tf-record`, or
2047      *       `tf-record-gzip`.
2048      * If not specified, Vertex AI converts the batch prediction input as
2049      * follows:
2050      *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
2051      *    order of columns is the same as defined in the file or table, unless
2052      *    [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2053      *    is populated.
2054      *  * For `jsonl`, the prediction instance format is determined by
2055      *    each line of the input.
2056      *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
2057      *    an object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
2058      *    the Base64-encoded string of the content of the record.
2059      *  * For `file-list`, each file in the list will be converted to an
2060      *    object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
2061      *    the Base64-encoded string of the content of the file.
2062      * </pre>
2063      *
2064      * <code>string instance_type = 1;</code>
2065      *
2066      * @return The bytes for instanceType.
2067      */
2068     @java.lang.Override
getInstanceTypeBytes()2069     public com.google.protobuf.ByteString getInstanceTypeBytes() {
2070       java.lang.Object ref = instanceType_;
2071       if (ref instanceof java.lang.String) {
2072         com.google.protobuf.ByteString b =
2073             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
2074         instanceType_ = b;
2075         return b;
2076       } else {
2077         return (com.google.protobuf.ByteString) ref;
2078       }
2079     }
2080 
2081     public static final int KEY_FIELD_FIELD_NUMBER = 2;
2082 
2083     @SuppressWarnings("serial")
2084     private volatile java.lang.Object keyField_ = "";
2085     /**
2086      *
2087      *
2088      * <pre>
2089      * The name of the field that is considered as a key.
2090      * The values identified by the key field is not included in the transformed
2091      * instances that is sent to the Model. This is similar to
2092      * specifying this name of the field in
2093      * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields].
2094      * In addition, the batch prediction output will not include the instances.
2095      * Instead the output will only include the value of the key field, in a
2096      * field named `key` in the output:
2097      *  * For `jsonl` output format, the output will have a `key` field
2098      *    instead of the `instance` field.
2099      *  * For `csv`/`bigquery` output format, the output will have have a `key`
2100      *    column instead of the instance feature columns.
2101      * The input must be JSONL with objects at each line, CSV, BigQuery
2102      * or TfRecord.
2103      * </pre>
2104      *
2105      * <code>string key_field = 2;</code>
2106      *
2107      * @return The keyField.
2108      */
2109     @java.lang.Override
getKeyField()2110     public java.lang.String getKeyField() {
2111       java.lang.Object ref = keyField_;
2112       if (ref instanceof java.lang.String) {
2113         return (java.lang.String) ref;
2114       } else {
2115         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
2116         java.lang.String s = bs.toStringUtf8();
2117         keyField_ = s;
2118         return s;
2119       }
2120     }
2121     /**
2122      *
2123      *
2124      * <pre>
2125      * The name of the field that is considered as a key.
2126      * The values identified by the key field is not included in the transformed
2127      * instances that is sent to the Model. This is similar to
2128      * specifying this name of the field in
2129      * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields].
2130      * In addition, the batch prediction output will not include the instances.
2131      * Instead the output will only include the value of the key field, in a
2132      * field named `key` in the output:
2133      *  * For `jsonl` output format, the output will have a `key` field
2134      *    instead of the `instance` field.
2135      *  * For `csv`/`bigquery` output format, the output will have have a `key`
2136      *    column instead of the instance feature columns.
2137      * The input must be JSONL with objects at each line, CSV, BigQuery
2138      * or TfRecord.
2139      * </pre>
2140      *
2141      * <code>string key_field = 2;</code>
2142      *
2143      * @return The bytes for keyField.
2144      */
2145     @java.lang.Override
getKeyFieldBytes()2146     public com.google.protobuf.ByteString getKeyFieldBytes() {
2147       java.lang.Object ref = keyField_;
2148       if (ref instanceof java.lang.String) {
2149         com.google.protobuf.ByteString b =
2150             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
2151         keyField_ = b;
2152         return b;
2153       } else {
2154         return (com.google.protobuf.ByteString) ref;
2155       }
2156     }
2157 
2158     public static final int INCLUDED_FIELDS_FIELD_NUMBER = 3;
2159 
2160     @SuppressWarnings("serial")
2161     private com.google.protobuf.LazyStringList includedFields_;
2162     /**
2163      *
2164      *
2165      * <pre>
2166      * Fields that will be included in the prediction instance that is
2167      * sent to the Model.
2168      * If
2169      * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
2170      * is `array`, the order of field names in included_fields also determines
2171      * the order of the values in the array.
2172      * When included_fields is populated,
2173      * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
2174      * must be empty.
2175      * The input must be JSONL with objects at each line, CSV, BigQuery
2176      * or TfRecord.
2177      * </pre>
2178      *
2179      * <code>repeated string included_fields = 3;</code>
2180      *
2181      * @return A list containing the includedFields.
2182      */
getIncludedFieldsList()2183     public com.google.protobuf.ProtocolStringList getIncludedFieldsList() {
2184       return includedFields_;
2185     }
2186     /**
2187      *
2188      *
2189      * <pre>
2190      * Fields that will be included in the prediction instance that is
2191      * sent to the Model.
2192      * If
2193      * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
2194      * is `array`, the order of field names in included_fields also determines
2195      * the order of the values in the array.
2196      * When included_fields is populated,
2197      * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
2198      * must be empty.
2199      * The input must be JSONL with objects at each line, CSV, BigQuery
2200      * or TfRecord.
2201      * </pre>
2202      *
2203      * <code>repeated string included_fields = 3;</code>
2204      *
2205      * @return The count of includedFields.
2206      */
getIncludedFieldsCount()2207     public int getIncludedFieldsCount() {
2208       return includedFields_.size();
2209     }
2210     /**
2211      *
2212      *
2213      * <pre>
2214      * Fields that will be included in the prediction instance that is
2215      * sent to the Model.
2216      * If
2217      * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
2218      * is `array`, the order of field names in included_fields also determines
2219      * the order of the values in the array.
2220      * When included_fields is populated,
2221      * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
2222      * must be empty.
2223      * The input must be JSONL with objects at each line, CSV, BigQuery
2224      * or TfRecord.
2225      * </pre>
2226      *
2227      * <code>repeated string included_fields = 3;</code>
2228      *
2229      * @param index The index of the element to return.
2230      * @return The includedFields at the given index.
2231      */
getIncludedFields(int index)2232     public java.lang.String getIncludedFields(int index) {
2233       return includedFields_.get(index);
2234     }
2235     /**
2236      *
2237      *
2238      * <pre>
2239      * Fields that will be included in the prediction instance that is
2240      * sent to the Model.
2241      * If
2242      * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
2243      * is `array`, the order of field names in included_fields also determines
2244      * the order of the values in the array.
2245      * When included_fields is populated,
2246      * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
2247      * must be empty.
2248      * The input must be JSONL with objects at each line, CSV, BigQuery
2249      * or TfRecord.
2250      * </pre>
2251      *
2252      * <code>repeated string included_fields = 3;</code>
2253      *
2254      * @param index The index of the value to return.
2255      * @return The bytes of the includedFields at the given index.
2256      */
getIncludedFieldsBytes(int index)2257     public com.google.protobuf.ByteString getIncludedFieldsBytes(int index) {
2258       return includedFields_.getByteString(index);
2259     }
2260 
2261     public static final int EXCLUDED_FIELDS_FIELD_NUMBER = 4;
2262 
2263     @SuppressWarnings("serial")
2264     private com.google.protobuf.LazyStringList excludedFields_;
2265     /**
2266      *
2267      *
2268      * <pre>
2269      * Fields that will be excluded in the prediction instance that is
2270      * sent to the Model.
2271      * Excluded will be attached to the batch prediction output if
2272      * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
2273      * is not specified.
2274      * When excluded_fields is populated,
2275      * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2276      * must be empty.
2277      * The input must be JSONL with objects at each line, CSV, BigQuery
2278      * or TfRecord.
2279      * </pre>
2280      *
2281      * <code>repeated string excluded_fields = 4;</code>
2282      *
2283      * @return A list containing the excludedFields.
2284      */
getExcludedFieldsList()2285     public com.google.protobuf.ProtocolStringList getExcludedFieldsList() {
2286       return excludedFields_;
2287     }
2288     /**
2289      *
2290      *
2291      * <pre>
2292      * Fields that will be excluded in the prediction instance that is
2293      * sent to the Model.
2294      * Excluded will be attached to the batch prediction output if
2295      * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
2296      * is not specified.
2297      * When excluded_fields is populated,
2298      * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2299      * must be empty.
2300      * The input must be JSONL with objects at each line, CSV, BigQuery
2301      * or TfRecord.
2302      * </pre>
2303      *
2304      * <code>repeated string excluded_fields = 4;</code>
2305      *
2306      * @return The count of excludedFields.
2307      */
getExcludedFieldsCount()2308     public int getExcludedFieldsCount() {
2309       return excludedFields_.size();
2310     }
2311     /**
2312      *
2313      *
2314      * <pre>
2315      * Fields that will be excluded in the prediction instance that is
2316      * sent to the Model.
2317      * Excluded will be attached to the batch prediction output if
2318      * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
2319      * is not specified.
2320      * When excluded_fields is populated,
2321      * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2322      * must be empty.
2323      * The input must be JSONL with objects at each line, CSV, BigQuery
2324      * or TfRecord.
2325      * </pre>
2326      *
2327      * <code>repeated string excluded_fields = 4;</code>
2328      *
2329      * @param index The index of the element to return.
2330      * @return The excludedFields at the given index.
2331      */
getExcludedFields(int index)2332     public java.lang.String getExcludedFields(int index) {
2333       return excludedFields_.get(index);
2334     }
2335     /**
2336      *
2337      *
2338      * <pre>
2339      * Fields that will be excluded in the prediction instance that is
2340      * sent to the Model.
2341      * Excluded will be attached to the batch prediction output if
2342      * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
2343      * is not specified.
2344      * When excluded_fields is populated,
2345      * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2346      * must be empty.
2347      * The input must be JSONL with objects at each line, CSV, BigQuery
2348      * or TfRecord.
2349      * </pre>
2350      *
2351      * <code>repeated string excluded_fields = 4;</code>
2352      *
2353      * @param index The index of the value to return.
2354      * @return The bytes of the excludedFields at the given index.
2355      */
getExcludedFieldsBytes(int index)2356     public com.google.protobuf.ByteString getExcludedFieldsBytes(int index) {
2357       return excludedFields_.getByteString(index);
2358     }
2359 
2360     private byte memoizedIsInitialized = -1;
2361 
2362     @java.lang.Override
isInitialized()2363     public final boolean isInitialized() {
2364       byte isInitialized = memoizedIsInitialized;
2365       if (isInitialized == 1) return true;
2366       if (isInitialized == 0) return false;
2367 
2368       memoizedIsInitialized = 1;
2369       return true;
2370     }
2371 
2372     @java.lang.Override
writeTo(com.google.protobuf.CodedOutputStream output)2373     public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
2374       if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceType_)) {
2375         com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instanceType_);
2376       }
2377       if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(keyField_)) {
2378         com.google.protobuf.GeneratedMessageV3.writeString(output, 2, keyField_);
2379       }
2380       for (int i = 0; i < includedFields_.size(); i++) {
2381         com.google.protobuf.GeneratedMessageV3.writeString(output, 3, includedFields_.getRaw(i));
2382       }
2383       for (int i = 0; i < excludedFields_.size(); i++) {
2384         com.google.protobuf.GeneratedMessageV3.writeString(output, 4, excludedFields_.getRaw(i));
2385       }
2386       getUnknownFields().writeTo(output);
2387     }
2388 
2389     @java.lang.Override
getSerializedSize()2390     public int getSerializedSize() {
2391       int size = memoizedSize;
2392       if (size != -1) return size;
2393 
2394       size = 0;
2395       if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceType_)) {
2396         size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instanceType_);
2397       }
2398       if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(keyField_)) {
2399         size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, keyField_);
2400       }
2401       {
2402         int dataSize = 0;
2403         for (int i = 0; i < includedFields_.size(); i++) {
2404           dataSize += computeStringSizeNoTag(includedFields_.getRaw(i));
2405         }
2406         size += dataSize;
2407         size += 1 * getIncludedFieldsList().size();
2408       }
2409       {
2410         int dataSize = 0;
2411         for (int i = 0; i < excludedFields_.size(); i++) {
2412           dataSize += computeStringSizeNoTag(excludedFields_.getRaw(i));
2413         }
2414         size += dataSize;
2415         size += 1 * getExcludedFieldsList().size();
2416       }
2417       size += getUnknownFields().getSerializedSize();
2418       memoizedSize = size;
2419       return size;
2420     }
2421 
2422     @java.lang.Override
equals(final java.lang.Object obj)2423     public boolean equals(final java.lang.Object obj) {
2424       if (obj == this) {
2425         return true;
2426       }
2427       if (!(obj instanceof com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig)) {
2428         return super.equals(obj);
2429       }
2430       com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig other =
2431           (com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig) obj;
2432 
2433       if (!getInstanceType().equals(other.getInstanceType())) return false;
2434       if (!getKeyField().equals(other.getKeyField())) return false;
2435       if (!getIncludedFieldsList().equals(other.getIncludedFieldsList())) return false;
2436       if (!getExcludedFieldsList().equals(other.getExcludedFieldsList())) return false;
2437       if (!getUnknownFields().equals(other.getUnknownFields())) return false;
2438       return true;
2439     }
2440 
2441     @java.lang.Override
hashCode()2442     public int hashCode() {
2443       if (memoizedHashCode != 0) {
2444         return memoizedHashCode;
2445       }
2446       int hash = 41;
2447       hash = (19 * hash) + getDescriptor().hashCode();
2448       hash = (37 * hash) + INSTANCE_TYPE_FIELD_NUMBER;
2449       hash = (53 * hash) + getInstanceType().hashCode();
2450       hash = (37 * hash) + KEY_FIELD_FIELD_NUMBER;
2451       hash = (53 * hash) + getKeyField().hashCode();
2452       if (getIncludedFieldsCount() > 0) {
2453         hash = (37 * hash) + INCLUDED_FIELDS_FIELD_NUMBER;
2454         hash = (53 * hash) + getIncludedFieldsList().hashCode();
2455       }
2456       if (getExcludedFieldsCount() > 0) {
2457         hash = (37 * hash) + EXCLUDED_FIELDS_FIELD_NUMBER;
2458         hash = (53 * hash) + getExcludedFieldsList().hashCode();
2459       }
2460       hash = (29 * hash) + getUnknownFields().hashCode();
2461       memoizedHashCode = hash;
2462       return hash;
2463     }
2464 
parseFrom( java.nio.ByteBuffer data)2465     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig parseFrom(
2466         java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
2467       return PARSER.parseFrom(data);
2468     }
2469 
parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2470     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig parseFrom(
2471         java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2472         throws com.google.protobuf.InvalidProtocolBufferException {
2473       return PARSER.parseFrom(data, extensionRegistry);
2474     }
2475 
parseFrom( com.google.protobuf.ByteString data)2476     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig parseFrom(
2477         com.google.protobuf.ByteString data)
2478         throws com.google.protobuf.InvalidProtocolBufferException {
2479       return PARSER.parseFrom(data);
2480     }
2481 
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2482     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig parseFrom(
2483         com.google.protobuf.ByteString data,
2484         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2485         throws com.google.protobuf.InvalidProtocolBufferException {
2486       return PARSER.parseFrom(data, extensionRegistry);
2487     }
2488 
parseFrom( byte[] data)2489     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig parseFrom(
2490         byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
2491       return PARSER.parseFrom(data);
2492     }
2493 
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2494     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig parseFrom(
2495         byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2496         throws com.google.protobuf.InvalidProtocolBufferException {
2497       return PARSER.parseFrom(data, extensionRegistry);
2498     }
2499 
parseFrom( java.io.InputStream input)2500     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig parseFrom(
2501         java.io.InputStream input) throws java.io.IOException {
2502       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
2503     }
2504 
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2505     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig parseFrom(
2506         java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2507         throws java.io.IOException {
2508       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
2509           PARSER, input, extensionRegistry);
2510     }
2511 
2512     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig
parseDelimitedFrom(java.io.InputStream input)2513         parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
2514       return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
2515     }
2516 
2517     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2518         parseDelimitedFrom(
2519             java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2520             throws java.io.IOException {
2521       return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
2522           PARSER, input, extensionRegistry);
2523     }
2524 
parseFrom( com.google.protobuf.CodedInputStream input)2525     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig parseFrom(
2526         com.google.protobuf.CodedInputStream input) throws java.io.IOException {
2527       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
2528     }
2529 
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2530     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig parseFrom(
2531         com.google.protobuf.CodedInputStream input,
2532         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2533         throws java.io.IOException {
2534       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
2535           PARSER, input, extensionRegistry);
2536     }
2537 
2538     @java.lang.Override
newBuilderForType()2539     public Builder newBuilderForType() {
2540       return newBuilder();
2541     }
2542 
newBuilder()2543     public static Builder newBuilder() {
2544       return DEFAULT_INSTANCE.toBuilder();
2545     }
2546 
newBuilder( com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig prototype)2547     public static Builder newBuilder(
2548         com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig prototype) {
2549       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
2550     }
2551 
2552     @java.lang.Override
toBuilder()2553     public Builder toBuilder() {
2554       return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
2555     }
2556 
2557     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent)2558     protected Builder newBuilderForType(
2559         com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
2560       Builder builder = new Builder(parent);
2561       return builder;
2562     }
2563     /**
2564      *
2565      *
2566      * <pre>
2567      * Configuration defining how to transform batch prediction input instances to
2568      * the instances that the Model accepts.
2569      * </pre>
2570      *
2571      * Protobuf type {@code google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig}
2572      */
2573     public static final class Builder
2574         extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
2575         implements
2576         // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig)
2577         com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfigOrBuilder {
getDescriptor()2578       public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
2579         return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
2580             .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_InstanceConfig_descriptor;
2581       }
2582 
2583       @java.lang.Override
2584       protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()2585           internalGetFieldAccessorTable() {
2586         return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
2587             .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_InstanceConfig_fieldAccessorTable
2588             .ensureFieldAccessorsInitialized(
2589                 com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.class,
2590                 com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.Builder.class);
2591       }
2592 
2593       // Construct using
2594       // com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.newBuilder()
Builder()2595       private Builder() {}
2596 
Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)2597       private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
2598         super(parent);
2599       }
2600 
2601       @java.lang.Override
clear()2602       public Builder clear() {
2603         super.clear();
2604         bitField0_ = 0;
2605         instanceType_ = "";
2606         keyField_ = "";
2607         includedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY;
2608         bitField0_ = (bitField0_ & ~0x00000004);
2609         excludedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY;
2610         bitField0_ = (bitField0_ & ~0x00000008);
2611         return this;
2612       }
2613 
2614       @java.lang.Override
getDescriptorForType()2615       public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
2616         return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
2617             .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_InstanceConfig_descriptor;
2618       }
2619 
2620       @java.lang.Override
2621       public com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig
getDefaultInstanceForType()2622           getDefaultInstanceForType() {
2623         return com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig
2624             .getDefaultInstance();
2625       }
2626 
2627       @java.lang.Override
build()2628       public com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig build() {
2629         com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig result = buildPartial();
2630         if (!result.isInitialized()) {
2631           throw newUninitializedMessageException(result);
2632         }
2633         return result;
2634       }
2635 
2636       @java.lang.Override
buildPartial()2637       public com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig buildPartial() {
2638         com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig result =
2639             new com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig(this);
2640         buildPartialRepeatedFields(result);
2641         if (bitField0_ != 0) {
2642           buildPartial0(result);
2643         }
2644         onBuilt();
2645         return result;
2646       }
2647 
buildPartialRepeatedFields( com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig result)2648       private void buildPartialRepeatedFields(
2649           com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig result) {
2650         if (((bitField0_ & 0x00000004) != 0)) {
2651           includedFields_ = includedFields_.getUnmodifiableView();
2652           bitField0_ = (bitField0_ & ~0x00000004);
2653         }
2654         result.includedFields_ = includedFields_;
2655         if (((bitField0_ & 0x00000008) != 0)) {
2656           excludedFields_ = excludedFields_.getUnmodifiableView();
2657           bitField0_ = (bitField0_ & ~0x00000008);
2658         }
2659         result.excludedFields_ = excludedFields_;
2660       }
2661 
buildPartial0( com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig result)2662       private void buildPartial0(
2663           com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig result) {
2664         int from_bitField0_ = bitField0_;
2665         if (((from_bitField0_ & 0x00000001) != 0)) {
2666           result.instanceType_ = instanceType_;
2667         }
2668         if (((from_bitField0_ & 0x00000002) != 0)) {
2669           result.keyField_ = keyField_;
2670         }
2671       }
2672 
2673       @java.lang.Override
clone()2674       public Builder clone() {
2675         return super.clone();
2676       }
2677 
2678       @java.lang.Override
setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)2679       public Builder setField(
2680           com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
2681         return super.setField(field, value);
2682       }
2683 
2684       @java.lang.Override
clearField(com.google.protobuf.Descriptors.FieldDescriptor field)2685       public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
2686         return super.clearField(field);
2687       }
2688 
2689       @java.lang.Override
clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)2690       public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
2691         return super.clearOneof(oneof);
2692       }
2693 
2694       @java.lang.Override
setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)2695       public Builder setRepeatedField(
2696           com.google.protobuf.Descriptors.FieldDescriptor field,
2697           int index,
2698           java.lang.Object value) {
2699         return super.setRepeatedField(field, index, value);
2700       }
2701 
2702       @java.lang.Override
addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)2703       public Builder addRepeatedField(
2704           com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
2705         return super.addRepeatedField(field, value);
2706       }
2707 
2708       @java.lang.Override
mergeFrom(com.google.protobuf.Message other)2709       public Builder mergeFrom(com.google.protobuf.Message other) {
2710         if (other instanceof com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig) {
2711           return mergeFrom(
2712               (com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig) other);
2713         } else {
2714           super.mergeFrom(other);
2715           return this;
2716         }
2717       }
2718 
mergeFrom( com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig other)2719       public Builder mergeFrom(
2720           com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig other) {
2721         if (other
2722             == com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig
2723                 .getDefaultInstance()) return this;
2724         if (!other.getInstanceType().isEmpty()) {
2725           instanceType_ = other.instanceType_;
2726           bitField0_ |= 0x00000001;
2727           onChanged();
2728         }
2729         if (!other.getKeyField().isEmpty()) {
2730           keyField_ = other.keyField_;
2731           bitField0_ |= 0x00000002;
2732           onChanged();
2733         }
2734         if (!other.includedFields_.isEmpty()) {
2735           if (includedFields_.isEmpty()) {
2736             includedFields_ = other.includedFields_;
2737             bitField0_ = (bitField0_ & ~0x00000004);
2738           } else {
2739             ensureIncludedFieldsIsMutable();
2740             includedFields_.addAll(other.includedFields_);
2741           }
2742           onChanged();
2743         }
2744         if (!other.excludedFields_.isEmpty()) {
2745           if (excludedFields_.isEmpty()) {
2746             excludedFields_ = other.excludedFields_;
2747             bitField0_ = (bitField0_ & ~0x00000008);
2748           } else {
2749             ensureExcludedFieldsIsMutable();
2750             excludedFields_.addAll(other.excludedFields_);
2751           }
2752           onChanged();
2753         }
2754         this.mergeUnknownFields(other.getUnknownFields());
2755         onChanged();
2756         return this;
2757       }
2758 
2759       @java.lang.Override
isInitialized()2760       public final boolean isInitialized() {
2761         return true;
2762       }
2763 
2764       @java.lang.Override
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2765       public Builder mergeFrom(
2766           com.google.protobuf.CodedInputStream input,
2767           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2768           throws java.io.IOException {
2769         if (extensionRegistry == null) {
2770           throw new java.lang.NullPointerException();
2771         }
2772         try {
2773           boolean done = false;
2774           while (!done) {
2775             int tag = input.readTag();
2776             switch (tag) {
2777               case 0:
2778                 done = true;
2779                 break;
2780               case 10:
2781                 {
2782                   instanceType_ = input.readStringRequireUtf8();
2783                   bitField0_ |= 0x00000001;
2784                   break;
2785                 } // case 10
2786               case 18:
2787                 {
2788                   keyField_ = input.readStringRequireUtf8();
2789                   bitField0_ |= 0x00000002;
2790                   break;
2791                 } // case 18
2792               case 26:
2793                 {
2794                   java.lang.String s = input.readStringRequireUtf8();
2795                   ensureIncludedFieldsIsMutable();
2796                   includedFields_.add(s);
2797                   break;
2798                 } // case 26
2799               case 34:
2800                 {
2801                   java.lang.String s = input.readStringRequireUtf8();
2802                   ensureExcludedFieldsIsMutable();
2803                   excludedFields_.add(s);
2804                   break;
2805                 } // case 34
2806               default:
2807                 {
2808                   if (!super.parseUnknownField(input, extensionRegistry, tag)) {
2809                     done = true; // was an endgroup tag
2810                   }
2811                   break;
2812                 } // default:
2813             } // switch (tag)
2814           } // while (!done)
2815         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2816           throw e.unwrapIOException();
2817         } finally {
2818           onChanged();
2819         } // finally
2820         return this;
2821       }
2822 
2823       private int bitField0_;
2824 
2825       private java.lang.Object instanceType_ = "";
2826       /**
2827        *
2828        *
2829        * <pre>
2830        * The format of the instance that the Model accepts. Vertex AI will
2831        * convert compatible
2832        * [batch prediction input instance
2833        * formats][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.instances_format]
2834        * to the specified format.
2835        * Supported values are:
2836        * * `object`: Each input is converted to JSON object format.
2837        *     * For `bigquery`, each row is converted to an object.
2838        *     * For `jsonl`, each line of the JSONL input must be an object.
2839        *     * Does not apply to `csv`, `file-list`, `tf-record`, or
2840        *       `tf-record-gzip`.
2841        * * `array`: Each input is converted to JSON array format.
2842        *     * For `bigquery`, each row is converted to an array. The order
2843        *       of columns is determined by the BigQuery column order, unless
2844        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2845        *       is populated.
2846        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2847        *       must be populated for specifying field orders.
2848        *     * For `jsonl`, if each line of the JSONL input is an object,
2849        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2850        *       must be populated for specifying field orders.
2851        *     * Does not apply to `csv`, `file-list`, `tf-record`, or
2852        *       `tf-record-gzip`.
2853        * If not specified, Vertex AI converts the batch prediction input as
2854        * follows:
2855        *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
2856        *    order of columns is the same as defined in the file or table, unless
2857        *    [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2858        *    is populated.
2859        *  * For `jsonl`, the prediction instance format is determined by
2860        *    each line of the input.
2861        *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
2862        *    an object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
2863        *    the Base64-encoded string of the content of the record.
2864        *  * For `file-list`, each file in the list will be converted to an
2865        *    object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
2866        *    the Base64-encoded string of the content of the file.
2867        * </pre>
2868        *
2869        * <code>string instance_type = 1;</code>
2870        *
2871        * @return The instanceType.
2872        */
getInstanceType()2873       public java.lang.String getInstanceType() {
2874         java.lang.Object ref = instanceType_;
2875         if (!(ref instanceof java.lang.String)) {
2876           com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
2877           java.lang.String s = bs.toStringUtf8();
2878           instanceType_ = s;
2879           return s;
2880         } else {
2881           return (java.lang.String) ref;
2882         }
2883       }
2884       /**
2885        *
2886        *
2887        * <pre>
2888        * The format of the instance that the Model accepts. Vertex AI will
2889        * convert compatible
2890        * [batch prediction input instance
2891        * formats][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.instances_format]
2892        * to the specified format.
2893        * Supported values are:
2894        * * `object`: Each input is converted to JSON object format.
2895        *     * For `bigquery`, each row is converted to an object.
2896        *     * For `jsonl`, each line of the JSONL input must be an object.
2897        *     * Does not apply to `csv`, `file-list`, `tf-record`, or
2898        *       `tf-record-gzip`.
2899        * * `array`: Each input is converted to JSON array format.
2900        *     * For `bigquery`, each row is converted to an array. The order
2901        *       of columns is determined by the BigQuery column order, unless
2902        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2903        *       is populated.
2904        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2905        *       must be populated for specifying field orders.
2906        *     * For `jsonl`, if each line of the JSONL input is an object,
2907        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2908        *       must be populated for specifying field orders.
2909        *     * Does not apply to `csv`, `file-list`, `tf-record`, or
2910        *       `tf-record-gzip`.
2911        * If not specified, Vertex AI converts the batch prediction input as
2912        * follows:
2913        *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
2914        *    order of columns is the same as defined in the file or table, unless
2915        *    [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2916        *    is populated.
2917        *  * For `jsonl`, the prediction instance format is determined by
2918        *    each line of the input.
2919        *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
2920        *    an object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
2921        *    the Base64-encoded string of the content of the record.
2922        *  * For `file-list`, each file in the list will be converted to an
2923        *    object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
2924        *    the Base64-encoded string of the content of the file.
2925        * </pre>
2926        *
2927        * <code>string instance_type = 1;</code>
2928        *
2929        * @return The bytes for instanceType.
2930        */
getInstanceTypeBytes()2931       public com.google.protobuf.ByteString getInstanceTypeBytes() {
2932         java.lang.Object ref = instanceType_;
2933         if (ref instanceof String) {
2934           com.google.protobuf.ByteString b =
2935               com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
2936           instanceType_ = b;
2937           return b;
2938         } else {
2939           return (com.google.protobuf.ByteString) ref;
2940         }
2941       }
2942       /**
2943        *
2944        *
2945        * <pre>
2946        * The format of the instance that the Model accepts. Vertex AI will
2947        * convert compatible
2948        * [batch prediction input instance
2949        * formats][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.instances_format]
2950        * to the specified format.
2951        * Supported values are:
2952        * * `object`: Each input is converted to JSON object format.
2953        *     * For `bigquery`, each row is converted to an object.
2954        *     * For `jsonl`, each line of the JSONL input must be an object.
2955        *     * Does not apply to `csv`, `file-list`, `tf-record`, or
2956        *       `tf-record-gzip`.
2957        * * `array`: Each input is converted to JSON array format.
2958        *     * For `bigquery`, each row is converted to an array. The order
2959        *       of columns is determined by the BigQuery column order, unless
2960        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2961        *       is populated.
2962        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2963        *       must be populated for specifying field orders.
2964        *     * For `jsonl`, if each line of the JSONL input is an object,
2965        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2966        *       must be populated for specifying field orders.
2967        *     * Does not apply to `csv`, `file-list`, `tf-record`, or
2968        *       `tf-record-gzip`.
2969        * If not specified, Vertex AI converts the batch prediction input as
2970        * follows:
2971        *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
2972        *    order of columns is the same as defined in the file or table, unless
2973        *    [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
2974        *    is populated.
2975        *  * For `jsonl`, the prediction instance format is determined by
2976        *    each line of the input.
2977        *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
2978        *    an object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
2979        *    the Base64-encoded string of the content of the record.
2980        *  * For `file-list`, each file in the list will be converted to an
2981        *    object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
2982        *    the Base64-encoded string of the content of the file.
2983        * </pre>
2984        *
2985        * <code>string instance_type = 1;</code>
2986        *
2987        * @param value The instanceType to set.
2988        * @return This builder for chaining.
2989        */
setInstanceType(java.lang.String value)2990       public Builder setInstanceType(java.lang.String value) {
2991         if (value == null) {
2992           throw new NullPointerException();
2993         }
2994         instanceType_ = value;
2995         bitField0_ |= 0x00000001;
2996         onChanged();
2997         return this;
2998       }
2999       /**
3000        *
3001        *
3002        * <pre>
3003        * The format of the instance that the Model accepts. Vertex AI will
3004        * convert compatible
3005        * [batch prediction input instance
3006        * formats][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.instances_format]
3007        * to the specified format.
3008        * Supported values are:
3009        * * `object`: Each input is converted to JSON object format.
3010        *     * For `bigquery`, each row is converted to an object.
3011        *     * For `jsonl`, each line of the JSONL input must be an object.
3012        *     * Does not apply to `csv`, `file-list`, `tf-record`, or
3013        *       `tf-record-gzip`.
3014        * * `array`: Each input is converted to JSON array format.
3015        *     * For `bigquery`, each row is converted to an array. The order
3016        *       of columns is determined by the BigQuery column order, unless
3017        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3018        *       is populated.
3019        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3020        *       must be populated for specifying field orders.
3021        *     * For `jsonl`, if each line of the JSONL input is an object,
3022        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3023        *       must be populated for specifying field orders.
3024        *     * Does not apply to `csv`, `file-list`, `tf-record`, or
3025        *       `tf-record-gzip`.
3026        * If not specified, Vertex AI converts the batch prediction input as
3027        * follows:
3028        *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
3029        *    order of columns is the same as defined in the file or table, unless
3030        *    [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3031        *    is populated.
3032        *  * For `jsonl`, the prediction instance format is determined by
3033        *    each line of the input.
3034        *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
3035        *    an object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
3036        *    the Base64-encoded string of the content of the record.
3037        *  * For `file-list`, each file in the list will be converted to an
3038        *    object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
3039        *    the Base64-encoded string of the content of the file.
3040        * </pre>
3041        *
3042        * <code>string instance_type = 1;</code>
3043        *
3044        * @return This builder for chaining.
3045        */
clearInstanceType()3046       public Builder clearInstanceType() {
3047         instanceType_ = getDefaultInstance().getInstanceType();
3048         bitField0_ = (bitField0_ & ~0x00000001);
3049         onChanged();
3050         return this;
3051       }
3052       /**
3053        *
3054        *
3055        * <pre>
3056        * The format of the instance that the Model accepts. Vertex AI will
3057        * convert compatible
3058        * [batch prediction input instance
3059        * formats][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.instances_format]
3060        * to the specified format.
3061        * Supported values are:
3062        * * `object`: Each input is converted to JSON object format.
3063        *     * For `bigquery`, each row is converted to an object.
3064        *     * For `jsonl`, each line of the JSONL input must be an object.
3065        *     * Does not apply to `csv`, `file-list`, `tf-record`, or
3066        *       `tf-record-gzip`.
3067        * * `array`: Each input is converted to JSON array format.
3068        *     * For `bigquery`, each row is converted to an array. The order
3069        *       of columns is determined by the BigQuery column order, unless
3070        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3071        *       is populated.
3072        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3073        *       must be populated for specifying field orders.
3074        *     * For `jsonl`, if each line of the JSONL input is an object,
3075        *       [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3076        *       must be populated for specifying field orders.
3077        *     * Does not apply to `csv`, `file-list`, `tf-record`, or
3078        *       `tf-record-gzip`.
3079        * If not specified, Vertex AI converts the batch prediction input as
3080        * follows:
3081        *  * For `bigquery` and `csv`, the behavior is the same as `array`. The
3082        *    order of columns is the same as defined in the file or table, unless
3083        *    [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3084        *    is populated.
3085        *  * For `jsonl`, the prediction instance format is determined by
3086        *    each line of the input.
3087        *  * For `tf-record`/`tf-record-gzip`, each record will be converted to
3088        *    an object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
3089        *    the Base64-encoded string of the content of the record.
3090        *  * For `file-list`, each file in the list will be converted to an
3091        *    object in the format of `{"b64": &lt;value&gt;}`, where `&lt;value&gt;` is
3092        *    the Base64-encoded string of the content of the file.
3093        * </pre>
3094        *
3095        * <code>string instance_type = 1;</code>
3096        *
3097        * @param value The bytes for instanceType to set.
3098        * @return This builder for chaining.
3099        */
setInstanceTypeBytes(com.google.protobuf.ByteString value)3100       public Builder setInstanceTypeBytes(com.google.protobuf.ByteString value) {
3101         if (value == null) {
3102           throw new NullPointerException();
3103         }
3104         checkByteStringIsUtf8(value);
3105         instanceType_ = value;
3106         bitField0_ |= 0x00000001;
3107         onChanged();
3108         return this;
3109       }
3110 
3111       private java.lang.Object keyField_ = "";
3112       /**
3113        *
3114        *
3115        * <pre>
3116        * The name of the field that is considered as a key.
3117        * The values identified by the key field is not included in the transformed
3118        * instances that is sent to the Model. This is similar to
3119        * specifying this name of the field in
3120        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields].
3121        * In addition, the batch prediction output will not include the instances.
3122        * Instead the output will only include the value of the key field, in a
3123        * field named `key` in the output:
3124        *  * For `jsonl` output format, the output will have a `key` field
3125        *    instead of the `instance` field.
3126        *  * For `csv`/`bigquery` output format, the output will have have a `key`
3127        *    column instead of the instance feature columns.
3128        * The input must be JSONL with objects at each line, CSV, BigQuery
3129        * or TfRecord.
3130        * </pre>
3131        *
3132        * <code>string key_field = 2;</code>
3133        *
3134        * @return The keyField.
3135        */
getKeyField()3136       public java.lang.String getKeyField() {
3137         java.lang.Object ref = keyField_;
3138         if (!(ref instanceof java.lang.String)) {
3139           com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
3140           java.lang.String s = bs.toStringUtf8();
3141           keyField_ = s;
3142           return s;
3143         } else {
3144           return (java.lang.String) ref;
3145         }
3146       }
3147       /**
3148        *
3149        *
3150        * <pre>
3151        * The name of the field that is considered as a key.
3152        * The values identified by the key field is not included in the transformed
3153        * instances that is sent to the Model. This is similar to
3154        * specifying this name of the field in
3155        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields].
3156        * In addition, the batch prediction output will not include the instances.
3157        * Instead the output will only include the value of the key field, in a
3158        * field named `key` in the output:
3159        *  * For `jsonl` output format, the output will have a `key` field
3160        *    instead of the `instance` field.
3161        *  * For `csv`/`bigquery` output format, the output will have have a `key`
3162        *    column instead of the instance feature columns.
3163        * The input must be JSONL with objects at each line, CSV, BigQuery
3164        * or TfRecord.
3165        * </pre>
3166        *
3167        * <code>string key_field = 2;</code>
3168        *
3169        * @return The bytes for keyField.
3170        */
getKeyFieldBytes()3171       public com.google.protobuf.ByteString getKeyFieldBytes() {
3172         java.lang.Object ref = keyField_;
3173         if (ref instanceof String) {
3174           com.google.protobuf.ByteString b =
3175               com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
3176           keyField_ = b;
3177           return b;
3178         } else {
3179           return (com.google.protobuf.ByteString) ref;
3180         }
3181       }
3182       /**
3183        *
3184        *
3185        * <pre>
3186        * The name of the field that is considered as a key.
3187        * The values identified by the key field is not included in the transformed
3188        * instances that is sent to the Model. This is similar to
3189        * specifying this name of the field in
3190        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields].
3191        * In addition, the batch prediction output will not include the instances.
3192        * Instead the output will only include the value of the key field, in a
3193        * field named `key` in the output:
3194        *  * For `jsonl` output format, the output will have a `key` field
3195        *    instead of the `instance` field.
3196        *  * For `csv`/`bigquery` output format, the output will have have a `key`
3197        *    column instead of the instance feature columns.
3198        * The input must be JSONL with objects at each line, CSV, BigQuery
3199        * or TfRecord.
3200        * </pre>
3201        *
3202        * <code>string key_field = 2;</code>
3203        *
3204        * @param value The keyField to set.
3205        * @return This builder for chaining.
3206        */
setKeyField(java.lang.String value)3207       public Builder setKeyField(java.lang.String value) {
3208         if (value == null) {
3209           throw new NullPointerException();
3210         }
3211         keyField_ = value;
3212         bitField0_ |= 0x00000002;
3213         onChanged();
3214         return this;
3215       }
3216       /**
3217        *
3218        *
3219        * <pre>
3220        * The name of the field that is considered as a key.
3221        * The values identified by the key field is not included in the transformed
3222        * instances that is sent to the Model. This is similar to
3223        * specifying this name of the field in
3224        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields].
3225        * In addition, the batch prediction output will not include the instances.
3226        * Instead the output will only include the value of the key field, in a
3227        * field named `key` in the output:
3228        *  * For `jsonl` output format, the output will have a `key` field
3229        *    instead of the `instance` field.
3230        *  * For `csv`/`bigquery` output format, the output will have have a `key`
3231        *    column instead of the instance feature columns.
3232        * The input must be JSONL with objects at each line, CSV, BigQuery
3233        * or TfRecord.
3234        * </pre>
3235        *
3236        * <code>string key_field = 2;</code>
3237        *
3238        * @return This builder for chaining.
3239        */
clearKeyField()3240       public Builder clearKeyField() {
3241         keyField_ = getDefaultInstance().getKeyField();
3242         bitField0_ = (bitField0_ & ~0x00000002);
3243         onChanged();
3244         return this;
3245       }
3246       /**
3247        *
3248        *
3249        * <pre>
3250        * The name of the field that is considered as a key.
3251        * The values identified by the key field is not included in the transformed
3252        * instances that is sent to the Model. This is similar to
3253        * specifying this name of the field in
3254        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields].
3255        * In addition, the batch prediction output will not include the instances.
3256        * Instead the output will only include the value of the key field, in a
3257        * field named `key` in the output:
3258        *  * For `jsonl` output format, the output will have a `key` field
3259        *    instead of the `instance` field.
3260        *  * For `csv`/`bigquery` output format, the output will have have a `key`
3261        *    column instead of the instance feature columns.
3262        * The input must be JSONL with objects at each line, CSV, BigQuery
3263        * or TfRecord.
3264        * </pre>
3265        *
3266        * <code>string key_field = 2;</code>
3267        *
3268        * @param value The bytes for keyField to set.
3269        * @return This builder for chaining.
3270        */
setKeyFieldBytes(com.google.protobuf.ByteString value)3271       public Builder setKeyFieldBytes(com.google.protobuf.ByteString value) {
3272         if (value == null) {
3273           throw new NullPointerException();
3274         }
3275         checkByteStringIsUtf8(value);
3276         keyField_ = value;
3277         bitField0_ |= 0x00000002;
3278         onChanged();
3279         return this;
3280       }
3281 
3282       private com.google.protobuf.LazyStringList includedFields_ =
3283           com.google.protobuf.LazyStringArrayList.EMPTY;
3284 
ensureIncludedFieldsIsMutable()3285       private void ensureIncludedFieldsIsMutable() {
3286         if (!((bitField0_ & 0x00000004) != 0)) {
3287           includedFields_ = new com.google.protobuf.LazyStringArrayList(includedFields_);
3288           bitField0_ |= 0x00000004;
3289         }
3290       }
3291       /**
3292        *
3293        *
3294        * <pre>
3295        * Fields that will be included in the prediction instance that is
3296        * sent to the Model.
3297        * If
3298        * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
3299        * is `array`, the order of field names in included_fields also determines
3300        * the order of the values in the array.
3301        * When included_fields is populated,
3302        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
3303        * must be empty.
3304        * The input must be JSONL with objects at each line, CSV, BigQuery
3305        * or TfRecord.
3306        * </pre>
3307        *
3308        * <code>repeated string included_fields = 3;</code>
3309        *
3310        * @return A list containing the includedFields.
3311        */
getIncludedFieldsList()3312       public com.google.protobuf.ProtocolStringList getIncludedFieldsList() {
3313         return includedFields_.getUnmodifiableView();
3314       }
3315       /**
3316        *
3317        *
3318        * <pre>
3319        * Fields that will be included in the prediction instance that is
3320        * sent to the Model.
3321        * If
3322        * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
3323        * is `array`, the order of field names in included_fields also determines
3324        * the order of the values in the array.
3325        * When included_fields is populated,
3326        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
3327        * must be empty.
3328        * The input must be JSONL with objects at each line, CSV, BigQuery
3329        * or TfRecord.
3330        * </pre>
3331        *
3332        * <code>repeated string included_fields = 3;</code>
3333        *
3334        * @return The count of includedFields.
3335        */
getIncludedFieldsCount()3336       public int getIncludedFieldsCount() {
3337         return includedFields_.size();
3338       }
3339       /**
3340        *
3341        *
3342        * <pre>
3343        * Fields that will be included in the prediction instance that is
3344        * sent to the Model.
3345        * If
3346        * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
3347        * is `array`, the order of field names in included_fields also determines
3348        * the order of the values in the array.
3349        * When included_fields is populated,
3350        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
3351        * must be empty.
3352        * The input must be JSONL with objects at each line, CSV, BigQuery
3353        * or TfRecord.
3354        * </pre>
3355        *
3356        * <code>repeated string included_fields = 3;</code>
3357        *
3358        * @param index The index of the element to return.
3359        * @return The includedFields at the given index.
3360        */
getIncludedFields(int index)3361       public java.lang.String getIncludedFields(int index) {
3362         return includedFields_.get(index);
3363       }
3364       /**
3365        *
3366        *
3367        * <pre>
3368        * Fields that will be included in the prediction instance that is
3369        * sent to the Model.
3370        * If
3371        * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
3372        * is `array`, the order of field names in included_fields also determines
3373        * the order of the values in the array.
3374        * When included_fields is populated,
3375        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
3376        * must be empty.
3377        * The input must be JSONL with objects at each line, CSV, BigQuery
3378        * or TfRecord.
3379        * </pre>
3380        *
3381        * <code>repeated string included_fields = 3;</code>
3382        *
3383        * @param index The index of the value to return.
3384        * @return The bytes of the includedFields at the given index.
3385        */
getIncludedFieldsBytes(int index)3386       public com.google.protobuf.ByteString getIncludedFieldsBytes(int index) {
3387         return includedFields_.getByteString(index);
3388       }
3389       /**
3390        *
3391        *
3392        * <pre>
3393        * Fields that will be included in the prediction instance that is
3394        * sent to the Model.
3395        * If
3396        * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
3397        * is `array`, the order of field names in included_fields also determines
3398        * the order of the values in the array.
3399        * When included_fields is populated,
3400        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
3401        * must be empty.
3402        * The input must be JSONL with objects at each line, CSV, BigQuery
3403        * or TfRecord.
3404        * </pre>
3405        *
3406        * <code>repeated string included_fields = 3;</code>
3407        *
3408        * @param index The index to set the value at.
3409        * @param value The includedFields to set.
3410        * @return This builder for chaining.
3411        */
setIncludedFields(int index, java.lang.String value)3412       public Builder setIncludedFields(int index, java.lang.String value) {
3413         if (value == null) {
3414           throw new NullPointerException();
3415         }
3416         ensureIncludedFieldsIsMutable();
3417         includedFields_.set(index, value);
3418         onChanged();
3419         return this;
3420       }
3421       /**
3422        *
3423        *
3424        * <pre>
3425        * Fields that will be included in the prediction instance that is
3426        * sent to the Model.
3427        * If
3428        * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
3429        * is `array`, the order of field names in included_fields also determines
3430        * the order of the values in the array.
3431        * When included_fields is populated,
3432        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
3433        * must be empty.
3434        * The input must be JSONL with objects at each line, CSV, BigQuery
3435        * or TfRecord.
3436        * </pre>
3437        *
3438        * <code>repeated string included_fields = 3;</code>
3439        *
3440        * @param value The includedFields to add.
3441        * @return This builder for chaining.
3442        */
addIncludedFields(java.lang.String value)3443       public Builder addIncludedFields(java.lang.String value) {
3444         if (value == null) {
3445           throw new NullPointerException();
3446         }
3447         ensureIncludedFieldsIsMutable();
3448         includedFields_.add(value);
3449         onChanged();
3450         return this;
3451       }
3452       /**
3453        *
3454        *
3455        * <pre>
3456        * Fields that will be included in the prediction instance that is
3457        * sent to the Model.
3458        * If
3459        * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
3460        * is `array`, the order of field names in included_fields also determines
3461        * the order of the values in the array.
3462        * When included_fields is populated,
3463        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
3464        * must be empty.
3465        * The input must be JSONL with objects at each line, CSV, BigQuery
3466        * or TfRecord.
3467        * </pre>
3468        *
3469        * <code>repeated string included_fields = 3;</code>
3470        *
3471        * @param values The includedFields to add.
3472        * @return This builder for chaining.
3473        */
addAllIncludedFields(java.lang.Iterable<java.lang.String> values)3474       public Builder addAllIncludedFields(java.lang.Iterable<java.lang.String> values) {
3475         ensureIncludedFieldsIsMutable();
3476         com.google.protobuf.AbstractMessageLite.Builder.addAll(values, includedFields_);
3477         onChanged();
3478         return this;
3479       }
3480       /**
3481        *
3482        *
3483        * <pre>
3484        * Fields that will be included in the prediction instance that is
3485        * sent to the Model.
3486        * If
3487        * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
3488        * is `array`, the order of field names in included_fields also determines
3489        * the order of the values in the array.
3490        * When included_fields is populated,
3491        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
3492        * must be empty.
3493        * The input must be JSONL with objects at each line, CSV, BigQuery
3494        * or TfRecord.
3495        * </pre>
3496        *
3497        * <code>repeated string included_fields = 3;</code>
3498        *
3499        * @return This builder for chaining.
3500        */
clearIncludedFields()3501       public Builder clearIncludedFields() {
3502         includedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY;
3503         bitField0_ = (bitField0_ & ~0x00000004);
3504         onChanged();
3505         return this;
3506       }
3507       /**
3508        *
3509        *
3510        * <pre>
3511        * Fields that will be included in the prediction instance that is
3512        * sent to the Model.
3513        * If
3514        * [instance_type][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.instance_type]
3515        * is `array`, the order of field names in included_fields also determines
3516        * the order of the values in the array.
3517        * When included_fields is populated,
3518        * [excluded_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.excluded_fields]
3519        * must be empty.
3520        * The input must be JSONL with objects at each line, CSV, BigQuery
3521        * or TfRecord.
3522        * </pre>
3523        *
3524        * <code>repeated string included_fields = 3;</code>
3525        *
3526        * @param value The bytes of the includedFields to add.
3527        * @return This builder for chaining.
3528        */
addIncludedFieldsBytes(com.google.protobuf.ByteString value)3529       public Builder addIncludedFieldsBytes(com.google.protobuf.ByteString value) {
3530         if (value == null) {
3531           throw new NullPointerException();
3532         }
3533         checkByteStringIsUtf8(value);
3534         ensureIncludedFieldsIsMutable();
3535         includedFields_.add(value);
3536         onChanged();
3537         return this;
3538       }
3539 
3540       private com.google.protobuf.LazyStringList excludedFields_ =
3541           com.google.protobuf.LazyStringArrayList.EMPTY;
3542 
ensureExcludedFieldsIsMutable()3543       private void ensureExcludedFieldsIsMutable() {
3544         if (!((bitField0_ & 0x00000008) != 0)) {
3545           excludedFields_ = new com.google.protobuf.LazyStringArrayList(excludedFields_);
3546           bitField0_ |= 0x00000008;
3547         }
3548       }
3549       /**
3550        *
3551        *
3552        * <pre>
3553        * Fields that will be excluded in the prediction instance that is
3554        * sent to the Model.
3555        * Excluded will be attached to the batch prediction output if
3556        * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
3557        * is not specified.
3558        * When excluded_fields is populated,
3559        * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3560        * must be empty.
3561        * The input must be JSONL with objects at each line, CSV, BigQuery
3562        * or TfRecord.
3563        * </pre>
3564        *
3565        * <code>repeated string excluded_fields = 4;</code>
3566        *
3567        * @return A list containing the excludedFields.
3568        */
getExcludedFieldsList()3569       public com.google.protobuf.ProtocolStringList getExcludedFieldsList() {
3570         return excludedFields_.getUnmodifiableView();
3571       }
3572       /**
3573        *
3574        *
3575        * <pre>
3576        * Fields that will be excluded in the prediction instance that is
3577        * sent to the Model.
3578        * Excluded will be attached to the batch prediction output if
3579        * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
3580        * is not specified.
3581        * When excluded_fields is populated,
3582        * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3583        * must be empty.
3584        * The input must be JSONL with objects at each line, CSV, BigQuery
3585        * or TfRecord.
3586        * </pre>
3587        *
3588        * <code>repeated string excluded_fields = 4;</code>
3589        *
3590        * @return The count of excludedFields.
3591        */
getExcludedFieldsCount()3592       public int getExcludedFieldsCount() {
3593         return excludedFields_.size();
3594       }
3595       /**
3596        *
3597        *
3598        * <pre>
3599        * Fields that will be excluded in the prediction instance that is
3600        * sent to the Model.
3601        * Excluded will be attached to the batch prediction output if
3602        * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
3603        * is not specified.
3604        * When excluded_fields is populated,
3605        * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3606        * must be empty.
3607        * The input must be JSONL with objects at each line, CSV, BigQuery
3608        * or TfRecord.
3609        * </pre>
3610        *
3611        * <code>repeated string excluded_fields = 4;</code>
3612        *
3613        * @param index The index of the element to return.
3614        * @return The excludedFields at the given index.
3615        */
getExcludedFields(int index)3616       public java.lang.String getExcludedFields(int index) {
3617         return excludedFields_.get(index);
3618       }
3619       /**
3620        *
3621        *
3622        * <pre>
3623        * Fields that will be excluded in the prediction instance that is
3624        * sent to the Model.
3625        * Excluded will be attached to the batch prediction output if
3626        * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
3627        * is not specified.
3628        * When excluded_fields is populated,
3629        * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3630        * must be empty.
3631        * The input must be JSONL with objects at each line, CSV, BigQuery
3632        * or TfRecord.
3633        * </pre>
3634        *
3635        * <code>repeated string excluded_fields = 4;</code>
3636        *
3637        * @param index The index of the value to return.
3638        * @return The bytes of the excludedFields at the given index.
3639        */
getExcludedFieldsBytes(int index)3640       public com.google.protobuf.ByteString getExcludedFieldsBytes(int index) {
3641         return excludedFields_.getByteString(index);
3642       }
3643       /**
3644        *
3645        *
3646        * <pre>
3647        * Fields that will be excluded in the prediction instance that is
3648        * sent to the Model.
3649        * Excluded will be attached to the batch prediction output if
3650        * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
3651        * is not specified.
3652        * When excluded_fields is populated,
3653        * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3654        * must be empty.
3655        * The input must be JSONL with objects at each line, CSV, BigQuery
3656        * or TfRecord.
3657        * </pre>
3658        *
3659        * <code>repeated string excluded_fields = 4;</code>
3660        *
3661        * @param index The index to set the value at.
3662        * @param value The excludedFields to set.
3663        * @return This builder for chaining.
3664        */
setExcludedFields(int index, java.lang.String value)3665       public Builder setExcludedFields(int index, java.lang.String value) {
3666         if (value == null) {
3667           throw new NullPointerException();
3668         }
3669         ensureExcludedFieldsIsMutable();
3670         excludedFields_.set(index, value);
3671         onChanged();
3672         return this;
3673       }
3674       /**
3675        *
3676        *
3677        * <pre>
3678        * Fields that will be excluded in the prediction instance that is
3679        * sent to the Model.
3680        * Excluded will be attached to the batch prediction output if
3681        * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
3682        * is not specified.
3683        * When excluded_fields is populated,
3684        * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3685        * must be empty.
3686        * The input must be JSONL with objects at each line, CSV, BigQuery
3687        * or TfRecord.
3688        * </pre>
3689        *
3690        * <code>repeated string excluded_fields = 4;</code>
3691        *
3692        * @param value The excludedFields to add.
3693        * @return This builder for chaining.
3694        */
addExcludedFields(java.lang.String value)3695       public Builder addExcludedFields(java.lang.String value) {
3696         if (value == null) {
3697           throw new NullPointerException();
3698         }
3699         ensureExcludedFieldsIsMutable();
3700         excludedFields_.add(value);
3701         onChanged();
3702         return this;
3703       }
3704       /**
3705        *
3706        *
3707        * <pre>
3708        * Fields that will be excluded in the prediction instance that is
3709        * sent to the Model.
3710        * Excluded will be attached to the batch prediction output if
3711        * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
3712        * is not specified.
3713        * When excluded_fields is populated,
3714        * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3715        * must be empty.
3716        * The input must be JSONL with objects at each line, CSV, BigQuery
3717        * or TfRecord.
3718        * </pre>
3719        *
3720        * <code>repeated string excluded_fields = 4;</code>
3721        *
3722        * @param values The excludedFields to add.
3723        * @return This builder for chaining.
3724        */
addAllExcludedFields(java.lang.Iterable<java.lang.String> values)3725       public Builder addAllExcludedFields(java.lang.Iterable<java.lang.String> values) {
3726         ensureExcludedFieldsIsMutable();
3727         com.google.protobuf.AbstractMessageLite.Builder.addAll(values, excludedFields_);
3728         onChanged();
3729         return this;
3730       }
3731       /**
3732        *
3733        *
3734        * <pre>
3735        * Fields that will be excluded in the prediction instance that is
3736        * sent to the Model.
3737        * Excluded will be attached to the batch prediction output if
3738        * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
3739        * is not specified.
3740        * When excluded_fields is populated,
3741        * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3742        * must be empty.
3743        * The input must be JSONL with objects at each line, CSV, BigQuery
3744        * or TfRecord.
3745        * </pre>
3746        *
3747        * <code>repeated string excluded_fields = 4;</code>
3748        *
3749        * @return This builder for chaining.
3750        */
clearExcludedFields()3751       public Builder clearExcludedFields() {
3752         excludedFields_ = com.google.protobuf.LazyStringArrayList.EMPTY;
3753         bitField0_ = (bitField0_ & ~0x00000008);
3754         onChanged();
3755         return this;
3756       }
3757       /**
3758        *
3759        *
3760        * <pre>
3761        * Fields that will be excluded in the prediction instance that is
3762        * sent to the Model.
3763        * Excluded will be attached to the batch prediction output if
3764        * [key_field][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.key_field]
3765        * is not specified.
3766        * When excluded_fields is populated,
3767        * [included_fields][google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.included_fields]
3768        * must be empty.
3769        * The input must be JSONL with objects at each line, CSV, BigQuery
3770        * or TfRecord.
3771        * </pre>
3772        *
3773        * <code>repeated string excluded_fields = 4;</code>
3774        *
3775        * @param value The bytes of the excludedFields to add.
3776        * @return This builder for chaining.
3777        */
addExcludedFieldsBytes(com.google.protobuf.ByteString value)3778       public Builder addExcludedFieldsBytes(com.google.protobuf.ByteString value) {
3779         if (value == null) {
3780           throw new NullPointerException();
3781         }
3782         checkByteStringIsUtf8(value);
3783         ensureExcludedFieldsIsMutable();
3784         excludedFields_.add(value);
3785         onChanged();
3786         return this;
3787       }
3788 
3789       @java.lang.Override
setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)3790       public final Builder setUnknownFields(
3791           final com.google.protobuf.UnknownFieldSet unknownFields) {
3792         return super.setUnknownFields(unknownFields);
3793       }
3794 
3795       @java.lang.Override
mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)3796       public final Builder mergeUnknownFields(
3797           final com.google.protobuf.UnknownFieldSet unknownFields) {
3798         return super.mergeUnknownFields(unknownFields);
3799       }
3800 
3801       // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig)
3802     }
3803 
3804     // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig)
3805     private static final com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig
3806         DEFAULT_INSTANCE;
3807 
3808     static {
3809       DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig();
3810     }
3811 
3812     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig
getDefaultInstance()3813         getDefaultInstance() {
3814       return DEFAULT_INSTANCE;
3815     }
3816 
3817     private static final com.google.protobuf.Parser<InstanceConfig> PARSER =
3818         new com.google.protobuf.AbstractParser<InstanceConfig>() {
3819           @java.lang.Override
3820           public InstanceConfig parsePartialFrom(
3821               com.google.protobuf.CodedInputStream input,
3822               com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3823               throws com.google.protobuf.InvalidProtocolBufferException {
3824             Builder builder = newBuilder();
3825             try {
3826               builder.mergeFrom(input, extensionRegistry);
3827             } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3828               throw e.setUnfinishedMessage(builder.buildPartial());
3829             } catch (com.google.protobuf.UninitializedMessageException e) {
3830               throw e.asInvalidProtocolBufferException()
3831                   .setUnfinishedMessage(builder.buildPartial());
3832             } catch (java.io.IOException e) {
3833               throw new com.google.protobuf.InvalidProtocolBufferException(e)
3834                   .setUnfinishedMessage(builder.buildPartial());
3835             }
3836             return builder.buildPartial();
3837           }
3838         };
3839 
parser()3840     public static com.google.protobuf.Parser<InstanceConfig> parser() {
3841       return PARSER;
3842     }
3843 
3844     @java.lang.Override
getParserForType()3845     public com.google.protobuf.Parser<InstanceConfig> getParserForType() {
3846       return PARSER;
3847     }
3848 
3849     @java.lang.Override
3850     public com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig
getDefaultInstanceForType()3851         getDefaultInstanceForType() {
3852       return DEFAULT_INSTANCE;
3853     }
3854   }
3855 
3856   public interface OutputConfigOrBuilder
3857       extends
3858       // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig)
3859       com.google.protobuf.MessageOrBuilder {
3860 
3861     /**
3862      *
3863      *
3864      * <pre>
3865      * The Cloud Storage location of the directory where the output is
3866      * to be written to. In the given directory a new directory is created.
3867      * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
3868      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
3869      * Inside of it files `predictions_0001.&lt;extension&gt;`,
3870      * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
3871      * are created where `&lt;extension&gt;` depends on chosen
3872      * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
3873      * and N may equal 0001 and depends on the total number of successfully
3874      * predicted instances. If the Model has both
3875      * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
3876      * and
3877      * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
3878      * schemata defined then each such file contains predictions as per the
3879      * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
3880      * If prediction for any instance failed (partially or completely), then
3881      * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
3882      * `errors_N.&lt;extension&gt;` files are created (N depends on total number
3883      * of failed predictions). These files contain the failed instances,
3884      * as per their schema, followed by an additional `error` field which as
3885      * value has [google.rpc.Status][google.rpc.Status]
3886      * containing only `code` and `message` fields.
3887      * </pre>
3888      *
3889      * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
3890      *
3891      * @return Whether the gcsDestination field is set.
3892      */
hasGcsDestination()3893     boolean hasGcsDestination();
3894     /**
3895      *
3896      *
3897      * <pre>
3898      * The Cloud Storage location of the directory where the output is
3899      * to be written to. In the given directory a new directory is created.
3900      * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
3901      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
3902      * Inside of it files `predictions_0001.&lt;extension&gt;`,
3903      * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
3904      * are created where `&lt;extension&gt;` depends on chosen
3905      * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
3906      * and N may equal 0001 and depends on the total number of successfully
3907      * predicted instances. If the Model has both
3908      * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
3909      * and
3910      * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
3911      * schemata defined then each such file contains predictions as per the
3912      * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
3913      * If prediction for any instance failed (partially or completely), then
3914      * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
3915      * `errors_N.&lt;extension&gt;` files are created (N depends on total number
3916      * of failed predictions). These files contain the failed instances,
3917      * as per their schema, followed by an additional `error` field which as
3918      * value has [google.rpc.Status][google.rpc.Status]
3919      * containing only `code` and `message` fields.
3920      * </pre>
3921      *
3922      * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
3923      *
3924      * @return The gcsDestination.
3925      */
getGcsDestination()3926     com.google.cloud.aiplatform.v1.GcsDestination getGcsDestination();
3927     /**
3928      *
3929      *
3930      * <pre>
3931      * The Cloud Storage location of the directory where the output is
3932      * to be written to. In the given directory a new directory is created.
3933      * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
3934      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
3935      * Inside of it files `predictions_0001.&lt;extension&gt;`,
3936      * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
3937      * are created where `&lt;extension&gt;` depends on chosen
3938      * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
3939      * and N may equal 0001 and depends on the total number of successfully
3940      * predicted instances. If the Model has both
3941      * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
3942      * and
3943      * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
3944      * schemata defined then each such file contains predictions as per the
3945      * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
3946      * If prediction for any instance failed (partially or completely), then
3947      * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
3948      * `errors_N.&lt;extension&gt;` files are created (N depends on total number
3949      * of failed predictions). These files contain the failed instances,
3950      * as per their schema, followed by an additional `error` field which as
3951      * value has [google.rpc.Status][google.rpc.Status]
3952      * containing only `code` and `message` fields.
3953      * </pre>
3954      *
3955      * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
3956      */
getGcsDestinationOrBuilder()3957     com.google.cloud.aiplatform.v1.GcsDestinationOrBuilder getGcsDestinationOrBuilder();
3958 
3959     /**
3960      *
3961      *
3962      * <pre>
3963      * The BigQuery project or dataset location where the output is to be
3964      * written to. If project is provided, a new dataset is created with name
3965      * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
3966      * where &lt;model-display-name&gt; is made
3967      * BigQuery-dataset-name compatible (for example, most special characters
3968      * become underscores), and timestamp is in
3969      * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
3970      * two tables will be created, `predictions`, and `errors`.
3971      * If the Model has both
3972      * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
3973      * and
3974      * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
3975      * schemata defined then the tables have columns as follows: The
3976      * `predictions` table contains instances for which the prediction
3977      * succeeded, it has columns as per a concatenation of the Model's
3978      * instance and prediction schemata. The `errors` table contains rows for
3979      * which the prediction has failed, it has instance columns, as per the
3980      * instance schema, followed by a single "errors" column, which as values
3981      * has [google.rpc.Status][google.rpc.Status]
3982      * represented as a STRUCT, and containing only `code` and `message`.
3983      * </pre>
3984      *
3985      * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
3986      *
3987      * @return Whether the bigqueryDestination field is set.
3988      */
hasBigqueryDestination()3989     boolean hasBigqueryDestination();
3990     /**
3991      *
3992      *
3993      * <pre>
3994      * The BigQuery project or dataset location where the output is to be
3995      * written to. If project is provided, a new dataset is created with name
3996      * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
3997      * where &lt;model-display-name&gt; is made
3998      * BigQuery-dataset-name compatible (for example, most special characters
3999      * become underscores), and timestamp is in
4000      * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
4001      * two tables will be created, `predictions`, and `errors`.
4002      * If the Model has both
4003      * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
4004      * and
4005      * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
4006      * schemata defined then the tables have columns as follows: The
4007      * `predictions` table contains instances for which the prediction
4008      * succeeded, it has columns as per a concatenation of the Model's
4009      * instance and prediction schemata. The `errors` table contains rows for
4010      * which the prediction has failed, it has instance columns, as per the
4011      * instance schema, followed by a single "errors" column, which as values
4012      * has [google.rpc.Status][google.rpc.Status]
4013      * represented as a STRUCT, and containing only `code` and `message`.
4014      * </pre>
4015      *
4016      * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
4017      *
4018      * @return The bigqueryDestination.
4019      */
getBigqueryDestination()4020     com.google.cloud.aiplatform.v1.BigQueryDestination getBigqueryDestination();
4021     /**
4022      *
4023      *
4024      * <pre>
4025      * The BigQuery project or dataset location where the output is to be
4026      * written to. If project is provided, a new dataset is created with name
4027      * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
4028      * where &lt;model-display-name&gt; is made
4029      * BigQuery-dataset-name compatible (for example, most special characters
4030      * become underscores), and timestamp is in
4031      * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
4032      * two tables will be created, `predictions`, and `errors`.
4033      * If the Model has both
4034      * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
4035      * and
4036      * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
4037      * schemata defined then the tables have columns as follows: The
4038      * `predictions` table contains instances for which the prediction
4039      * succeeded, it has columns as per a concatenation of the Model's
4040      * instance and prediction schemata. The `errors` table contains rows for
4041      * which the prediction has failed, it has instance columns, as per the
4042      * instance schema, followed by a single "errors" column, which as values
4043      * has [google.rpc.Status][google.rpc.Status]
4044      * represented as a STRUCT, and containing only `code` and `message`.
4045      * </pre>
4046      *
4047      * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
4048      */
getBigqueryDestinationOrBuilder()4049     com.google.cloud.aiplatform.v1.BigQueryDestinationOrBuilder getBigqueryDestinationOrBuilder();
4050 
4051     /**
4052      *
4053      *
4054      * <pre>
4055      * Required. The format in which Vertex AI gives the predictions, must be
4056      * one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
4057      * [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
4058      * </pre>
4059      *
4060      * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
4061      *
4062      * @return The predictionsFormat.
4063      */
getPredictionsFormat()4064     java.lang.String getPredictionsFormat();
4065     /**
4066      *
4067      *
4068      * <pre>
4069      * Required. The format in which Vertex AI gives the predictions, must be
4070      * one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
4071      * [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
4072      * </pre>
4073      *
4074      * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
4075      *
4076      * @return The bytes for predictionsFormat.
4077      */
getPredictionsFormatBytes()4078     com.google.protobuf.ByteString getPredictionsFormatBytes();
4079 
4080     public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.DestinationCase
getDestinationCase()4081         getDestinationCase();
4082   }
4083   /**
4084    *
4085    *
4086    * <pre>
4087    * Configures the output of
4088    * [BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob]. See
4089    * [Model.supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats]
4090    * for supported output formats, and how predictions are expressed via any of
4091    * them.
4092    * </pre>
4093    *
4094    * Protobuf type {@code google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig}
4095    */
4096   public static final class OutputConfig extends com.google.protobuf.GeneratedMessageV3
4097       implements
4098       // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig)
4099       OutputConfigOrBuilder {
4100     private static final long serialVersionUID = 0L;
4101     // Use OutputConfig.newBuilder() to construct.
OutputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)4102     private OutputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
4103       super(builder);
4104     }
4105 
OutputConfig()4106     private OutputConfig() {
4107       predictionsFormat_ = "";
4108     }
4109 
4110     @java.lang.Override
4111     @SuppressWarnings({"unused"})
newInstance(UnusedPrivateParameter unused)4112     protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
4113       return new OutputConfig();
4114     }
4115 
4116     @java.lang.Override
getUnknownFields()4117     public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
4118       return this.unknownFields;
4119     }
4120 
getDescriptor()4121     public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
4122       return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
4123           .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_OutputConfig_descriptor;
4124     }
4125 
4126     @java.lang.Override
4127     protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()4128         internalGetFieldAccessorTable() {
4129       return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
4130           .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_OutputConfig_fieldAccessorTable
4131           .ensureFieldAccessorsInitialized(
4132               com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.class,
4133               com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.Builder.class);
4134     }
4135 
4136     private int destinationCase_ = 0;
4137     private java.lang.Object destination_;
4138 
4139     public enum DestinationCase
4140         implements
4141             com.google.protobuf.Internal.EnumLite,
4142             com.google.protobuf.AbstractMessage.InternalOneOfEnum {
4143       GCS_DESTINATION(2),
4144       BIGQUERY_DESTINATION(3),
4145       DESTINATION_NOT_SET(0);
4146       private final int value;
4147 
DestinationCase(int value)4148       private DestinationCase(int value) {
4149         this.value = value;
4150       }
4151       /**
4152        * @param value The number of the enum to look for.
4153        * @return The enum associated with the given number.
4154        * @deprecated Use {@link #forNumber(int)} instead.
4155        */
4156       @java.lang.Deprecated
valueOf(int value)4157       public static DestinationCase valueOf(int value) {
4158         return forNumber(value);
4159       }
4160 
forNumber(int value)4161       public static DestinationCase forNumber(int value) {
4162         switch (value) {
4163           case 2:
4164             return GCS_DESTINATION;
4165           case 3:
4166             return BIGQUERY_DESTINATION;
4167           case 0:
4168             return DESTINATION_NOT_SET;
4169           default:
4170             return null;
4171         }
4172       }
4173 
getNumber()4174       public int getNumber() {
4175         return this.value;
4176       }
4177     };
4178 
getDestinationCase()4179     public DestinationCase getDestinationCase() {
4180       return DestinationCase.forNumber(destinationCase_);
4181     }
4182 
4183     public static final int GCS_DESTINATION_FIELD_NUMBER = 2;
4184     /**
4185      *
4186      *
4187      * <pre>
4188      * The Cloud Storage location of the directory where the output is
4189      * to be written to. In the given directory a new directory is created.
4190      * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
4191      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
4192      * Inside of it files `predictions_0001.&lt;extension&gt;`,
4193      * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
4194      * are created where `&lt;extension&gt;` depends on chosen
4195      * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
4196      * and N may equal 0001 and depends on the total number of successfully
4197      * predicted instances. If the Model has both
4198      * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
4199      * and
4200      * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
4201      * schemata defined then each such file contains predictions as per the
4202      * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
4203      * If prediction for any instance failed (partially or completely), then
4204      * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
4205      * `errors_N.&lt;extension&gt;` files are created (N depends on total number
4206      * of failed predictions). These files contain the failed instances,
4207      * as per their schema, followed by an additional `error` field which as
4208      * value has [google.rpc.Status][google.rpc.Status]
4209      * containing only `code` and `message` fields.
4210      * </pre>
4211      *
4212      * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
4213      *
4214      * @return Whether the gcsDestination field is set.
4215      */
4216     @java.lang.Override
hasGcsDestination()4217     public boolean hasGcsDestination() {
4218       return destinationCase_ == 2;
4219     }
4220     /**
4221      *
4222      *
4223      * <pre>
4224      * The Cloud Storage location of the directory where the output is
4225      * to be written to. In the given directory a new directory is created.
4226      * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
4227      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
4228      * Inside of it files `predictions_0001.&lt;extension&gt;`,
4229      * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
4230      * are created where `&lt;extension&gt;` depends on chosen
4231      * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
4232      * and N may equal 0001 and depends on the total number of successfully
4233      * predicted instances. If the Model has both
4234      * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
4235      * and
4236      * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
4237      * schemata defined then each such file contains predictions as per the
4238      * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
4239      * If prediction for any instance failed (partially or completely), then
4240      * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
4241      * `errors_N.&lt;extension&gt;` files are created (N depends on total number
4242      * of failed predictions). These files contain the failed instances,
4243      * as per their schema, followed by an additional `error` field which as
4244      * value has [google.rpc.Status][google.rpc.Status]
4245      * containing only `code` and `message` fields.
4246      * </pre>
4247      *
4248      * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
4249      *
4250      * @return The gcsDestination.
4251      */
4252     @java.lang.Override
getGcsDestination()4253     public com.google.cloud.aiplatform.v1.GcsDestination getGcsDestination() {
4254       if (destinationCase_ == 2) {
4255         return (com.google.cloud.aiplatform.v1.GcsDestination) destination_;
4256       }
4257       return com.google.cloud.aiplatform.v1.GcsDestination.getDefaultInstance();
4258     }
4259     /**
4260      *
4261      *
4262      * <pre>
4263      * The Cloud Storage location of the directory where the output is
4264      * to be written to. In the given directory a new directory is created.
4265      * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
4266      * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
4267      * Inside of it files `predictions_0001.&lt;extension&gt;`,
4268      * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
4269      * are created where `&lt;extension&gt;` depends on chosen
4270      * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
4271      * and N may equal 0001 and depends on the total number of successfully
4272      * predicted instances. If the Model has both
4273      * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
4274      * and
4275      * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
4276      * schemata defined then each such file contains predictions as per the
4277      * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
4278      * If prediction for any instance failed (partially or completely), then
4279      * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
4280      * `errors_N.&lt;extension&gt;` files are created (N depends on total number
4281      * of failed predictions). These files contain the failed instances,
4282      * as per their schema, followed by an additional `error` field which as
4283      * value has [google.rpc.Status][google.rpc.Status]
4284      * containing only `code` and `message` fields.
4285      * </pre>
4286      *
4287      * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
4288      */
4289     @java.lang.Override
getGcsDestinationOrBuilder()4290     public com.google.cloud.aiplatform.v1.GcsDestinationOrBuilder getGcsDestinationOrBuilder() {
4291       if (destinationCase_ == 2) {
4292         return (com.google.cloud.aiplatform.v1.GcsDestination) destination_;
4293       }
4294       return com.google.cloud.aiplatform.v1.GcsDestination.getDefaultInstance();
4295     }
4296 
4297     public static final int BIGQUERY_DESTINATION_FIELD_NUMBER = 3;
4298     /**
4299      *
4300      *
4301      * <pre>
4302      * The BigQuery project or dataset location where the output is to be
4303      * written to. If project is provided, a new dataset is created with name
4304      * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
4305      * where &lt;model-display-name&gt; is made
4306      * BigQuery-dataset-name compatible (for example, most special characters
4307      * become underscores), and timestamp is in
4308      * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
4309      * two tables will be created, `predictions`, and `errors`.
4310      * If the Model has both
4311      * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
4312      * and
4313      * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
4314      * schemata defined then the tables have columns as follows: The
4315      * `predictions` table contains instances for which the prediction
4316      * succeeded, it has columns as per a concatenation of the Model's
4317      * instance and prediction schemata. The `errors` table contains rows for
4318      * which the prediction has failed, it has instance columns, as per the
4319      * instance schema, followed by a single "errors" column, which as values
4320      * has [google.rpc.Status][google.rpc.Status]
4321      * represented as a STRUCT, and containing only `code` and `message`.
4322      * </pre>
4323      *
4324      * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
4325      *
4326      * @return Whether the bigqueryDestination field is set.
4327      */
4328     @java.lang.Override
hasBigqueryDestination()4329     public boolean hasBigqueryDestination() {
4330       return destinationCase_ == 3;
4331     }
4332     /**
4333      *
4334      *
4335      * <pre>
4336      * The BigQuery project or dataset location where the output is to be
4337      * written to. If project is provided, a new dataset is created with name
4338      * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
4339      * where &lt;model-display-name&gt; is made
4340      * BigQuery-dataset-name compatible (for example, most special characters
4341      * become underscores), and timestamp is in
4342      * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
4343      * two tables will be created, `predictions`, and `errors`.
4344      * If the Model has both
4345      * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
4346      * and
4347      * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
4348      * schemata defined then the tables have columns as follows: The
4349      * `predictions` table contains instances for which the prediction
4350      * succeeded, it has columns as per a concatenation of the Model's
4351      * instance and prediction schemata. The `errors` table contains rows for
4352      * which the prediction has failed, it has instance columns, as per the
4353      * instance schema, followed by a single "errors" column, which as values
4354      * has [google.rpc.Status][google.rpc.Status]
4355      * represented as a STRUCT, and containing only `code` and `message`.
4356      * </pre>
4357      *
4358      * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
4359      *
4360      * @return The bigqueryDestination.
4361      */
4362     @java.lang.Override
getBigqueryDestination()4363     public com.google.cloud.aiplatform.v1.BigQueryDestination getBigqueryDestination() {
4364       if (destinationCase_ == 3) {
4365         return (com.google.cloud.aiplatform.v1.BigQueryDestination) destination_;
4366       }
4367       return com.google.cloud.aiplatform.v1.BigQueryDestination.getDefaultInstance();
4368     }
4369     /**
4370      *
4371      *
4372      * <pre>
4373      * The BigQuery project or dataset location where the output is to be
4374      * written to. If project is provided, a new dataset is created with name
4375      * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
4376      * where &lt;model-display-name&gt; is made
4377      * BigQuery-dataset-name compatible (for example, most special characters
4378      * become underscores), and timestamp is in
4379      * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
4380      * two tables will be created, `predictions`, and `errors`.
4381      * If the Model has both
4382      * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
4383      * and
4384      * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
4385      * schemata defined then the tables have columns as follows: The
4386      * `predictions` table contains instances for which the prediction
4387      * succeeded, it has columns as per a concatenation of the Model's
4388      * instance and prediction schemata. The `errors` table contains rows for
4389      * which the prediction has failed, it has instance columns, as per the
4390      * instance schema, followed by a single "errors" column, which as values
4391      * has [google.rpc.Status][google.rpc.Status]
4392      * represented as a STRUCT, and containing only `code` and `message`.
4393      * </pre>
4394      *
4395      * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
4396      */
4397     @java.lang.Override
4398     public com.google.cloud.aiplatform.v1.BigQueryDestinationOrBuilder
getBigqueryDestinationOrBuilder()4399         getBigqueryDestinationOrBuilder() {
4400       if (destinationCase_ == 3) {
4401         return (com.google.cloud.aiplatform.v1.BigQueryDestination) destination_;
4402       }
4403       return com.google.cloud.aiplatform.v1.BigQueryDestination.getDefaultInstance();
4404     }
4405 
4406     public static final int PREDICTIONS_FORMAT_FIELD_NUMBER = 1;
4407 
4408     @SuppressWarnings("serial")
4409     private volatile java.lang.Object predictionsFormat_ = "";
4410     /**
4411      *
4412      *
4413      * <pre>
4414      * Required. The format in which Vertex AI gives the predictions, must be
4415      * one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
4416      * [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
4417      * </pre>
4418      *
4419      * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
4420      *
4421      * @return The predictionsFormat.
4422      */
4423     @java.lang.Override
getPredictionsFormat()4424     public java.lang.String getPredictionsFormat() {
4425       java.lang.Object ref = predictionsFormat_;
4426       if (ref instanceof java.lang.String) {
4427         return (java.lang.String) ref;
4428       } else {
4429         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
4430         java.lang.String s = bs.toStringUtf8();
4431         predictionsFormat_ = s;
4432         return s;
4433       }
4434     }
4435     /**
4436      *
4437      *
4438      * <pre>
4439      * Required. The format in which Vertex AI gives the predictions, must be
4440      * one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
4441      * [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
4442      * </pre>
4443      *
4444      * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
4445      *
4446      * @return The bytes for predictionsFormat.
4447      */
4448     @java.lang.Override
getPredictionsFormatBytes()4449     public com.google.protobuf.ByteString getPredictionsFormatBytes() {
4450       java.lang.Object ref = predictionsFormat_;
4451       if (ref instanceof java.lang.String) {
4452         com.google.protobuf.ByteString b =
4453             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
4454         predictionsFormat_ = b;
4455         return b;
4456       } else {
4457         return (com.google.protobuf.ByteString) ref;
4458       }
4459     }
4460 
4461     private byte memoizedIsInitialized = -1;
4462 
4463     @java.lang.Override
isInitialized()4464     public final boolean isInitialized() {
4465       byte isInitialized = memoizedIsInitialized;
4466       if (isInitialized == 1) return true;
4467       if (isInitialized == 0) return false;
4468 
4469       memoizedIsInitialized = 1;
4470       return true;
4471     }
4472 
4473     @java.lang.Override
writeTo(com.google.protobuf.CodedOutputStream output)4474     public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
4475       if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(predictionsFormat_)) {
4476         com.google.protobuf.GeneratedMessageV3.writeString(output, 1, predictionsFormat_);
4477       }
4478       if (destinationCase_ == 2) {
4479         output.writeMessage(2, (com.google.cloud.aiplatform.v1.GcsDestination) destination_);
4480       }
4481       if (destinationCase_ == 3) {
4482         output.writeMessage(3, (com.google.cloud.aiplatform.v1.BigQueryDestination) destination_);
4483       }
4484       getUnknownFields().writeTo(output);
4485     }
4486 
4487     @java.lang.Override
getSerializedSize()4488     public int getSerializedSize() {
4489       int size = memoizedSize;
4490       if (size != -1) return size;
4491 
4492       size = 0;
4493       if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(predictionsFormat_)) {
4494         size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, predictionsFormat_);
4495       }
4496       if (destinationCase_ == 2) {
4497         size +=
4498             com.google.protobuf.CodedOutputStream.computeMessageSize(
4499                 2, (com.google.cloud.aiplatform.v1.GcsDestination) destination_);
4500       }
4501       if (destinationCase_ == 3) {
4502         size +=
4503             com.google.protobuf.CodedOutputStream.computeMessageSize(
4504                 3, (com.google.cloud.aiplatform.v1.BigQueryDestination) destination_);
4505       }
4506       size += getUnknownFields().getSerializedSize();
4507       memoizedSize = size;
4508       return size;
4509     }
4510 
4511     @java.lang.Override
equals(final java.lang.Object obj)4512     public boolean equals(final java.lang.Object obj) {
4513       if (obj == this) {
4514         return true;
4515       }
4516       if (!(obj instanceof com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig)) {
4517         return super.equals(obj);
4518       }
4519       com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig other =
4520           (com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig) obj;
4521 
4522       if (!getPredictionsFormat().equals(other.getPredictionsFormat())) return false;
4523       if (!getDestinationCase().equals(other.getDestinationCase())) return false;
4524       switch (destinationCase_) {
4525         case 2:
4526           if (!getGcsDestination().equals(other.getGcsDestination())) return false;
4527           break;
4528         case 3:
4529           if (!getBigqueryDestination().equals(other.getBigqueryDestination())) return false;
4530           break;
4531         case 0:
4532         default:
4533       }
4534       if (!getUnknownFields().equals(other.getUnknownFields())) return false;
4535       return true;
4536     }
4537 
4538     @java.lang.Override
hashCode()4539     public int hashCode() {
4540       if (memoizedHashCode != 0) {
4541         return memoizedHashCode;
4542       }
4543       int hash = 41;
4544       hash = (19 * hash) + getDescriptor().hashCode();
4545       hash = (37 * hash) + PREDICTIONS_FORMAT_FIELD_NUMBER;
4546       hash = (53 * hash) + getPredictionsFormat().hashCode();
4547       switch (destinationCase_) {
4548         case 2:
4549           hash = (37 * hash) + GCS_DESTINATION_FIELD_NUMBER;
4550           hash = (53 * hash) + getGcsDestination().hashCode();
4551           break;
4552         case 3:
4553           hash = (37 * hash) + BIGQUERY_DESTINATION_FIELD_NUMBER;
4554           hash = (53 * hash) + getBigqueryDestination().hashCode();
4555           break;
4556         case 0:
4557         default:
4558       }
4559       hash = (29 * hash) + getUnknownFields().hashCode();
4560       memoizedHashCode = hash;
4561       return hash;
4562     }
4563 
parseFrom( java.nio.ByteBuffer data)4564     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig parseFrom(
4565         java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
4566       return PARSER.parseFrom(data);
4567     }
4568 
parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4569     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig parseFrom(
4570         java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4571         throws com.google.protobuf.InvalidProtocolBufferException {
4572       return PARSER.parseFrom(data, extensionRegistry);
4573     }
4574 
parseFrom( com.google.protobuf.ByteString data)4575     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig parseFrom(
4576         com.google.protobuf.ByteString data)
4577         throws com.google.protobuf.InvalidProtocolBufferException {
4578       return PARSER.parseFrom(data);
4579     }
4580 
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4581     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig parseFrom(
4582         com.google.protobuf.ByteString data,
4583         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4584         throws com.google.protobuf.InvalidProtocolBufferException {
4585       return PARSER.parseFrom(data, extensionRegistry);
4586     }
4587 
parseFrom( byte[] data)4588     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig parseFrom(
4589         byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
4590       return PARSER.parseFrom(data);
4591     }
4592 
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4593     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig parseFrom(
4594         byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4595         throws com.google.protobuf.InvalidProtocolBufferException {
4596       return PARSER.parseFrom(data, extensionRegistry);
4597     }
4598 
parseFrom( java.io.InputStream input)4599     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig parseFrom(
4600         java.io.InputStream input) throws java.io.IOException {
4601       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
4602     }
4603 
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4604     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig parseFrom(
4605         java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4606         throws java.io.IOException {
4607       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
4608           PARSER, input, extensionRegistry);
4609     }
4610 
parseDelimitedFrom( java.io.InputStream input)4611     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig parseDelimitedFrom(
4612         java.io.InputStream input) throws java.io.IOException {
4613       return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
4614     }
4615 
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4616     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig parseDelimitedFrom(
4617         java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4618         throws java.io.IOException {
4619       return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
4620           PARSER, input, extensionRegistry);
4621     }
4622 
parseFrom( com.google.protobuf.CodedInputStream input)4623     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig parseFrom(
4624         com.google.protobuf.CodedInputStream input) throws java.io.IOException {
4625       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
4626     }
4627 
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4628     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig parseFrom(
4629         com.google.protobuf.CodedInputStream input,
4630         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4631         throws java.io.IOException {
4632       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
4633           PARSER, input, extensionRegistry);
4634     }
4635 
4636     @java.lang.Override
newBuilderForType()4637     public Builder newBuilderForType() {
4638       return newBuilder();
4639     }
4640 
newBuilder()4641     public static Builder newBuilder() {
4642       return DEFAULT_INSTANCE.toBuilder();
4643     }
4644 
newBuilder( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig prototype)4645     public static Builder newBuilder(
4646         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig prototype) {
4647       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
4648     }
4649 
4650     @java.lang.Override
toBuilder()4651     public Builder toBuilder() {
4652       return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
4653     }
4654 
4655     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent)4656     protected Builder newBuilderForType(
4657         com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
4658       Builder builder = new Builder(parent);
4659       return builder;
4660     }
4661     /**
4662      *
4663      *
4664      * <pre>
4665      * Configures the output of
4666      * [BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob]. See
4667      * [Model.supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats]
4668      * for supported output formats, and how predictions are expressed via any of
4669      * them.
4670      * </pre>
4671      *
4672      * Protobuf type {@code google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig}
4673      */
4674     public static final class Builder
4675         extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
4676         implements
4677         // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig)
4678         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfigOrBuilder {
getDescriptor()4679       public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
4680         return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
4681             .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_OutputConfig_descriptor;
4682       }
4683 
4684       @java.lang.Override
4685       protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()4686           internalGetFieldAccessorTable() {
4687         return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
4688             .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_OutputConfig_fieldAccessorTable
4689             .ensureFieldAccessorsInitialized(
4690                 com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.class,
4691                 com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.Builder.class);
4692       }
4693 
4694       // Construct using com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.newBuilder()
Builder()4695       private Builder() {}
4696 
Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)4697       private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
4698         super(parent);
4699       }
4700 
4701       @java.lang.Override
clear()4702       public Builder clear() {
4703         super.clear();
4704         bitField0_ = 0;
4705         if (gcsDestinationBuilder_ != null) {
4706           gcsDestinationBuilder_.clear();
4707         }
4708         if (bigqueryDestinationBuilder_ != null) {
4709           bigqueryDestinationBuilder_.clear();
4710         }
4711         predictionsFormat_ = "";
4712         destinationCase_ = 0;
4713         destination_ = null;
4714         return this;
4715       }
4716 
4717       @java.lang.Override
getDescriptorForType()4718       public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
4719         return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
4720             .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_OutputConfig_descriptor;
4721       }
4722 
4723       @java.lang.Override
4724       public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig
getDefaultInstanceForType()4725           getDefaultInstanceForType() {
4726         return com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.getDefaultInstance();
4727       }
4728 
4729       @java.lang.Override
build()4730       public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig build() {
4731         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig result = buildPartial();
4732         if (!result.isInitialized()) {
4733           throw newUninitializedMessageException(result);
4734         }
4735         return result;
4736       }
4737 
4738       @java.lang.Override
buildPartial()4739       public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig buildPartial() {
4740         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig result =
4741             new com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig(this);
4742         if (bitField0_ != 0) {
4743           buildPartial0(result);
4744         }
4745         buildPartialOneofs(result);
4746         onBuilt();
4747         return result;
4748       }
4749 
buildPartial0( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig result)4750       private void buildPartial0(
4751           com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig result) {
4752         int from_bitField0_ = bitField0_;
4753         if (((from_bitField0_ & 0x00000004) != 0)) {
4754           result.predictionsFormat_ = predictionsFormat_;
4755         }
4756       }
4757 
buildPartialOneofs( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig result)4758       private void buildPartialOneofs(
4759           com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig result) {
4760         result.destinationCase_ = destinationCase_;
4761         result.destination_ = this.destination_;
4762         if (destinationCase_ == 2 && gcsDestinationBuilder_ != null) {
4763           result.destination_ = gcsDestinationBuilder_.build();
4764         }
4765         if (destinationCase_ == 3 && bigqueryDestinationBuilder_ != null) {
4766           result.destination_ = bigqueryDestinationBuilder_.build();
4767         }
4768       }
4769 
4770       @java.lang.Override
clone()4771       public Builder clone() {
4772         return super.clone();
4773       }
4774 
4775       @java.lang.Override
setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)4776       public Builder setField(
4777           com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
4778         return super.setField(field, value);
4779       }
4780 
4781       @java.lang.Override
clearField(com.google.protobuf.Descriptors.FieldDescriptor field)4782       public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
4783         return super.clearField(field);
4784       }
4785 
4786       @java.lang.Override
clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)4787       public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
4788         return super.clearOneof(oneof);
4789       }
4790 
4791       @java.lang.Override
setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)4792       public Builder setRepeatedField(
4793           com.google.protobuf.Descriptors.FieldDescriptor field,
4794           int index,
4795           java.lang.Object value) {
4796         return super.setRepeatedField(field, index, value);
4797       }
4798 
4799       @java.lang.Override
addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)4800       public Builder addRepeatedField(
4801           com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
4802         return super.addRepeatedField(field, value);
4803       }
4804 
4805       @java.lang.Override
mergeFrom(com.google.protobuf.Message other)4806       public Builder mergeFrom(com.google.protobuf.Message other) {
4807         if (other instanceof com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig) {
4808           return mergeFrom((com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig) other);
4809         } else {
4810           super.mergeFrom(other);
4811           return this;
4812         }
4813       }
4814 
mergeFrom( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig other)4815       public Builder mergeFrom(
4816           com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig other) {
4817         if (other
4818             == com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.getDefaultInstance())
4819           return this;
4820         if (!other.getPredictionsFormat().isEmpty()) {
4821           predictionsFormat_ = other.predictionsFormat_;
4822           bitField0_ |= 0x00000004;
4823           onChanged();
4824         }
4825         switch (other.getDestinationCase()) {
4826           case GCS_DESTINATION:
4827             {
4828               mergeGcsDestination(other.getGcsDestination());
4829               break;
4830             }
4831           case BIGQUERY_DESTINATION:
4832             {
4833               mergeBigqueryDestination(other.getBigqueryDestination());
4834               break;
4835             }
4836           case DESTINATION_NOT_SET:
4837             {
4838               break;
4839             }
4840         }
4841         this.mergeUnknownFields(other.getUnknownFields());
4842         onChanged();
4843         return this;
4844       }
4845 
4846       @java.lang.Override
isInitialized()4847       public final boolean isInitialized() {
4848         return true;
4849       }
4850 
4851       @java.lang.Override
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4852       public Builder mergeFrom(
4853           com.google.protobuf.CodedInputStream input,
4854           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4855           throws java.io.IOException {
4856         if (extensionRegistry == null) {
4857           throw new java.lang.NullPointerException();
4858         }
4859         try {
4860           boolean done = false;
4861           while (!done) {
4862             int tag = input.readTag();
4863             switch (tag) {
4864               case 0:
4865                 done = true;
4866                 break;
4867               case 10:
4868                 {
4869                   predictionsFormat_ = input.readStringRequireUtf8();
4870                   bitField0_ |= 0x00000004;
4871                   break;
4872                 } // case 10
4873               case 18:
4874                 {
4875                   input.readMessage(
4876                       getGcsDestinationFieldBuilder().getBuilder(), extensionRegistry);
4877                   destinationCase_ = 2;
4878                   break;
4879                 } // case 18
4880               case 26:
4881                 {
4882                   input.readMessage(
4883                       getBigqueryDestinationFieldBuilder().getBuilder(), extensionRegistry);
4884                   destinationCase_ = 3;
4885                   break;
4886                 } // case 26
4887               default:
4888                 {
4889                   if (!super.parseUnknownField(input, extensionRegistry, tag)) {
4890                     done = true; // was an endgroup tag
4891                   }
4892                   break;
4893                 } // default:
4894             } // switch (tag)
4895           } // while (!done)
4896         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4897           throw e.unwrapIOException();
4898         } finally {
4899           onChanged();
4900         } // finally
4901         return this;
4902       }
4903 
4904       private int destinationCase_ = 0;
4905       private java.lang.Object destination_;
4906 
getDestinationCase()4907       public DestinationCase getDestinationCase() {
4908         return DestinationCase.forNumber(destinationCase_);
4909       }
4910 
clearDestination()4911       public Builder clearDestination() {
4912         destinationCase_ = 0;
4913         destination_ = null;
4914         onChanged();
4915         return this;
4916       }
4917 
4918       private int bitField0_;
4919 
4920       private com.google.protobuf.SingleFieldBuilderV3<
4921               com.google.cloud.aiplatform.v1.GcsDestination,
4922               com.google.cloud.aiplatform.v1.GcsDestination.Builder,
4923               com.google.cloud.aiplatform.v1.GcsDestinationOrBuilder>
4924           gcsDestinationBuilder_;
4925       /**
4926        *
4927        *
4928        * <pre>
4929        * The Cloud Storage location of the directory where the output is
4930        * to be written to. In the given directory a new directory is created.
4931        * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
4932        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
4933        * Inside of it files `predictions_0001.&lt;extension&gt;`,
4934        * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
4935        * are created where `&lt;extension&gt;` depends on chosen
4936        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
4937        * and N may equal 0001 and depends on the total number of successfully
4938        * predicted instances. If the Model has both
4939        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
4940        * and
4941        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
4942        * schemata defined then each such file contains predictions as per the
4943        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
4944        * If prediction for any instance failed (partially or completely), then
4945        * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
4946        * `errors_N.&lt;extension&gt;` files are created (N depends on total number
4947        * of failed predictions). These files contain the failed instances,
4948        * as per their schema, followed by an additional `error` field which as
4949        * value has [google.rpc.Status][google.rpc.Status]
4950        * containing only `code` and `message` fields.
4951        * </pre>
4952        *
4953        * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
4954        *
4955        * @return Whether the gcsDestination field is set.
4956        */
4957       @java.lang.Override
hasGcsDestination()4958       public boolean hasGcsDestination() {
4959         return destinationCase_ == 2;
4960       }
4961       /**
4962        *
4963        *
4964        * <pre>
4965        * The Cloud Storage location of the directory where the output is
4966        * to be written to. In the given directory a new directory is created.
4967        * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
4968        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
4969        * Inside of it files `predictions_0001.&lt;extension&gt;`,
4970        * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
4971        * are created where `&lt;extension&gt;` depends on chosen
4972        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
4973        * and N may equal 0001 and depends on the total number of successfully
4974        * predicted instances. If the Model has both
4975        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
4976        * and
4977        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
4978        * schemata defined then each such file contains predictions as per the
4979        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
4980        * If prediction for any instance failed (partially or completely), then
4981        * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
4982        * `errors_N.&lt;extension&gt;` files are created (N depends on total number
4983        * of failed predictions). These files contain the failed instances,
4984        * as per their schema, followed by an additional `error` field which as
4985        * value has [google.rpc.Status][google.rpc.Status]
4986        * containing only `code` and `message` fields.
4987        * </pre>
4988        *
4989        * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
4990        *
4991        * @return The gcsDestination.
4992        */
4993       @java.lang.Override
getGcsDestination()4994       public com.google.cloud.aiplatform.v1.GcsDestination getGcsDestination() {
4995         if (gcsDestinationBuilder_ == null) {
4996           if (destinationCase_ == 2) {
4997             return (com.google.cloud.aiplatform.v1.GcsDestination) destination_;
4998           }
4999           return com.google.cloud.aiplatform.v1.GcsDestination.getDefaultInstance();
5000         } else {
5001           if (destinationCase_ == 2) {
5002             return gcsDestinationBuilder_.getMessage();
5003           }
5004           return com.google.cloud.aiplatform.v1.GcsDestination.getDefaultInstance();
5005         }
5006       }
5007       /**
5008        *
5009        *
5010        * <pre>
5011        * The Cloud Storage location of the directory where the output is
5012        * to be written to. In the given directory a new directory is created.
5013        * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
5014        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
5015        * Inside of it files `predictions_0001.&lt;extension&gt;`,
5016        * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
5017        * are created where `&lt;extension&gt;` depends on chosen
5018        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
5019        * and N may equal 0001 and depends on the total number of successfully
5020        * predicted instances. If the Model has both
5021        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5022        * and
5023        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5024        * schemata defined then each such file contains predictions as per the
5025        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
5026        * If prediction for any instance failed (partially or completely), then
5027        * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
5028        * `errors_N.&lt;extension&gt;` files are created (N depends on total number
5029        * of failed predictions). These files contain the failed instances,
5030        * as per their schema, followed by an additional `error` field which as
5031        * value has [google.rpc.Status][google.rpc.Status]
5032        * containing only `code` and `message` fields.
5033        * </pre>
5034        *
5035        * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
5036        */
setGcsDestination(com.google.cloud.aiplatform.v1.GcsDestination value)5037       public Builder setGcsDestination(com.google.cloud.aiplatform.v1.GcsDestination value) {
5038         if (gcsDestinationBuilder_ == null) {
5039           if (value == null) {
5040             throw new NullPointerException();
5041           }
5042           destination_ = value;
5043           onChanged();
5044         } else {
5045           gcsDestinationBuilder_.setMessage(value);
5046         }
5047         destinationCase_ = 2;
5048         return this;
5049       }
5050       /**
5051        *
5052        *
5053        * <pre>
5054        * The Cloud Storage location of the directory where the output is
5055        * to be written to. In the given directory a new directory is created.
5056        * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
5057        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
5058        * Inside of it files `predictions_0001.&lt;extension&gt;`,
5059        * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
5060        * are created where `&lt;extension&gt;` depends on chosen
5061        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
5062        * and N may equal 0001 and depends on the total number of successfully
5063        * predicted instances. If the Model has both
5064        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5065        * and
5066        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5067        * schemata defined then each such file contains predictions as per the
5068        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
5069        * If prediction for any instance failed (partially or completely), then
5070        * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
5071        * `errors_N.&lt;extension&gt;` files are created (N depends on total number
5072        * of failed predictions). These files contain the failed instances,
5073        * as per their schema, followed by an additional `error` field which as
5074        * value has [google.rpc.Status][google.rpc.Status]
5075        * containing only `code` and `message` fields.
5076        * </pre>
5077        *
5078        * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
5079        */
setGcsDestination( com.google.cloud.aiplatform.v1.GcsDestination.Builder builderForValue)5080       public Builder setGcsDestination(
5081           com.google.cloud.aiplatform.v1.GcsDestination.Builder builderForValue) {
5082         if (gcsDestinationBuilder_ == null) {
5083           destination_ = builderForValue.build();
5084           onChanged();
5085         } else {
5086           gcsDestinationBuilder_.setMessage(builderForValue.build());
5087         }
5088         destinationCase_ = 2;
5089         return this;
5090       }
5091       /**
5092        *
5093        *
5094        * <pre>
5095        * The Cloud Storage location of the directory where the output is
5096        * to be written to. In the given directory a new directory is created.
5097        * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
5098        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
5099        * Inside of it files `predictions_0001.&lt;extension&gt;`,
5100        * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
5101        * are created where `&lt;extension&gt;` depends on chosen
5102        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
5103        * and N may equal 0001 and depends on the total number of successfully
5104        * predicted instances. If the Model has both
5105        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5106        * and
5107        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5108        * schemata defined then each such file contains predictions as per the
5109        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
5110        * If prediction for any instance failed (partially or completely), then
5111        * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
5112        * `errors_N.&lt;extension&gt;` files are created (N depends on total number
5113        * of failed predictions). These files contain the failed instances,
5114        * as per their schema, followed by an additional `error` field which as
5115        * value has [google.rpc.Status][google.rpc.Status]
5116        * containing only `code` and `message` fields.
5117        * </pre>
5118        *
5119        * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
5120        */
mergeGcsDestination(com.google.cloud.aiplatform.v1.GcsDestination value)5121       public Builder mergeGcsDestination(com.google.cloud.aiplatform.v1.GcsDestination value) {
5122         if (gcsDestinationBuilder_ == null) {
5123           if (destinationCase_ == 2
5124               && destination_
5125                   != com.google.cloud.aiplatform.v1.GcsDestination.getDefaultInstance()) {
5126             destination_ =
5127                 com.google.cloud.aiplatform.v1.GcsDestination.newBuilder(
5128                         (com.google.cloud.aiplatform.v1.GcsDestination) destination_)
5129                     .mergeFrom(value)
5130                     .buildPartial();
5131           } else {
5132             destination_ = value;
5133           }
5134           onChanged();
5135         } else {
5136           if (destinationCase_ == 2) {
5137             gcsDestinationBuilder_.mergeFrom(value);
5138           } else {
5139             gcsDestinationBuilder_.setMessage(value);
5140           }
5141         }
5142         destinationCase_ = 2;
5143         return this;
5144       }
5145       /**
5146        *
5147        *
5148        * <pre>
5149        * The Cloud Storage location of the directory where the output is
5150        * to be written to. In the given directory a new directory is created.
5151        * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
5152        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
5153        * Inside of it files `predictions_0001.&lt;extension&gt;`,
5154        * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
5155        * are created where `&lt;extension&gt;` depends on chosen
5156        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
5157        * and N may equal 0001 and depends on the total number of successfully
5158        * predicted instances. If the Model has both
5159        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5160        * and
5161        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5162        * schemata defined then each such file contains predictions as per the
5163        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
5164        * If prediction for any instance failed (partially or completely), then
5165        * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
5166        * `errors_N.&lt;extension&gt;` files are created (N depends on total number
5167        * of failed predictions). These files contain the failed instances,
5168        * as per their schema, followed by an additional `error` field which as
5169        * value has [google.rpc.Status][google.rpc.Status]
5170        * containing only `code` and `message` fields.
5171        * </pre>
5172        *
5173        * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
5174        */
clearGcsDestination()5175       public Builder clearGcsDestination() {
5176         if (gcsDestinationBuilder_ == null) {
5177           if (destinationCase_ == 2) {
5178             destinationCase_ = 0;
5179             destination_ = null;
5180             onChanged();
5181           }
5182         } else {
5183           if (destinationCase_ == 2) {
5184             destinationCase_ = 0;
5185             destination_ = null;
5186           }
5187           gcsDestinationBuilder_.clear();
5188         }
5189         return this;
5190       }
5191       /**
5192        *
5193        *
5194        * <pre>
5195        * The Cloud Storage location of the directory where the output is
5196        * to be written to. In the given directory a new directory is created.
5197        * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
5198        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
5199        * Inside of it files `predictions_0001.&lt;extension&gt;`,
5200        * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
5201        * are created where `&lt;extension&gt;` depends on chosen
5202        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
5203        * and N may equal 0001 and depends on the total number of successfully
5204        * predicted instances. If the Model has both
5205        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5206        * and
5207        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5208        * schemata defined then each such file contains predictions as per the
5209        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
5210        * If prediction for any instance failed (partially or completely), then
5211        * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
5212        * `errors_N.&lt;extension&gt;` files are created (N depends on total number
5213        * of failed predictions). These files contain the failed instances,
5214        * as per their schema, followed by an additional `error` field which as
5215        * value has [google.rpc.Status][google.rpc.Status]
5216        * containing only `code` and `message` fields.
5217        * </pre>
5218        *
5219        * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
5220        */
getGcsDestinationBuilder()5221       public com.google.cloud.aiplatform.v1.GcsDestination.Builder getGcsDestinationBuilder() {
5222         return getGcsDestinationFieldBuilder().getBuilder();
5223       }
5224       /**
5225        *
5226        *
5227        * <pre>
5228        * The Cloud Storage location of the directory where the output is
5229        * to be written to. In the given directory a new directory is created.
5230        * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
5231        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
5232        * Inside of it files `predictions_0001.&lt;extension&gt;`,
5233        * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
5234        * are created where `&lt;extension&gt;` depends on chosen
5235        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
5236        * and N may equal 0001 and depends on the total number of successfully
5237        * predicted instances. If the Model has both
5238        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5239        * and
5240        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5241        * schemata defined then each such file contains predictions as per the
5242        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
5243        * If prediction for any instance failed (partially or completely), then
5244        * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
5245        * `errors_N.&lt;extension&gt;` files are created (N depends on total number
5246        * of failed predictions). These files contain the failed instances,
5247        * as per their schema, followed by an additional `error` field which as
5248        * value has [google.rpc.Status][google.rpc.Status]
5249        * containing only `code` and `message` fields.
5250        * </pre>
5251        *
5252        * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
5253        */
5254       @java.lang.Override
getGcsDestinationOrBuilder()5255       public com.google.cloud.aiplatform.v1.GcsDestinationOrBuilder getGcsDestinationOrBuilder() {
5256         if ((destinationCase_ == 2) && (gcsDestinationBuilder_ != null)) {
5257           return gcsDestinationBuilder_.getMessageOrBuilder();
5258         } else {
5259           if (destinationCase_ == 2) {
5260             return (com.google.cloud.aiplatform.v1.GcsDestination) destination_;
5261           }
5262           return com.google.cloud.aiplatform.v1.GcsDestination.getDefaultInstance();
5263         }
5264       }
5265       /**
5266        *
5267        *
5268        * <pre>
5269        * The Cloud Storage location of the directory where the output is
5270        * to be written to. In the given directory a new directory is created.
5271        * Its name is `prediction-&lt;model-display-name&gt;-&lt;job-create-time&gt;`,
5272        * where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
5273        * Inside of it files `predictions_0001.&lt;extension&gt;`,
5274        * `predictions_0002.&lt;extension&gt;`, ..., `predictions_N.&lt;extension&gt;`
5275        * are created where `&lt;extension&gt;` depends on chosen
5276        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format],
5277        * and N may equal 0001 and depends on the total number of successfully
5278        * predicted instances. If the Model has both
5279        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5280        * and
5281        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5282        * schemata defined then each such file contains predictions as per the
5283        * [predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format].
5284        * If prediction for any instance failed (partially or completely), then
5285        * an additional `errors_0001.&lt;extension&gt;`, `errors_0002.&lt;extension&gt;`,...,
5286        * `errors_N.&lt;extension&gt;` files are created (N depends on total number
5287        * of failed predictions). These files contain the failed instances,
5288        * as per their schema, followed by an additional `error` field which as
5289        * value has [google.rpc.Status][google.rpc.Status]
5290        * containing only `code` and `message` fields.
5291        * </pre>
5292        *
5293        * <code>.google.cloud.aiplatform.v1.GcsDestination gcs_destination = 2;</code>
5294        */
5295       private com.google.protobuf.SingleFieldBuilderV3<
5296               com.google.cloud.aiplatform.v1.GcsDestination,
5297               com.google.cloud.aiplatform.v1.GcsDestination.Builder,
5298               com.google.cloud.aiplatform.v1.GcsDestinationOrBuilder>
getGcsDestinationFieldBuilder()5299           getGcsDestinationFieldBuilder() {
5300         if (gcsDestinationBuilder_ == null) {
5301           if (!(destinationCase_ == 2)) {
5302             destination_ = com.google.cloud.aiplatform.v1.GcsDestination.getDefaultInstance();
5303           }
5304           gcsDestinationBuilder_ =
5305               new com.google.protobuf.SingleFieldBuilderV3<
5306                   com.google.cloud.aiplatform.v1.GcsDestination,
5307                   com.google.cloud.aiplatform.v1.GcsDestination.Builder,
5308                   com.google.cloud.aiplatform.v1.GcsDestinationOrBuilder>(
5309                   (com.google.cloud.aiplatform.v1.GcsDestination) destination_,
5310                   getParentForChildren(),
5311                   isClean());
5312           destination_ = null;
5313         }
5314         destinationCase_ = 2;
5315         onChanged();
5316         return gcsDestinationBuilder_;
5317       }
5318 
5319       private com.google.protobuf.SingleFieldBuilderV3<
5320               com.google.cloud.aiplatform.v1.BigQueryDestination,
5321               com.google.cloud.aiplatform.v1.BigQueryDestination.Builder,
5322               com.google.cloud.aiplatform.v1.BigQueryDestinationOrBuilder>
5323           bigqueryDestinationBuilder_;
5324       /**
5325        *
5326        *
5327        * <pre>
5328        * The BigQuery project or dataset location where the output is to be
5329        * written to. If project is provided, a new dataset is created with name
5330        * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
5331        * where &lt;model-display-name&gt; is made
5332        * BigQuery-dataset-name compatible (for example, most special characters
5333        * become underscores), and timestamp is in
5334        * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
5335        * two tables will be created, `predictions`, and `errors`.
5336        * If the Model has both
5337        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5338        * and
5339        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5340        * schemata defined then the tables have columns as follows: The
5341        * `predictions` table contains instances for which the prediction
5342        * succeeded, it has columns as per a concatenation of the Model's
5343        * instance and prediction schemata. The `errors` table contains rows for
5344        * which the prediction has failed, it has instance columns, as per the
5345        * instance schema, followed by a single "errors" column, which as values
5346        * has [google.rpc.Status][google.rpc.Status]
5347        * represented as a STRUCT, and containing only `code` and `message`.
5348        * </pre>
5349        *
5350        * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
5351        *
5352        * @return Whether the bigqueryDestination field is set.
5353        */
5354       @java.lang.Override
hasBigqueryDestination()5355       public boolean hasBigqueryDestination() {
5356         return destinationCase_ == 3;
5357       }
5358       /**
5359        *
5360        *
5361        * <pre>
5362        * The BigQuery project or dataset location where the output is to be
5363        * written to. If project is provided, a new dataset is created with name
5364        * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
5365        * where &lt;model-display-name&gt; is made
5366        * BigQuery-dataset-name compatible (for example, most special characters
5367        * become underscores), and timestamp is in
5368        * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
5369        * two tables will be created, `predictions`, and `errors`.
5370        * If the Model has both
5371        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5372        * and
5373        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5374        * schemata defined then the tables have columns as follows: The
5375        * `predictions` table contains instances for which the prediction
5376        * succeeded, it has columns as per a concatenation of the Model's
5377        * instance and prediction schemata. The `errors` table contains rows for
5378        * which the prediction has failed, it has instance columns, as per the
5379        * instance schema, followed by a single "errors" column, which as values
5380        * has [google.rpc.Status][google.rpc.Status]
5381        * represented as a STRUCT, and containing only `code` and `message`.
5382        * </pre>
5383        *
5384        * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
5385        *
5386        * @return The bigqueryDestination.
5387        */
5388       @java.lang.Override
getBigqueryDestination()5389       public com.google.cloud.aiplatform.v1.BigQueryDestination getBigqueryDestination() {
5390         if (bigqueryDestinationBuilder_ == null) {
5391           if (destinationCase_ == 3) {
5392             return (com.google.cloud.aiplatform.v1.BigQueryDestination) destination_;
5393           }
5394           return com.google.cloud.aiplatform.v1.BigQueryDestination.getDefaultInstance();
5395         } else {
5396           if (destinationCase_ == 3) {
5397             return bigqueryDestinationBuilder_.getMessage();
5398           }
5399           return com.google.cloud.aiplatform.v1.BigQueryDestination.getDefaultInstance();
5400         }
5401       }
5402       /**
5403        *
5404        *
5405        * <pre>
5406        * The BigQuery project or dataset location where the output is to be
5407        * written to. If project is provided, a new dataset is created with name
5408        * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
5409        * where &lt;model-display-name&gt; is made
5410        * BigQuery-dataset-name compatible (for example, most special characters
5411        * become underscores), and timestamp is in
5412        * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
5413        * two tables will be created, `predictions`, and `errors`.
5414        * If the Model has both
5415        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5416        * and
5417        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5418        * schemata defined then the tables have columns as follows: The
5419        * `predictions` table contains instances for which the prediction
5420        * succeeded, it has columns as per a concatenation of the Model's
5421        * instance and prediction schemata. The `errors` table contains rows for
5422        * which the prediction has failed, it has instance columns, as per the
5423        * instance schema, followed by a single "errors" column, which as values
5424        * has [google.rpc.Status][google.rpc.Status]
5425        * represented as a STRUCT, and containing only `code` and `message`.
5426        * </pre>
5427        *
5428        * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
5429        */
setBigqueryDestination( com.google.cloud.aiplatform.v1.BigQueryDestination value)5430       public Builder setBigqueryDestination(
5431           com.google.cloud.aiplatform.v1.BigQueryDestination value) {
5432         if (bigqueryDestinationBuilder_ == null) {
5433           if (value == null) {
5434             throw new NullPointerException();
5435           }
5436           destination_ = value;
5437           onChanged();
5438         } else {
5439           bigqueryDestinationBuilder_.setMessage(value);
5440         }
5441         destinationCase_ = 3;
5442         return this;
5443       }
5444       /**
5445        *
5446        *
5447        * <pre>
5448        * The BigQuery project or dataset location where the output is to be
5449        * written to. If project is provided, a new dataset is created with name
5450        * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
5451        * where &lt;model-display-name&gt; is made
5452        * BigQuery-dataset-name compatible (for example, most special characters
5453        * become underscores), and timestamp is in
5454        * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
5455        * two tables will be created, `predictions`, and `errors`.
5456        * If the Model has both
5457        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5458        * and
5459        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5460        * schemata defined then the tables have columns as follows: The
5461        * `predictions` table contains instances for which the prediction
5462        * succeeded, it has columns as per a concatenation of the Model's
5463        * instance and prediction schemata. The `errors` table contains rows for
5464        * which the prediction has failed, it has instance columns, as per the
5465        * instance schema, followed by a single "errors" column, which as values
5466        * has [google.rpc.Status][google.rpc.Status]
5467        * represented as a STRUCT, and containing only `code` and `message`.
5468        * </pre>
5469        *
5470        * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
5471        */
setBigqueryDestination( com.google.cloud.aiplatform.v1.BigQueryDestination.Builder builderForValue)5472       public Builder setBigqueryDestination(
5473           com.google.cloud.aiplatform.v1.BigQueryDestination.Builder builderForValue) {
5474         if (bigqueryDestinationBuilder_ == null) {
5475           destination_ = builderForValue.build();
5476           onChanged();
5477         } else {
5478           bigqueryDestinationBuilder_.setMessage(builderForValue.build());
5479         }
5480         destinationCase_ = 3;
5481         return this;
5482       }
5483       /**
5484        *
5485        *
5486        * <pre>
5487        * The BigQuery project or dataset location where the output is to be
5488        * written to. If project is provided, a new dataset is created with name
5489        * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
5490        * where &lt;model-display-name&gt; is made
5491        * BigQuery-dataset-name compatible (for example, most special characters
5492        * become underscores), and timestamp is in
5493        * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
5494        * two tables will be created, `predictions`, and `errors`.
5495        * If the Model has both
5496        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5497        * and
5498        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5499        * schemata defined then the tables have columns as follows: The
5500        * `predictions` table contains instances for which the prediction
5501        * succeeded, it has columns as per a concatenation of the Model's
5502        * instance and prediction schemata. The `errors` table contains rows for
5503        * which the prediction has failed, it has instance columns, as per the
5504        * instance schema, followed by a single "errors" column, which as values
5505        * has [google.rpc.Status][google.rpc.Status]
5506        * represented as a STRUCT, and containing only `code` and `message`.
5507        * </pre>
5508        *
5509        * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
5510        */
mergeBigqueryDestination( com.google.cloud.aiplatform.v1.BigQueryDestination value)5511       public Builder mergeBigqueryDestination(
5512           com.google.cloud.aiplatform.v1.BigQueryDestination value) {
5513         if (bigqueryDestinationBuilder_ == null) {
5514           if (destinationCase_ == 3
5515               && destination_
5516                   != com.google.cloud.aiplatform.v1.BigQueryDestination.getDefaultInstance()) {
5517             destination_ =
5518                 com.google.cloud.aiplatform.v1.BigQueryDestination.newBuilder(
5519                         (com.google.cloud.aiplatform.v1.BigQueryDestination) destination_)
5520                     .mergeFrom(value)
5521                     .buildPartial();
5522           } else {
5523             destination_ = value;
5524           }
5525           onChanged();
5526         } else {
5527           if (destinationCase_ == 3) {
5528             bigqueryDestinationBuilder_.mergeFrom(value);
5529           } else {
5530             bigqueryDestinationBuilder_.setMessage(value);
5531           }
5532         }
5533         destinationCase_ = 3;
5534         return this;
5535       }
5536       /**
5537        *
5538        *
5539        * <pre>
5540        * The BigQuery project or dataset location where the output is to be
5541        * written to. If project is provided, a new dataset is created with name
5542        * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
5543        * where &lt;model-display-name&gt; is made
5544        * BigQuery-dataset-name compatible (for example, most special characters
5545        * become underscores), and timestamp is in
5546        * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
5547        * two tables will be created, `predictions`, and `errors`.
5548        * If the Model has both
5549        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5550        * and
5551        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5552        * schemata defined then the tables have columns as follows: The
5553        * `predictions` table contains instances for which the prediction
5554        * succeeded, it has columns as per a concatenation of the Model's
5555        * instance and prediction schemata. The `errors` table contains rows for
5556        * which the prediction has failed, it has instance columns, as per the
5557        * instance schema, followed by a single "errors" column, which as values
5558        * has [google.rpc.Status][google.rpc.Status]
5559        * represented as a STRUCT, and containing only `code` and `message`.
5560        * </pre>
5561        *
5562        * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
5563        */
clearBigqueryDestination()5564       public Builder clearBigqueryDestination() {
5565         if (bigqueryDestinationBuilder_ == null) {
5566           if (destinationCase_ == 3) {
5567             destinationCase_ = 0;
5568             destination_ = null;
5569             onChanged();
5570           }
5571         } else {
5572           if (destinationCase_ == 3) {
5573             destinationCase_ = 0;
5574             destination_ = null;
5575           }
5576           bigqueryDestinationBuilder_.clear();
5577         }
5578         return this;
5579       }
5580       /**
5581        *
5582        *
5583        * <pre>
5584        * The BigQuery project or dataset location where the output is to be
5585        * written to. If project is provided, a new dataset is created with name
5586        * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
5587        * where &lt;model-display-name&gt; is made
5588        * BigQuery-dataset-name compatible (for example, most special characters
5589        * become underscores), and timestamp is in
5590        * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
5591        * two tables will be created, `predictions`, and `errors`.
5592        * If the Model has both
5593        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5594        * and
5595        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5596        * schemata defined then the tables have columns as follows: The
5597        * `predictions` table contains instances for which the prediction
5598        * succeeded, it has columns as per a concatenation of the Model's
5599        * instance and prediction schemata. The `errors` table contains rows for
5600        * which the prediction has failed, it has instance columns, as per the
5601        * instance schema, followed by a single "errors" column, which as values
5602        * has [google.rpc.Status][google.rpc.Status]
5603        * represented as a STRUCT, and containing only `code` and `message`.
5604        * </pre>
5605        *
5606        * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
5607        */
5608       public com.google.cloud.aiplatform.v1.BigQueryDestination.Builder
getBigqueryDestinationBuilder()5609           getBigqueryDestinationBuilder() {
5610         return getBigqueryDestinationFieldBuilder().getBuilder();
5611       }
5612       /**
5613        *
5614        *
5615        * <pre>
5616        * The BigQuery project or dataset location where the output is to be
5617        * written to. If project is provided, a new dataset is created with name
5618        * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
5619        * where &lt;model-display-name&gt; is made
5620        * BigQuery-dataset-name compatible (for example, most special characters
5621        * become underscores), and timestamp is in
5622        * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
5623        * two tables will be created, `predictions`, and `errors`.
5624        * If the Model has both
5625        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5626        * and
5627        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5628        * schemata defined then the tables have columns as follows: The
5629        * `predictions` table contains instances for which the prediction
5630        * succeeded, it has columns as per a concatenation of the Model's
5631        * instance and prediction schemata. The `errors` table contains rows for
5632        * which the prediction has failed, it has instance columns, as per the
5633        * instance schema, followed by a single "errors" column, which as values
5634        * has [google.rpc.Status][google.rpc.Status]
5635        * represented as a STRUCT, and containing only `code` and `message`.
5636        * </pre>
5637        *
5638        * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
5639        */
5640       @java.lang.Override
5641       public com.google.cloud.aiplatform.v1.BigQueryDestinationOrBuilder
getBigqueryDestinationOrBuilder()5642           getBigqueryDestinationOrBuilder() {
5643         if ((destinationCase_ == 3) && (bigqueryDestinationBuilder_ != null)) {
5644           return bigqueryDestinationBuilder_.getMessageOrBuilder();
5645         } else {
5646           if (destinationCase_ == 3) {
5647             return (com.google.cloud.aiplatform.v1.BigQueryDestination) destination_;
5648           }
5649           return com.google.cloud.aiplatform.v1.BigQueryDestination.getDefaultInstance();
5650         }
5651       }
5652       /**
5653        *
5654        *
5655        * <pre>
5656        * The BigQuery project or dataset location where the output is to be
5657        * written to. If project is provided, a new dataset is created with name
5658        * `prediction_&lt;model-display-name&gt;_&lt;job-create-time&gt;`
5659        * where &lt;model-display-name&gt; is made
5660        * BigQuery-dataset-name compatible (for example, most special characters
5661        * become underscores), and timestamp is in
5662        * YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
5663        * two tables will be created, `predictions`, and `errors`.
5664        * If the Model has both
5665        * [instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
5666        * and
5667        * [prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri]
5668        * schemata defined then the tables have columns as follows: The
5669        * `predictions` table contains instances for which the prediction
5670        * succeeded, it has columns as per a concatenation of the Model's
5671        * instance and prediction schemata. The `errors` table contains rows for
5672        * which the prediction has failed, it has instance columns, as per the
5673        * instance schema, followed by a single "errors" column, which as values
5674        * has [google.rpc.Status][google.rpc.Status]
5675        * represented as a STRUCT, and containing only `code` and `message`.
5676        * </pre>
5677        *
5678        * <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
5679        */
5680       private com.google.protobuf.SingleFieldBuilderV3<
5681               com.google.cloud.aiplatform.v1.BigQueryDestination,
5682               com.google.cloud.aiplatform.v1.BigQueryDestination.Builder,
5683               com.google.cloud.aiplatform.v1.BigQueryDestinationOrBuilder>
getBigqueryDestinationFieldBuilder()5684           getBigqueryDestinationFieldBuilder() {
5685         if (bigqueryDestinationBuilder_ == null) {
5686           if (!(destinationCase_ == 3)) {
5687             destination_ = com.google.cloud.aiplatform.v1.BigQueryDestination.getDefaultInstance();
5688           }
5689           bigqueryDestinationBuilder_ =
5690               new com.google.protobuf.SingleFieldBuilderV3<
5691                   com.google.cloud.aiplatform.v1.BigQueryDestination,
5692                   com.google.cloud.aiplatform.v1.BigQueryDestination.Builder,
5693                   com.google.cloud.aiplatform.v1.BigQueryDestinationOrBuilder>(
5694                   (com.google.cloud.aiplatform.v1.BigQueryDestination) destination_,
5695                   getParentForChildren(),
5696                   isClean());
5697           destination_ = null;
5698         }
5699         destinationCase_ = 3;
5700         onChanged();
5701         return bigqueryDestinationBuilder_;
5702       }
5703 
5704       private java.lang.Object predictionsFormat_ = "";
5705       /**
5706        *
5707        *
5708        * <pre>
5709        * Required. The format in which Vertex AI gives the predictions, must be
5710        * one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
5711        * [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
5712        * </pre>
5713        *
5714        * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
5715        *
5716        * @return The predictionsFormat.
5717        */
getPredictionsFormat()5718       public java.lang.String getPredictionsFormat() {
5719         java.lang.Object ref = predictionsFormat_;
5720         if (!(ref instanceof java.lang.String)) {
5721           com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
5722           java.lang.String s = bs.toStringUtf8();
5723           predictionsFormat_ = s;
5724           return s;
5725         } else {
5726           return (java.lang.String) ref;
5727         }
5728       }
5729       /**
5730        *
5731        *
5732        * <pre>
5733        * Required. The format in which Vertex AI gives the predictions, must be
5734        * one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
5735        * [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
5736        * </pre>
5737        *
5738        * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
5739        *
5740        * @return The bytes for predictionsFormat.
5741        */
getPredictionsFormatBytes()5742       public com.google.protobuf.ByteString getPredictionsFormatBytes() {
5743         java.lang.Object ref = predictionsFormat_;
5744         if (ref instanceof String) {
5745           com.google.protobuf.ByteString b =
5746               com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
5747           predictionsFormat_ = b;
5748           return b;
5749         } else {
5750           return (com.google.protobuf.ByteString) ref;
5751         }
5752       }
5753       /**
5754        *
5755        *
5756        * <pre>
5757        * Required. The format in which Vertex AI gives the predictions, must be
5758        * one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
5759        * [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
5760        * </pre>
5761        *
5762        * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
5763        *
5764        * @param value The predictionsFormat to set.
5765        * @return This builder for chaining.
5766        */
setPredictionsFormat(java.lang.String value)5767       public Builder setPredictionsFormat(java.lang.String value) {
5768         if (value == null) {
5769           throw new NullPointerException();
5770         }
5771         predictionsFormat_ = value;
5772         bitField0_ |= 0x00000004;
5773         onChanged();
5774         return this;
5775       }
5776       /**
5777        *
5778        *
5779        * <pre>
5780        * Required. The format in which Vertex AI gives the predictions, must be
5781        * one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
5782        * [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
5783        * </pre>
5784        *
5785        * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
5786        *
5787        * @return This builder for chaining.
5788        */
clearPredictionsFormat()5789       public Builder clearPredictionsFormat() {
5790         predictionsFormat_ = getDefaultInstance().getPredictionsFormat();
5791         bitField0_ = (bitField0_ & ~0x00000004);
5792         onChanged();
5793         return this;
5794       }
5795       /**
5796        *
5797        *
5798        * <pre>
5799        * Required. The format in which Vertex AI gives the predictions, must be
5800        * one of the [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
5801        * [supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats].
5802        * </pre>
5803        *
5804        * <code>string predictions_format = 1 [(.google.api.field_behavior) = REQUIRED];</code>
5805        *
5806        * @param value The bytes for predictionsFormat to set.
5807        * @return This builder for chaining.
5808        */
setPredictionsFormatBytes(com.google.protobuf.ByteString value)5809       public Builder setPredictionsFormatBytes(com.google.protobuf.ByteString value) {
5810         if (value == null) {
5811           throw new NullPointerException();
5812         }
5813         checkByteStringIsUtf8(value);
5814         predictionsFormat_ = value;
5815         bitField0_ |= 0x00000004;
5816         onChanged();
5817         return this;
5818       }
5819 
5820       @java.lang.Override
setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)5821       public final Builder setUnknownFields(
5822           final com.google.protobuf.UnknownFieldSet unknownFields) {
5823         return super.setUnknownFields(unknownFields);
5824       }
5825 
5826       @java.lang.Override
mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)5827       public final Builder mergeUnknownFields(
5828           final com.google.protobuf.UnknownFieldSet unknownFields) {
5829         return super.mergeUnknownFields(unknownFields);
5830       }
5831 
5832       // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig)
5833     }
5834 
5835     // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig)
5836     private static final com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig
5837         DEFAULT_INSTANCE;
5838 
5839     static {
5840       DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig();
5841     }
5842 
5843     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig
getDefaultInstance()5844         getDefaultInstance() {
5845       return DEFAULT_INSTANCE;
5846     }
5847 
5848     private static final com.google.protobuf.Parser<OutputConfig> PARSER =
5849         new com.google.protobuf.AbstractParser<OutputConfig>() {
5850           @java.lang.Override
5851           public OutputConfig parsePartialFrom(
5852               com.google.protobuf.CodedInputStream input,
5853               com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5854               throws com.google.protobuf.InvalidProtocolBufferException {
5855             Builder builder = newBuilder();
5856             try {
5857               builder.mergeFrom(input, extensionRegistry);
5858             } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5859               throw e.setUnfinishedMessage(builder.buildPartial());
5860             } catch (com.google.protobuf.UninitializedMessageException e) {
5861               throw e.asInvalidProtocolBufferException()
5862                   .setUnfinishedMessage(builder.buildPartial());
5863             } catch (java.io.IOException e) {
5864               throw new com.google.protobuf.InvalidProtocolBufferException(e)
5865                   .setUnfinishedMessage(builder.buildPartial());
5866             }
5867             return builder.buildPartial();
5868           }
5869         };
5870 
parser()5871     public static com.google.protobuf.Parser<OutputConfig> parser() {
5872       return PARSER;
5873     }
5874 
5875     @java.lang.Override
getParserForType()5876     public com.google.protobuf.Parser<OutputConfig> getParserForType() {
5877       return PARSER;
5878     }
5879 
5880     @java.lang.Override
5881     public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig
getDefaultInstanceForType()5882         getDefaultInstanceForType() {
5883       return DEFAULT_INSTANCE;
5884     }
5885   }
5886 
5887   public interface OutputInfoOrBuilder
5888       extends
5889       // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo)
5890       com.google.protobuf.MessageOrBuilder {
5891 
5892     /**
5893      *
5894      *
5895      * <pre>
5896      * Output only. The full path of the Cloud Storage directory created, into
5897      * which the prediction output is written.
5898      * </pre>
5899      *
5900      * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
5901      *
5902      * @return Whether the gcsOutputDirectory field is set.
5903      */
hasGcsOutputDirectory()5904     boolean hasGcsOutputDirectory();
5905     /**
5906      *
5907      *
5908      * <pre>
5909      * Output only. The full path of the Cloud Storage directory created, into
5910      * which the prediction output is written.
5911      * </pre>
5912      *
5913      * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
5914      *
5915      * @return The gcsOutputDirectory.
5916      */
getGcsOutputDirectory()5917     java.lang.String getGcsOutputDirectory();
5918     /**
5919      *
5920      *
5921      * <pre>
5922      * Output only. The full path of the Cloud Storage directory created, into
5923      * which the prediction output is written.
5924      * </pre>
5925      *
5926      * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
5927      *
5928      * @return The bytes for gcsOutputDirectory.
5929      */
getGcsOutputDirectoryBytes()5930     com.google.protobuf.ByteString getGcsOutputDirectoryBytes();
5931 
5932     /**
5933      *
5934      *
5935      * <pre>
5936      * Output only. The path of the BigQuery dataset created, in
5937      * `bq://projectId.bqDatasetId`
5938      * format, into which the prediction output is written.
5939      * </pre>
5940      *
5941      * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
5942      *
5943      * @return Whether the bigqueryOutputDataset field is set.
5944      */
hasBigqueryOutputDataset()5945     boolean hasBigqueryOutputDataset();
5946     /**
5947      *
5948      *
5949      * <pre>
5950      * Output only. The path of the BigQuery dataset created, in
5951      * `bq://projectId.bqDatasetId`
5952      * format, into which the prediction output is written.
5953      * </pre>
5954      *
5955      * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
5956      *
5957      * @return The bigqueryOutputDataset.
5958      */
getBigqueryOutputDataset()5959     java.lang.String getBigqueryOutputDataset();
5960     /**
5961      *
5962      *
5963      * <pre>
5964      * Output only. The path of the BigQuery dataset created, in
5965      * `bq://projectId.bqDatasetId`
5966      * format, into which the prediction output is written.
5967      * </pre>
5968      *
5969      * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
5970      *
5971      * @return The bytes for bigqueryOutputDataset.
5972      */
getBigqueryOutputDatasetBytes()5973     com.google.protobuf.ByteString getBigqueryOutputDatasetBytes();
5974 
5975     /**
5976      *
5977      *
5978      * <pre>
5979      * Output only. The name of the BigQuery table created, in
5980      * `predictions_&lt;timestamp&gt;`
5981      * format, into which the prediction output is written.
5982      * Can be used by UI to generate the BigQuery output path, for example.
5983      * </pre>
5984      *
5985      * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
5986      *
5987      * @return The bigqueryOutputTable.
5988      */
getBigqueryOutputTable()5989     java.lang.String getBigqueryOutputTable();
5990     /**
5991      *
5992      *
5993      * <pre>
5994      * Output only. The name of the BigQuery table created, in
5995      * `predictions_&lt;timestamp&gt;`
5996      * format, into which the prediction output is written.
5997      * Can be used by UI to generate the BigQuery output path, for example.
5998      * </pre>
5999      *
6000      * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6001      *
6002      * @return The bytes for bigqueryOutputTable.
6003      */
getBigqueryOutputTableBytes()6004     com.google.protobuf.ByteString getBigqueryOutputTableBytes();
6005 
6006     public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.OutputLocationCase
getOutputLocationCase()6007         getOutputLocationCase();
6008   }
6009   /**
6010    *
6011    *
6012    * <pre>
6013    * Further describes this job's output.
6014    * Supplements
6015    * [output_config][google.cloud.aiplatform.v1.BatchPredictionJob.output_config].
6016    * </pre>
6017    *
6018    * Protobuf type {@code google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo}
6019    */
6020   public static final class OutputInfo extends com.google.protobuf.GeneratedMessageV3
6021       implements
6022       // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo)
6023       OutputInfoOrBuilder {
6024     private static final long serialVersionUID = 0L;
6025     // Use OutputInfo.newBuilder() to construct.
OutputInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)6026     private OutputInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
6027       super(builder);
6028     }
6029 
OutputInfo()6030     private OutputInfo() {
6031       bigqueryOutputTable_ = "";
6032     }
6033 
6034     @java.lang.Override
6035     @SuppressWarnings({"unused"})
newInstance(UnusedPrivateParameter unused)6036     protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
6037       return new OutputInfo();
6038     }
6039 
6040     @java.lang.Override
getUnknownFields()6041     public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
6042       return this.unknownFields;
6043     }
6044 
getDescriptor()6045     public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
6046       return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
6047           .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_OutputInfo_descriptor;
6048     }
6049 
6050     @java.lang.Override
6051     protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()6052         internalGetFieldAccessorTable() {
6053       return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
6054           .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_OutputInfo_fieldAccessorTable
6055           .ensureFieldAccessorsInitialized(
6056               com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.class,
6057               com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.Builder.class);
6058     }
6059 
6060     private int outputLocationCase_ = 0;
6061     private java.lang.Object outputLocation_;
6062 
6063     public enum OutputLocationCase
6064         implements
6065             com.google.protobuf.Internal.EnumLite,
6066             com.google.protobuf.AbstractMessage.InternalOneOfEnum {
6067       GCS_OUTPUT_DIRECTORY(1),
6068       BIGQUERY_OUTPUT_DATASET(2),
6069       OUTPUTLOCATION_NOT_SET(0);
6070       private final int value;
6071 
OutputLocationCase(int value)6072       private OutputLocationCase(int value) {
6073         this.value = value;
6074       }
6075       /**
6076        * @param value The number of the enum to look for.
6077        * @return The enum associated with the given number.
6078        * @deprecated Use {@link #forNumber(int)} instead.
6079        */
6080       @java.lang.Deprecated
valueOf(int value)6081       public static OutputLocationCase valueOf(int value) {
6082         return forNumber(value);
6083       }
6084 
forNumber(int value)6085       public static OutputLocationCase forNumber(int value) {
6086         switch (value) {
6087           case 1:
6088             return GCS_OUTPUT_DIRECTORY;
6089           case 2:
6090             return BIGQUERY_OUTPUT_DATASET;
6091           case 0:
6092             return OUTPUTLOCATION_NOT_SET;
6093           default:
6094             return null;
6095         }
6096       }
6097 
getNumber()6098       public int getNumber() {
6099         return this.value;
6100       }
6101     };
6102 
getOutputLocationCase()6103     public OutputLocationCase getOutputLocationCase() {
6104       return OutputLocationCase.forNumber(outputLocationCase_);
6105     }
6106 
6107     public static final int GCS_OUTPUT_DIRECTORY_FIELD_NUMBER = 1;
6108     /**
6109      *
6110      *
6111      * <pre>
6112      * Output only. The full path of the Cloud Storage directory created, into
6113      * which the prediction output is written.
6114      * </pre>
6115      *
6116      * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6117      *
6118      * @return Whether the gcsOutputDirectory field is set.
6119      */
hasGcsOutputDirectory()6120     public boolean hasGcsOutputDirectory() {
6121       return outputLocationCase_ == 1;
6122     }
6123     /**
6124      *
6125      *
6126      * <pre>
6127      * Output only. The full path of the Cloud Storage directory created, into
6128      * which the prediction output is written.
6129      * </pre>
6130      *
6131      * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6132      *
6133      * @return The gcsOutputDirectory.
6134      */
getGcsOutputDirectory()6135     public java.lang.String getGcsOutputDirectory() {
6136       java.lang.Object ref = "";
6137       if (outputLocationCase_ == 1) {
6138         ref = outputLocation_;
6139       }
6140       if (ref instanceof java.lang.String) {
6141         return (java.lang.String) ref;
6142       } else {
6143         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
6144         java.lang.String s = bs.toStringUtf8();
6145         if (outputLocationCase_ == 1) {
6146           outputLocation_ = s;
6147         }
6148         return s;
6149       }
6150     }
6151     /**
6152      *
6153      *
6154      * <pre>
6155      * Output only. The full path of the Cloud Storage directory created, into
6156      * which the prediction output is written.
6157      * </pre>
6158      *
6159      * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6160      *
6161      * @return The bytes for gcsOutputDirectory.
6162      */
getGcsOutputDirectoryBytes()6163     public com.google.protobuf.ByteString getGcsOutputDirectoryBytes() {
6164       java.lang.Object ref = "";
6165       if (outputLocationCase_ == 1) {
6166         ref = outputLocation_;
6167       }
6168       if (ref instanceof java.lang.String) {
6169         com.google.protobuf.ByteString b =
6170             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
6171         if (outputLocationCase_ == 1) {
6172           outputLocation_ = b;
6173         }
6174         return b;
6175       } else {
6176         return (com.google.protobuf.ByteString) ref;
6177       }
6178     }
6179 
6180     public static final int BIGQUERY_OUTPUT_DATASET_FIELD_NUMBER = 2;
6181     /**
6182      *
6183      *
6184      * <pre>
6185      * Output only. The path of the BigQuery dataset created, in
6186      * `bq://projectId.bqDatasetId`
6187      * format, into which the prediction output is written.
6188      * </pre>
6189      *
6190      * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6191      *
6192      * @return Whether the bigqueryOutputDataset field is set.
6193      */
hasBigqueryOutputDataset()6194     public boolean hasBigqueryOutputDataset() {
6195       return outputLocationCase_ == 2;
6196     }
6197     /**
6198      *
6199      *
6200      * <pre>
6201      * Output only. The path of the BigQuery dataset created, in
6202      * `bq://projectId.bqDatasetId`
6203      * format, into which the prediction output is written.
6204      * </pre>
6205      *
6206      * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6207      *
6208      * @return The bigqueryOutputDataset.
6209      */
getBigqueryOutputDataset()6210     public java.lang.String getBigqueryOutputDataset() {
6211       java.lang.Object ref = "";
6212       if (outputLocationCase_ == 2) {
6213         ref = outputLocation_;
6214       }
6215       if (ref instanceof java.lang.String) {
6216         return (java.lang.String) ref;
6217       } else {
6218         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
6219         java.lang.String s = bs.toStringUtf8();
6220         if (outputLocationCase_ == 2) {
6221           outputLocation_ = s;
6222         }
6223         return s;
6224       }
6225     }
6226     /**
6227      *
6228      *
6229      * <pre>
6230      * Output only. The path of the BigQuery dataset created, in
6231      * `bq://projectId.bqDatasetId`
6232      * format, into which the prediction output is written.
6233      * </pre>
6234      *
6235      * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6236      *
6237      * @return The bytes for bigqueryOutputDataset.
6238      */
getBigqueryOutputDatasetBytes()6239     public com.google.protobuf.ByteString getBigqueryOutputDatasetBytes() {
6240       java.lang.Object ref = "";
6241       if (outputLocationCase_ == 2) {
6242         ref = outputLocation_;
6243       }
6244       if (ref instanceof java.lang.String) {
6245         com.google.protobuf.ByteString b =
6246             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
6247         if (outputLocationCase_ == 2) {
6248           outputLocation_ = b;
6249         }
6250         return b;
6251       } else {
6252         return (com.google.protobuf.ByteString) ref;
6253       }
6254     }
6255 
6256     public static final int BIGQUERY_OUTPUT_TABLE_FIELD_NUMBER = 4;
6257 
6258     @SuppressWarnings("serial")
6259     private volatile java.lang.Object bigqueryOutputTable_ = "";
6260     /**
6261      *
6262      *
6263      * <pre>
6264      * Output only. The name of the BigQuery table created, in
6265      * `predictions_&lt;timestamp&gt;`
6266      * format, into which the prediction output is written.
6267      * Can be used by UI to generate the BigQuery output path, for example.
6268      * </pre>
6269      *
6270      * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6271      *
6272      * @return The bigqueryOutputTable.
6273      */
6274     @java.lang.Override
getBigqueryOutputTable()6275     public java.lang.String getBigqueryOutputTable() {
6276       java.lang.Object ref = bigqueryOutputTable_;
6277       if (ref instanceof java.lang.String) {
6278         return (java.lang.String) ref;
6279       } else {
6280         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
6281         java.lang.String s = bs.toStringUtf8();
6282         bigqueryOutputTable_ = s;
6283         return s;
6284       }
6285     }
6286     /**
6287      *
6288      *
6289      * <pre>
6290      * Output only. The name of the BigQuery table created, in
6291      * `predictions_&lt;timestamp&gt;`
6292      * format, into which the prediction output is written.
6293      * Can be used by UI to generate the BigQuery output path, for example.
6294      * </pre>
6295      *
6296      * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6297      *
6298      * @return The bytes for bigqueryOutputTable.
6299      */
6300     @java.lang.Override
getBigqueryOutputTableBytes()6301     public com.google.protobuf.ByteString getBigqueryOutputTableBytes() {
6302       java.lang.Object ref = bigqueryOutputTable_;
6303       if (ref instanceof java.lang.String) {
6304         com.google.protobuf.ByteString b =
6305             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
6306         bigqueryOutputTable_ = b;
6307         return b;
6308       } else {
6309         return (com.google.protobuf.ByteString) ref;
6310       }
6311     }
6312 
6313     private byte memoizedIsInitialized = -1;
6314 
6315     @java.lang.Override
isInitialized()6316     public final boolean isInitialized() {
6317       byte isInitialized = memoizedIsInitialized;
6318       if (isInitialized == 1) return true;
6319       if (isInitialized == 0) return false;
6320 
6321       memoizedIsInitialized = 1;
6322       return true;
6323     }
6324 
6325     @java.lang.Override
writeTo(com.google.protobuf.CodedOutputStream output)6326     public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
6327       if (outputLocationCase_ == 1) {
6328         com.google.protobuf.GeneratedMessageV3.writeString(output, 1, outputLocation_);
6329       }
6330       if (outputLocationCase_ == 2) {
6331         com.google.protobuf.GeneratedMessageV3.writeString(output, 2, outputLocation_);
6332       }
6333       if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bigqueryOutputTable_)) {
6334         com.google.protobuf.GeneratedMessageV3.writeString(output, 4, bigqueryOutputTable_);
6335       }
6336       getUnknownFields().writeTo(output);
6337     }
6338 
6339     @java.lang.Override
getSerializedSize()6340     public int getSerializedSize() {
6341       int size = memoizedSize;
6342       if (size != -1) return size;
6343 
6344       size = 0;
6345       if (outputLocationCase_ == 1) {
6346         size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, outputLocation_);
6347       }
6348       if (outputLocationCase_ == 2) {
6349         size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, outputLocation_);
6350       }
6351       if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bigqueryOutputTable_)) {
6352         size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, bigqueryOutputTable_);
6353       }
6354       size += getUnknownFields().getSerializedSize();
6355       memoizedSize = size;
6356       return size;
6357     }
6358 
6359     @java.lang.Override
equals(final java.lang.Object obj)6360     public boolean equals(final java.lang.Object obj) {
6361       if (obj == this) {
6362         return true;
6363       }
6364       if (!(obj instanceof com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo)) {
6365         return super.equals(obj);
6366       }
6367       com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo other =
6368           (com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo) obj;
6369 
6370       if (!getBigqueryOutputTable().equals(other.getBigqueryOutputTable())) return false;
6371       if (!getOutputLocationCase().equals(other.getOutputLocationCase())) return false;
6372       switch (outputLocationCase_) {
6373         case 1:
6374           if (!getGcsOutputDirectory().equals(other.getGcsOutputDirectory())) return false;
6375           break;
6376         case 2:
6377           if (!getBigqueryOutputDataset().equals(other.getBigqueryOutputDataset())) return false;
6378           break;
6379         case 0:
6380         default:
6381       }
6382       if (!getUnknownFields().equals(other.getUnknownFields())) return false;
6383       return true;
6384     }
6385 
6386     @java.lang.Override
hashCode()6387     public int hashCode() {
6388       if (memoizedHashCode != 0) {
6389         return memoizedHashCode;
6390       }
6391       int hash = 41;
6392       hash = (19 * hash) + getDescriptor().hashCode();
6393       hash = (37 * hash) + BIGQUERY_OUTPUT_TABLE_FIELD_NUMBER;
6394       hash = (53 * hash) + getBigqueryOutputTable().hashCode();
6395       switch (outputLocationCase_) {
6396         case 1:
6397           hash = (37 * hash) + GCS_OUTPUT_DIRECTORY_FIELD_NUMBER;
6398           hash = (53 * hash) + getGcsOutputDirectory().hashCode();
6399           break;
6400         case 2:
6401           hash = (37 * hash) + BIGQUERY_OUTPUT_DATASET_FIELD_NUMBER;
6402           hash = (53 * hash) + getBigqueryOutputDataset().hashCode();
6403           break;
6404         case 0:
6405         default:
6406       }
6407       hash = (29 * hash) + getUnknownFields().hashCode();
6408       memoizedHashCode = hash;
6409       return hash;
6410     }
6411 
parseFrom( java.nio.ByteBuffer data)6412     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo parseFrom(
6413         java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
6414       return PARSER.parseFrom(data);
6415     }
6416 
parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6417     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo parseFrom(
6418         java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6419         throws com.google.protobuf.InvalidProtocolBufferException {
6420       return PARSER.parseFrom(data, extensionRegistry);
6421     }
6422 
parseFrom( com.google.protobuf.ByteString data)6423     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo parseFrom(
6424         com.google.protobuf.ByteString data)
6425         throws com.google.protobuf.InvalidProtocolBufferException {
6426       return PARSER.parseFrom(data);
6427     }
6428 
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6429     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo parseFrom(
6430         com.google.protobuf.ByteString data,
6431         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6432         throws com.google.protobuf.InvalidProtocolBufferException {
6433       return PARSER.parseFrom(data, extensionRegistry);
6434     }
6435 
parseFrom( byte[] data)6436     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo parseFrom(
6437         byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
6438       return PARSER.parseFrom(data);
6439     }
6440 
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6441     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo parseFrom(
6442         byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6443         throws com.google.protobuf.InvalidProtocolBufferException {
6444       return PARSER.parseFrom(data, extensionRegistry);
6445     }
6446 
parseFrom( java.io.InputStream input)6447     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo parseFrom(
6448         java.io.InputStream input) throws java.io.IOException {
6449       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
6450     }
6451 
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6452     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo parseFrom(
6453         java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6454         throws java.io.IOException {
6455       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
6456           PARSER, input, extensionRegistry);
6457     }
6458 
parseDelimitedFrom( java.io.InputStream input)6459     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo parseDelimitedFrom(
6460         java.io.InputStream input) throws java.io.IOException {
6461       return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
6462     }
6463 
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6464     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo parseDelimitedFrom(
6465         java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6466         throws java.io.IOException {
6467       return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
6468           PARSER, input, extensionRegistry);
6469     }
6470 
parseFrom( com.google.protobuf.CodedInputStream input)6471     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo parseFrom(
6472         com.google.protobuf.CodedInputStream input) throws java.io.IOException {
6473       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
6474     }
6475 
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6476     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo parseFrom(
6477         com.google.protobuf.CodedInputStream input,
6478         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6479         throws java.io.IOException {
6480       return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
6481           PARSER, input, extensionRegistry);
6482     }
6483 
6484     @java.lang.Override
newBuilderForType()6485     public Builder newBuilderForType() {
6486       return newBuilder();
6487     }
6488 
newBuilder()6489     public static Builder newBuilder() {
6490       return DEFAULT_INSTANCE.toBuilder();
6491     }
6492 
newBuilder( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo prototype)6493     public static Builder newBuilder(
6494         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo prototype) {
6495       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
6496     }
6497 
6498     @java.lang.Override
toBuilder()6499     public Builder toBuilder() {
6500       return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
6501     }
6502 
6503     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent)6504     protected Builder newBuilderForType(
6505         com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
6506       Builder builder = new Builder(parent);
6507       return builder;
6508     }
6509     /**
6510      *
6511      *
6512      * <pre>
6513      * Further describes this job's output.
6514      * Supplements
6515      * [output_config][google.cloud.aiplatform.v1.BatchPredictionJob.output_config].
6516      * </pre>
6517      *
6518      * Protobuf type {@code google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo}
6519      */
6520     public static final class Builder
6521         extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
6522         implements
6523         // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo)
6524         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfoOrBuilder {
getDescriptor()6525       public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
6526         return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
6527             .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_OutputInfo_descriptor;
6528       }
6529 
6530       @java.lang.Override
6531       protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()6532           internalGetFieldAccessorTable() {
6533         return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
6534             .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_OutputInfo_fieldAccessorTable
6535             .ensureFieldAccessorsInitialized(
6536                 com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.class,
6537                 com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.Builder.class);
6538       }
6539 
6540       // Construct using com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.newBuilder()
Builder()6541       private Builder() {}
6542 
Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)6543       private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
6544         super(parent);
6545       }
6546 
6547       @java.lang.Override
clear()6548       public Builder clear() {
6549         super.clear();
6550         bitField0_ = 0;
6551         bigqueryOutputTable_ = "";
6552         outputLocationCase_ = 0;
6553         outputLocation_ = null;
6554         return this;
6555       }
6556 
6557       @java.lang.Override
getDescriptorForType()6558       public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
6559         return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
6560             .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_OutputInfo_descriptor;
6561       }
6562 
6563       @java.lang.Override
6564       public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo
getDefaultInstanceForType()6565           getDefaultInstanceForType() {
6566         return com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.getDefaultInstance();
6567       }
6568 
6569       @java.lang.Override
build()6570       public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo build() {
6571         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo result = buildPartial();
6572         if (!result.isInitialized()) {
6573           throw newUninitializedMessageException(result);
6574         }
6575         return result;
6576       }
6577 
6578       @java.lang.Override
buildPartial()6579       public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo buildPartial() {
6580         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo result =
6581             new com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo(this);
6582         if (bitField0_ != 0) {
6583           buildPartial0(result);
6584         }
6585         buildPartialOneofs(result);
6586         onBuilt();
6587         return result;
6588       }
6589 
buildPartial0( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo result)6590       private void buildPartial0(
6591           com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo result) {
6592         int from_bitField0_ = bitField0_;
6593         if (((from_bitField0_ & 0x00000004) != 0)) {
6594           result.bigqueryOutputTable_ = bigqueryOutputTable_;
6595         }
6596       }
6597 
buildPartialOneofs( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo result)6598       private void buildPartialOneofs(
6599           com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo result) {
6600         result.outputLocationCase_ = outputLocationCase_;
6601         result.outputLocation_ = this.outputLocation_;
6602       }
6603 
6604       @java.lang.Override
clone()6605       public Builder clone() {
6606         return super.clone();
6607       }
6608 
6609       @java.lang.Override
setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)6610       public Builder setField(
6611           com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
6612         return super.setField(field, value);
6613       }
6614 
6615       @java.lang.Override
clearField(com.google.protobuf.Descriptors.FieldDescriptor field)6616       public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
6617         return super.clearField(field);
6618       }
6619 
6620       @java.lang.Override
clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)6621       public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
6622         return super.clearOneof(oneof);
6623       }
6624 
6625       @java.lang.Override
setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)6626       public Builder setRepeatedField(
6627           com.google.protobuf.Descriptors.FieldDescriptor field,
6628           int index,
6629           java.lang.Object value) {
6630         return super.setRepeatedField(field, index, value);
6631       }
6632 
6633       @java.lang.Override
addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)6634       public Builder addRepeatedField(
6635           com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
6636         return super.addRepeatedField(field, value);
6637       }
6638 
6639       @java.lang.Override
mergeFrom(com.google.protobuf.Message other)6640       public Builder mergeFrom(com.google.protobuf.Message other) {
6641         if (other instanceof com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo) {
6642           return mergeFrom((com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo) other);
6643         } else {
6644           super.mergeFrom(other);
6645           return this;
6646         }
6647       }
6648 
mergeFrom(com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo other)6649       public Builder mergeFrom(com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo other) {
6650         if (other
6651             == com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.getDefaultInstance())
6652           return this;
6653         if (!other.getBigqueryOutputTable().isEmpty()) {
6654           bigqueryOutputTable_ = other.bigqueryOutputTable_;
6655           bitField0_ |= 0x00000004;
6656           onChanged();
6657         }
6658         switch (other.getOutputLocationCase()) {
6659           case GCS_OUTPUT_DIRECTORY:
6660             {
6661               outputLocationCase_ = 1;
6662               outputLocation_ = other.outputLocation_;
6663               onChanged();
6664               break;
6665             }
6666           case BIGQUERY_OUTPUT_DATASET:
6667             {
6668               outputLocationCase_ = 2;
6669               outputLocation_ = other.outputLocation_;
6670               onChanged();
6671               break;
6672             }
6673           case OUTPUTLOCATION_NOT_SET:
6674             {
6675               break;
6676             }
6677         }
6678         this.mergeUnknownFields(other.getUnknownFields());
6679         onChanged();
6680         return this;
6681       }
6682 
6683       @java.lang.Override
isInitialized()6684       public final boolean isInitialized() {
6685         return true;
6686       }
6687 
6688       @java.lang.Override
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6689       public Builder mergeFrom(
6690           com.google.protobuf.CodedInputStream input,
6691           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6692           throws java.io.IOException {
6693         if (extensionRegistry == null) {
6694           throw new java.lang.NullPointerException();
6695         }
6696         try {
6697           boolean done = false;
6698           while (!done) {
6699             int tag = input.readTag();
6700             switch (tag) {
6701               case 0:
6702                 done = true;
6703                 break;
6704               case 10:
6705                 {
6706                   java.lang.String s = input.readStringRequireUtf8();
6707                   outputLocationCase_ = 1;
6708                   outputLocation_ = s;
6709                   break;
6710                 } // case 10
6711               case 18:
6712                 {
6713                   java.lang.String s = input.readStringRequireUtf8();
6714                   outputLocationCase_ = 2;
6715                   outputLocation_ = s;
6716                   break;
6717                 } // case 18
6718               case 34:
6719                 {
6720                   bigqueryOutputTable_ = input.readStringRequireUtf8();
6721                   bitField0_ |= 0x00000004;
6722                   break;
6723                 } // case 34
6724               default:
6725                 {
6726                   if (!super.parseUnknownField(input, extensionRegistry, tag)) {
6727                     done = true; // was an endgroup tag
6728                   }
6729                   break;
6730                 } // default:
6731             } // switch (tag)
6732           } // while (!done)
6733         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6734           throw e.unwrapIOException();
6735         } finally {
6736           onChanged();
6737         } // finally
6738         return this;
6739       }
6740 
6741       private int outputLocationCase_ = 0;
6742       private java.lang.Object outputLocation_;
6743 
getOutputLocationCase()6744       public OutputLocationCase getOutputLocationCase() {
6745         return OutputLocationCase.forNumber(outputLocationCase_);
6746       }
6747 
clearOutputLocation()6748       public Builder clearOutputLocation() {
6749         outputLocationCase_ = 0;
6750         outputLocation_ = null;
6751         onChanged();
6752         return this;
6753       }
6754 
6755       private int bitField0_;
6756 
6757       /**
6758        *
6759        *
6760        * <pre>
6761        * Output only. The full path of the Cloud Storage directory created, into
6762        * which the prediction output is written.
6763        * </pre>
6764        *
6765        * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6766        *
6767        * @return Whether the gcsOutputDirectory field is set.
6768        */
6769       @java.lang.Override
hasGcsOutputDirectory()6770       public boolean hasGcsOutputDirectory() {
6771         return outputLocationCase_ == 1;
6772       }
6773       /**
6774        *
6775        *
6776        * <pre>
6777        * Output only. The full path of the Cloud Storage directory created, into
6778        * which the prediction output is written.
6779        * </pre>
6780        *
6781        * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6782        *
6783        * @return The gcsOutputDirectory.
6784        */
6785       @java.lang.Override
getGcsOutputDirectory()6786       public java.lang.String getGcsOutputDirectory() {
6787         java.lang.Object ref = "";
6788         if (outputLocationCase_ == 1) {
6789           ref = outputLocation_;
6790         }
6791         if (!(ref instanceof java.lang.String)) {
6792           com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
6793           java.lang.String s = bs.toStringUtf8();
6794           if (outputLocationCase_ == 1) {
6795             outputLocation_ = s;
6796           }
6797           return s;
6798         } else {
6799           return (java.lang.String) ref;
6800         }
6801       }
6802       /**
6803        *
6804        *
6805        * <pre>
6806        * Output only. The full path of the Cloud Storage directory created, into
6807        * which the prediction output is written.
6808        * </pre>
6809        *
6810        * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6811        *
6812        * @return The bytes for gcsOutputDirectory.
6813        */
6814       @java.lang.Override
getGcsOutputDirectoryBytes()6815       public com.google.protobuf.ByteString getGcsOutputDirectoryBytes() {
6816         java.lang.Object ref = "";
6817         if (outputLocationCase_ == 1) {
6818           ref = outputLocation_;
6819         }
6820         if (ref instanceof String) {
6821           com.google.protobuf.ByteString b =
6822               com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
6823           if (outputLocationCase_ == 1) {
6824             outputLocation_ = b;
6825           }
6826           return b;
6827         } else {
6828           return (com.google.protobuf.ByteString) ref;
6829         }
6830       }
6831       /**
6832        *
6833        *
6834        * <pre>
6835        * Output only. The full path of the Cloud Storage directory created, into
6836        * which the prediction output is written.
6837        * </pre>
6838        *
6839        * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6840        *
6841        * @param value The gcsOutputDirectory to set.
6842        * @return This builder for chaining.
6843        */
setGcsOutputDirectory(java.lang.String value)6844       public Builder setGcsOutputDirectory(java.lang.String value) {
6845         if (value == null) {
6846           throw new NullPointerException();
6847         }
6848         outputLocationCase_ = 1;
6849         outputLocation_ = value;
6850         onChanged();
6851         return this;
6852       }
6853       /**
6854        *
6855        *
6856        * <pre>
6857        * Output only. The full path of the Cloud Storage directory created, into
6858        * which the prediction output is written.
6859        * </pre>
6860        *
6861        * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6862        *
6863        * @return This builder for chaining.
6864        */
clearGcsOutputDirectory()6865       public Builder clearGcsOutputDirectory() {
6866         if (outputLocationCase_ == 1) {
6867           outputLocationCase_ = 0;
6868           outputLocation_ = null;
6869           onChanged();
6870         }
6871         return this;
6872       }
6873       /**
6874        *
6875        *
6876        * <pre>
6877        * Output only. The full path of the Cloud Storage directory created, into
6878        * which the prediction output is written.
6879        * </pre>
6880        *
6881        * <code>string gcs_output_directory = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6882        *
6883        * @param value The bytes for gcsOutputDirectory to set.
6884        * @return This builder for chaining.
6885        */
setGcsOutputDirectoryBytes(com.google.protobuf.ByteString value)6886       public Builder setGcsOutputDirectoryBytes(com.google.protobuf.ByteString value) {
6887         if (value == null) {
6888           throw new NullPointerException();
6889         }
6890         checkByteStringIsUtf8(value);
6891         outputLocationCase_ = 1;
6892         outputLocation_ = value;
6893         onChanged();
6894         return this;
6895       }
6896 
6897       /**
6898        *
6899        *
6900        * <pre>
6901        * Output only. The path of the BigQuery dataset created, in
6902        * `bq://projectId.bqDatasetId`
6903        * format, into which the prediction output is written.
6904        * </pre>
6905        *
6906        * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
6907        * </code>
6908        *
6909        * @return Whether the bigqueryOutputDataset field is set.
6910        */
6911       @java.lang.Override
hasBigqueryOutputDataset()6912       public boolean hasBigqueryOutputDataset() {
6913         return outputLocationCase_ == 2;
6914       }
6915       /**
6916        *
6917        *
6918        * <pre>
6919        * Output only. The path of the BigQuery dataset created, in
6920        * `bq://projectId.bqDatasetId`
6921        * format, into which the prediction output is written.
6922        * </pre>
6923        *
6924        * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
6925        * </code>
6926        *
6927        * @return The bigqueryOutputDataset.
6928        */
6929       @java.lang.Override
getBigqueryOutputDataset()6930       public java.lang.String getBigqueryOutputDataset() {
6931         java.lang.Object ref = "";
6932         if (outputLocationCase_ == 2) {
6933           ref = outputLocation_;
6934         }
6935         if (!(ref instanceof java.lang.String)) {
6936           com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
6937           java.lang.String s = bs.toStringUtf8();
6938           if (outputLocationCase_ == 2) {
6939             outputLocation_ = s;
6940           }
6941           return s;
6942         } else {
6943           return (java.lang.String) ref;
6944         }
6945       }
6946       /**
6947        *
6948        *
6949        * <pre>
6950        * Output only. The path of the BigQuery dataset created, in
6951        * `bq://projectId.bqDatasetId`
6952        * format, into which the prediction output is written.
6953        * </pre>
6954        *
6955        * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
6956        * </code>
6957        *
6958        * @return The bytes for bigqueryOutputDataset.
6959        */
6960       @java.lang.Override
getBigqueryOutputDatasetBytes()6961       public com.google.protobuf.ByteString getBigqueryOutputDatasetBytes() {
6962         java.lang.Object ref = "";
6963         if (outputLocationCase_ == 2) {
6964           ref = outputLocation_;
6965         }
6966         if (ref instanceof String) {
6967           com.google.protobuf.ByteString b =
6968               com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
6969           if (outputLocationCase_ == 2) {
6970             outputLocation_ = b;
6971           }
6972           return b;
6973         } else {
6974           return (com.google.protobuf.ByteString) ref;
6975         }
6976       }
6977       /**
6978        *
6979        *
6980        * <pre>
6981        * Output only. The path of the BigQuery dataset created, in
6982        * `bq://projectId.bqDatasetId`
6983        * format, into which the prediction output is written.
6984        * </pre>
6985        *
6986        * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
6987        * </code>
6988        *
6989        * @param value The bigqueryOutputDataset to set.
6990        * @return This builder for chaining.
6991        */
setBigqueryOutputDataset(java.lang.String value)6992       public Builder setBigqueryOutputDataset(java.lang.String value) {
6993         if (value == null) {
6994           throw new NullPointerException();
6995         }
6996         outputLocationCase_ = 2;
6997         outputLocation_ = value;
6998         onChanged();
6999         return this;
7000       }
7001       /**
7002        *
7003        *
7004        * <pre>
7005        * Output only. The path of the BigQuery dataset created, in
7006        * `bq://projectId.bqDatasetId`
7007        * format, into which the prediction output is written.
7008        * </pre>
7009        *
7010        * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
7011        * </code>
7012        *
7013        * @return This builder for chaining.
7014        */
clearBigqueryOutputDataset()7015       public Builder clearBigqueryOutputDataset() {
7016         if (outputLocationCase_ == 2) {
7017           outputLocationCase_ = 0;
7018           outputLocation_ = null;
7019           onChanged();
7020         }
7021         return this;
7022       }
7023       /**
7024        *
7025        *
7026        * <pre>
7027        * Output only. The path of the BigQuery dataset created, in
7028        * `bq://projectId.bqDatasetId`
7029        * format, into which the prediction output is written.
7030        * </pre>
7031        *
7032        * <code>string bigquery_output_dataset = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
7033        * </code>
7034        *
7035        * @param value The bytes for bigqueryOutputDataset to set.
7036        * @return This builder for chaining.
7037        */
setBigqueryOutputDatasetBytes(com.google.protobuf.ByteString value)7038       public Builder setBigqueryOutputDatasetBytes(com.google.protobuf.ByteString value) {
7039         if (value == null) {
7040           throw new NullPointerException();
7041         }
7042         checkByteStringIsUtf8(value);
7043         outputLocationCase_ = 2;
7044         outputLocation_ = value;
7045         onChanged();
7046         return this;
7047       }
7048 
7049       private java.lang.Object bigqueryOutputTable_ = "";
7050       /**
7051        *
7052        *
7053        * <pre>
7054        * Output only. The name of the BigQuery table created, in
7055        * `predictions_&lt;timestamp&gt;`
7056        * format, into which the prediction output is written.
7057        * Can be used by UI to generate the BigQuery output path, for example.
7058        * </pre>
7059        *
7060        * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
7061        *
7062        * @return The bigqueryOutputTable.
7063        */
getBigqueryOutputTable()7064       public java.lang.String getBigqueryOutputTable() {
7065         java.lang.Object ref = bigqueryOutputTable_;
7066         if (!(ref instanceof java.lang.String)) {
7067           com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
7068           java.lang.String s = bs.toStringUtf8();
7069           bigqueryOutputTable_ = s;
7070           return s;
7071         } else {
7072           return (java.lang.String) ref;
7073         }
7074       }
7075       /**
7076        *
7077        *
7078        * <pre>
7079        * Output only. The name of the BigQuery table created, in
7080        * `predictions_&lt;timestamp&gt;`
7081        * format, into which the prediction output is written.
7082        * Can be used by UI to generate the BigQuery output path, for example.
7083        * </pre>
7084        *
7085        * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
7086        *
7087        * @return The bytes for bigqueryOutputTable.
7088        */
getBigqueryOutputTableBytes()7089       public com.google.protobuf.ByteString getBigqueryOutputTableBytes() {
7090         java.lang.Object ref = bigqueryOutputTable_;
7091         if (ref instanceof String) {
7092           com.google.protobuf.ByteString b =
7093               com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
7094           bigqueryOutputTable_ = b;
7095           return b;
7096         } else {
7097           return (com.google.protobuf.ByteString) ref;
7098         }
7099       }
7100       /**
7101        *
7102        *
7103        * <pre>
7104        * Output only. The name of the BigQuery table created, in
7105        * `predictions_&lt;timestamp&gt;`
7106        * format, into which the prediction output is written.
7107        * Can be used by UI to generate the BigQuery output path, for example.
7108        * </pre>
7109        *
7110        * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
7111        *
7112        * @param value The bigqueryOutputTable to set.
7113        * @return This builder for chaining.
7114        */
setBigqueryOutputTable(java.lang.String value)7115       public Builder setBigqueryOutputTable(java.lang.String value) {
7116         if (value == null) {
7117           throw new NullPointerException();
7118         }
7119         bigqueryOutputTable_ = value;
7120         bitField0_ |= 0x00000004;
7121         onChanged();
7122         return this;
7123       }
7124       /**
7125        *
7126        *
7127        * <pre>
7128        * Output only. The name of the BigQuery table created, in
7129        * `predictions_&lt;timestamp&gt;`
7130        * format, into which the prediction output is written.
7131        * Can be used by UI to generate the BigQuery output path, for example.
7132        * </pre>
7133        *
7134        * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
7135        *
7136        * @return This builder for chaining.
7137        */
clearBigqueryOutputTable()7138       public Builder clearBigqueryOutputTable() {
7139         bigqueryOutputTable_ = getDefaultInstance().getBigqueryOutputTable();
7140         bitField0_ = (bitField0_ & ~0x00000004);
7141         onChanged();
7142         return this;
7143       }
7144       /**
7145        *
7146        *
7147        * <pre>
7148        * Output only. The name of the BigQuery table created, in
7149        * `predictions_&lt;timestamp&gt;`
7150        * format, into which the prediction output is written.
7151        * Can be used by UI to generate the BigQuery output path, for example.
7152        * </pre>
7153        *
7154        * <code>string bigquery_output_table = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
7155        *
7156        * @param value The bytes for bigqueryOutputTable to set.
7157        * @return This builder for chaining.
7158        */
setBigqueryOutputTableBytes(com.google.protobuf.ByteString value)7159       public Builder setBigqueryOutputTableBytes(com.google.protobuf.ByteString value) {
7160         if (value == null) {
7161           throw new NullPointerException();
7162         }
7163         checkByteStringIsUtf8(value);
7164         bigqueryOutputTable_ = value;
7165         bitField0_ |= 0x00000004;
7166         onChanged();
7167         return this;
7168       }
7169 
7170       @java.lang.Override
setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)7171       public final Builder setUnknownFields(
7172           final com.google.protobuf.UnknownFieldSet unknownFields) {
7173         return super.setUnknownFields(unknownFields);
7174       }
7175 
7176       @java.lang.Override
mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)7177       public final Builder mergeUnknownFields(
7178           final com.google.protobuf.UnknownFieldSet unknownFields) {
7179         return super.mergeUnknownFields(unknownFields);
7180       }
7181 
7182       // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo)
7183     }
7184 
7185     // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo)
7186     private static final com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo
7187         DEFAULT_INSTANCE;
7188 
7189     static {
7190       DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo();
7191     }
7192 
7193     public static com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo
getDefaultInstance()7194         getDefaultInstance() {
7195       return DEFAULT_INSTANCE;
7196     }
7197 
7198     private static final com.google.protobuf.Parser<OutputInfo> PARSER =
7199         new com.google.protobuf.AbstractParser<OutputInfo>() {
7200           @java.lang.Override
7201           public OutputInfo parsePartialFrom(
7202               com.google.protobuf.CodedInputStream input,
7203               com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7204               throws com.google.protobuf.InvalidProtocolBufferException {
7205             Builder builder = newBuilder();
7206             try {
7207               builder.mergeFrom(input, extensionRegistry);
7208             } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7209               throw e.setUnfinishedMessage(builder.buildPartial());
7210             } catch (com.google.protobuf.UninitializedMessageException e) {
7211               throw e.asInvalidProtocolBufferException()
7212                   .setUnfinishedMessage(builder.buildPartial());
7213             } catch (java.io.IOException e) {
7214               throw new com.google.protobuf.InvalidProtocolBufferException(e)
7215                   .setUnfinishedMessage(builder.buildPartial());
7216             }
7217             return builder.buildPartial();
7218           }
7219         };
7220 
parser()7221     public static com.google.protobuf.Parser<OutputInfo> parser() {
7222       return PARSER;
7223     }
7224 
7225     @java.lang.Override
getParserForType()7226     public com.google.protobuf.Parser<OutputInfo> getParserForType() {
7227       return PARSER;
7228     }
7229 
7230     @java.lang.Override
7231     public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo
getDefaultInstanceForType()7232         getDefaultInstanceForType() {
7233       return DEFAULT_INSTANCE;
7234     }
7235   }
7236 
7237   public static final int NAME_FIELD_NUMBER = 1;
7238 
7239   @SuppressWarnings("serial")
7240   private volatile java.lang.Object name_ = "";
7241   /**
7242    *
7243    *
7244    * <pre>
7245    * Output only. Resource name of the BatchPredictionJob.
7246    * </pre>
7247    *
7248    * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
7249    *
7250    * @return The name.
7251    */
7252   @java.lang.Override
getName()7253   public java.lang.String getName() {
7254     java.lang.Object ref = name_;
7255     if (ref instanceof java.lang.String) {
7256       return (java.lang.String) ref;
7257     } else {
7258       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
7259       java.lang.String s = bs.toStringUtf8();
7260       name_ = s;
7261       return s;
7262     }
7263   }
7264   /**
7265    *
7266    *
7267    * <pre>
7268    * Output only. Resource name of the BatchPredictionJob.
7269    * </pre>
7270    *
7271    * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
7272    *
7273    * @return The bytes for name.
7274    */
7275   @java.lang.Override
getNameBytes()7276   public com.google.protobuf.ByteString getNameBytes() {
7277     java.lang.Object ref = name_;
7278     if (ref instanceof java.lang.String) {
7279       com.google.protobuf.ByteString b =
7280           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
7281       name_ = b;
7282       return b;
7283     } else {
7284       return (com.google.protobuf.ByteString) ref;
7285     }
7286   }
7287 
7288   public static final int DISPLAY_NAME_FIELD_NUMBER = 2;
7289 
7290   @SuppressWarnings("serial")
7291   private volatile java.lang.Object displayName_ = "";
7292   /**
7293    *
7294    *
7295    * <pre>
7296    * Required. The user-defined name of this BatchPredictionJob.
7297    * </pre>
7298    *
7299    * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
7300    *
7301    * @return The displayName.
7302    */
7303   @java.lang.Override
getDisplayName()7304   public java.lang.String getDisplayName() {
7305     java.lang.Object ref = displayName_;
7306     if (ref instanceof java.lang.String) {
7307       return (java.lang.String) ref;
7308     } else {
7309       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
7310       java.lang.String s = bs.toStringUtf8();
7311       displayName_ = s;
7312       return s;
7313     }
7314   }
7315   /**
7316    *
7317    *
7318    * <pre>
7319    * Required. The user-defined name of this BatchPredictionJob.
7320    * </pre>
7321    *
7322    * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
7323    *
7324    * @return The bytes for displayName.
7325    */
7326   @java.lang.Override
getDisplayNameBytes()7327   public com.google.protobuf.ByteString getDisplayNameBytes() {
7328     java.lang.Object ref = displayName_;
7329     if (ref instanceof java.lang.String) {
7330       com.google.protobuf.ByteString b =
7331           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
7332       displayName_ = b;
7333       return b;
7334     } else {
7335       return (com.google.protobuf.ByteString) ref;
7336     }
7337   }
7338 
7339   public static final int MODEL_FIELD_NUMBER = 3;
7340 
7341   @SuppressWarnings("serial")
7342   private volatile java.lang.Object model_ = "";
7343   /**
7344    *
7345    *
7346    * <pre>
7347    * The name of the Model resource that produces the predictions via this job,
7348    * must share the same ancestor Location.
7349    * Starting this job has no impact on any existing deployments of the Model
7350    * and their resources.
7351    * Exactly one of model and unmanaged_container_model must be set.
7352    * The model resource name may contain version id or version alias to specify
7353    * the version.
7354    *  Example: `projects/{project}/locations/{location}/models/{model}&#64;2`
7355    *              or
7356    *            `projects/{project}/locations/{location}/models/{model}&#64;golden`
7357    * if no version is specified, the default version will be deployed.
7358    * </pre>
7359    *
7360    * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code>
7361    *
7362    * @return The model.
7363    */
7364   @java.lang.Override
getModel()7365   public java.lang.String getModel() {
7366     java.lang.Object ref = model_;
7367     if (ref instanceof java.lang.String) {
7368       return (java.lang.String) ref;
7369     } else {
7370       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
7371       java.lang.String s = bs.toStringUtf8();
7372       model_ = s;
7373       return s;
7374     }
7375   }
7376   /**
7377    *
7378    *
7379    * <pre>
7380    * The name of the Model resource that produces the predictions via this job,
7381    * must share the same ancestor Location.
7382    * Starting this job has no impact on any existing deployments of the Model
7383    * and their resources.
7384    * Exactly one of model and unmanaged_container_model must be set.
7385    * The model resource name may contain version id or version alias to specify
7386    * the version.
7387    *  Example: `projects/{project}/locations/{location}/models/{model}&#64;2`
7388    *              or
7389    *            `projects/{project}/locations/{location}/models/{model}&#64;golden`
7390    * if no version is specified, the default version will be deployed.
7391    * </pre>
7392    *
7393    * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code>
7394    *
7395    * @return The bytes for model.
7396    */
7397   @java.lang.Override
getModelBytes()7398   public com.google.protobuf.ByteString getModelBytes() {
7399     java.lang.Object ref = model_;
7400     if (ref instanceof java.lang.String) {
7401       com.google.protobuf.ByteString b =
7402           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
7403       model_ = b;
7404       return b;
7405     } else {
7406       return (com.google.protobuf.ByteString) ref;
7407     }
7408   }
7409 
7410   public static final int MODEL_VERSION_ID_FIELD_NUMBER = 30;
7411 
7412   @SuppressWarnings("serial")
7413   private volatile java.lang.Object modelVersionId_ = "";
7414   /**
7415    *
7416    *
7417    * <pre>
7418    * Output only. The version ID of the Model that produces the predictions via
7419    * this job.
7420    * </pre>
7421    *
7422    * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
7423    *
7424    * @return The modelVersionId.
7425    */
7426   @java.lang.Override
getModelVersionId()7427   public java.lang.String getModelVersionId() {
7428     java.lang.Object ref = modelVersionId_;
7429     if (ref instanceof java.lang.String) {
7430       return (java.lang.String) ref;
7431     } else {
7432       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
7433       java.lang.String s = bs.toStringUtf8();
7434       modelVersionId_ = s;
7435       return s;
7436     }
7437   }
7438   /**
7439    *
7440    *
7441    * <pre>
7442    * Output only. The version ID of the Model that produces the predictions via
7443    * this job.
7444    * </pre>
7445    *
7446    * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
7447    *
7448    * @return The bytes for modelVersionId.
7449    */
7450   @java.lang.Override
getModelVersionIdBytes()7451   public com.google.protobuf.ByteString getModelVersionIdBytes() {
7452     java.lang.Object ref = modelVersionId_;
7453     if (ref instanceof java.lang.String) {
7454       com.google.protobuf.ByteString b =
7455           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
7456       modelVersionId_ = b;
7457       return b;
7458     } else {
7459       return (com.google.protobuf.ByteString) ref;
7460     }
7461   }
7462 
7463   public static final int UNMANAGED_CONTAINER_MODEL_FIELD_NUMBER = 28;
7464   private com.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanagedContainerModel_;
7465   /**
7466    *
7467    *
7468    * <pre>
7469    * Contains model information necessary to perform batch prediction without
7470    * requiring uploading to model registry.
7471    * Exactly one of model and unmanaged_container_model must be set.
7472    * </pre>
7473    *
7474    * <code>.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanaged_container_model = 28;
7475    * </code>
7476    *
7477    * @return Whether the unmanagedContainerModel field is set.
7478    */
7479   @java.lang.Override
hasUnmanagedContainerModel()7480   public boolean hasUnmanagedContainerModel() {
7481     return unmanagedContainerModel_ != null;
7482   }
7483   /**
7484    *
7485    *
7486    * <pre>
7487    * Contains model information necessary to perform batch prediction without
7488    * requiring uploading to model registry.
7489    * Exactly one of model and unmanaged_container_model must be set.
7490    * </pre>
7491    *
7492    * <code>.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanaged_container_model = 28;
7493    * </code>
7494    *
7495    * @return The unmanagedContainerModel.
7496    */
7497   @java.lang.Override
getUnmanagedContainerModel()7498   public com.google.cloud.aiplatform.v1.UnmanagedContainerModel getUnmanagedContainerModel() {
7499     return unmanagedContainerModel_ == null
7500         ? com.google.cloud.aiplatform.v1.UnmanagedContainerModel.getDefaultInstance()
7501         : unmanagedContainerModel_;
7502   }
7503   /**
7504    *
7505    *
7506    * <pre>
7507    * Contains model information necessary to perform batch prediction without
7508    * requiring uploading to model registry.
7509    * Exactly one of model and unmanaged_container_model must be set.
7510    * </pre>
7511    *
7512    * <code>.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanaged_container_model = 28;
7513    * </code>
7514    */
7515   @java.lang.Override
7516   public com.google.cloud.aiplatform.v1.UnmanagedContainerModelOrBuilder
getUnmanagedContainerModelOrBuilder()7517       getUnmanagedContainerModelOrBuilder() {
7518     return unmanagedContainerModel_ == null
7519         ? com.google.cloud.aiplatform.v1.UnmanagedContainerModel.getDefaultInstance()
7520         : unmanagedContainerModel_;
7521   }
7522 
7523   public static final int INPUT_CONFIG_FIELD_NUMBER = 4;
7524   private com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig inputConfig_;
7525   /**
7526    *
7527    *
7528    * <pre>
7529    * Required. Input configuration of the instances on which predictions are
7530    * performed. The schema of any single instance may be specified via the
7531    * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
7532    * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
7533    * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri].
7534    * </pre>
7535    *
7536    * <code>
7537    * .google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED];
7538    * </code>
7539    *
7540    * @return Whether the inputConfig field is set.
7541    */
7542   @java.lang.Override
hasInputConfig()7543   public boolean hasInputConfig() {
7544     return inputConfig_ != null;
7545   }
7546   /**
7547    *
7548    *
7549    * <pre>
7550    * Required. Input configuration of the instances on which predictions are
7551    * performed. The schema of any single instance may be specified via the
7552    * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
7553    * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
7554    * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri].
7555    * </pre>
7556    *
7557    * <code>
7558    * .google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED];
7559    * </code>
7560    *
7561    * @return The inputConfig.
7562    */
7563   @java.lang.Override
getInputConfig()7564   public com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig getInputConfig() {
7565     return inputConfig_ == null
7566         ? com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.getDefaultInstance()
7567         : inputConfig_;
7568   }
7569   /**
7570    *
7571    *
7572    * <pre>
7573    * Required. Input configuration of the instances on which predictions are
7574    * performed. The schema of any single instance may be specified via the
7575    * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
7576    * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
7577    * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri].
7578    * </pre>
7579    *
7580    * <code>
7581    * .google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED];
7582    * </code>
7583    */
7584   @java.lang.Override
7585   public com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfigOrBuilder
getInputConfigOrBuilder()7586       getInputConfigOrBuilder() {
7587     return inputConfig_ == null
7588         ? com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.getDefaultInstance()
7589         : inputConfig_;
7590   }
7591 
7592   public static final int INSTANCE_CONFIG_FIELD_NUMBER = 27;
7593   private com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instanceConfig_;
7594   /**
7595    *
7596    *
7597    * <pre>
7598    * Configuration for how to convert batch prediction input instances to the
7599    * prediction instances that are sent to the Model.
7600    * </pre>
7601    *
7602    * <code>.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instance_config = 27;
7603    * </code>
7604    *
7605    * @return Whether the instanceConfig field is set.
7606    */
7607   @java.lang.Override
hasInstanceConfig()7608   public boolean hasInstanceConfig() {
7609     return instanceConfig_ != null;
7610   }
7611   /**
7612    *
7613    *
7614    * <pre>
7615    * Configuration for how to convert batch prediction input instances to the
7616    * prediction instances that are sent to the Model.
7617    * </pre>
7618    *
7619    * <code>.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instance_config = 27;
7620    * </code>
7621    *
7622    * @return The instanceConfig.
7623    */
7624   @java.lang.Override
getInstanceConfig()7625   public com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig getInstanceConfig() {
7626     return instanceConfig_ == null
7627         ? com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.getDefaultInstance()
7628         : instanceConfig_;
7629   }
7630   /**
7631    *
7632    *
7633    * <pre>
7634    * Configuration for how to convert batch prediction input instances to the
7635    * prediction instances that are sent to the Model.
7636    * </pre>
7637    *
7638    * <code>.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instance_config = 27;
7639    * </code>
7640    */
7641   @java.lang.Override
7642   public com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfigOrBuilder
getInstanceConfigOrBuilder()7643       getInstanceConfigOrBuilder() {
7644     return instanceConfig_ == null
7645         ? com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.getDefaultInstance()
7646         : instanceConfig_;
7647   }
7648 
7649   public static final int MODEL_PARAMETERS_FIELD_NUMBER = 5;
7650   private com.google.protobuf.Value modelParameters_;
7651   /**
7652    *
7653    *
7654    * <pre>
7655    * The parameters that govern the predictions. The schema of the parameters
7656    * may be specified via the
7657    * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
7658    * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
7659    * [parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri].
7660    * </pre>
7661    *
7662    * <code>.google.protobuf.Value model_parameters = 5;</code>
7663    *
7664    * @return Whether the modelParameters field is set.
7665    */
7666   @java.lang.Override
hasModelParameters()7667   public boolean hasModelParameters() {
7668     return modelParameters_ != null;
7669   }
7670   /**
7671    *
7672    *
7673    * <pre>
7674    * The parameters that govern the predictions. The schema of the parameters
7675    * may be specified via the
7676    * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
7677    * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
7678    * [parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri].
7679    * </pre>
7680    *
7681    * <code>.google.protobuf.Value model_parameters = 5;</code>
7682    *
7683    * @return The modelParameters.
7684    */
7685   @java.lang.Override
getModelParameters()7686   public com.google.protobuf.Value getModelParameters() {
7687     return modelParameters_ == null
7688         ? com.google.protobuf.Value.getDefaultInstance()
7689         : modelParameters_;
7690   }
7691   /**
7692    *
7693    *
7694    * <pre>
7695    * The parameters that govern the predictions. The schema of the parameters
7696    * may be specified via the
7697    * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
7698    * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
7699    * [parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri].
7700    * </pre>
7701    *
7702    * <code>.google.protobuf.Value model_parameters = 5;</code>
7703    */
7704   @java.lang.Override
getModelParametersOrBuilder()7705   public com.google.protobuf.ValueOrBuilder getModelParametersOrBuilder() {
7706     return modelParameters_ == null
7707         ? com.google.protobuf.Value.getDefaultInstance()
7708         : modelParameters_;
7709   }
7710 
7711   public static final int OUTPUT_CONFIG_FIELD_NUMBER = 6;
7712   private com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig outputConfig_;
7713   /**
7714    *
7715    *
7716    * <pre>
7717    * Required. The Configuration specifying where output predictions should
7718    * be written.
7719    * The schema of any single prediction may be specified as a concatenation
7720    * of [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
7721    * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
7722    * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
7723    * and
7724    * [prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri].
7725    * </pre>
7726    *
7727    * <code>
7728    * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED];
7729    * </code>
7730    *
7731    * @return Whether the outputConfig field is set.
7732    */
7733   @java.lang.Override
hasOutputConfig()7734   public boolean hasOutputConfig() {
7735     return outputConfig_ != null;
7736   }
7737   /**
7738    *
7739    *
7740    * <pre>
7741    * Required. The Configuration specifying where output predictions should
7742    * be written.
7743    * The schema of any single prediction may be specified as a concatenation
7744    * of [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
7745    * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
7746    * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
7747    * and
7748    * [prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri].
7749    * </pre>
7750    *
7751    * <code>
7752    * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED];
7753    * </code>
7754    *
7755    * @return The outputConfig.
7756    */
7757   @java.lang.Override
getOutputConfig()7758   public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig getOutputConfig() {
7759     return outputConfig_ == null
7760         ? com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.getDefaultInstance()
7761         : outputConfig_;
7762   }
7763   /**
7764    *
7765    *
7766    * <pre>
7767    * Required. The Configuration specifying where output predictions should
7768    * be written.
7769    * The schema of any single prediction may be specified as a concatenation
7770    * of [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
7771    * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
7772    * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
7773    * and
7774    * [prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri].
7775    * </pre>
7776    *
7777    * <code>
7778    * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED];
7779    * </code>
7780    */
7781   @java.lang.Override
7782   public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfigOrBuilder
getOutputConfigOrBuilder()7783       getOutputConfigOrBuilder() {
7784     return outputConfig_ == null
7785         ? com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.getDefaultInstance()
7786         : outputConfig_;
7787   }
7788 
7789   public static final int DEDICATED_RESOURCES_FIELD_NUMBER = 7;
7790   private com.google.cloud.aiplatform.v1.BatchDedicatedResources dedicatedResources_;
7791   /**
7792    *
7793    *
7794    * <pre>
7795    * The config of resources used by the Model during the batch prediction. If
7796    * the Model
7797    * [supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types]
7798    * DEDICATED_RESOURCES this config may be provided (and the job will use these
7799    * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
7800    * must be provided.
7801    * </pre>
7802    *
7803    * <code>.google.cloud.aiplatform.v1.BatchDedicatedResources dedicated_resources = 7;</code>
7804    *
7805    * @return Whether the dedicatedResources field is set.
7806    */
7807   @java.lang.Override
hasDedicatedResources()7808   public boolean hasDedicatedResources() {
7809     return dedicatedResources_ != null;
7810   }
7811   /**
7812    *
7813    *
7814    * <pre>
7815    * The config of resources used by the Model during the batch prediction. If
7816    * the Model
7817    * [supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types]
7818    * DEDICATED_RESOURCES this config may be provided (and the job will use these
7819    * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
7820    * must be provided.
7821    * </pre>
7822    *
7823    * <code>.google.cloud.aiplatform.v1.BatchDedicatedResources dedicated_resources = 7;</code>
7824    *
7825    * @return The dedicatedResources.
7826    */
7827   @java.lang.Override
getDedicatedResources()7828   public com.google.cloud.aiplatform.v1.BatchDedicatedResources getDedicatedResources() {
7829     return dedicatedResources_ == null
7830         ? com.google.cloud.aiplatform.v1.BatchDedicatedResources.getDefaultInstance()
7831         : dedicatedResources_;
7832   }
7833   /**
7834    *
7835    *
7836    * <pre>
7837    * The config of resources used by the Model during the batch prediction. If
7838    * the Model
7839    * [supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types]
7840    * DEDICATED_RESOURCES this config may be provided (and the job will use these
7841    * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
7842    * must be provided.
7843    * </pre>
7844    *
7845    * <code>.google.cloud.aiplatform.v1.BatchDedicatedResources dedicated_resources = 7;</code>
7846    */
7847   @java.lang.Override
7848   public com.google.cloud.aiplatform.v1.BatchDedicatedResourcesOrBuilder
getDedicatedResourcesOrBuilder()7849       getDedicatedResourcesOrBuilder() {
7850     return dedicatedResources_ == null
7851         ? com.google.cloud.aiplatform.v1.BatchDedicatedResources.getDefaultInstance()
7852         : dedicatedResources_;
7853   }
7854 
7855   public static final int SERVICE_ACCOUNT_FIELD_NUMBER = 29;
7856 
7857   @SuppressWarnings("serial")
7858   private volatile java.lang.Object serviceAccount_ = "";
7859   /**
7860    *
7861    *
7862    * <pre>
7863    * The service account that the DeployedModel's container runs as. If not
7864    * specified, a system generated one will be used, which
7865    * has minimal permissions and the custom container, if used, may not have
7866    * enough permission to access other Google Cloud resources.
7867    * Users deploying the Model must have the `iam.serviceAccounts.actAs`
7868    * permission on this service account.
7869    * </pre>
7870    *
7871    * <code>string service_account = 29;</code>
7872    *
7873    * @return The serviceAccount.
7874    */
7875   @java.lang.Override
getServiceAccount()7876   public java.lang.String getServiceAccount() {
7877     java.lang.Object ref = serviceAccount_;
7878     if (ref instanceof java.lang.String) {
7879       return (java.lang.String) ref;
7880     } else {
7881       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
7882       java.lang.String s = bs.toStringUtf8();
7883       serviceAccount_ = s;
7884       return s;
7885     }
7886   }
7887   /**
7888    *
7889    *
7890    * <pre>
7891    * The service account that the DeployedModel's container runs as. If not
7892    * specified, a system generated one will be used, which
7893    * has minimal permissions and the custom container, if used, may not have
7894    * enough permission to access other Google Cloud resources.
7895    * Users deploying the Model must have the `iam.serviceAccounts.actAs`
7896    * permission on this service account.
7897    * </pre>
7898    *
7899    * <code>string service_account = 29;</code>
7900    *
7901    * @return The bytes for serviceAccount.
7902    */
7903   @java.lang.Override
getServiceAccountBytes()7904   public com.google.protobuf.ByteString getServiceAccountBytes() {
7905     java.lang.Object ref = serviceAccount_;
7906     if (ref instanceof java.lang.String) {
7907       com.google.protobuf.ByteString b =
7908           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
7909       serviceAccount_ = b;
7910       return b;
7911     } else {
7912       return (com.google.protobuf.ByteString) ref;
7913     }
7914   }
7915 
7916   public static final int MANUAL_BATCH_TUNING_PARAMETERS_FIELD_NUMBER = 8;
7917   private com.google.cloud.aiplatform.v1.ManualBatchTuningParameters manualBatchTuningParameters_;
7918   /**
7919    *
7920    *
7921    * <pre>
7922    * Immutable. Parameters configuring the batch behavior. Currently only
7923    * applicable when
7924    * [dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources]
7925    * are used (in other cases Vertex AI does the tuning itself).
7926    * </pre>
7927    *
7928    * <code>
7929    * .google.cloud.aiplatform.v1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE];
7930    * </code>
7931    *
7932    * @return Whether the manualBatchTuningParameters field is set.
7933    */
7934   @java.lang.Override
hasManualBatchTuningParameters()7935   public boolean hasManualBatchTuningParameters() {
7936     return manualBatchTuningParameters_ != null;
7937   }
7938   /**
7939    *
7940    *
7941    * <pre>
7942    * Immutable. Parameters configuring the batch behavior. Currently only
7943    * applicable when
7944    * [dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources]
7945    * are used (in other cases Vertex AI does the tuning itself).
7946    * </pre>
7947    *
7948    * <code>
7949    * .google.cloud.aiplatform.v1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE];
7950    * </code>
7951    *
7952    * @return The manualBatchTuningParameters.
7953    */
7954   @java.lang.Override
7955   public com.google.cloud.aiplatform.v1.ManualBatchTuningParameters
getManualBatchTuningParameters()7956       getManualBatchTuningParameters() {
7957     return manualBatchTuningParameters_ == null
7958         ? com.google.cloud.aiplatform.v1.ManualBatchTuningParameters.getDefaultInstance()
7959         : manualBatchTuningParameters_;
7960   }
7961   /**
7962    *
7963    *
7964    * <pre>
7965    * Immutable. Parameters configuring the batch behavior. Currently only
7966    * applicable when
7967    * [dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources]
7968    * are used (in other cases Vertex AI does the tuning itself).
7969    * </pre>
7970    *
7971    * <code>
7972    * .google.cloud.aiplatform.v1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE];
7973    * </code>
7974    */
7975   @java.lang.Override
7976   public com.google.cloud.aiplatform.v1.ManualBatchTuningParametersOrBuilder
getManualBatchTuningParametersOrBuilder()7977       getManualBatchTuningParametersOrBuilder() {
7978     return manualBatchTuningParameters_ == null
7979         ? com.google.cloud.aiplatform.v1.ManualBatchTuningParameters.getDefaultInstance()
7980         : manualBatchTuningParameters_;
7981   }
7982 
7983   public static final int GENERATE_EXPLANATION_FIELD_NUMBER = 23;
7984   private boolean generateExplanation_ = false;
7985   /**
7986    *
7987    *
7988    * <pre>
7989    * Generate explanation with the batch prediction results.
7990    * When set to `true`, the batch prediction output changes based on the
7991    * `predictions_format` field of the
7992    * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1.BatchPredictionJob.output_config]
7993    * object:
7994    *  * `bigquery`: output includes a column named `explanation`. The value
7995    *    is a struct that conforms to the
7996    *    [Explanation][google.cloud.aiplatform.v1.Explanation] object.
7997    *  * `jsonl`: The JSON objects on each line include an additional entry
7998    *    keyed `explanation`. The value of the entry is a JSON object that
7999    *    conforms to the [Explanation][google.cloud.aiplatform.v1.Explanation]
8000    *    object.
8001    *  * `csv`: Generating explanations for CSV format is not supported.
8002    * If this field is set to true, either the
8003    * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
8004    * or
8005    * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
8006    * must be populated.
8007    * </pre>
8008    *
8009    * <code>bool generate_explanation = 23;</code>
8010    *
8011    * @return The generateExplanation.
8012    */
8013   @java.lang.Override
getGenerateExplanation()8014   public boolean getGenerateExplanation() {
8015     return generateExplanation_;
8016   }
8017 
8018   public static final int EXPLANATION_SPEC_FIELD_NUMBER = 25;
8019   private com.google.cloud.aiplatform.v1.ExplanationSpec explanationSpec_;
8020   /**
8021    *
8022    *
8023    * <pre>
8024    * Explanation configuration for this BatchPredictionJob. Can be
8025    * specified only if
8026    * [generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation]
8027    * is set to `true`.
8028    * This value overrides the value of
8029    * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec].
8030    * All fields of
8031    * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
8032    * are optional in the request. If a field of the
8033    * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
8034    * object is not populated, the corresponding field of the
8035    * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
8036    * object is inherited.
8037    * </pre>
8038    *
8039    * <code>.google.cloud.aiplatform.v1.ExplanationSpec explanation_spec = 25;</code>
8040    *
8041    * @return Whether the explanationSpec field is set.
8042    */
8043   @java.lang.Override
hasExplanationSpec()8044   public boolean hasExplanationSpec() {
8045     return explanationSpec_ != null;
8046   }
8047   /**
8048    *
8049    *
8050    * <pre>
8051    * Explanation configuration for this BatchPredictionJob. Can be
8052    * specified only if
8053    * [generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation]
8054    * is set to `true`.
8055    * This value overrides the value of
8056    * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec].
8057    * All fields of
8058    * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
8059    * are optional in the request. If a field of the
8060    * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
8061    * object is not populated, the corresponding field of the
8062    * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
8063    * object is inherited.
8064    * </pre>
8065    *
8066    * <code>.google.cloud.aiplatform.v1.ExplanationSpec explanation_spec = 25;</code>
8067    *
8068    * @return The explanationSpec.
8069    */
8070   @java.lang.Override
getExplanationSpec()8071   public com.google.cloud.aiplatform.v1.ExplanationSpec getExplanationSpec() {
8072     return explanationSpec_ == null
8073         ? com.google.cloud.aiplatform.v1.ExplanationSpec.getDefaultInstance()
8074         : explanationSpec_;
8075   }
8076   /**
8077    *
8078    *
8079    * <pre>
8080    * Explanation configuration for this BatchPredictionJob. Can be
8081    * specified only if
8082    * [generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation]
8083    * is set to `true`.
8084    * This value overrides the value of
8085    * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec].
8086    * All fields of
8087    * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
8088    * are optional in the request. If a field of the
8089    * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
8090    * object is not populated, the corresponding field of the
8091    * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
8092    * object is inherited.
8093    * </pre>
8094    *
8095    * <code>.google.cloud.aiplatform.v1.ExplanationSpec explanation_spec = 25;</code>
8096    */
8097   @java.lang.Override
getExplanationSpecOrBuilder()8098   public com.google.cloud.aiplatform.v1.ExplanationSpecOrBuilder getExplanationSpecOrBuilder() {
8099     return explanationSpec_ == null
8100         ? com.google.cloud.aiplatform.v1.ExplanationSpec.getDefaultInstance()
8101         : explanationSpec_;
8102   }
8103 
8104   public static final int OUTPUT_INFO_FIELD_NUMBER = 9;
8105   private com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo outputInfo_;
8106   /**
8107    *
8108    *
8109    * <pre>
8110    * Output only. Information further describing the output of this job.
8111    * </pre>
8112    *
8113    * <code>
8114    * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
8115    * </code>
8116    *
8117    * @return Whether the outputInfo field is set.
8118    */
8119   @java.lang.Override
hasOutputInfo()8120   public boolean hasOutputInfo() {
8121     return outputInfo_ != null;
8122   }
8123   /**
8124    *
8125    *
8126    * <pre>
8127    * Output only. Information further describing the output of this job.
8128    * </pre>
8129    *
8130    * <code>
8131    * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
8132    * </code>
8133    *
8134    * @return The outputInfo.
8135    */
8136   @java.lang.Override
getOutputInfo()8137   public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo getOutputInfo() {
8138     return outputInfo_ == null
8139         ? com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.getDefaultInstance()
8140         : outputInfo_;
8141   }
8142   /**
8143    *
8144    *
8145    * <pre>
8146    * Output only. Information further describing the output of this job.
8147    * </pre>
8148    *
8149    * <code>
8150    * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
8151    * </code>
8152    */
8153   @java.lang.Override
8154   public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfoOrBuilder
getOutputInfoOrBuilder()8155       getOutputInfoOrBuilder() {
8156     return outputInfo_ == null
8157         ? com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.getDefaultInstance()
8158         : outputInfo_;
8159   }
8160 
8161   public static final int STATE_FIELD_NUMBER = 10;
8162   private int state_ = 0;
8163   /**
8164    *
8165    *
8166    * <pre>
8167    * Output only. The detailed state of the job.
8168    * </pre>
8169    *
8170    * <code>
8171    * .google.cloud.aiplatform.v1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY];
8172    * </code>
8173    *
8174    * @return The enum numeric value on the wire for state.
8175    */
8176   @java.lang.Override
getStateValue()8177   public int getStateValue() {
8178     return state_;
8179   }
8180   /**
8181    *
8182    *
8183    * <pre>
8184    * Output only. The detailed state of the job.
8185    * </pre>
8186    *
8187    * <code>
8188    * .google.cloud.aiplatform.v1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY];
8189    * </code>
8190    *
8191    * @return The state.
8192    */
8193   @java.lang.Override
getState()8194   public com.google.cloud.aiplatform.v1.JobState getState() {
8195     com.google.cloud.aiplatform.v1.JobState result =
8196         com.google.cloud.aiplatform.v1.JobState.forNumber(state_);
8197     return result == null ? com.google.cloud.aiplatform.v1.JobState.UNRECOGNIZED : result;
8198   }
8199 
8200   public static final int ERROR_FIELD_NUMBER = 11;
8201   private com.google.rpc.Status error_;
8202   /**
8203    *
8204    *
8205    * <pre>
8206    * Output only. Only populated when the job's state is JOB_STATE_FAILED or
8207    * JOB_STATE_CANCELLED.
8208    * </pre>
8209    *
8210    * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
8211    *
8212    * @return Whether the error field is set.
8213    */
8214   @java.lang.Override
hasError()8215   public boolean hasError() {
8216     return error_ != null;
8217   }
8218   /**
8219    *
8220    *
8221    * <pre>
8222    * Output only. Only populated when the job's state is JOB_STATE_FAILED or
8223    * JOB_STATE_CANCELLED.
8224    * </pre>
8225    *
8226    * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
8227    *
8228    * @return The error.
8229    */
8230   @java.lang.Override
getError()8231   public com.google.rpc.Status getError() {
8232     return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
8233   }
8234   /**
8235    *
8236    *
8237    * <pre>
8238    * Output only. Only populated when the job's state is JOB_STATE_FAILED or
8239    * JOB_STATE_CANCELLED.
8240    * </pre>
8241    *
8242    * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
8243    */
8244   @java.lang.Override
getErrorOrBuilder()8245   public com.google.rpc.StatusOrBuilder getErrorOrBuilder() {
8246     return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
8247   }
8248 
8249   public static final int PARTIAL_FAILURES_FIELD_NUMBER = 12;
8250 
8251   @SuppressWarnings("serial")
8252   private java.util.List<com.google.rpc.Status> partialFailures_;
8253   /**
8254    *
8255    *
8256    * <pre>
8257    * Output only. Partial failures encountered.
8258    * For example, single files that can't be read.
8259    * This field never exceeds 20 entries.
8260    * Status details fields contain standard Google Cloud error details.
8261    * </pre>
8262    *
8263    * <code>
8264    * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
8265    * </code>
8266    */
8267   @java.lang.Override
getPartialFailuresList()8268   public java.util.List<com.google.rpc.Status> getPartialFailuresList() {
8269     return partialFailures_;
8270   }
8271   /**
8272    *
8273    *
8274    * <pre>
8275    * Output only. Partial failures encountered.
8276    * For example, single files that can't be read.
8277    * This field never exceeds 20 entries.
8278    * Status details fields contain standard Google Cloud error details.
8279    * </pre>
8280    *
8281    * <code>
8282    * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
8283    * </code>
8284    */
8285   @java.lang.Override
8286   public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getPartialFailuresOrBuilderList()8287       getPartialFailuresOrBuilderList() {
8288     return partialFailures_;
8289   }
8290   /**
8291    *
8292    *
8293    * <pre>
8294    * Output only. Partial failures encountered.
8295    * For example, single files that can't be read.
8296    * This field never exceeds 20 entries.
8297    * Status details fields contain standard Google Cloud error details.
8298    * </pre>
8299    *
8300    * <code>
8301    * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
8302    * </code>
8303    */
8304   @java.lang.Override
getPartialFailuresCount()8305   public int getPartialFailuresCount() {
8306     return partialFailures_.size();
8307   }
8308   /**
8309    *
8310    *
8311    * <pre>
8312    * Output only. Partial failures encountered.
8313    * For example, single files that can't be read.
8314    * This field never exceeds 20 entries.
8315    * Status details fields contain standard Google Cloud error details.
8316    * </pre>
8317    *
8318    * <code>
8319    * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
8320    * </code>
8321    */
8322   @java.lang.Override
getPartialFailures(int index)8323   public com.google.rpc.Status getPartialFailures(int index) {
8324     return partialFailures_.get(index);
8325   }
8326   /**
8327    *
8328    *
8329    * <pre>
8330    * Output only. Partial failures encountered.
8331    * For example, single files that can't be read.
8332    * This field never exceeds 20 entries.
8333    * Status details fields contain standard Google Cloud error details.
8334    * </pre>
8335    *
8336    * <code>
8337    * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
8338    * </code>
8339    */
8340   @java.lang.Override
getPartialFailuresOrBuilder(int index)8341   public com.google.rpc.StatusOrBuilder getPartialFailuresOrBuilder(int index) {
8342     return partialFailures_.get(index);
8343   }
8344 
8345   public static final int RESOURCES_CONSUMED_FIELD_NUMBER = 13;
8346   private com.google.cloud.aiplatform.v1.ResourcesConsumed resourcesConsumed_;
8347   /**
8348    *
8349    *
8350    * <pre>
8351    * Output only. Information about resources that had been consumed by this
8352    * job. Provided in real time at best effort basis, as well as a final value
8353    * once the job completes.
8354    * Note: This field currently may be not populated for batch predictions that
8355    * use AutoML Models.
8356    * </pre>
8357    *
8358    * <code>
8359    * .google.cloud.aiplatform.v1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
8360    * </code>
8361    *
8362    * @return Whether the resourcesConsumed field is set.
8363    */
8364   @java.lang.Override
hasResourcesConsumed()8365   public boolean hasResourcesConsumed() {
8366     return resourcesConsumed_ != null;
8367   }
8368   /**
8369    *
8370    *
8371    * <pre>
8372    * Output only. Information about resources that had been consumed by this
8373    * job. Provided in real time at best effort basis, as well as a final value
8374    * once the job completes.
8375    * Note: This field currently may be not populated for batch predictions that
8376    * use AutoML Models.
8377    * </pre>
8378    *
8379    * <code>
8380    * .google.cloud.aiplatform.v1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
8381    * </code>
8382    *
8383    * @return The resourcesConsumed.
8384    */
8385   @java.lang.Override
getResourcesConsumed()8386   public com.google.cloud.aiplatform.v1.ResourcesConsumed getResourcesConsumed() {
8387     return resourcesConsumed_ == null
8388         ? com.google.cloud.aiplatform.v1.ResourcesConsumed.getDefaultInstance()
8389         : resourcesConsumed_;
8390   }
8391   /**
8392    *
8393    *
8394    * <pre>
8395    * Output only. Information about resources that had been consumed by this
8396    * job. Provided in real time at best effort basis, as well as a final value
8397    * once the job completes.
8398    * Note: This field currently may be not populated for batch predictions that
8399    * use AutoML Models.
8400    * </pre>
8401    *
8402    * <code>
8403    * .google.cloud.aiplatform.v1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
8404    * </code>
8405    */
8406   @java.lang.Override
getResourcesConsumedOrBuilder()8407   public com.google.cloud.aiplatform.v1.ResourcesConsumedOrBuilder getResourcesConsumedOrBuilder() {
8408     return resourcesConsumed_ == null
8409         ? com.google.cloud.aiplatform.v1.ResourcesConsumed.getDefaultInstance()
8410         : resourcesConsumed_;
8411   }
8412 
8413   public static final int COMPLETION_STATS_FIELD_NUMBER = 14;
8414   private com.google.cloud.aiplatform.v1.CompletionStats completionStats_;
8415   /**
8416    *
8417    *
8418    * <pre>
8419    * Output only. Statistics on completed and failed prediction instances.
8420    * </pre>
8421    *
8422    * <code>
8423    * .google.cloud.aiplatform.v1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY];
8424    * </code>
8425    *
8426    * @return Whether the completionStats field is set.
8427    */
8428   @java.lang.Override
hasCompletionStats()8429   public boolean hasCompletionStats() {
8430     return completionStats_ != null;
8431   }
8432   /**
8433    *
8434    *
8435    * <pre>
8436    * Output only. Statistics on completed and failed prediction instances.
8437    * </pre>
8438    *
8439    * <code>
8440    * .google.cloud.aiplatform.v1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY];
8441    * </code>
8442    *
8443    * @return The completionStats.
8444    */
8445   @java.lang.Override
getCompletionStats()8446   public com.google.cloud.aiplatform.v1.CompletionStats getCompletionStats() {
8447     return completionStats_ == null
8448         ? com.google.cloud.aiplatform.v1.CompletionStats.getDefaultInstance()
8449         : completionStats_;
8450   }
8451   /**
8452    *
8453    *
8454    * <pre>
8455    * Output only. Statistics on completed and failed prediction instances.
8456    * </pre>
8457    *
8458    * <code>
8459    * .google.cloud.aiplatform.v1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY];
8460    * </code>
8461    */
8462   @java.lang.Override
getCompletionStatsOrBuilder()8463   public com.google.cloud.aiplatform.v1.CompletionStatsOrBuilder getCompletionStatsOrBuilder() {
8464     return completionStats_ == null
8465         ? com.google.cloud.aiplatform.v1.CompletionStats.getDefaultInstance()
8466         : completionStats_;
8467   }
8468 
8469   public static final int CREATE_TIME_FIELD_NUMBER = 15;
8470   private com.google.protobuf.Timestamp createTime_;
8471   /**
8472    *
8473    *
8474    * <pre>
8475    * Output only. Time when the BatchPredictionJob was created.
8476    * </pre>
8477    *
8478    * <code>.google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
8479    * </code>
8480    *
8481    * @return Whether the createTime field is set.
8482    */
8483   @java.lang.Override
hasCreateTime()8484   public boolean hasCreateTime() {
8485     return createTime_ != null;
8486   }
8487   /**
8488    *
8489    *
8490    * <pre>
8491    * Output only. Time when the BatchPredictionJob was created.
8492    * </pre>
8493    *
8494    * <code>.google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
8495    * </code>
8496    *
8497    * @return The createTime.
8498    */
8499   @java.lang.Override
getCreateTime()8500   public com.google.protobuf.Timestamp getCreateTime() {
8501     return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
8502   }
8503   /**
8504    *
8505    *
8506    * <pre>
8507    * Output only. Time when the BatchPredictionJob was created.
8508    * </pre>
8509    *
8510    * <code>.google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
8511    * </code>
8512    */
8513   @java.lang.Override
getCreateTimeOrBuilder()8514   public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
8515     return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
8516   }
8517 
8518   public static final int START_TIME_FIELD_NUMBER = 16;
8519   private com.google.protobuf.Timestamp startTime_;
8520   /**
8521    *
8522    *
8523    * <pre>
8524    * Output only. Time when the BatchPredictionJob for the first time entered
8525    * the `JOB_STATE_RUNNING` state.
8526    * </pre>
8527    *
8528    * <code>.google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
8529    * </code>
8530    *
8531    * @return Whether the startTime field is set.
8532    */
8533   @java.lang.Override
hasStartTime()8534   public boolean hasStartTime() {
8535     return startTime_ != null;
8536   }
8537   /**
8538    *
8539    *
8540    * <pre>
8541    * Output only. Time when the BatchPredictionJob for the first time entered
8542    * the `JOB_STATE_RUNNING` state.
8543    * </pre>
8544    *
8545    * <code>.google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
8546    * </code>
8547    *
8548    * @return The startTime.
8549    */
8550   @java.lang.Override
getStartTime()8551   public com.google.protobuf.Timestamp getStartTime() {
8552     return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
8553   }
8554   /**
8555    *
8556    *
8557    * <pre>
8558    * Output only. Time when the BatchPredictionJob for the first time entered
8559    * the `JOB_STATE_RUNNING` state.
8560    * </pre>
8561    *
8562    * <code>.google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
8563    * </code>
8564    */
8565   @java.lang.Override
getStartTimeOrBuilder()8566   public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() {
8567     return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
8568   }
8569 
8570   public static final int END_TIME_FIELD_NUMBER = 17;
8571   private com.google.protobuf.Timestamp endTime_;
8572   /**
8573    *
8574    *
8575    * <pre>
8576    * Output only. Time when the BatchPredictionJob entered any of the following
8577    * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
8578    * </pre>
8579    *
8580    * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
8581    * </code>
8582    *
8583    * @return Whether the endTime field is set.
8584    */
8585   @java.lang.Override
hasEndTime()8586   public boolean hasEndTime() {
8587     return endTime_ != null;
8588   }
8589   /**
8590    *
8591    *
8592    * <pre>
8593    * Output only. Time when the BatchPredictionJob entered any of the following
8594    * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
8595    * </pre>
8596    *
8597    * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
8598    * </code>
8599    *
8600    * @return The endTime.
8601    */
8602   @java.lang.Override
getEndTime()8603   public com.google.protobuf.Timestamp getEndTime() {
8604     return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;
8605   }
8606   /**
8607    *
8608    *
8609    * <pre>
8610    * Output only. Time when the BatchPredictionJob entered any of the following
8611    * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
8612    * </pre>
8613    *
8614    * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
8615    * </code>
8616    */
8617   @java.lang.Override
getEndTimeOrBuilder()8618   public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() {
8619     return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;
8620   }
8621 
8622   public static final int UPDATE_TIME_FIELD_NUMBER = 18;
8623   private com.google.protobuf.Timestamp updateTime_;
8624   /**
8625    *
8626    *
8627    * <pre>
8628    * Output only. Time when the BatchPredictionJob was most recently updated.
8629    * </pre>
8630    *
8631    * <code>.google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
8632    * </code>
8633    *
8634    * @return Whether the updateTime field is set.
8635    */
8636   @java.lang.Override
hasUpdateTime()8637   public boolean hasUpdateTime() {
8638     return updateTime_ != null;
8639   }
8640   /**
8641    *
8642    *
8643    * <pre>
8644    * Output only. Time when the BatchPredictionJob was most recently updated.
8645    * </pre>
8646    *
8647    * <code>.google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
8648    * </code>
8649    *
8650    * @return The updateTime.
8651    */
8652   @java.lang.Override
getUpdateTime()8653   public com.google.protobuf.Timestamp getUpdateTime() {
8654     return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_;
8655   }
8656   /**
8657    *
8658    *
8659    * <pre>
8660    * Output only. Time when the BatchPredictionJob was most recently updated.
8661    * </pre>
8662    *
8663    * <code>.google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
8664    * </code>
8665    */
8666   @java.lang.Override
getUpdateTimeOrBuilder()8667   public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
8668     return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_;
8669   }
8670 
8671   public static final int LABELS_FIELD_NUMBER = 19;
8672 
8673   private static final class LabelsDefaultEntryHolder {
8674     static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
8675         com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
8676             com.google.cloud.aiplatform.v1.BatchPredictionJobProto
8677                 .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_LabelsEntry_descriptor,
8678             com.google.protobuf.WireFormat.FieldType.STRING,
8679             "",
8680             com.google.protobuf.WireFormat.FieldType.STRING,
8681             "");
8682   }
8683 
8684   @SuppressWarnings("serial")
8685   private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;
8686 
internalGetLabels()8687   private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
8688     if (labels_ == null) {
8689       return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
8690     }
8691     return labels_;
8692   }
8693 
getLabelsCount()8694   public int getLabelsCount() {
8695     return internalGetLabels().getMap().size();
8696   }
8697   /**
8698    *
8699    *
8700    * <pre>
8701    * The labels with user-defined metadata to organize BatchPredictionJobs.
8702    * Label keys and values can be no longer than 64 characters
8703    * (Unicode codepoints), can only contain lowercase letters, numeric
8704    * characters, underscores and dashes. International characters are allowed.
8705    * See https://goo.gl/xmQnxf for more information and examples of labels.
8706    * </pre>
8707    *
8708    * <code>map&lt;string, string&gt; labels = 19;</code>
8709    */
8710   @java.lang.Override
containsLabels(java.lang.String key)8711   public boolean containsLabels(java.lang.String key) {
8712     if (key == null) {
8713       throw new NullPointerException("map key");
8714     }
8715     return internalGetLabels().getMap().containsKey(key);
8716   }
8717   /** Use {@link #getLabelsMap()} instead. */
8718   @java.lang.Override
8719   @java.lang.Deprecated
getLabels()8720   public java.util.Map<java.lang.String, java.lang.String> getLabels() {
8721     return getLabelsMap();
8722   }
8723   /**
8724    *
8725    *
8726    * <pre>
8727    * The labels with user-defined metadata to organize BatchPredictionJobs.
8728    * Label keys and values can be no longer than 64 characters
8729    * (Unicode codepoints), can only contain lowercase letters, numeric
8730    * characters, underscores and dashes. International characters are allowed.
8731    * See https://goo.gl/xmQnxf for more information and examples of labels.
8732    * </pre>
8733    *
8734    * <code>map&lt;string, string&gt; labels = 19;</code>
8735    */
8736   @java.lang.Override
getLabelsMap()8737   public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
8738     return internalGetLabels().getMap();
8739   }
8740   /**
8741    *
8742    *
8743    * <pre>
8744    * The labels with user-defined metadata to organize BatchPredictionJobs.
8745    * Label keys and values can be no longer than 64 characters
8746    * (Unicode codepoints), can only contain lowercase letters, numeric
8747    * characters, underscores and dashes. International characters are allowed.
8748    * See https://goo.gl/xmQnxf for more information and examples of labels.
8749    * </pre>
8750    *
8751    * <code>map&lt;string, string&gt; labels = 19;</code>
8752    */
8753   @java.lang.Override
getLabelsOrDefault( java.lang.String key, java.lang.String defaultValue)8754   public /* nullable */ java.lang.String getLabelsOrDefault(
8755       java.lang.String key,
8756       /* nullable */
8757       java.lang.String defaultValue) {
8758     if (key == null) {
8759       throw new NullPointerException("map key");
8760     }
8761     java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
8762     return map.containsKey(key) ? map.get(key) : defaultValue;
8763   }
8764   /**
8765    *
8766    *
8767    * <pre>
8768    * The labels with user-defined metadata to organize BatchPredictionJobs.
8769    * Label keys and values can be no longer than 64 characters
8770    * (Unicode codepoints), can only contain lowercase letters, numeric
8771    * characters, underscores and dashes. International characters are allowed.
8772    * See https://goo.gl/xmQnxf for more information and examples of labels.
8773    * </pre>
8774    *
8775    * <code>map&lt;string, string&gt; labels = 19;</code>
8776    */
8777   @java.lang.Override
getLabelsOrThrow(java.lang.String key)8778   public java.lang.String getLabelsOrThrow(java.lang.String key) {
8779     if (key == null) {
8780       throw new NullPointerException("map key");
8781     }
8782     java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
8783     if (!map.containsKey(key)) {
8784       throw new java.lang.IllegalArgumentException();
8785     }
8786     return map.get(key);
8787   }
8788 
8789   public static final int ENCRYPTION_SPEC_FIELD_NUMBER = 24;
8790   private com.google.cloud.aiplatform.v1.EncryptionSpec encryptionSpec_;
8791   /**
8792    *
8793    *
8794    * <pre>
8795    * Customer-managed encryption key options for a BatchPredictionJob. If this
8796    * is set, then all resources created by the BatchPredictionJob will be
8797    * encrypted with the provided encryption key.
8798    * </pre>
8799    *
8800    * <code>.google.cloud.aiplatform.v1.EncryptionSpec encryption_spec = 24;</code>
8801    *
8802    * @return Whether the encryptionSpec field is set.
8803    */
8804   @java.lang.Override
hasEncryptionSpec()8805   public boolean hasEncryptionSpec() {
8806     return encryptionSpec_ != null;
8807   }
8808   /**
8809    *
8810    *
8811    * <pre>
8812    * Customer-managed encryption key options for a BatchPredictionJob. If this
8813    * is set, then all resources created by the BatchPredictionJob will be
8814    * encrypted with the provided encryption key.
8815    * </pre>
8816    *
8817    * <code>.google.cloud.aiplatform.v1.EncryptionSpec encryption_spec = 24;</code>
8818    *
8819    * @return The encryptionSpec.
8820    */
8821   @java.lang.Override
getEncryptionSpec()8822   public com.google.cloud.aiplatform.v1.EncryptionSpec getEncryptionSpec() {
8823     return encryptionSpec_ == null
8824         ? com.google.cloud.aiplatform.v1.EncryptionSpec.getDefaultInstance()
8825         : encryptionSpec_;
8826   }
8827   /**
8828    *
8829    *
8830    * <pre>
8831    * Customer-managed encryption key options for a BatchPredictionJob. If this
8832    * is set, then all resources created by the BatchPredictionJob will be
8833    * encrypted with the provided encryption key.
8834    * </pre>
8835    *
8836    * <code>.google.cloud.aiplatform.v1.EncryptionSpec encryption_spec = 24;</code>
8837    */
8838   @java.lang.Override
getEncryptionSpecOrBuilder()8839   public com.google.cloud.aiplatform.v1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() {
8840     return encryptionSpec_ == null
8841         ? com.google.cloud.aiplatform.v1.EncryptionSpec.getDefaultInstance()
8842         : encryptionSpec_;
8843   }
8844 
8845   public static final int DISABLE_CONTAINER_LOGGING_FIELD_NUMBER = 34;
8846   private boolean disableContainerLogging_ = false;
8847   /**
8848    *
8849    *
8850    * <pre>
8851    * For custom-trained Models and AutoML Tabular Models, the container of the
8852    * DeployedModel instances will send `stderr` and `stdout` streams to
8853    * Cloud Logging by default. Please note that the logs incur cost,
8854    * which are subject to [Cloud Logging
8855    * pricing](https://cloud.google.com/logging/pricing).
8856    * User can disable container logging by setting this flag to true.
8857    * </pre>
8858    *
8859    * <code>bool disable_container_logging = 34;</code>
8860    *
8861    * @return The disableContainerLogging.
8862    */
8863   @java.lang.Override
getDisableContainerLogging()8864   public boolean getDisableContainerLogging() {
8865     return disableContainerLogging_;
8866   }
8867 
8868   private byte memoizedIsInitialized = -1;
8869 
8870   @java.lang.Override
isInitialized()8871   public final boolean isInitialized() {
8872     byte isInitialized = memoizedIsInitialized;
8873     if (isInitialized == 1) return true;
8874     if (isInitialized == 0) return false;
8875 
8876     memoizedIsInitialized = 1;
8877     return true;
8878   }
8879 
8880   @java.lang.Override
writeTo(com.google.protobuf.CodedOutputStream output)8881   public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
8882     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
8883       com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
8884     }
8885     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) {
8886       com.google.protobuf.GeneratedMessageV3.writeString(output, 2, displayName_);
8887     }
8888     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
8889       com.google.protobuf.GeneratedMessageV3.writeString(output, 3, model_);
8890     }
8891     if (inputConfig_ != null) {
8892       output.writeMessage(4, getInputConfig());
8893     }
8894     if (modelParameters_ != null) {
8895       output.writeMessage(5, getModelParameters());
8896     }
8897     if (outputConfig_ != null) {
8898       output.writeMessage(6, getOutputConfig());
8899     }
8900     if (dedicatedResources_ != null) {
8901       output.writeMessage(7, getDedicatedResources());
8902     }
8903     if (manualBatchTuningParameters_ != null) {
8904       output.writeMessage(8, getManualBatchTuningParameters());
8905     }
8906     if (outputInfo_ != null) {
8907       output.writeMessage(9, getOutputInfo());
8908     }
8909     if (state_ != com.google.cloud.aiplatform.v1.JobState.JOB_STATE_UNSPECIFIED.getNumber()) {
8910       output.writeEnum(10, state_);
8911     }
8912     if (error_ != null) {
8913       output.writeMessage(11, getError());
8914     }
8915     for (int i = 0; i < partialFailures_.size(); i++) {
8916       output.writeMessage(12, partialFailures_.get(i));
8917     }
8918     if (resourcesConsumed_ != null) {
8919       output.writeMessage(13, getResourcesConsumed());
8920     }
8921     if (completionStats_ != null) {
8922       output.writeMessage(14, getCompletionStats());
8923     }
8924     if (createTime_ != null) {
8925       output.writeMessage(15, getCreateTime());
8926     }
8927     if (startTime_ != null) {
8928       output.writeMessage(16, getStartTime());
8929     }
8930     if (endTime_ != null) {
8931       output.writeMessage(17, getEndTime());
8932     }
8933     if (updateTime_ != null) {
8934       output.writeMessage(18, getUpdateTime());
8935     }
8936     com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
8937         output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 19);
8938     if (generateExplanation_ != false) {
8939       output.writeBool(23, generateExplanation_);
8940     }
8941     if (encryptionSpec_ != null) {
8942       output.writeMessage(24, getEncryptionSpec());
8943     }
8944     if (explanationSpec_ != null) {
8945       output.writeMessage(25, getExplanationSpec());
8946     }
8947     if (instanceConfig_ != null) {
8948       output.writeMessage(27, getInstanceConfig());
8949     }
8950     if (unmanagedContainerModel_ != null) {
8951       output.writeMessage(28, getUnmanagedContainerModel());
8952     }
8953     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceAccount_)) {
8954       com.google.protobuf.GeneratedMessageV3.writeString(output, 29, serviceAccount_);
8955     }
8956     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(modelVersionId_)) {
8957       com.google.protobuf.GeneratedMessageV3.writeString(output, 30, modelVersionId_);
8958     }
8959     if (disableContainerLogging_ != false) {
8960       output.writeBool(34, disableContainerLogging_);
8961     }
8962     getUnknownFields().writeTo(output);
8963   }
8964 
8965   @java.lang.Override
getSerializedSize()8966   public int getSerializedSize() {
8967     int size = memoizedSize;
8968     if (size != -1) return size;
8969 
8970     size = 0;
8971     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
8972       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
8973     }
8974     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) {
8975       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, displayName_);
8976     }
8977     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
8978       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, model_);
8979     }
8980     if (inputConfig_ != null) {
8981       size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getInputConfig());
8982     }
8983     if (modelParameters_ != null) {
8984       size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getModelParameters());
8985     }
8986     if (outputConfig_ != null) {
8987       size += com.google.protobuf.CodedOutputStream.computeMessageSize(6, getOutputConfig());
8988     }
8989     if (dedicatedResources_ != null) {
8990       size += com.google.protobuf.CodedOutputStream.computeMessageSize(7, getDedicatedResources());
8991     }
8992     if (manualBatchTuningParameters_ != null) {
8993       size +=
8994           com.google.protobuf.CodedOutputStream.computeMessageSize(
8995               8, getManualBatchTuningParameters());
8996     }
8997     if (outputInfo_ != null) {
8998       size += com.google.protobuf.CodedOutputStream.computeMessageSize(9, getOutputInfo());
8999     }
9000     if (state_ != com.google.cloud.aiplatform.v1.JobState.JOB_STATE_UNSPECIFIED.getNumber()) {
9001       size += com.google.protobuf.CodedOutputStream.computeEnumSize(10, state_);
9002     }
9003     if (error_ != null) {
9004       size += com.google.protobuf.CodedOutputStream.computeMessageSize(11, getError());
9005     }
9006     for (int i = 0; i < partialFailures_.size(); i++) {
9007       size += com.google.protobuf.CodedOutputStream.computeMessageSize(12, partialFailures_.get(i));
9008     }
9009     if (resourcesConsumed_ != null) {
9010       size += com.google.protobuf.CodedOutputStream.computeMessageSize(13, getResourcesConsumed());
9011     }
9012     if (completionStats_ != null) {
9013       size += com.google.protobuf.CodedOutputStream.computeMessageSize(14, getCompletionStats());
9014     }
9015     if (createTime_ != null) {
9016       size += com.google.protobuf.CodedOutputStream.computeMessageSize(15, getCreateTime());
9017     }
9018     if (startTime_ != null) {
9019       size += com.google.protobuf.CodedOutputStream.computeMessageSize(16, getStartTime());
9020     }
9021     if (endTime_ != null) {
9022       size += com.google.protobuf.CodedOutputStream.computeMessageSize(17, getEndTime());
9023     }
9024     if (updateTime_ != null) {
9025       size += com.google.protobuf.CodedOutputStream.computeMessageSize(18, getUpdateTime());
9026     }
9027     for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
9028         internalGetLabels().getMap().entrySet()) {
9029       com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
9030           LabelsDefaultEntryHolder.defaultEntry
9031               .newBuilderForType()
9032               .setKey(entry.getKey())
9033               .setValue(entry.getValue())
9034               .build();
9035       size += com.google.protobuf.CodedOutputStream.computeMessageSize(19, labels__);
9036     }
9037     if (generateExplanation_ != false) {
9038       size += com.google.protobuf.CodedOutputStream.computeBoolSize(23, generateExplanation_);
9039     }
9040     if (encryptionSpec_ != null) {
9041       size += com.google.protobuf.CodedOutputStream.computeMessageSize(24, getEncryptionSpec());
9042     }
9043     if (explanationSpec_ != null) {
9044       size += com.google.protobuf.CodedOutputStream.computeMessageSize(25, getExplanationSpec());
9045     }
9046     if (instanceConfig_ != null) {
9047       size += com.google.protobuf.CodedOutputStream.computeMessageSize(27, getInstanceConfig());
9048     }
9049     if (unmanagedContainerModel_ != null) {
9050       size +=
9051           com.google.protobuf.CodedOutputStream.computeMessageSize(
9052               28, getUnmanagedContainerModel());
9053     }
9054     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceAccount_)) {
9055       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(29, serviceAccount_);
9056     }
9057     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(modelVersionId_)) {
9058       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(30, modelVersionId_);
9059     }
9060     if (disableContainerLogging_ != false) {
9061       size += com.google.protobuf.CodedOutputStream.computeBoolSize(34, disableContainerLogging_);
9062     }
9063     size += getUnknownFields().getSerializedSize();
9064     memoizedSize = size;
9065     return size;
9066   }
9067 
9068   @java.lang.Override
equals(final java.lang.Object obj)9069   public boolean equals(final java.lang.Object obj) {
9070     if (obj == this) {
9071       return true;
9072     }
9073     if (!(obj instanceof com.google.cloud.aiplatform.v1.BatchPredictionJob)) {
9074       return super.equals(obj);
9075     }
9076     com.google.cloud.aiplatform.v1.BatchPredictionJob other =
9077         (com.google.cloud.aiplatform.v1.BatchPredictionJob) obj;
9078 
9079     if (!getName().equals(other.getName())) return false;
9080     if (!getDisplayName().equals(other.getDisplayName())) return false;
9081     if (!getModel().equals(other.getModel())) return false;
9082     if (!getModelVersionId().equals(other.getModelVersionId())) return false;
9083     if (hasUnmanagedContainerModel() != other.hasUnmanagedContainerModel()) return false;
9084     if (hasUnmanagedContainerModel()) {
9085       if (!getUnmanagedContainerModel().equals(other.getUnmanagedContainerModel())) return false;
9086     }
9087     if (hasInputConfig() != other.hasInputConfig()) return false;
9088     if (hasInputConfig()) {
9089       if (!getInputConfig().equals(other.getInputConfig())) return false;
9090     }
9091     if (hasInstanceConfig() != other.hasInstanceConfig()) return false;
9092     if (hasInstanceConfig()) {
9093       if (!getInstanceConfig().equals(other.getInstanceConfig())) return false;
9094     }
9095     if (hasModelParameters() != other.hasModelParameters()) return false;
9096     if (hasModelParameters()) {
9097       if (!getModelParameters().equals(other.getModelParameters())) return false;
9098     }
9099     if (hasOutputConfig() != other.hasOutputConfig()) return false;
9100     if (hasOutputConfig()) {
9101       if (!getOutputConfig().equals(other.getOutputConfig())) return false;
9102     }
9103     if (hasDedicatedResources() != other.hasDedicatedResources()) return false;
9104     if (hasDedicatedResources()) {
9105       if (!getDedicatedResources().equals(other.getDedicatedResources())) return false;
9106     }
9107     if (!getServiceAccount().equals(other.getServiceAccount())) return false;
9108     if (hasManualBatchTuningParameters() != other.hasManualBatchTuningParameters()) return false;
9109     if (hasManualBatchTuningParameters()) {
9110       if (!getManualBatchTuningParameters().equals(other.getManualBatchTuningParameters()))
9111         return false;
9112     }
9113     if (getGenerateExplanation() != other.getGenerateExplanation()) return false;
9114     if (hasExplanationSpec() != other.hasExplanationSpec()) return false;
9115     if (hasExplanationSpec()) {
9116       if (!getExplanationSpec().equals(other.getExplanationSpec())) return false;
9117     }
9118     if (hasOutputInfo() != other.hasOutputInfo()) return false;
9119     if (hasOutputInfo()) {
9120       if (!getOutputInfo().equals(other.getOutputInfo())) return false;
9121     }
9122     if (state_ != other.state_) return false;
9123     if (hasError() != other.hasError()) return false;
9124     if (hasError()) {
9125       if (!getError().equals(other.getError())) return false;
9126     }
9127     if (!getPartialFailuresList().equals(other.getPartialFailuresList())) return false;
9128     if (hasResourcesConsumed() != other.hasResourcesConsumed()) return false;
9129     if (hasResourcesConsumed()) {
9130       if (!getResourcesConsumed().equals(other.getResourcesConsumed())) return false;
9131     }
9132     if (hasCompletionStats() != other.hasCompletionStats()) return false;
9133     if (hasCompletionStats()) {
9134       if (!getCompletionStats().equals(other.getCompletionStats())) return false;
9135     }
9136     if (hasCreateTime() != other.hasCreateTime()) return false;
9137     if (hasCreateTime()) {
9138       if (!getCreateTime().equals(other.getCreateTime())) return false;
9139     }
9140     if (hasStartTime() != other.hasStartTime()) return false;
9141     if (hasStartTime()) {
9142       if (!getStartTime().equals(other.getStartTime())) return false;
9143     }
9144     if (hasEndTime() != other.hasEndTime()) return false;
9145     if (hasEndTime()) {
9146       if (!getEndTime().equals(other.getEndTime())) return false;
9147     }
9148     if (hasUpdateTime() != other.hasUpdateTime()) return false;
9149     if (hasUpdateTime()) {
9150       if (!getUpdateTime().equals(other.getUpdateTime())) return false;
9151     }
9152     if (!internalGetLabels().equals(other.internalGetLabels())) return false;
9153     if (hasEncryptionSpec() != other.hasEncryptionSpec()) return false;
9154     if (hasEncryptionSpec()) {
9155       if (!getEncryptionSpec().equals(other.getEncryptionSpec())) return false;
9156     }
9157     if (getDisableContainerLogging() != other.getDisableContainerLogging()) return false;
9158     if (!getUnknownFields().equals(other.getUnknownFields())) return false;
9159     return true;
9160   }
9161 
9162   @java.lang.Override
hashCode()9163   public int hashCode() {
9164     if (memoizedHashCode != 0) {
9165       return memoizedHashCode;
9166     }
9167     int hash = 41;
9168     hash = (19 * hash) + getDescriptor().hashCode();
9169     hash = (37 * hash) + NAME_FIELD_NUMBER;
9170     hash = (53 * hash) + getName().hashCode();
9171     hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER;
9172     hash = (53 * hash) + getDisplayName().hashCode();
9173     hash = (37 * hash) + MODEL_FIELD_NUMBER;
9174     hash = (53 * hash) + getModel().hashCode();
9175     hash = (37 * hash) + MODEL_VERSION_ID_FIELD_NUMBER;
9176     hash = (53 * hash) + getModelVersionId().hashCode();
9177     if (hasUnmanagedContainerModel()) {
9178       hash = (37 * hash) + UNMANAGED_CONTAINER_MODEL_FIELD_NUMBER;
9179       hash = (53 * hash) + getUnmanagedContainerModel().hashCode();
9180     }
9181     if (hasInputConfig()) {
9182       hash = (37 * hash) + INPUT_CONFIG_FIELD_NUMBER;
9183       hash = (53 * hash) + getInputConfig().hashCode();
9184     }
9185     if (hasInstanceConfig()) {
9186       hash = (37 * hash) + INSTANCE_CONFIG_FIELD_NUMBER;
9187       hash = (53 * hash) + getInstanceConfig().hashCode();
9188     }
9189     if (hasModelParameters()) {
9190       hash = (37 * hash) + MODEL_PARAMETERS_FIELD_NUMBER;
9191       hash = (53 * hash) + getModelParameters().hashCode();
9192     }
9193     if (hasOutputConfig()) {
9194       hash = (37 * hash) + OUTPUT_CONFIG_FIELD_NUMBER;
9195       hash = (53 * hash) + getOutputConfig().hashCode();
9196     }
9197     if (hasDedicatedResources()) {
9198       hash = (37 * hash) + DEDICATED_RESOURCES_FIELD_NUMBER;
9199       hash = (53 * hash) + getDedicatedResources().hashCode();
9200     }
9201     hash = (37 * hash) + SERVICE_ACCOUNT_FIELD_NUMBER;
9202     hash = (53 * hash) + getServiceAccount().hashCode();
9203     if (hasManualBatchTuningParameters()) {
9204       hash = (37 * hash) + MANUAL_BATCH_TUNING_PARAMETERS_FIELD_NUMBER;
9205       hash = (53 * hash) + getManualBatchTuningParameters().hashCode();
9206     }
9207     hash = (37 * hash) + GENERATE_EXPLANATION_FIELD_NUMBER;
9208     hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getGenerateExplanation());
9209     if (hasExplanationSpec()) {
9210       hash = (37 * hash) + EXPLANATION_SPEC_FIELD_NUMBER;
9211       hash = (53 * hash) + getExplanationSpec().hashCode();
9212     }
9213     if (hasOutputInfo()) {
9214       hash = (37 * hash) + OUTPUT_INFO_FIELD_NUMBER;
9215       hash = (53 * hash) + getOutputInfo().hashCode();
9216     }
9217     hash = (37 * hash) + STATE_FIELD_NUMBER;
9218     hash = (53 * hash) + state_;
9219     if (hasError()) {
9220       hash = (37 * hash) + ERROR_FIELD_NUMBER;
9221       hash = (53 * hash) + getError().hashCode();
9222     }
9223     if (getPartialFailuresCount() > 0) {
9224       hash = (37 * hash) + PARTIAL_FAILURES_FIELD_NUMBER;
9225       hash = (53 * hash) + getPartialFailuresList().hashCode();
9226     }
9227     if (hasResourcesConsumed()) {
9228       hash = (37 * hash) + RESOURCES_CONSUMED_FIELD_NUMBER;
9229       hash = (53 * hash) + getResourcesConsumed().hashCode();
9230     }
9231     if (hasCompletionStats()) {
9232       hash = (37 * hash) + COMPLETION_STATS_FIELD_NUMBER;
9233       hash = (53 * hash) + getCompletionStats().hashCode();
9234     }
9235     if (hasCreateTime()) {
9236       hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER;
9237       hash = (53 * hash) + getCreateTime().hashCode();
9238     }
9239     if (hasStartTime()) {
9240       hash = (37 * hash) + START_TIME_FIELD_NUMBER;
9241       hash = (53 * hash) + getStartTime().hashCode();
9242     }
9243     if (hasEndTime()) {
9244       hash = (37 * hash) + END_TIME_FIELD_NUMBER;
9245       hash = (53 * hash) + getEndTime().hashCode();
9246     }
9247     if (hasUpdateTime()) {
9248       hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER;
9249       hash = (53 * hash) + getUpdateTime().hashCode();
9250     }
9251     if (!internalGetLabels().getMap().isEmpty()) {
9252       hash = (37 * hash) + LABELS_FIELD_NUMBER;
9253       hash = (53 * hash) + internalGetLabels().hashCode();
9254     }
9255     if (hasEncryptionSpec()) {
9256       hash = (37 * hash) + ENCRYPTION_SPEC_FIELD_NUMBER;
9257       hash = (53 * hash) + getEncryptionSpec().hashCode();
9258     }
9259     hash = (37 * hash) + DISABLE_CONTAINER_LOGGING_FIELD_NUMBER;
9260     hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getDisableContainerLogging());
9261     hash = (29 * hash) + getUnknownFields().hashCode();
9262     memoizedHashCode = hash;
9263     return hash;
9264   }
9265 
parseFrom( java.nio.ByteBuffer data)9266   public static com.google.cloud.aiplatform.v1.BatchPredictionJob parseFrom(
9267       java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
9268     return PARSER.parseFrom(data);
9269   }
9270 
parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9271   public static com.google.cloud.aiplatform.v1.BatchPredictionJob parseFrom(
9272       java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9273       throws com.google.protobuf.InvalidProtocolBufferException {
9274     return PARSER.parseFrom(data, extensionRegistry);
9275   }
9276 
parseFrom( com.google.protobuf.ByteString data)9277   public static com.google.cloud.aiplatform.v1.BatchPredictionJob parseFrom(
9278       com.google.protobuf.ByteString data)
9279       throws com.google.protobuf.InvalidProtocolBufferException {
9280     return PARSER.parseFrom(data);
9281   }
9282 
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9283   public static com.google.cloud.aiplatform.v1.BatchPredictionJob parseFrom(
9284       com.google.protobuf.ByteString data,
9285       com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9286       throws com.google.protobuf.InvalidProtocolBufferException {
9287     return PARSER.parseFrom(data, extensionRegistry);
9288   }
9289 
parseFrom(byte[] data)9290   public static com.google.cloud.aiplatform.v1.BatchPredictionJob parseFrom(byte[] data)
9291       throws com.google.protobuf.InvalidProtocolBufferException {
9292     return PARSER.parseFrom(data);
9293   }
9294 
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9295   public static com.google.cloud.aiplatform.v1.BatchPredictionJob parseFrom(
9296       byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9297       throws com.google.protobuf.InvalidProtocolBufferException {
9298     return PARSER.parseFrom(data, extensionRegistry);
9299   }
9300 
parseFrom( java.io.InputStream input)9301   public static com.google.cloud.aiplatform.v1.BatchPredictionJob parseFrom(
9302       java.io.InputStream input) throws java.io.IOException {
9303     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
9304   }
9305 
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9306   public static com.google.cloud.aiplatform.v1.BatchPredictionJob parseFrom(
9307       java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9308       throws java.io.IOException {
9309     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
9310         PARSER, input, extensionRegistry);
9311   }
9312 
parseDelimitedFrom( java.io.InputStream input)9313   public static com.google.cloud.aiplatform.v1.BatchPredictionJob parseDelimitedFrom(
9314       java.io.InputStream input) throws java.io.IOException {
9315     return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
9316   }
9317 
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9318   public static com.google.cloud.aiplatform.v1.BatchPredictionJob parseDelimitedFrom(
9319       java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9320       throws java.io.IOException {
9321     return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
9322         PARSER, input, extensionRegistry);
9323   }
9324 
parseFrom( com.google.protobuf.CodedInputStream input)9325   public static com.google.cloud.aiplatform.v1.BatchPredictionJob parseFrom(
9326       com.google.protobuf.CodedInputStream input) throws java.io.IOException {
9327     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
9328   }
9329 
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9330   public static com.google.cloud.aiplatform.v1.BatchPredictionJob parseFrom(
9331       com.google.protobuf.CodedInputStream input,
9332       com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9333       throws java.io.IOException {
9334     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
9335         PARSER, input, extensionRegistry);
9336   }
9337 
9338   @java.lang.Override
newBuilderForType()9339   public Builder newBuilderForType() {
9340     return newBuilder();
9341   }
9342 
newBuilder()9343   public static Builder newBuilder() {
9344     return DEFAULT_INSTANCE.toBuilder();
9345   }
9346 
newBuilder(com.google.cloud.aiplatform.v1.BatchPredictionJob prototype)9347   public static Builder newBuilder(com.google.cloud.aiplatform.v1.BatchPredictionJob prototype) {
9348     return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
9349   }
9350 
9351   @java.lang.Override
toBuilder()9352   public Builder toBuilder() {
9353     return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
9354   }
9355 
9356   @java.lang.Override
newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)9357   protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
9358     Builder builder = new Builder(parent);
9359     return builder;
9360   }
9361   /**
9362    *
9363    *
9364    * <pre>
9365    * A job that uses a
9366    * [Model][google.cloud.aiplatform.v1.BatchPredictionJob.model] to produce
9367    * predictions on multiple [input
9368    * instances][google.cloud.aiplatform.v1.BatchPredictionJob.input_config]. If
9369    * predictions for significant portion of the instances fail, the job may finish
9370    * without attempting predictions for all remaining instances.
9371    * </pre>
9372    *
9373    * Protobuf type {@code google.cloud.aiplatform.v1.BatchPredictionJob}
9374    */
9375   public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
9376       implements
9377       // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.BatchPredictionJob)
9378       com.google.cloud.aiplatform.v1.BatchPredictionJobOrBuilder {
getDescriptor()9379     public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
9380       return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
9381           .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_descriptor;
9382     }
9383 
9384     @SuppressWarnings({"rawtypes"})
internalGetMapField(int number)9385     protected com.google.protobuf.MapField internalGetMapField(int number) {
9386       switch (number) {
9387         case 19:
9388           return internalGetLabels();
9389         default:
9390           throw new RuntimeException("Invalid map field number: " + number);
9391       }
9392     }
9393 
9394     @SuppressWarnings({"rawtypes"})
internalGetMutableMapField(int number)9395     protected com.google.protobuf.MapField internalGetMutableMapField(int number) {
9396       switch (number) {
9397         case 19:
9398           return internalGetMutableLabels();
9399         default:
9400           throw new RuntimeException("Invalid map field number: " + number);
9401       }
9402     }
9403 
9404     @java.lang.Override
9405     protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()9406         internalGetFieldAccessorTable() {
9407       return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
9408           .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_fieldAccessorTable
9409           .ensureFieldAccessorsInitialized(
9410               com.google.cloud.aiplatform.v1.BatchPredictionJob.class,
9411               com.google.cloud.aiplatform.v1.BatchPredictionJob.Builder.class);
9412     }
9413 
9414     // Construct using com.google.cloud.aiplatform.v1.BatchPredictionJob.newBuilder()
Builder()9415     private Builder() {}
9416 
Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)9417     private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
9418       super(parent);
9419     }
9420 
9421     @java.lang.Override
clear()9422     public Builder clear() {
9423       super.clear();
9424       bitField0_ = 0;
9425       name_ = "";
9426       displayName_ = "";
9427       model_ = "";
9428       modelVersionId_ = "";
9429       unmanagedContainerModel_ = null;
9430       if (unmanagedContainerModelBuilder_ != null) {
9431         unmanagedContainerModelBuilder_.dispose();
9432         unmanagedContainerModelBuilder_ = null;
9433       }
9434       inputConfig_ = null;
9435       if (inputConfigBuilder_ != null) {
9436         inputConfigBuilder_.dispose();
9437         inputConfigBuilder_ = null;
9438       }
9439       instanceConfig_ = null;
9440       if (instanceConfigBuilder_ != null) {
9441         instanceConfigBuilder_.dispose();
9442         instanceConfigBuilder_ = null;
9443       }
9444       modelParameters_ = null;
9445       if (modelParametersBuilder_ != null) {
9446         modelParametersBuilder_.dispose();
9447         modelParametersBuilder_ = null;
9448       }
9449       outputConfig_ = null;
9450       if (outputConfigBuilder_ != null) {
9451         outputConfigBuilder_.dispose();
9452         outputConfigBuilder_ = null;
9453       }
9454       dedicatedResources_ = null;
9455       if (dedicatedResourcesBuilder_ != null) {
9456         dedicatedResourcesBuilder_.dispose();
9457         dedicatedResourcesBuilder_ = null;
9458       }
9459       serviceAccount_ = "";
9460       manualBatchTuningParameters_ = null;
9461       if (manualBatchTuningParametersBuilder_ != null) {
9462         manualBatchTuningParametersBuilder_.dispose();
9463         manualBatchTuningParametersBuilder_ = null;
9464       }
9465       generateExplanation_ = false;
9466       explanationSpec_ = null;
9467       if (explanationSpecBuilder_ != null) {
9468         explanationSpecBuilder_.dispose();
9469         explanationSpecBuilder_ = null;
9470       }
9471       outputInfo_ = null;
9472       if (outputInfoBuilder_ != null) {
9473         outputInfoBuilder_.dispose();
9474         outputInfoBuilder_ = null;
9475       }
9476       state_ = 0;
9477       error_ = null;
9478       if (errorBuilder_ != null) {
9479         errorBuilder_.dispose();
9480         errorBuilder_ = null;
9481       }
9482       if (partialFailuresBuilder_ == null) {
9483         partialFailures_ = java.util.Collections.emptyList();
9484       } else {
9485         partialFailures_ = null;
9486         partialFailuresBuilder_.clear();
9487       }
9488       bitField0_ = (bitField0_ & ~0x00020000);
9489       resourcesConsumed_ = null;
9490       if (resourcesConsumedBuilder_ != null) {
9491         resourcesConsumedBuilder_.dispose();
9492         resourcesConsumedBuilder_ = null;
9493       }
9494       completionStats_ = null;
9495       if (completionStatsBuilder_ != null) {
9496         completionStatsBuilder_.dispose();
9497         completionStatsBuilder_ = null;
9498       }
9499       createTime_ = null;
9500       if (createTimeBuilder_ != null) {
9501         createTimeBuilder_.dispose();
9502         createTimeBuilder_ = null;
9503       }
9504       startTime_ = null;
9505       if (startTimeBuilder_ != null) {
9506         startTimeBuilder_.dispose();
9507         startTimeBuilder_ = null;
9508       }
9509       endTime_ = null;
9510       if (endTimeBuilder_ != null) {
9511         endTimeBuilder_.dispose();
9512         endTimeBuilder_ = null;
9513       }
9514       updateTime_ = null;
9515       if (updateTimeBuilder_ != null) {
9516         updateTimeBuilder_.dispose();
9517         updateTimeBuilder_ = null;
9518       }
9519       internalGetMutableLabels().clear();
9520       encryptionSpec_ = null;
9521       if (encryptionSpecBuilder_ != null) {
9522         encryptionSpecBuilder_.dispose();
9523         encryptionSpecBuilder_ = null;
9524       }
9525       disableContainerLogging_ = false;
9526       return this;
9527     }
9528 
9529     @java.lang.Override
getDescriptorForType()9530     public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
9531       return com.google.cloud.aiplatform.v1.BatchPredictionJobProto
9532           .internal_static_google_cloud_aiplatform_v1_BatchPredictionJob_descriptor;
9533     }
9534 
9535     @java.lang.Override
getDefaultInstanceForType()9536     public com.google.cloud.aiplatform.v1.BatchPredictionJob getDefaultInstanceForType() {
9537       return com.google.cloud.aiplatform.v1.BatchPredictionJob.getDefaultInstance();
9538     }
9539 
9540     @java.lang.Override
build()9541     public com.google.cloud.aiplatform.v1.BatchPredictionJob build() {
9542       com.google.cloud.aiplatform.v1.BatchPredictionJob result = buildPartial();
9543       if (!result.isInitialized()) {
9544         throw newUninitializedMessageException(result);
9545       }
9546       return result;
9547     }
9548 
9549     @java.lang.Override
buildPartial()9550     public com.google.cloud.aiplatform.v1.BatchPredictionJob buildPartial() {
9551       com.google.cloud.aiplatform.v1.BatchPredictionJob result =
9552           new com.google.cloud.aiplatform.v1.BatchPredictionJob(this);
9553       buildPartialRepeatedFields(result);
9554       if (bitField0_ != 0) {
9555         buildPartial0(result);
9556       }
9557       onBuilt();
9558       return result;
9559     }
9560 
buildPartialRepeatedFields( com.google.cloud.aiplatform.v1.BatchPredictionJob result)9561     private void buildPartialRepeatedFields(
9562         com.google.cloud.aiplatform.v1.BatchPredictionJob result) {
9563       if (partialFailuresBuilder_ == null) {
9564         if (((bitField0_ & 0x00020000) != 0)) {
9565           partialFailures_ = java.util.Collections.unmodifiableList(partialFailures_);
9566           bitField0_ = (bitField0_ & ~0x00020000);
9567         }
9568         result.partialFailures_ = partialFailures_;
9569       } else {
9570         result.partialFailures_ = partialFailuresBuilder_.build();
9571       }
9572     }
9573 
buildPartial0(com.google.cloud.aiplatform.v1.BatchPredictionJob result)9574     private void buildPartial0(com.google.cloud.aiplatform.v1.BatchPredictionJob result) {
9575       int from_bitField0_ = bitField0_;
9576       if (((from_bitField0_ & 0x00000001) != 0)) {
9577         result.name_ = name_;
9578       }
9579       if (((from_bitField0_ & 0x00000002) != 0)) {
9580         result.displayName_ = displayName_;
9581       }
9582       if (((from_bitField0_ & 0x00000004) != 0)) {
9583         result.model_ = model_;
9584       }
9585       if (((from_bitField0_ & 0x00000008) != 0)) {
9586         result.modelVersionId_ = modelVersionId_;
9587       }
9588       if (((from_bitField0_ & 0x00000010) != 0)) {
9589         result.unmanagedContainerModel_ =
9590             unmanagedContainerModelBuilder_ == null
9591                 ? unmanagedContainerModel_
9592                 : unmanagedContainerModelBuilder_.build();
9593       }
9594       if (((from_bitField0_ & 0x00000020) != 0)) {
9595         result.inputConfig_ =
9596             inputConfigBuilder_ == null ? inputConfig_ : inputConfigBuilder_.build();
9597       }
9598       if (((from_bitField0_ & 0x00000040) != 0)) {
9599         result.instanceConfig_ =
9600             instanceConfigBuilder_ == null ? instanceConfig_ : instanceConfigBuilder_.build();
9601       }
9602       if (((from_bitField0_ & 0x00000080) != 0)) {
9603         result.modelParameters_ =
9604             modelParametersBuilder_ == null ? modelParameters_ : modelParametersBuilder_.build();
9605       }
9606       if (((from_bitField0_ & 0x00000100) != 0)) {
9607         result.outputConfig_ =
9608             outputConfigBuilder_ == null ? outputConfig_ : outputConfigBuilder_.build();
9609       }
9610       if (((from_bitField0_ & 0x00000200) != 0)) {
9611         result.dedicatedResources_ =
9612             dedicatedResourcesBuilder_ == null
9613                 ? dedicatedResources_
9614                 : dedicatedResourcesBuilder_.build();
9615       }
9616       if (((from_bitField0_ & 0x00000400) != 0)) {
9617         result.serviceAccount_ = serviceAccount_;
9618       }
9619       if (((from_bitField0_ & 0x00000800) != 0)) {
9620         result.manualBatchTuningParameters_ =
9621             manualBatchTuningParametersBuilder_ == null
9622                 ? manualBatchTuningParameters_
9623                 : manualBatchTuningParametersBuilder_.build();
9624       }
9625       if (((from_bitField0_ & 0x00001000) != 0)) {
9626         result.generateExplanation_ = generateExplanation_;
9627       }
9628       if (((from_bitField0_ & 0x00002000) != 0)) {
9629         result.explanationSpec_ =
9630             explanationSpecBuilder_ == null ? explanationSpec_ : explanationSpecBuilder_.build();
9631       }
9632       if (((from_bitField0_ & 0x00004000) != 0)) {
9633         result.outputInfo_ = outputInfoBuilder_ == null ? outputInfo_ : outputInfoBuilder_.build();
9634       }
9635       if (((from_bitField0_ & 0x00008000) != 0)) {
9636         result.state_ = state_;
9637       }
9638       if (((from_bitField0_ & 0x00010000) != 0)) {
9639         result.error_ = errorBuilder_ == null ? error_ : errorBuilder_.build();
9640       }
9641       if (((from_bitField0_ & 0x00040000) != 0)) {
9642         result.resourcesConsumed_ =
9643             resourcesConsumedBuilder_ == null
9644                 ? resourcesConsumed_
9645                 : resourcesConsumedBuilder_.build();
9646       }
9647       if (((from_bitField0_ & 0x00080000) != 0)) {
9648         result.completionStats_ =
9649             completionStatsBuilder_ == null ? completionStats_ : completionStatsBuilder_.build();
9650       }
9651       if (((from_bitField0_ & 0x00100000) != 0)) {
9652         result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build();
9653       }
9654       if (((from_bitField0_ & 0x00200000) != 0)) {
9655         result.startTime_ = startTimeBuilder_ == null ? startTime_ : startTimeBuilder_.build();
9656       }
9657       if (((from_bitField0_ & 0x00400000) != 0)) {
9658         result.endTime_ = endTimeBuilder_ == null ? endTime_ : endTimeBuilder_.build();
9659       }
9660       if (((from_bitField0_ & 0x00800000) != 0)) {
9661         result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build();
9662       }
9663       if (((from_bitField0_ & 0x01000000) != 0)) {
9664         result.labels_ = internalGetLabels();
9665         result.labels_.makeImmutable();
9666       }
9667       if (((from_bitField0_ & 0x02000000) != 0)) {
9668         result.encryptionSpec_ =
9669             encryptionSpecBuilder_ == null ? encryptionSpec_ : encryptionSpecBuilder_.build();
9670       }
9671       if (((from_bitField0_ & 0x04000000) != 0)) {
9672         result.disableContainerLogging_ = disableContainerLogging_;
9673       }
9674     }
9675 
9676     @java.lang.Override
clone()9677     public Builder clone() {
9678       return super.clone();
9679     }
9680 
9681     @java.lang.Override
setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)9682     public Builder setField(
9683         com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
9684       return super.setField(field, value);
9685     }
9686 
9687     @java.lang.Override
clearField(com.google.protobuf.Descriptors.FieldDescriptor field)9688     public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
9689       return super.clearField(field);
9690     }
9691 
9692     @java.lang.Override
clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)9693     public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
9694       return super.clearOneof(oneof);
9695     }
9696 
9697     @java.lang.Override
setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)9698     public Builder setRepeatedField(
9699         com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
9700       return super.setRepeatedField(field, index, value);
9701     }
9702 
9703     @java.lang.Override
addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)9704     public Builder addRepeatedField(
9705         com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
9706       return super.addRepeatedField(field, value);
9707     }
9708 
9709     @java.lang.Override
mergeFrom(com.google.protobuf.Message other)9710     public Builder mergeFrom(com.google.protobuf.Message other) {
9711       if (other instanceof com.google.cloud.aiplatform.v1.BatchPredictionJob) {
9712         return mergeFrom((com.google.cloud.aiplatform.v1.BatchPredictionJob) other);
9713       } else {
9714         super.mergeFrom(other);
9715         return this;
9716       }
9717     }
9718 
mergeFrom(com.google.cloud.aiplatform.v1.BatchPredictionJob other)9719     public Builder mergeFrom(com.google.cloud.aiplatform.v1.BatchPredictionJob other) {
9720       if (other == com.google.cloud.aiplatform.v1.BatchPredictionJob.getDefaultInstance())
9721         return this;
9722       if (!other.getName().isEmpty()) {
9723         name_ = other.name_;
9724         bitField0_ |= 0x00000001;
9725         onChanged();
9726       }
9727       if (!other.getDisplayName().isEmpty()) {
9728         displayName_ = other.displayName_;
9729         bitField0_ |= 0x00000002;
9730         onChanged();
9731       }
9732       if (!other.getModel().isEmpty()) {
9733         model_ = other.model_;
9734         bitField0_ |= 0x00000004;
9735         onChanged();
9736       }
9737       if (!other.getModelVersionId().isEmpty()) {
9738         modelVersionId_ = other.modelVersionId_;
9739         bitField0_ |= 0x00000008;
9740         onChanged();
9741       }
9742       if (other.hasUnmanagedContainerModel()) {
9743         mergeUnmanagedContainerModel(other.getUnmanagedContainerModel());
9744       }
9745       if (other.hasInputConfig()) {
9746         mergeInputConfig(other.getInputConfig());
9747       }
9748       if (other.hasInstanceConfig()) {
9749         mergeInstanceConfig(other.getInstanceConfig());
9750       }
9751       if (other.hasModelParameters()) {
9752         mergeModelParameters(other.getModelParameters());
9753       }
9754       if (other.hasOutputConfig()) {
9755         mergeOutputConfig(other.getOutputConfig());
9756       }
9757       if (other.hasDedicatedResources()) {
9758         mergeDedicatedResources(other.getDedicatedResources());
9759       }
9760       if (!other.getServiceAccount().isEmpty()) {
9761         serviceAccount_ = other.serviceAccount_;
9762         bitField0_ |= 0x00000400;
9763         onChanged();
9764       }
9765       if (other.hasManualBatchTuningParameters()) {
9766         mergeManualBatchTuningParameters(other.getManualBatchTuningParameters());
9767       }
9768       if (other.getGenerateExplanation() != false) {
9769         setGenerateExplanation(other.getGenerateExplanation());
9770       }
9771       if (other.hasExplanationSpec()) {
9772         mergeExplanationSpec(other.getExplanationSpec());
9773       }
9774       if (other.hasOutputInfo()) {
9775         mergeOutputInfo(other.getOutputInfo());
9776       }
9777       if (other.state_ != 0) {
9778         setStateValue(other.getStateValue());
9779       }
9780       if (other.hasError()) {
9781         mergeError(other.getError());
9782       }
9783       if (partialFailuresBuilder_ == null) {
9784         if (!other.partialFailures_.isEmpty()) {
9785           if (partialFailures_.isEmpty()) {
9786             partialFailures_ = other.partialFailures_;
9787             bitField0_ = (bitField0_ & ~0x00020000);
9788           } else {
9789             ensurePartialFailuresIsMutable();
9790             partialFailures_.addAll(other.partialFailures_);
9791           }
9792           onChanged();
9793         }
9794       } else {
9795         if (!other.partialFailures_.isEmpty()) {
9796           if (partialFailuresBuilder_.isEmpty()) {
9797             partialFailuresBuilder_.dispose();
9798             partialFailuresBuilder_ = null;
9799             partialFailures_ = other.partialFailures_;
9800             bitField0_ = (bitField0_ & ~0x00020000);
9801             partialFailuresBuilder_ =
9802                 com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
9803                     ? getPartialFailuresFieldBuilder()
9804                     : null;
9805           } else {
9806             partialFailuresBuilder_.addAllMessages(other.partialFailures_);
9807           }
9808         }
9809       }
9810       if (other.hasResourcesConsumed()) {
9811         mergeResourcesConsumed(other.getResourcesConsumed());
9812       }
9813       if (other.hasCompletionStats()) {
9814         mergeCompletionStats(other.getCompletionStats());
9815       }
9816       if (other.hasCreateTime()) {
9817         mergeCreateTime(other.getCreateTime());
9818       }
9819       if (other.hasStartTime()) {
9820         mergeStartTime(other.getStartTime());
9821       }
9822       if (other.hasEndTime()) {
9823         mergeEndTime(other.getEndTime());
9824       }
9825       if (other.hasUpdateTime()) {
9826         mergeUpdateTime(other.getUpdateTime());
9827       }
9828       internalGetMutableLabels().mergeFrom(other.internalGetLabels());
9829       bitField0_ |= 0x01000000;
9830       if (other.hasEncryptionSpec()) {
9831         mergeEncryptionSpec(other.getEncryptionSpec());
9832       }
9833       if (other.getDisableContainerLogging() != false) {
9834         setDisableContainerLogging(other.getDisableContainerLogging());
9835       }
9836       this.mergeUnknownFields(other.getUnknownFields());
9837       onChanged();
9838       return this;
9839     }
9840 
9841     @java.lang.Override
isInitialized()9842     public final boolean isInitialized() {
9843       return true;
9844     }
9845 
9846     @java.lang.Override
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9847     public Builder mergeFrom(
9848         com.google.protobuf.CodedInputStream input,
9849         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9850         throws java.io.IOException {
9851       if (extensionRegistry == null) {
9852         throw new java.lang.NullPointerException();
9853       }
9854       try {
9855         boolean done = false;
9856         while (!done) {
9857           int tag = input.readTag();
9858           switch (tag) {
9859             case 0:
9860               done = true;
9861               break;
9862             case 10:
9863               {
9864                 name_ = input.readStringRequireUtf8();
9865                 bitField0_ |= 0x00000001;
9866                 break;
9867               } // case 10
9868             case 18:
9869               {
9870                 displayName_ = input.readStringRequireUtf8();
9871                 bitField0_ |= 0x00000002;
9872                 break;
9873               } // case 18
9874             case 26:
9875               {
9876                 model_ = input.readStringRequireUtf8();
9877                 bitField0_ |= 0x00000004;
9878                 break;
9879               } // case 26
9880             case 34:
9881               {
9882                 input.readMessage(getInputConfigFieldBuilder().getBuilder(), extensionRegistry);
9883                 bitField0_ |= 0x00000020;
9884                 break;
9885               } // case 34
9886             case 42:
9887               {
9888                 input.readMessage(getModelParametersFieldBuilder().getBuilder(), extensionRegistry);
9889                 bitField0_ |= 0x00000080;
9890                 break;
9891               } // case 42
9892             case 50:
9893               {
9894                 input.readMessage(getOutputConfigFieldBuilder().getBuilder(), extensionRegistry);
9895                 bitField0_ |= 0x00000100;
9896                 break;
9897               } // case 50
9898             case 58:
9899               {
9900                 input.readMessage(
9901                     getDedicatedResourcesFieldBuilder().getBuilder(), extensionRegistry);
9902                 bitField0_ |= 0x00000200;
9903                 break;
9904               } // case 58
9905             case 66:
9906               {
9907                 input.readMessage(
9908                     getManualBatchTuningParametersFieldBuilder().getBuilder(), extensionRegistry);
9909                 bitField0_ |= 0x00000800;
9910                 break;
9911               } // case 66
9912             case 74:
9913               {
9914                 input.readMessage(getOutputInfoFieldBuilder().getBuilder(), extensionRegistry);
9915                 bitField0_ |= 0x00004000;
9916                 break;
9917               } // case 74
9918             case 80:
9919               {
9920                 state_ = input.readEnum();
9921                 bitField0_ |= 0x00008000;
9922                 break;
9923               } // case 80
9924             case 90:
9925               {
9926                 input.readMessage(getErrorFieldBuilder().getBuilder(), extensionRegistry);
9927                 bitField0_ |= 0x00010000;
9928                 break;
9929               } // case 90
9930             case 98:
9931               {
9932                 com.google.rpc.Status m =
9933                     input.readMessage(com.google.rpc.Status.parser(), extensionRegistry);
9934                 if (partialFailuresBuilder_ == null) {
9935                   ensurePartialFailuresIsMutable();
9936                   partialFailures_.add(m);
9937                 } else {
9938                   partialFailuresBuilder_.addMessage(m);
9939                 }
9940                 break;
9941               } // case 98
9942             case 106:
9943               {
9944                 input.readMessage(
9945                     getResourcesConsumedFieldBuilder().getBuilder(), extensionRegistry);
9946                 bitField0_ |= 0x00040000;
9947                 break;
9948               } // case 106
9949             case 114:
9950               {
9951                 input.readMessage(getCompletionStatsFieldBuilder().getBuilder(), extensionRegistry);
9952                 bitField0_ |= 0x00080000;
9953                 break;
9954               } // case 114
9955             case 122:
9956               {
9957                 input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry);
9958                 bitField0_ |= 0x00100000;
9959                 break;
9960               } // case 122
9961             case 130:
9962               {
9963                 input.readMessage(getStartTimeFieldBuilder().getBuilder(), extensionRegistry);
9964                 bitField0_ |= 0x00200000;
9965                 break;
9966               } // case 130
9967             case 138:
9968               {
9969                 input.readMessage(getEndTimeFieldBuilder().getBuilder(), extensionRegistry);
9970                 bitField0_ |= 0x00400000;
9971                 break;
9972               } // case 138
9973             case 146:
9974               {
9975                 input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry);
9976                 bitField0_ |= 0x00800000;
9977                 break;
9978               } // case 146
9979             case 154:
9980               {
9981                 com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
9982                     input.readMessage(
9983                         LabelsDefaultEntryHolder.defaultEntry.getParserForType(),
9984                         extensionRegistry);
9985                 internalGetMutableLabels()
9986                     .getMutableMap()
9987                     .put(labels__.getKey(), labels__.getValue());
9988                 bitField0_ |= 0x01000000;
9989                 break;
9990               } // case 154
9991             case 184:
9992               {
9993                 generateExplanation_ = input.readBool();
9994                 bitField0_ |= 0x00001000;
9995                 break;
9996               } // case 184
9997             case 194:
9998               {
9999                 input.readMessage(getEncryptionSpecFieldBuilder().getBuilder(), extensionRegistry);
10000                 bitField0_ |= 0x02000000;
10001                 break;
10002               } // case 194
10003             case 202:
10004               {
10005                 input.readMessage(getExplanationSpecFieldBuilder().getBuilder(), extensionRegistry);
10006                 bitField0_ |= 0x00002000;
10007                 break;
10008               } // case 202
10009             case 218:
10010               {
10011                 input.readMessage(getInstanceConfigFieldBuilder().getBuilder(), extensionRegistry);
10012                 bitField0_ |= 0x00000040;
10013                 break;
10014               } // case 218
10015             case 226:
10016               {
10017                 input.readMessage(
10018                     getUnmanagedContainerModelFieldBuilder().getBuilder(), extensionRegistry);
10019                 bitField0_ |= 0x00000010;
10020                 break;
10021               } // case 226
10022             case 234:
10023               {
10024                 serviceAccount_ = input.readStringRequireUtf8();
10025                 bitField0_ |= 0x00000400;
10026                 break;
10027               } // case 234
10028             case 242:
10029               {
10030                 modelVersionId_ = input.readStringRequireUtf8();
10031                 bitField0_ |= 0x00000008;
10032                 break;
10033               } // case 242
10034             case 272:
10035               {
10036                 disableContainerLogging_ = input.readBool();
10037                 bitField0_ |= 0x04000000;
10038                 break;
10039               } // case 272
10040             default:
10041               {
10042                 if (!super.parseUnknownField(input, extensionRegistry, tag)) {
10043                   done = true; // was an endgroup tag
10044                 }
10045                 break;
10046               } // default:
10047           } // switch (tag)
10048         } // while (!done)
10049       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10050         throw e.unwrapIOException();
10051       } finally {
10052         onChanged();
10053       } // finally
10054       return this;
10055     }
10056 
10057     private int bitField0_;
10058 
10059     private java.lang.Object name_ = "";
10060     /**
10061      *
10062      *
10063      * <pre>
10064      * Output only. Resource name of the BatchPredictionJob.
10065      * </pre>
10066      *
10067      * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
10068      *
10069      * @return The name.
10070      */
getName()10071     public java.lang.String getName() {
10072       java.lang.Object ref = name_;
10073       if (!(ref instanceof java.lang.String)) {
10074         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
10075         java.lang.String s = bs.toStringUtf8();
10076         name_ = s;
10077         return s;
10078       } else {
10079         return (java.lang.String) ref;
10080       }
10081     }
10082     /**
10083      *
10084      *
10085      * <pre>
10086      * Output only. Resource name of the BatchPredictionJob.
10087      * </pre>
10088      *
10089      * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
10090      *
10091      * @return The bytes for name.
10092      */
getNameBytes()10093     public com.google.protobuf.ByteString getNameBytes() {
10094       java.lang.Object ref = name_;
10095       if (ref instanceof String) {
10096         com.google.protobuf.ByteString b =
10097             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
10098         name_ = b;
10099         return b;
10100       } else {
10101         return (com.google.protobuf.ByteString) ref;
10102       }
10103     }
10104     /**
10105      *
10106      *
10107      * <pre>
10108      * Output only. Resource name of the BatchPredictionJob.
10109      * </pre>
10110      *
10111      * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
10112      *
10113      * @param value The name to set.
10114      * @return This builder for chaining.
10115      */
setName(java.lang.String value)10116     public Builder setName(java.lang.String value) {
10117       if (value == null) {
10118         throw new NullPointerException();
10119       }
10120       name_ = value;
10121       bitField0_ |= 0x00000001;
10122       onChanged();
10123       return this;
10124     }
10125     /**
10126      *
10127      *
10128      * <pre>
10129      * Output only. Resource name of the BatchPredictionJob.
10130      * </pre>
10131      *
10132      * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
10133      *
10134      * @return This builder for chaining.
10135      */
clearName()10136     public Builder clearName() {
10137       name_ = getDefaultInstance().getName();
10138       bitField0_ = (bitField0_ & ~0x00000001);
10139       onChanged();
10140       return this;
10141     }
10142     /**
10143      *
10144      *
10145      * <pre>
10146      * Output only. Resource name of the BatchPredictionJob.
10147      * </pre>
10148      *
10149      * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
10150      *
10151      * @param value The bytes for name to set.
10152      * @return This builder for chaining.
10153      */
setNameBytes(com.google.protobuf.ByteString value)10154     public Builder setNameBytes(com.google.protobuf.ByteString value) {
10155       if (value == null) {
10156         throw new NullPointerException();
10157       }
10158       checkByteStringIsUtf8(value);
10159       name_ = value;
10160       bitField0_ |= 0x00000001;
10161       onChanged();
10162       return this;
10163     }
10164 
10165     private java.lang.Object displayName_ = "";
10166     /**
10167      *
10168      *
10169      * <pre>
10170      * Required. The user-defined name of this BatchPredictionJob.
10171      * </pre>
10172      *
10173      * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
10174      *
10175      * @return The displayName.
10176      */
getDisplayName()10177     public java.lang.String getDisplayName() {
10178       java.lang.Object ref = displayName_;
10179       if (!(ref instanceof java.lang.String)) {
10180         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
10181         java.lang.String s = bs.toStringUtf8();
10182         displayName_ = s;
10183         return s;
10184       } else {
10185         return (java.lang.String) ref;
10186       }
10187     }
10188     /**
10189      *
10190      *
10191      * <pre>
10192      * Required. The user-defined name of this BatchPredictionJob.
10193      * </pre>
10194      *
10195      * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
10196      *
10197      * @return The bytes for displayName.
10198      */
getDisplayNameBytes()10199     public com.google.protobuf.ByteString getDisplayNameBytes() {
10200       java.lang.Object ref = displayName_;
10201       if (ref instanceof String) {
10202         com.google.protobuf.ByteString b =
10203             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
10204         displayName_ = b;
10205         return b;
10206       } else {
10207         return (com.google.protobuf.ByteString) ref;
10208       }
10209     }
10210     /**
10211      *
10212      *
10213      * <pre>
10214      * Required. The user-defined name of this BatchPredictionJob.
10215      * </pre>
10216      *
10217      * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
10218      *
10219      * @param value The displayName to set.
10220      * @return This builder for chaining.
10221      */
setDisplayName(java.lang.String value)10222     public Builder setDisplayName(java.lang.String value) {
10223       if (value == null) {
10224         throw new NullPointerException();
10225       }
10226       displayName_ = value;
10227       bitField0_ |= 0x00000002;
10228       onChanged();
10229       return this;
10230     }
10231     /**
10232      *
10233      *
10234      * <pre>
10235      * Required. The user-defined name of this BatchPredictionJob.
10236      * </pre>
10237      *
10238      * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
10239      *
10240      * @return This builder for chaining.
10241      */
clearDisplayName()10242     public Builder clearDisplayName() {
10243       displayName_ = getDefaultInstance().getDisplayName();
10244       bitField0_ = (bitField0_ & ~0x00000002);
10245       onChanged();
10246       return this;
10247     }
10248     /**
10249      *
10250      *
10251      * <pre>
10252      * Required. The user-defined name of this BatchPredictionJob.
10253      * </pre>
10254      *
10255      * <code>string display_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
10256      *
10257      * @param value The bytes for displayName to set.
10258      * @return This builder for chaining.
10259      */
setDisplayNameBytes(com.google.protobuf.ByteString value)10260     public Builder setDisplayNameBytes(com.google.protobuf.ByteString value) {
10261       if (value == null) {
10262         throw new NullPointerException();
10263       }
10264       checkByteStringIsUtf8(value);
10265       displayName_ = value;
10266       bitField0_ |= 0x00000002;
10267       onChanged();
10268       return this;
10269     }
10270 
10271     private java.lang.Object model_ = "";
10272     /**
10273      *
10274      *
10275      * <pre>
10276      * The name of the Model resource that produces the predictions via this job,
10277      * must share the same ancestor Location.
10278      * Starting this job has no impact on any existing deployments of the Model
10279      * and their resources.
10280      * Exactly one of model and unmanaged_container_model must be set.
10281      * The model resource name may contain version id or version alias to specify
10282      * the version.
10283      *  Example: `projects/{project}/locations/{location}/models/{model}&#64;2`
10284      *              or
10285      *            `projects/{project}/locations/{location}/models/{model}&#64;golden`
10286      * if no version is specified, the default version will be deployed.
10287      * </pre>
10288      *
10289      * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code>
10290      *
10291      * @return The model.
10292      */
getModel()10293     public java.lang.String getModel() {
10294       java.lang.Object ref = model_;
10295       if (!(ref instanceof java.lang.String)) {
10296         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
10297         java.lang.String s = bs.toStringUtf8();
10298         model_ = s;
10299         return s;
10300       } else {
10301         return (java.lang.String) ref;
10302       }
10303     }
10304     /**
10305      *
10306      *
10307      * <pre>
10308      * The name of the Model resource that produces the predictions via this job,
10309      * must share the same ancestor Location.
10310      * Starting this job has no impact on any existing deployments of the Model
10311      * and their resources.
10312      * Exactly one of model and unmanaged_container_model must be set.
10313      * The model resource name may contain version id or version alias to specify
10314      * the version.
10315      *  Example: `projects/{project}/locations/{location}/models/{model}&#64;2`
10316      *              or
10317      *            `projects/{project}/locations/{location}/models/{model}&#64;golden`
10318      * if no version is specified, the default version will be deployed.
10319      * </pre>
10320      *
10321      * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code>
10322      *
10323      * @return The bytes for model.
10324      */
getModelBytes()10325     public com.google.protobuf.ByteString getModelBytes() {
10326       java.lang.Object ref = model_;
10327       if (ref instanceof String) {
10328         com.google.protobuf.ByteString b =
10329             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
10330         model_ = b;
10331         return b;
10332       } else {
10333         return (com.google.protobuf.ByteString) ref;
10334       }
10335     }
10336     /**
10337      *
10338      *
10339      * <pre>
10340      * The name of the Model resource that produces the predictions via this job,
10341      * must share the same ancestor Location.
10342      * Starting this job has no impact on any existing deployments of the Model
10343      * and their resources.
10344      * Exactly one of model and unmanaged_container_model must be set.
10345      * The model resource name may contain version id or version alias to specify
10346      * the version.
10347      *  Example: `projects/{project}/locations/{location}/models/{model}&#64;2`
10348      *              or
10349      *            `projects/{project}/locations/{location}/models/{model}&#64;golden`
10350      * if no version is specified, the default version will be deployed.
10351      * </pre>
10352      *
10353      * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code>
10354      *
10355      * @param value The model to set.
10356      * @return This builder for chaining.
10357      */
setModel(java.lang.String value)10358     public Builder setModel(java.lang.String value) {
10359       if (value == null) {
10360         throw new NullPointerException();
10361       }
10362       model_ = value;
10363       bitField0_ |= 0x00000004;
10364       onChanged();
10365       return this;
10366     }
10367     /**
10368      *
10369      *
10370      * <pre>
10371      * The name of the Model resource that produces the predictions via this job,
10372      * must share the same ancestor Location.
10373      * Starting this job has no impact on any existing deployments of the Model
10374      * and their resources.
10375      * Exactly one of model and unmanaged_container_model must be set.
10376      * The model resource name may contain version id or version alias to specify
10377      * the version.
10378      *  Example: `projects/{project}/locations/{location}/models/{model}&#64;2`
10379      *              or
10380      *            `projects/{project}/locations/{location}/models/{model}&#64;golden`
10381      * if no version is specified, the default version will be deployed.
10382      * </pre>
10383      *
10384      * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code>
10385      *
10386      * @return This builder for chaining.
10387      */
clearModel()10388     public Builder clearModel() {
10389       model_ = getDefaultInstance().getModel();
10390       bitField0_ = (bitField0_ & ~0x00000004);
10391       onChanged();
10392       return this;
10393     }
10394     /**
10395      *
10396      *
10397      * <pre>
10398      * The name of the Model resource that produces the predictions via this job,
10399      * must share the same ancestor Location.
10400      * Starting this job has no impact on any existing deployments of the Model
10401      * and their resources.
10402      * Exactly one of model and unmanaged_container_model must be set.
10403      * The model resource name may contain version id or version alias to specify
10404      * the version.
10405      *  Example: `projects/{project}/locations/{location}/models/{model}&#64;2`
10406      *              or
10407      *            `projects/{project}/locations/{location}/models/{model}&#64;golden`
10408      * if no version is specified, the default version will be deployed.
10409      * </pre>
10410      *
10411      * <code>string model = 3 [(.google.api.resource_reference) = { ... }</code>
10412      *
10413      * @param value The bytes for model to set.
10414      * @return This builder for chaining.
10415      */
setModelBytes(com.google.protobuf.ByteString value)10416     public Builder setModelBytes(com.google.protobuf.ByteString value) {
10417       if (value == null) {
10418         throw new NullPointerException();
10419       }
10420       checkByteStringIsUtf8(value);
10421       model_ = value;
10422       bitField0_ |= 0x00000004;
10423       onChanged();
10424       return this;
10425     }
10426 
10427     private java.lang.Object modelVersionId_ = "";
10428     /**
10429      *
10430      *
10431      * <pre>
10432      * Output only. The version ID of the Model that produces the predictions via
10433      * this job.
10434      * </pre>
10435      *
10436      * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
10437      *
10438      * @return The modelVersionId.
10439      */
getModelVersionId()10440     public java.lang.String getModelVersionId() {
10441       java.lang.Object ref = modelVersionId_;
10442       if (!(ref instanceof java.lang.String)) {
10443         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
10444         java.lang.String s = bs.toStringUtf8();
10445         modelVersionId_ = s;
10446         return s;
10447       } else {
10448         return (java.lang.String) ref;
10449       }
10450     }
10451     /**
10452      *
10453      *
10454      * <pre>
10455      * Output only. The version ID of the Model that produces the predictions via
10456      * this job.
10457      * </pre>
10458      *
10459      * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
10460      *
10461      * @return The bytes for modelVersionId.
10462      */
getModelVersionIdBytes()10463     public com.google.protobuf.ByteString getModelVersionIdBytes() {
10464       java.lang.Object ref = modelVersionId_;
10465       if (ref instanceof String) {
10466         com.google.protobuf.ByteString b =
10467             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
10468         modelVersionId_ = b;
10469         return b;
10470       } else {
10471         return (com.google.protobuf.ByteString) ref;
10472       }
10473     }
10474     /**
10475      *
10476      *
10477      * <pre>
10478      * Output only. The version ID of the Model that produces the predictions via
10479      * this job.
10480      * </pre>
10481      *
10482      * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
10483      *
10484      * @param value The modelVersionId to set.
10485      * @return This builder for chaining.
10486      */
setModelVersionId(java.lang.String value)10487     public Builder setModelVersionId(java.lang.String value) {
10488       if (value == null) {
10489         throw new NullPointerException();
10490       }
10491       modelVersionId_ = value;
10492       bitField0_ |= 0x00000008;
10493       onChanged();
10494       return this;
10495     }
10496     /**
10497      *
10498      *
10499      * <pre>
10500      * Output only. The version ID of the Model that produces the predictions via
10501      * this job.
10502      * </pre>
10503      *
10504      * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
10505      *
10506      * @return This builder for chaining.
10507      */
clearModelVersionId()10508     public Builder clearModelVersionId() {
10509       modelVersionId_ = getDefaultInstance().getModelVersionId();
10510       bitField0_ = (bitField0_ & ~0x00000008);
10511       onChanged();
10512       return this;
10513     }
10514     /**
10515      *
10516      *
10517      * <pre>
10518      * Output only. The version ID of the Model that produces the predictions via
10519      * this job.
10520      * </pre>
10521      *
10522      * <code>string model_version_id = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
10523      *
10524      * @param value The bytes for modelVersionId to set.
10525      * @return This builder for chaining.
10526      */
setModelVersionIdBytes(com.google.protobuf.ByteString value)10527     public Builder setModelVersionIdBytes(com.google.protobuf.ByteString value) {
10528       if (value == null) {
10529         throw new NullPointerException();
10530       }
10531       checkByteStringIsUtf8(value);
10532       modelVersionId_ = value;
10533       bitField0_ |= 0x00000008;
10534       onChanged();
10535       return this;
10536     }
10537 
10538     private com.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanagedContainerModel_;
10539     private com.google.protobuf.SingleFieldBuilderV3<
10540             com.google.cloud.aiplatform.v1.UnmanagedContainerModel,
10541             com.google.cloud.aiplatform.v1.UnmanagedContainerModel.Builder,
10542             com.google.cloud.aiplatform.v1.UnmanagedContainerModelOrBuilder>
10543         unmanagedContainerModelBuilder_;
10544     /**
10545      *
10546      *
10547      * <pre>
10548      * Contains model information necessary to perform batch prediction without
10549      * requiring uploading to model registry.
10550      * Exactly one of model and unmanaged_container_model must be set.
10551      * </pre>
10552      *
10553      * <code>.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanaged_container_model = 28;
10554      * </code>
10555      *
10556      * @return Whether the unmanagedContainerModel field is set.
10557      */
hasUnmanagedContainerModel()10558     public boolean hasUnmanagedContainerModel() {
10559       return ((bitField0_ & 0x00000010) != 0);
10560     }
10561     /**
10562      *
10563      *
10564      * <pre>
10565      * Contains model information necessary to perform batch prediction without
10566      * requiring uploading to model registry.
10567      * Exactly one of model and unmanaged_container_model must be set.
10568      * </pre>
10569      *
10570      * <code>.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanaged_container_model = 28;
10571      * </code>
10572      *
10573      * @return The unmanagedContainerModel.
10574      */
getUnmanagedContainerModel()10575     public com.google.cloud.aiplatform.v1.UnmanagedContainerModel getUnmanagedContainerModel() {
10576       if (unmanagedContainerModelBuilder_ == null) {
10577         return unmanagedContainerModel_ == null
10578             ? com.google.cloud.aiplatform.v1.UnmanagedContainerModel.getDefaultInstance()
10579             : unmanagedContainerModel_;
10580       } else {
10581         return unmanagedContainerModelBuilder_.getMessage();
10582       }
10583     }
10584     /**
10585      *
10586      *
10587      * <pre>
10588      * Contains model information necessary to perform batch prediction without
10589      * requiring uploading to model registry.
10590      * Exactly one of model and unmanaged_container_model must be set.
10591      * </pre>
10592      *
10593      * <code>.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanaged_container_model = 28;
10594      * </code>
10595      */
setUnmanagedContainerModel( com.google.cloud.aiplatform.v1.UnmanagedContainerModel value)10596     public Builder setUnmanagedContainerModel(
10597         com.google.cloud.aiplatform.v1.UnmanagedContainerModel value) {
10598       if (unmanagedContainerModelBuilder_ == null) {
10599         if (value == null) {
10600           throw new NullPointerException();
10601         }
10602         unmanagedContainerModel_ = value;
10603       } else {
10604         unmanagedContainerModelBuilder_.setMessage(value);
10605       }
10606       bitField0_ |= 0x00000010;
10607       onChanged();
10608       return this;
10609     }
10610     /**
10611      *
10612      *
10613      * <pre>
10614      * Contains model information necessary to perform batch prediction without
10615      * requiring uploading to model registry.
10616      * Exactly one of model and unmanaged_container_model must be set.
10617      * </pre>
10618      *
10619      * <code>.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanaged_container_model = 28;
10620      * </code>
10621      */
setUnmanagedContainerModel( com.google.cloud.aiplatform.v1.UnmanagedContainerModel.Builder builderForValue)10622     public Builder setUnmanagedContainerModel(
10623         com.google.cloud.aiplatform.v1.UnmanagedContainerModel.Builder builderForValue) {
10624       if (unmanagedContainerModelBuilder_ == null) {
10625         unmanagedContainerModel_ = builderForValue.build();
10626       } else {
10627         unmanagedContainerModelBuilder_.setMessage(builderForValue.build());
10628       }
10629       bitField0_ |= 0x00000010;
10630       onChanged();
10631       return this;
10632     }
10633     /**
10634      *
10635      *
10636      * <pre>
10637      * Contains model information necessary to perform batch prediction without
10638      * requiring uploading to model registry.
10639      * Exactly one of model and unmanaged_container_model must be set.
10640      * </pre>
10641      *
10642      * <code>.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanaged_container_model = 28;
10643      * </code>
10644      */
mergeUnmanagedContainerModel( com.google.cloud.aiplatform.v1.UnmanagedContainerModel value)10645     public Builder mergeUnmanagedContainerModel(
10646         com.google.cloud.aiplatform.v1.UnmanagedContainerModel value) {
10647       if (unmanagedContainerModelBuilder_ == null) {
10648         if (((bitField0_ & 0x00000010) != 0)
10649             && unmanagedContainerModel_ != null
10650             && unmanagedContainerModel_
10651                 != com.google.cloud.aiplatform.v1.UnmanagedContainerModel.getDefaultInstance()) {
10652           getUnmanagedContainerModelBuilder().mergeFrom(value);
10653         } else {
10654           unmanagedContainerModel_ = value;
10655         }
10656       } else {
10657         unmanagedContainerModelBuilder_.mergeFrom(value);
10658       }
10659       bitField0_ |= 0x00000010;
10660       onChanged();
10661       return this;
10662     }
10663     /**
10664      *
10665      *
10666      * <pre>
10667      * Contains model information necessary to perform batch prediction without
10668      * requiring uploading to model registry.
10669      * Exactly one of model and unmanaged_container_model must be set.
10670      * </pre>
10671      *
10672      * <code>.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanaged_container_model = 28;
10673      * </code>
10674      */
clearUnmanagedContainerModel()10675     public Builder clearUnmanagedContainerModel() {
10676       bitField0_ = (bitField0_ & ~0x00000010);
10677       unmanagedContainerModel_ = null;
10678       if (unmanagedContainerModelBuilder_ != null) {
10679         unmanagedContainerModelBuilder_.dispose();
10680         unmanagedContainerModelBuilder_ = null;
10681       }
10682       onChanged();
10683       return this;
10684     }
10685     /**
10686      *
10687      *
10688      * <pre>
10689      * Contains model information necessary to perform batch prediction without
10690      * requiring uploading to model registry.
10691      * Exactly one of model and unmanaged_container_model must be set.
10692      * </pre>
10693      *
10694      * <code>.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanaged_container_model = 28;
10695      * </code>
10696      */
10697     public com.google.cloud.aiplatform.v1.UnmanagedContainerModel.Builder
getUnmanagedContainerModelBuilder()10698         getUnmanagedContainerModelBuilder() {
10699       bitField0_ |= 0x00000010;
10700       onChanged();
10701       return getUnmanagedContainerModelFieldBuilder().getBuilder();
10702     }
10703     /**
10704      *
10705      *
10706      * <pre>
10707      * Contains model information necessary to perform batch prediction without
10708      * requiring uploading to model registry.
10709      * Exactly one of model and unmanaged_container_model must be set.
10710      * </pre>
10711      *
10712      * <code>.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanaged_container_model = 28;
10713      * </code>
10714      */
10715     public com.google.cloud.aiplatform.v1.UnmanagedContainerModelOrBuilder
getUnmanagedContainerModelOrBuilder()10716         getUnmanagedContainerModelOrBuilder() {
10717       if (unmanagedContainerModelBuilder_ != null) {
10718         return unmanagedContainerModelBuilder_.getMessageOrBuilder();
10719       } else {
10720         return unmanagedContainerModel_ == null
10721             ? com.google.cloud.aiplatform.v1.UnmanagedContainerModel.getDefaultInstance()
10722             : unmanagedContainerModel_;
10723       }
10724     }
10725     /**
10726      *
10727      *
10728      * <pre>
10729      * Contains model information necessary to perform batch prediction without
10730      * requiring uploading to model registry.
10731      * Exactly one of model and unmanaged_container_model must be set.
10732      * </pre>
10733      *
10734      * <code>.google.cloud.aiplatform.v1.UnmanagedContainerModel unmanaged_container_model = 28;
10735      * </code>
10736      */
10737     private com.google.protobuf.SingleFieldBuilderV3<
10738             com.google.cloud.aiplatform.v1.UnmanagedContainerModel,
10739             com.google.cloud.aiplatform.v1.UnmanagedContainerModel.Builder,
10740             com.google.cloud.aiplatform.v1.UnmanagedContainerModelOrBuilder>
getUnmanagedContainerModelFieldBuilder()10741         getUnmanagedContainerModelFieldBuilder() {
10742       if (unmanagedContainerModelBuilder_ == null) {
10743         unmanagedContainerModelBuilder_ =
10744             new com.google.protobuf.SingleFieldBuilderV3<
10745                 com.google.cloud.aiplatform.v1.UnmanagedContainerModel,
10746                 com.google.cloud.aiplatform.v1.UnmanagedContainerModel.Builder,
10747                 com.google.cloud.aiplatform.v1.UnmanagedContainerModelOrBuilder>(
10748                 getUnmanagedContainerModel(), getParentForChildren(), isClean());
10749         unmanagedContainerModel_ = null;
10750       }
10751       return unmanagedContainerModelBuilder_;
10752     }
10753 
10754     private com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig inputConfig_;
10755     private com.google.protobuf.SingleFieldBuilderV3<
10756             com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig,
10757             com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.Builder,
10758             com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfigOrBuilder>
10759         inputConfigBuilder_;
10760     /**
10761      *
10762      *
10763      * <pre>
10764      * Required. Input configuration of the instances on which predictions are
10765      * performed. The schema of any single instance may be specified via the
10766      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
10767      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
10768      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri].
10769      * </pre>
10770      *
10771      * <code>
10772      * .google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED];
10773      * </code>
10774      *
10775      * @return Whether the inputConfig field is set.
10776      */
hasInputConfig()10777     public boolean hasInputConfig() {
10778       return ((bitField0_ & 0x00000020) != 0);
10779     }
10780     /**
10781      *
10782      *
10783      * <pre>
10784      * Required. Input configuration of the instances on which predictions are
10785      * performed. The schema of any single instance may be specified via the
10786      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
10787      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
10788      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri].
10789      * </pre>
10790      *
10791      * <code>
10792      * .google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED];
10793      * </code>
10794      *
10795      * @return The inputConfig.
10796      */
getInputConfig()10797     public com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig getInputConfig() {
10798       if (inputConfigBuilder_ == null) {
10799         return inputConfig_ == null
10800             ? com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.getDefaultInstance()
10801             : inputConfig_;
10802       } else {
10803         return inputConfigBuilder_.getMessage();
10804       }
10805     }
10806     /**
10807      *
10808      *
10809      * <pre>
10810      * Required. Input configuration of the instances on which predictions are
10811      * performed. The schema of any single instance may be specified via the
10812      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
10813      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
10814      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri].
10815      * </pre>
10816      *
10817      * <code>
10818      * .google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED];
10819      * </code>
10820      */
setInputConfig( com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig value)10821     public Builder setInputConfig(
10822         com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig value) {
10823       if (inputConfigBuilder_ == null) {
10824         if (value == null) {
10825           throw new NullPointerException();
10826         }
10827         inputConfig_ = value;
10828       } else {
10829         inputConfigBuilder_.setMessage(value);
10830       }
10831       bitField0_ |= 0x00000020;
10832       onChanged();
10833       return this;
10834     }
10835     /**
10836      *
10837      *
10838      * <pre>
10839      * Required. Input configuration of the instances on which predictions are
10840      * performed. The schema of any single instance may be specified via the
10841      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
10842      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
10843      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri].
10844      * </pre>
10845      *
10846      * <code>
10847      * .google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED];
10848      * </code>
10849      */
setInputConfig( com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.Builder builderForValue)10850     public Builder setInputConfig(
10851         com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.Builder builderForValue) {
10852       if (inputConfigBuilder_ == null) {
10853         inputConfig_ = builderForValue.build();
10854       } else {
10855         inputConfigBuilder_.setMessage(builderForValue.build());
10856       }
10857       bitField0_ |= 0x00000020;
10858       onChanged();
10859       return this;
10860     }
10861     /**
10862      *
10863      *
10864      * <pre>
10865      * Required. Input configuration of the instances on which predictions are
10866      * performed. The schema of any single instance may be specified via the
10867      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
10868      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
10869      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri].
10870      * </pre>
10871      *
10872      * <code>
10873      * .google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED];
10874      * </code>
10875      */
mergeInputConfig( com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig value)10876     public Builder mergeInputConfig(
10877         com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig value) {
10878       if (inputConfigBuilder_ == null) {
10879         if (((bitField0_ & 0x00000020) != 0)
10880             && inputConfig_ != null
10881             && inputConfig_
10882                 != com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig
10883                     .getDefaultInstance()) {
10884           getInputConfigBuilder().mergeFrom(value);
10885         } else {
10886           inputConfig_ = value;
10887         }
10888       } else {
10889         inputConfigBuilder_.mergeFrom(value);
10890       }
10891       bitField0_ |= 0x00000020;
10892       onChanged();
10893       return this;
10894     }
10895     /**
10896      *
10897      *
10898      * <pre>
10899      * Required. Input configuration of the instances on which predictions are
10900      * performed. The schema of any single instance may be specified via the
10901      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
10902      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
10903      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri].
10904      * </pre>
10905      *
10906      * <code>
10907      * .google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED];
10908      * </code>
10909      */
clearInputConfig()10910     public Builder clearInputConfig() {
10911       bitField0_ = (bitField0_ & ~0x00000020);
10912       inputConfig_ = null;
10913       if (inputConfigBuilder_ != null) {
10914         inputConfigBuilder_.dispose();
10915         inputConfigBuilder_ = null;
10916       }
10917       onChanged();
10918       return this;
10919     }
10920     /**
10921      *
10922      *
10923      * <pre>
10924      * Required. Input configuration of the instances on which predictions are
10925      * performed. The schema of any single instance may be specified via the
10926      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
10927      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
10928      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri].
10929      * </pre>
10930      *
10931      * <code>
10932      * .google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED];
10933      * </code>
10934      */
10935     public com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.Builder
getInputConfigBuilder()10936         getInputConfigBuilder() {
10937       bitField0_ |= 0x00000020;
10938       onChanged();
10939       return getInputConfigFieldBuilder().getBuilder();
10940     }
10941     /**
10942      *
10943      *
10944      * <pre>
10945      * Required. Input configuration of the instances on which predictions are
10946      * performed. The schema of any single instance may be specified via the
10947      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
10948      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
10949      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri].
10950      * </pre>
10951      *
10952      * <code>
10953      * .google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED];
10954      * </code>
10955      */
10956     public com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfigOrBuilder
getInputConfigOrBuilder()10957         getInputConfigOrBuilder() {
10958       if (inputConfigBuilder_ != null) {
10959         return inputConfigBuilder_.getMessageOrBuilder();
10960       } else {
10961         return inputConfig_ == null
10962             ? com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.getDefaultInstance()
10963             : inputConfig_;
10964       }
10965     }
10966     /**
10967      *
10968      *
10969      * <pre>
10970      * Required. Input configuration of the instances on which predictions are
10971      * performed. The schema of any single instance may be specified via the
10972      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
10973      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
10974      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri].
10975      * </pre>
10976      *
10977      * <code>
10978      * .google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig input_config = 4 [(.google.api.field_behavior) = REQUIRED];
10979      * </code>
10980      */
10981     private com.google.protobuf.SingleFieldBuilderV3<
10982             com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig,
10983             com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.Builder,
10984             com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfigOrBuilder>
getInputConfigFieldBuilder()10985         getInputConfigFieldBuilder() {
10986       if (inputConfigBuilder_ == null) {
10987         inputConfigBuilder_ =
10988             new com.google.protobuf.SingleFieldBuilderV3<
10989                 com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig,
10990                 com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.Builder,
10991                 com.google.cloud.aiplatform.v1.BatchPredictionJob.InputConfigOrBuilder>(
10992                 getInputConfig(), getParentForChildren(), isClean());
10993         inputConfig_ = null;
10994       }
10995       return inputConfigBuilder_;
10996     }
10997 
10998     private com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instanceConfig_;
10999     private com.google.protobuf.SingleFieldBuilderV3<
11000             com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig,
11001             com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.Builder,
11002             com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfigOrBuilder>
11003         instanceConfigBuilder_;
11004     /**
11005      *
11006      *
11007      * <pre>
11008      * Configuration for how to convert batch prediction input instances to the
11009      * prediction instances that are sent to the Model.
11010      * </pre>
11011      *
11012      * <code>.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instance_config = 27;
11013      * </code>
11014      *
11015      * @return Whether the instanceConfig field is set.
11016      */
hasInstanceConfig()11017     public boolean hasInstanceConfig() {
11018       return ((bitField0_ & 0x00000040) != 0);
11019     }
11020     /**
11021      *
11022      *
11023      * <pre>
11024      * Configuration for how to convert batch prediction input instances to the
11025      * prediction instances that are sent to the Model.
11026      * </pre>
11027      *
11028      * <code>.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instance_config = 27;
11029      * </code>
11030      *
11031      * @return The instanceConfig.
11032      */
getInstanceConfig()11033     public com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig getInstanceConfig() {
11034       if (instanceConfigBuilder_ == null) {
11035         return instanceConfig_ == null
11036             ? com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.getDefaultInstance()
11037             : instanceConfig_;
11038       } else {
11039         return instanceConfigBuilder_.getMessage();
11040       }
11041     }
11042     /**
11043      *
11044      *
11045      * <pre>
11046      * Configuration for how to convert batch prediction input instances to the
11047      * prediction instances that are sent to the Model.
11048      * </pre>
11049      *
11050      * <code>.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instance_config = 27;
11051      * </code>
11052      */
setInstanceConfig( com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig value)11053     public Builder setInstanceConfig(
11054         com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig value) {
11055       if (instanceConfigBuilder_ == null) {
11056         if (value == null) {
11057           throw new NullPointerException();
11058         }
11059         instanceConfig_ = value;
11060       } else {
11061         instanceConfigBuilder_.setMessage(value);
11062       }
11063       bitField0_ |= 0x00000040;
11064       onChanged();
11065       return this;
11066     }
11067     /**
11068      *
11069      *
11070      * <pre>
11071      * Configuration for how to convert batch prediction input instances to the
11072      * prediction instances that are sent to the Model.
11073      * </pre>
11074      *
11075      * <code>.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instance_config = 27;
11076      * </code>
11077      */
setInstanceConfig( com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.Builder builderForValue)11078     public Builder setInstanceConfig(
11079         com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.Builder builderForValue) {
11080       if (instanceConfigBuilder_ == null) {
11081         instanceConfig_ = builderForValue.build();
11082       } else {
11083         instanceConfigBuilder_.setMessage(builderForValue.build());
11084       }
11085       bitField0_ |= 0x00000040;
11086       onChanged();
11087       return this;
11088     }
11089     /**
11090      *
11091      *
11092      * <pre>
11093      * Configuration for how to convert batch prediction input instances to the
11094      * prediction instances that are sent to the Model.
11095      * </pre>
11096      *
11097      * <code>.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instance_config = 27;
11098      * </code>
11099      */
mergeInstanceConfig( com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig value)11100     public Builder mergeInstanceConfig(
11101         com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig value) {
11102       if (instanceConfigBuilder_ == null) {
11103         if (((bitField0_ & 0x00000040) != 0)
11104             && instanceConfig_ != null
11105             && instanceConfig_
11106                 != com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig
11107                     .getDefaultInstance()) {
11108           getInstanceConfigBuilder().mergeFrom(value);
11109         } else {
11110           instanceConfig_ = value;
11111         }
11112       } else {
11113         instanceConfigBuilder_.mergeFrom(value);
11114       }
11115       bitField0_ |= 0x00000040;
11116       onChanged();
11117       return this;
11118     }
11119     /**
11120      *
11121      *
11122      * <pre>
11123      * Configuration for how to convert batch prediction input instances to the
11124      * prediction instances that are sent to the Model.
11125      * </pre>
11126      *
11127      * <code>.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instance_config = 27;
11128      * </code>
11129      */
clearInstanceConfig()11130     public Builder clearInstanceConfig() {
11131       bitField0_ = (bitField0_ & ~0x00000040);
11132       instanceConfig_ = null;
11133       if (instanceConfigBuilder_ != null) {
11134         instanceConfigBuilder_.dispose();
11135         instanceConfigBuilder_ = null;
11136       }
11137       onChanged();
11138       return this;
11139     }
11140     /**
11141      *
11142      *
11143      * <pre>
11144      * Configuration for how to convert batch prediction input instances to the
11145      * prediction instances that are sent to the Model.
11146      * </pre>
11147      *
11148      * <code>.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instance_config = 27;
11149      * </code>
11150      */
11151     public com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.Builder
getInstanceConfigBuilder()11152         getInstanceConfigBuilder() {
11153       bitField0_ |= 0x00000040;
11154       onChanged();
11155       return getInstanceConfigFieldBuilder().getBuilder();
11156     }
11157     /**
11158      *
11159      *
11160      * <pre>
11161      * Configuration for how to convert batch prediction input instances to the
11162      * prediction instances that are sent to the Model.
11163      * </pre>
11164      *
11165      * <code>.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instance_config = 27;
11166      * </code>
11167      */
11168     public com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfigOrBuilder
getInstanceConfigOrBuilder()11169         getInstanceConfigOrBuilder() {
11170       if (instanceConfigBuilder_ != null) {
11171         return instanceConfigBuilder_.getMessageOrBuilder();
11172       } else {
11173         return instanceConfig_ == null
11174             ? com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.getDefaultInstance()
11175             : instanceConfig_;
11176       }
11177     }
11178     /**
11179      *
11180      *
11181      * <pre>
11182      * Configuration for how to convert batch prediction input instances to the
11183      * prediction instances that are sent to the Model.
11184      * </pre>
11185      *
11186      * <code>.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig instance_config = 27;
11187      * </code>
11188      */
11189     private com.google.protobuf.SingleFieldBuilderV3<
11190             com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig,
11191             com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.Builder,
11192             com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfigOrBuilder>
getInstanceConfigFieldBuilder()11193         getInstanceConfigFieldBuilder() {
11194       if (instanceConfigBuilder_ == null) {
11195         instanceConfigBuilder_ =
11196             new com.google.protobuf.SingleFieldBuilderV3<
11197                 com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig,
11198                 com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfig.Builder,
11199                 com.google.cloud.aiplatform.v1.BatchPredictionJob.InstanceConfigOrBuilder>(
11200                 getInstanceConfig(), getParentForChildren(), isClean());
11201         instanceConfig_ = null;
11202       }
11203       return instanceConfigBuilder_;
11204     }
11205 
11206     private com.google.protobuf.Value modelParameters_;
11207     private com.google.protobuf.SingleFieldBuilderV3<
11208             com.google.protobuf.Value,
11209             com.google.protobuf.Value.Builder,
11210             com.google.protobuf.ValueOrBuilder>
11211         modelParametersBuilder_;
11212     /**
11213      *
11214      *
11215      * <pre>
11216      * The parameters that govern the predictions. The schema of the parameters
11217      * may be specified via the
11218      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11219      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11220      * [parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri].
11221      * </pre>
11222      *
11223      * <code>.google.protobuf.Value model_parameters = 5;</code>
11224      *
11225      * @return Whether the modelParameters field is set.
11226      */
hasModelParameters()11227     public boolean hasModelParameters() {
11228       return ((bitField0_ & 0x00000080) != 0);
11229     }
11230     /**
11231      *
11232      *
11233      * <pre>
11234      * The parameters that govern the predictions. The schema of the parameters
11235      * may be specified via the
11236      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11237      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11238      * [parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri].
11239      * </pre>
11240      *
11241      * <code>.google.protobuf.Value model_parameters = 5;</code>
11242      *
11243      * @return The modelParameters.
11244      */
getModelParameters()11245     public com.google.protobuf.Value getModelParameters() {
11246       if (modelParametersBuilder_ == null) {
11247         return modelParameters_ == null
11248             ? com.google.protobuf.Value.getDefaultInstance()
11249             : modelParameters_;
11250       } else {
11251         return modelParametersBuilder_.getMessage();
11252       }
11253     }
11254     /**
11255      *
11256      *
11257      * <pre>
11258      * The parameters that govern the predictions. The schema of the parameters
11259      * may be specified via the
11260      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11261      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11262      * [parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri].
11263      * </pre>
11264      *
11265      * <code>.google.protobuf.Value model_parameters = 5;</code>
11266      */
setModelParameters(com.google.protobuf.Value value)11267     public Builder setModelParameters(com.google.protobuf.Value value) {
11268       if (modelParametersBuilder_ == null) {
11269         if (value == null) {
11270           throw new NullPointerException();
11271         }
11272         modelParameters_ = value;
11273       } else {
11274         modelParametersBuilder_.setMessage(value);
11275       }
11276       bitField0_ |= 0x00000080;
11277       onChanged();
11278       return this;
11279     }
11280     /**
11281      *
11282      *
11283      * <pre>
11284      * The parameters that govern the predictions. The schema of the parameters
11285      * may be specified via the
11286      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11287      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11288      * [parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri].
11289      * </pre>
11290      *
11291      * <code>.google.protobuf.Value model_parameters = 5;</code>
11292      */
setModelParameters(com.google.protobuf.Value.Builder builderForValue)11293     public Builder setModelParameters(com.google.protobuf.Value.Builder builderForValue) {
11294       if (modelParametersBuilder_ == null) {
11295         modelParameters_ = builderForValue.build();
11296       } else {
11297         modelParametersBuilder_.setMessage(builderForValue.build());
11298       }
11299       bitField0_ |= 0x00000080;
11300       onChanged();
11301       return this;
11302     }
11303     /**
11304      *
11305      *
11306      * <pre>
11307      * The parameters that govern the predictions. The schema of the parameters
11308      * may be specified via the
11309      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11310      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11311      * [parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri].
11312      * </pre>
11313      *
11314      * <code>.google.protobuf.Value model_parameters = 5;</code>
11315      */
mergeModelParameters(com.google.protobuf.Value value)11316     public Builder mergeModelParameters(com.google.protobuf.Value value) {
11317       if (modelParametersBuilder_ == null) {
11318         if (((bitField0_ & 0x00000080) != 0)
11319             && modelParameters_ != null
11320             && modelParameters_ != com.google.protobuf.Value.getDefaultInstance()) {
11321           getModelParametersBuilder().mergeFrom(value);
11322         } else {
11323           modelParameters_ = value;
11324         }
11325       } else {
11326         modelParametersBuilder_.mergeFrom(value);
11327       }
11328       bitField0_ |= 0x00000080;
11329       onChanged();
11330       return this;
11331     }
11332     /**
11333      *
11334      *
11335      * <pre>
11336      * The parameters that govern the predictions. The schema of the parameters
11337      * may be specified via the
11338      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11339      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11340      * [parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri].
11341      * </pre>
11342      *
11343      * <code>.google.protobuf.Value model_parameters = 5;</code>
11344      */
clearModelParameters()11345     public Builder clearModelParameters() {
11346       bitField0_ = (bitField0_ & ~0x00000080);
11347       modelParameters_ = null;
11348       if (modelParametersBuilder_ != null) {
11349         modelParametersBuilder_.dispose();
11350         modelParametersBuilder_ = null;
11351       }
11352       onChanged();
11353       return this;
11354     }
11355     /**
11356      *
11357      *
11358      * <pre>
11359      * The parameters that govern the predictions. The schema of the parameters
11360      * may be specified via the
11361      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11362      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11363      * [parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri].
11364      * </pre>
11365      *
11366      * <code>.google.protobuf.Value model_parameters = 5;</code>
11367      */
getModelParametersBuilder()11368     public com.google.protobuf.Value.Builder getModelParametersBuilder() {
11369       bitField0_ |= 0x00000080;
11370       onChanged();
11371       return getModelParametersFieldBuilder().getBuilder();
11372     }
11373     /**
11374      *
11375      *
11376      * <pre>
11377      * The parameters that govern the predictions. The schema of the parameters
11378      * may be specified via the
11379      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11380      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11381      * [parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri].
11382      * </pre>
11383      *
11384      * <code>.google.protobuf.Value model_parameters = 5;</code>
11385      */
getModelParametersOrBuilder()11386     public com.google.protobuf.ValueOrBuilder getModelParametersOrBuilder() {
11387       if (modelParametersBuilder_ != null) {
11388         return modelParametersBuilder_.getMessageOrBuilder();
11389       } else {
11390         return modelParameters_ == null
11391             ? com.google.protobuf.Value.getDefaultInstance()
11392             : modelParameters_;
11393       }
11394     }
11395     /**
11396      *
11397      *
11398      * <pre>
11399      * The parameters that govern the predictions. The schema of the parameters
11400      * may be specified via the
11401      * [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11402      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11403      * [parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri].
11404      * </pre>
11405      *
11406      * <code>.google.protobuf.Value model_parameters = 5;</code>
11407      */
11408     private com.google.protobuf.SingleFieldBuilderV3<
11409             com.google.protobuf.Value,
11410             com.google.protobuf.Value.Builder,
11411             com.google.protobuf.ValueOrBuilder>
getModelParametersFieldBuilder()11412         getModelParametersFieldBuilder() {
11413       if (modelParametersBuilder_ == null) {
11414         modelParametersBuilder_ =
11415             new com.google.protobuf.SingleFieldBuilderV3<
11416                 com.google.protobuf.Value,
11417                 com.google.protobuf.Value.Builder,
11418                 com.google.protobuf.ValueOrBuilder>(
11419                 getModelParameters(), getParentForChildren(), isClean());
11420         modelParameters_ = null;
11421       }
11422       return modelParametersBuilder_;
11423     }
11424 
11425     private com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig outputConfig_;
11426     private com.google.protobuf.SingleFieldBuilderV3<
11427             com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig,
11428             com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.Builder,
11429             com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfigOrBuilder>
11430         outputConfigBuilder_;
11431     /**
11432      *
11433      *
11434      * <pre>
11435      * Required. The Configuration specifying where output predictions should
11436      * be written.
11437      * The schema of any single prediction may be specified as a concatenation
11438      * of [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11439      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11440      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
11441      * and
11442      * [prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri].
11443      * </pre>
11444      *
11445      * <code>
11446      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED];
11447      * </code>
11448      *
11449      * @return Whether the outputConfig field is set.
11450      */
hasOutputConfig()11451     public boolean hasOutputConfig() {
11452       return ((bitField0_ & 0x00000100) != 0);
11453     }
11454     /**
11455      *
11456      *
11457      * <pre>
11458      * Required. The Configuration specifying where output predictions should
11459      * be written.
11460      * The schema of any single prediction may be specified as a concatenation
11461      * of [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11462      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11463      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
11464      * and
11465      * [prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri].
11466      * </pre>
11467      *
11468      * <code>
11469      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED];
11470      * </code>
11471      *
11472      * @return The outputConfig.
11473      */
getOutputConfig()11474     public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig getOutputConfig() {
11475       if (outputConfigBuilder_ == null) {
11476         return outputConfig_ == null
11477             ? com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.getDefaultInstance()
11478             : outputConfig_;
11479       } else {
11480         return outputConfigBuilder_.getMessage();
11481       }
11482     }
11483     /**
11484      *
11485      *
11486      * <pre>
11487      * Required. The Configuration specifying where output predictions should
11488      * be written.
11489      * The schema of any single prediction may be specified as a concatenation
11490      * of [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11491      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11492      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
11493      * and
11494      * [prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri].
11495      * </pre>
11496      *
11497      * <code>
11498      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED];
11499      * </code>
11500      */
setOutputConfig( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig value)11501     public Builder setOutputConfig(
11502         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig value) {
11503       if (outputConfigBuilder_ == null) {
11504         if (value == null) {
11505           throw new NullPointerException();
11506         }
11507         outputConfig_ = value;
11508       } else {
11509         outputConfigBuilder_.setMessage(value);
11510       }
11511       bitField0_ |= 0x00000100;
11512       onChanged();
11513       return this;
11514     }
11515     /**
11516      *
11517      *
11518      * <pre>
11519      * Required. The Configuration specifying where output predictions should
11520      * be written.
11521      * The schema of any single prediction may be specified as a concatenation
11522      * of [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11523      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11524      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
11525      * and
11526      * [prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri].
11527      * </pre>
11528      *
11529      * <code>
11530      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED];
11531      * </code>
11532      */
setOutputConfig( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.Builder builderForValue)11533     public Builder setOutputConfig(
11534         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.Builder builderForValue) {
11535       if (outputConfigBuilder_ == null) {
11536         outputConfig_ = builderForValue.build();
11537       } else {
11538         outputConfigBuilder_.setMessage(builderForValue.build());
11539       }
11540       bitField0_ |= 0x00000100;
11541       onChanged();
11542       return this;
11543     }
11544     /**
11545      *
11546      *
11547      * <pre>
11548      * Required. The Configuration specifying where output predictions should
11549      * be written.
11550      * The schema of any single prediction may be specified as a concatenation
11551      * of [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11552      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11553      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
11554      * and
11555      * [prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri].
11556      * </pre>
11557      *
11558      * <code>
11559      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED];
11560      * </code>
11561      */
mergeOutputConfig( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig value)11562     public Builder mergeOutputConfig(
11563         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig value) {
11564       if (outputConfigBuilder_ == null) {
11565         if (((bitField0_ & 0x00000100) != 0)
11566             && outputConfig_ != null
11567             && outputConfig_
11568                 != com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig
11569                     .getDefaultInstance()) {
11570           getOutputConfigBuilder().mergeFrom(value);
11571         } else {
11572           outputConfig_ = value;
11573         }
11574       } else {
11575         outputConfigBuilder_.mergeFrom(value);
11576       }
11577       bitField0_ |= 0x00000100;
11578       onChanged();
11579       return this;
11580     }
11581     /**
11582      *
11583      *
11584      * <pre>
11585      * Required. The Configuration specifying where output predictions should
11586      * be written.
11587      * The schema of any single prediction may be specified as a concatenation
11588      * of [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11589      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11590      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
11591      * and
11592      * [prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri].
11593      * </pre>
11594      *
11595      * <code>
11596      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED];
11597      * </code>
11598      */
clearOutputConfig()11599     public Builder clearOutputConfig() {
11600       bitField0_ = (bitField0_ & ~0x00000100);
11601       outputConfig_ = null;
11602       if (outputConfigBuilder_ != null) {
11603         outputConfigBuilder_.dispose();
11604         outputConfigBuilder_ = null;
11605       }
11606       onChanged();
11607       return this;
11608     }
11609     /**
11610      *
11611      *
11612      * <pre>
11613      * Required. The Configuration specifying where output predictions should
11614      * be written.
11615      * The schema of any single prediction may be specified as a concatenation
11616      * of [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11617      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11618      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
11619      * and
11620      * [prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri].
11621      * </pre>
11622      *
11623      * <code>
11624      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED];
11625      * </code>
11626      */
11627     public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.Builder
getOutputConfigBuilder()11628         getOutputConfigBuilder() {
11629       bitField0_ |= 0x00000100;
11630       onChanged();
11631       return getOutputConfigFieldBuilder().getBuilder();
11632     }
11633     /**
11634      *
11635      *
11636      * <pre>
11637      * Required. The Configuration specifying where output predictions should
11638      * be written.
11639      * The schema of any single prediction may be specified as a concatenation
11640      * of [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11641      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11642      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
11643      * and
11644      * [prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri].
11645      * </pre>
11646      *
11647      * <code>
11648      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED];
11649      * </code>
11650      */
11651     public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfigOrBuilder
getOutputConfigOrBuilder()11652         getOutputConfigOrBuilder() {
11653       if (outputConfigBuilder_ != null) {
11654         return outputConfigBuilder_.getMessageOrBuilder();
11655       } else {
11656         return outputConfig_ == null
11657             ? com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.getDefaultInstance()
11658             : outputConfig_;
11659       }
11660     }
11661     /**
11662      *
11663      *
11664      * <pre>
11665      * Required. The Configuration specifying where output predictions should
11666      * be written.
11667      * The schema of any single prediction may be specified as a concatenation
11668      * of [Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model]
11669      * [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
11670      * [instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri]
11671      * and
11672      * [prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri].
11673      * </pre>
11674      *
11675      * <code>
11676      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig output_config = 6 [(.google.api.field_behavior) = REQUIRED];
11677      * </code>
11678      */
11679     private com.google.protobuf.SingleFieldBuilderV3<
11680             com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig,
11681             com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.Builder,
11682             com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfigOrBuilder>
getOutputConfigFieldBuilder()11683         getOutputConfigFieldBuilder() {
11684       if (outputConfigBuilder_ == null) {
11685         outputConfigBuilder_ =
11686             new com.google.protobuf.SingleFieldBuilderV3<
11687                 com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig,
11688                 com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.Builder,
11689                 com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfigOrBuilder>(
11690                 getOutputConfig(), getParentForChildren(), isClean());
11691         outputConfig_ = null;
11692       }
11693       return outputConfigBuilder_;
11694     }
11695 
11696     private com.google.cloud.aiplatform.v1.BatchDedicatedResources dedicatedResources_;
11697     private com.google.protobuf.SingleFieldBuilderV3<
11698             com.google.cloud.aiplatform.v1.BatchDedicatedResources,
11699             com.google.cloud.aiplatform.v1.BatchDedicatedResources.Builder,
11700             com.google.cloud.aiplatform.v1.BatchDedicatedResourcesOrBuilder>
11701         dedicatedResourcesBuilder_;
11702     /**
11703      *
11704      *
11705      * <pre>
11706      * The config of resources used by the Model during the batch prediction. If
11707      * the Model
11708      * [supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types]
11709      * DEDICATED_RESOURCES this config may be provided (and the job will use these
11710      * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
11711      * must be provided.
11712      * </pre>
11713      *
11714      * <code>.google.cloud.aiplatform.v1.BatchDedicatedResources dedicated_resources = 7;</code>
11715      *
11716      * @return Whether the dedicatedResources field is set.
11717      */
hasDedicatedResources()11718     public boolean hasDedicatedResources() {
11719       return ((bitField0_ & 0x00000200) != 0);
11720     }
11721     /**
11722      *
11723      *
11724      * <pre>
11725      * The config of resources used by the Model during the batch prediction. If
11726      * the Model
11727      * [supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types]
11728      * DEDICATED_RESOURCES this config may be provided (and the job will use these
11729      * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
11730      * must be provided.
11731      * </pre>
11732      *
11733      * <code>.google.cloud.aiplatform.v1.BatchDedicatedResources dedicated_resources = 7;</code>
11734      *
11735      * @return The dedicatedResources.
11736      */
getDedicatedResources()11737     public com.google.cloud.aiplatform.v1.BatchDedicatedResources getDedicatedResources() {
11738       if (dedicatedResourcesBuilder_ == null) {
11739         return dedicatedResources_ == null
11740             ? com.google.cloud.aiplatform.v1.BatchDedicatedResources.getDefaultInstance()
11741             : dedicatedResources_;
11742       } else {
11743         return dedicatedResourcesBuilder_.getMessage();
11744       }
11745     }
11746     /**
11747      *
11748      *
11749      * <pre>
11750      * The config of resources used by the Model during the batch prediction. If
11751      * the Model
11752      * [supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types]
11753      * DEDICATED_RESOURCES this config may be provided (and the job will use these
11754      * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
11755      * must be provided.
11756      * </pre>
11757      *
11758      * <code>.google.cloud.aiplatform.v1.BatchDedicatedResources dedicated_resources = 7;</code>
11759      */
setDedicatedResources( com.google.cloud.aiplatform.v1.BatchDedicatedResources value)11760     public Builder setDedicatedResources(
11761         com.google.cloud.aiplatform.v1.BatchDedicatedResources value) {
11762       if (dedicatedResourcesBuilder_ == null) {
11763         if (value == null) {
11764           throw new NullPointerException();
11765         }
11766         dedicatedResources_ = value;
11767       } else {
11768         dedicatedResourcesBuilder_.setMessage(value);
11769       }
11770       bitField0_ |= 0x00000200;
11771       onChanged();
11772       return this;
11773     }
11774     /**
11775      *
11776      *
11777      * <pre>
11778      * The config of resources used by the Model during the batch prediction. If
11779      * the Model
11780      * [supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types]
11781      * DEDICATED_RESOURCES this config may be provided (and the job will use these
11782      * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
11783      * must be provided.
11784      * </pre>
11785      *
11786      * <code>.google.cloud.aiplatform.v1.BatchDedicatedResources dedicated_resources = 7;</code>
11787      */
setDedicatedResources( com.google.cloud.aiplatform.v1.BatchDedicatedResources.Builder builderForValue)11788     public Builder setDedicatedResources(
11789         com.google.cloud.aiplatform.v1.BatchDedicatedResources.Builder builderForValue) {
11790       if (dedicatedResourcesBuilder_ == null) {
11791         dedicatedResources_ = builderForValue.build();
11792       } else {
11793         dedicatedResourcesBuilder_.setMessage(builderForValue.build());
11794       }
11795       bitField0_ |= 0x00000200;
11796       onChanged();
11797       return this;
11798     }
11799     /**
11800      *
11801      *
11802      * <pre>
11803      * The config of resources used by the Model during the batch prediction. If
11804      * the Model
11805      * [supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types]
11806      * DEDICATED_RESOURCES this config may be provided (and the job will use these
11807      * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
11808      * must be provided.
11809      * </pre>
11810      *
11811      * <code>.google.cloud.aiplatform.v1.BatchDedicatedResources dedicated_resources = 7;</code>
11812      */
mergeDedicatedResources( com.google.cloud.aiplatform.v1.BatchDedicatedResources value)11813     public Builder mergeDedicatedResources(
11814         com.google.cloud.aiplatform.v1.BatchDedicatedResources value) {
11815       if (dedicatedResourcesBuilder_ == null) {
11816         if (((bitField0_ & 0x00000200) != 0)
11817             && dedicatedResources_ != null
11818             && dedicatedResources_
11819                 != com.google.cloud.aiplatform.v1.BatchDedicatedResources.getDefaultInstance()) {
11820           getDedicatedResourcesBuilder().mergeFrom(value);
11821         } else {
11822           dedicatedResources_ = value;
11823         }
11824       } else {
11825         dedicatedResourcesBuilder_.mergeFrom(value);
11826       }
11827       bitField0_ |= 0x00000200;
11828       onChanged();
11829       return this;
11830     }
11831     /**
11832      *
11833      *
11834      * <pre>
11835      * The config of resources used by the Model during the batch prediction. If
11836      * the Model
11837      * [supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types]
11838      * DEDICATED_RESOURCES this config may be provided (and the job will use these
11839      * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
11840      * must be provided.
11841      * </pre>
11842      *
11843      * <code>.google.cloud.aiplatform.v1.BatchDedicatedResources dedicated_resources = 7;</code>
11844      */
clearDedicatedResources()11845     public Builder clearDedicatedResources() {
11846       bitField0_ = (bitField0_ & ~0x00000200);
11847       dedicatedResources_ = null;
11848       if (dedicatedResourcesBuilder_ != null) {
11849         dedicatedResourcesBuilder_.dispose();
11850         dedicatedResourcesBuilder_ = null;
11851       }
11852       onChanged();
11853       return this;
11854     }
11855     /**
11856      *
11857      *
11858      * <pre>
11859      * The config of resources used by the Model during the batch prediction. If
11860      * the Model
11861      * [supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types]
11862      * DEDICATED_RESOURCES this config may be provided (and the job will use these
11863      * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
11864      * must be provided.
11865      * </pre>
11866      *
11867      * <code>.google.cloud.aiplatform.v1.BatchDedicatedResources dedicated_resources = 7;</code>
11868      */
11869     public com.google.cloud.aiplatform.v1.BatchDedicatedResources.Builder
getDedicatedResourcesBuilder()11870         getDedicatedResourcesBuilder() {
11871       bitField0_ |= 0x00000200;
11872       onChanged();
11873       return getDedicatedResourcesFieldBuilder().getBuilder();
11874     }
11875     /**
11876      *
11877      *
11878      * <pre>
11879      * The config of resources used by the Model during the batch prediction. If
11880      * the Model
11881      * [supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types]
11882      * DEDICATED_RESOURCES this config may be provided (and the job will use these
11883      * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
11884      * must be provided.
11885      * </pre>
11886      *
11887      * <code>.google.cloud.aiplatform.v1.BatchDedicatedResources dedicated_resources = 7;</code>
11888      */
11889     public com.google.cloud.aiplatform.v1.BatchDedicatedResourcesOrBuilder
getDedicatedResourcesOrBuilder()11890         getDedicatedResourcesOrBuilder() {
11891       if (dedicatedResourcesBuilder_ != null) {
11892         return dedicatedResourcesBuilder_.getMessageOrBuilder();
11893       } else {
11894         return dedicatedResources_ == null
11895             ? com.google.cloud.aiplatform.v1.BatchDedicatedResources.getDefaultInstance()
11896             : dedicatedResources_;
11897       }
11898     }
11899     /**
11900      *
11901      *
11902      * <pre>
11903      * The config of resources used by the Model during the batch prediction. If
11904      * the Model
11905      * [supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types]
11906      * DEDICATED_RESOURCES this config may be provided (and the job will use these
11907      * resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
11908      * must be provided.
11909      * </pre>
11910      *
11911      * <code>.google.cloud.aiplatform.v1.BatchDedicatedResources dedicated_resources = 7;</code>
11912      */
11913     private com.google.protobuf.SingleFieldBuilderV3<
11914             com.google.cloud.aiplatform.v1.BatchDedicatedResources,
11915             com.google.cloud.aiplatform.v1.BatchDedicatedResources.Builder,
11916             com.google.cloud.aiplatform.v1.BatchDedicatedResourcesOrBuilder>
getDedicatedResourcesFieldBuilder()11917         getDedicatedResourcesFieldBuilder() {
11918       if (dedicatedResourcesBuilder_ == null) {
11919         dedicatedResourcesBuilder_ =
11920             new com.google.protobuf.SingleFieldBuilderV3<
11921                 com.google.cloud.aiplatform.v1.BatchDedicatedResources,
11922                 com.google.cloud.aiplatform.v1.BatchDedicatedResources.Builder,
11923                 com.google.cloud.aiplatform.v1.BatchDedicatedResourcesOrBuilder>(
11924                 getDedicatedResources(), getParentForChildren(), isClean());
11925         dedicatedResources_ = null;
11926       }
11927       return dedicatedResourcesBuilder_;
11928     }
11929 
11930     private java.lang.Object serviceAccount_ = "";
11931     /**
11932      *
11933      *
11934      * <pre>
11935      * The service account that the DeployedModel's container runs as. If not
11936      * specified, a system generated one will be used, which
11937      * has minimal permissions and the custom container, if used, may not have
11938      * enough permission to access other Google Cloud resources.
11939      * Users deploying the Model must have the `iam.serviceAccounts.actAs`
11940      * permission on this service account.
11941      * </pre>
11942      *
11943      * <code>string service_account = 29;</code>
11944      *
11945      * @return The serviceAccount.
11946      */
getServiceAccount()11947     public java.lang.String getServiceAccount() {
11948       java.lang.Object ref = serviceAccount_;
11949       if (!(ref instanceof java.lang.String)) {
11950         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
11951         java.lang.String s = bs.toStringUtf8();
11952         serviceAccount_ = s;
11953         return s;
11954       } else {
11955         return (java.lang.String) ref;
11956       }
11957     }
11958     /**
11959      *
11960      *
11961      * <pre>
11962      * The service account that the DeployedModel's container runs as. If not
11963      * specified, a system generated one will be used, which
11964      * has minimal permissions and the custom container, if used, may not have
11965      * enough permission to access other Google Cloud resources.
11966      * Users deploying the Model must have the `iam.serviceAccounts.actAs`
11967      * permission on this service account.
11968      * </pre>
11969      *
11970      * <code>string service_account = 29;</code>
11971      *
11972      * @return The bytes for serviceAccount.
11973      */
getServiceAccountBytes()11974     public com.google.protobuf.ByteString getServiceAccountBytes() {
11975       java.lang.Object ref = serviceAccount_;
11976       if (ref instanceof String) {
11977         com.google.protobuf.ByteString b =
11978             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
11979         serviceAccount_ = b;
11980         return b;
11981       } else {
11982         return (com.google.protobuf.ByteString) ref;
11983       }
11984     }
11985     /**
11986      *
11987      *
11988      * <pre>
11989      * The service account that the DeployedModel's container runs as. If not
11990      * specified, a system generated one will be used, which
11991      * has minimal permissions and the custom container, if used, may not have
11992      * enough permission to access other Google Cloud resources.
11993      * Users deploying the Model must have the `iam.serviceAccounts.actAs`
11994      * permission on this service account.
11995      * </pre>
11996      *
11997      * <code>string service_account = 29;</code>
11998      *
11999      * @param value The serviceAccount to set.
12000      * @return This builder for chaining.
12001      */
setServiceAccount(java.lang.String value)12002     public Builder setServiceAccount(java.lang.String value) {
12003       if (value == null) {
12004         throw new NullPointerException();
12005       }
12006       serviceAccount_ = value;
12007       bitField0_ |= 0x00000400;
12008       onChanged();
12009       return this;
12010     }
12011     /**
12012      *
12013      *
12014      * <pre>
12015      * The service account that the DeployedModel's container runs as. If not
12016      * specified, a system generated one will be used, which
12017      * has minimal permissions and the custom container, if used, may not have
12018      * enough permission to access other Google Cloud resources.
12019      * Users deploying the Model must have the `iam.serviceAccounts.actAs`
12020      * permission on this service account.
12021      * </pre>
12022      *
12023      * <code>string service_account = 29;</code>
12024      *
12025      * @return This builder for chaining.
12026      */
clearServiceAccount()12027     public Builder clearServiceAccount() {
12028       serviceAccount_ = getDefaultInstance().getServiceAccount();
12029       bitField0_ = (bitField0_ & ~0x00000400);
12030       onChanged();
12031       return this;
12032     }
12033     /**
12034      *
12035      *
12036      * <pre>
12037      * The service account that the DeployedModel's container runs as. If not
12038      * specified, a system generated one will be used, which
12039      * has minimal permissions and the custom container, if used, may not have
12040      * enough permission to access other Google Cloud resources.
12041      * Users deploying the Model must have the `iam.serviceAccounts.actAs`
12042      * permission on this service account.
12043      * </pre>
12044      *
12045      * <code>string service_account = 29;</code>
12046      *
12047      * @param value The bytes for serviceAccount to set.
12048      * @return This builder for chaining.
12049      */
setServiceAccountBytes(com.google.protobuf.ByteString value)12050     public Builder setServiceAccountBytes(com.google.protobuf.ByteString value) {
12051       if (value == null) {
12052         throw new NullPointerException();
12053       }
12054       checkByteStringIsUtf8(value);
12055       serviceAccount_ = value;
12056       bitField0_ |= 0x00000400;
12057       onChanged();
12058       return this;
12059     }
12060 
12061     private com.google.cloud.aiplatform.v1.ManualBatchTuningParameters manualBatchTuningParameters_;
12062     private com.google.protobuf.SingleFieldBuilderV3<
12063             com.google.cloud.aiplatform.v1.ManualBatchTuningParameters,
12064             com.google.cloud.aiplatform.v1.ManualBatchTuningParameters.Builder,
12065             com.google.cloud.aiplatform.v1.ManualBatchTuningParametersOrBuilder>
12066         manualBatchTuningParametersBuilder_;
12067     /**
12068      *
12069      *
12070      * <pre>
12071      * Immutable. Parameters configuring the batch behavior. Currently only
12072      * applicable when
12073      * [dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources]
12074      * are used (in other cases Vertex AI does the tuning itself).
12075      * </pre>
12076      *
12077      * <code>
12078      * .google.cloud.aiplatform.v1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE];
12079      * </code>
12080      *
12081      * @return Whether the manualBatchTuningParameters field is set.
12082      */
hasManualBatchTuningParameters()12083     public boolean hasManualBatchTuningParameters() {
12084       return ((bitField0_ & 0x00000800) != 0);
12085     }
12086     /**
12087      *
12088      *
12089      * <pre>
12090      * Immutable. Parameters configuring the batch behavior. Currently only
12091      * applicable when
12092      * [dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources]
12093      * are used (in other cases Vertex AI does the tuning itself).
12094      * </pre>
12095      *
12096      * <code>
12097      * .google.cloud.aiplatform.v1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE];
12098      * </code>
12099      *
12100      * @return The manualBatchTuningParameters.
12101      */
12102     public com.google.cloud.aiplatform.v1.ManualBatchTuningParameters
getManualBatchTuningParameters()12103         getManualBatchTuningParameters() {
12104       if (manualBatchTuningParametersBuilder_ == null) {
12105         return manualBatchTuningParameters_ == null
12106             ? com.google.cloud.aiplatform.v1.ManualBatchTuningParameters.getDefaultInstance()
12107             : manualBatchTuningParameters_;
12108       } else {
12109         return manualBatchTuningParametersBuilder_.getMessage();
12110       }
12111     }
12112     /**
12113      *
12114      *
12115      * <pre>
12116      * Immutable. Parameters configuring the batch behavior. Currently only
12117      * applicable when
12118      * [dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources]
12119      * are used (in other cases Vertex AI does the tuning itself).
12120      * </pre>
12121      *
12122      * <code>
12123      * .google.cloud.aiplatform.v1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE];
12124      * </code>
12125      */
setManualBatchTuningParameters( com.google.cloud.aiplatform.v1.ManualBatchTuningParameters value)12126     public Builder setManualBatchTuningParameters(
12127         com.google.cloud.aiplatform.v1.ManualBatchTuningParameters value) {
12128       if (manualBatchTuningParametersBuilder_ == null) {
12129         if (value == null) {
12130           throw new NullPointerException();
12131         }
12132         manualBatchTuningParameters_ = value;
12133       } else {
12134         manualBatchTuningParametersBuilder_.setMessage(value);
12135       }
12136       bitField0_ |= 0x00000800;
12137       onChanged();
12138       return this;
12139     }
12140     /**
12141      *
12142      *
12143      * <pre>
12144      * Immutable. Parameters configuring the batch behavior. Currently only
12145      * applicable when
12146      * [dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources]
12147      * are used (in other cases Vertex AI does the tuning itself).
12148      * </pre>
12149      *
12150      * <code>
12151      * .google.cloud.aiplatform.v1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE];
12152      * </code>
12153      */
setManualBatchTuningParameters( com.google.cloud.aiplatform.v1.ManualBatchTuningParameters.Builder builderForValue)12154     public Builder setManualBatchTuningParameters(
12155         com.google.cloud.aiplatform.v1.ManualBatchTuningParameters.Builder builderForValue) {
12156       if (manualBatchTuningParametersBuilder_ == null) {
12157         manualBatchTuningParameters_ = builderForValue.build();
12158       } else {
12159         manualBatchTuningParametersBuilder_.setMessage(builderForValue.build());
12160       }
12161       bitField0_ |= 0x00000800;
12162       onChanged();
12163       return this;
12164     }
12165     /**
12166      *
12167      *
12168      * <pre>
12169      * Immutable. Parameters configuring the batch behavior. Currently only
12170      * applicable when
12171      * [dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources]
12172      * are used (in other cases Vertex AI does the tuning itself).
12173      * </pre>
12174      *
12175      * <code>
12176      * .google.cloud.aiplatform.v1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE];
12177      * </code>
12178      */
mergeManualBatchTuningParameters( com.google.cloud.aiplatform.v1.ManualBatchTuningParameters value)12179     public Builder mergeManualBatchTuningParameters(
12180         com.google.cloud.aiplatform.v1.ManualBatchTuningParameters value) {
12181       if (manualBatchTuningParametersBuilder_ == null) {
12182         if (((bitField0_ & 0x00000800) != 0)
12183             && manualBatchTuningParameters_ != null
12184             && manualBatchTuningParameters_
12185                 != com.google.cloud.aiplatform.v1.ManualBatchTuningParameters
12186                     .getDefaultInstance()) {
12187           getManualBatchTuningParametersBuilder().mergeFrom(value);
12188         } else {
12189           manualBatchTuningParameters_ = value;
12190         }
12191       } else {
12192         manualBatchTuningParametersBuilder_.mergeFrom(value);
12193       }
12194       bitField0_ |= 0x00000800;
12195       onChanged();
12196       return this;
12197     }
12198     /**
12199      *
12200      *
12201      * <pre>
12202      * Immutable. Parameters configuring the batch behavior. Currently only
12203      * applicable when
12204      * [dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources]
12205      * are used (in other cases Vertex AI does the tuning itself).
12206      * </pre>
12207      *
12208      * <code>
12209      * .google.cloud.aiplatform.v1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE];
12210      * </code>
12211      */
clearManualBatchTuningParameters()12212     public Builder clearManualBatchTuningParameters() {
12213       bitField0_ = (bitField0_ & ~0x00000800);
12214       manualBatchTuningParameters_ = null;
12215       if (manualBatchTuningParametersBuilder_ != null) {
12216         manualBatchTuningParametersBuilder_.dispose();
12217         manualBatchTuningParametersBuilder_ = null;
12218       }
12219       onChanged();
12220       return this;
12221     }
12222     /**
12223      *
12224      *
12225      * <pre>
12226      * Immutable. Parameters configuring the batch behavior. Currently only
12227      * applicable when
12228      * [dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources]
12229      * are used (in other cases Vertex AI does the tuning itself).
12230      * </pre>
12231      *
12232      * <code>
12233      * .google.cloud.aiplatform.v1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE];
12234      * </code>
12235      */
12236     public com.google.cloud.aiplatform.v1.ManualBatchTuningParameters.Builder
getManualBatchTuningParametersBuilder()12237         getManualBatchTuningParametersBuilder() {
12238       bitField0_ |= 0x00000800;
12239       onChanged();
12240       return getManualBatchTuningParametersFieldBuilder().getBuilder();
12241     }
12242     /**
12243      *
12244      *
12245      * <pre>
12246      * Immutable. Parameters configuring the batch behavior. Currently only
12247      * applicable when
12248      * [dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources]
12249      * are used (in other cases Vertex AI does the tuning itself).
12250      * </pre>
12251      *
12252      * <code>
12253      * .google.cloud.aiplatform.v1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE];
12254      * </code>
12255      */
12256     public com.google.cloud.aiplatform.v1.ManualBatchTuningParametersOrBuilder
getManualBatchTuningParametersOrBuilder()12257         getManualBatchTuningParametersOrBuilder() {
12258       if (manualBatchTuningParametersBuilder_ != null) {
12259         return manualBatchTuningParametersBuilder_.getMessageOrBuilder();
12260       } else {
12261         return manualBatchTuningParameters_ == null
12262             ? com.google.cloud.aiplatform.v1.ManualBatchTuningParameters.getDefaultInstance()
12263             : manualBatchTuningParameters_;
12264       }
12265     }
12266     /**
12267      *
12268      *
12269      * <pre>
12270      * Immutable. Parameters configuring the batch behavior. Currently only
12271      * applicable when
12272      * [dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources]
12273      * are used (in other cases Vertex AI does the tuning itself).
12274      * </pre>
12275      *
12276      * <code>
12277      * .google.cloud.aiplatform.v1.ManualBatchTuningParameters manual_batch_tuning_parameters = 8 [(.google.api.field_behavior) = IMMUTABLE];
12278      * </code>
12279      */
12280     private com.google.protobuf.SingleFieldBuilderV3<
12281             com.google.cloud.aiplatform.v1.ManualBatchTuningParameters,
12282             com.google.cloud.aiplatform.v1.ManualBatchTuningParameters.Builder,
12283             com.google.cloud.aiplatform.v1.ManualBatchTuningParametersOrBuilder>
getManualBatchTuningParametersFieldBuilder()12284         getManualBatchTuningParametersFieldBuilder() {
12285       if (manualBatchTuningParametersBuilder_ == null) {
12286         manualBatchTuningParametersBuilder_ =
12287             new com.google.protobuf.SingleFieldBuilderV3<
12288                 com.google.cloud.aiplatform.v1.ManualBatchTuningParameters,
12289                 com.google.cloud.aiplatform.v1.ManualBatchTuningParameters.Builder,
12290                 com.google.cloud.aiplatform.v1.ManualBatchTuningParametersOrBuilder>(
12291                 getManualBatchTuningParameters(), getParentForChildren(), isClean());
12292         manualBatchTuningParameters_ = null;
12293       }
12294       return manualBatchTuningParametersBuilder_;
12295     }
12296 
12297     private boolean generateExplanation_;
12298     /**
12299      *
12300      *
12301      * <pre>
12302      * Generate explanation with the batch prediction results.
12303      * When set to `true`, the batch prediction output changes based on the
12304      * `predictions_format` field of the
12305      * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1.BatchPredictionJob.output_config]
12306      * object:
12307      *  * `bigquery`: output includes a column named `explanation`. The value
12308      *    is a struct that conforms to the
12309      *    [Explanation][google.cloud.aiplatform.v1.Explanation] object.
12310      *  * `jsonl`: The JSON objects on each line include an additional entry
12311      *    keyed `explanation`. The value of the entry is a JSON object that
12312      *    conforms to the [Explanation][google.cloud.aiplatform.v1.Explanation]
12313      *    object.
12314      *  * `csv`: Generating explanations for CSV format is not supported.
12315      * If this field is set to true, either the
12316      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
12317      * or
12318      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12319      * must be populated.
12320      * </pre>
12321      *
12322      * <code>bool generate_explanation = 23;</code>
12323      *
12324      * @return The generateExplanation.
12325      */
12326     @java.lang.Override
getGenerateExplanation()12327     public boolean getGenerateExplanation() {
12328       return generateExplanation_;
12329     }
12330     /**
12331      *
12332      *
12333      * <pre>
12334      * Generate explanation with the batch prediction results.
12335      * When set to `true`, the batch prediction output changes based on the
12336      * `predictions_format` field of the
12337      * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1.BatchPredictionJob.output_config]
12338      * object:
12339      *  * `bigquery`: output includes a column named `explanation`. The value
12340      *    is a struct that conforms to the
12341      *    [Explanation][google.cloud.aiplatform.v1.Explanation] object.
12342      *  * `jsonl`: The JSON objects on each line include an additional entry
12343      *    keyed `explanation`. The value of the entry is a JSON object that
12344      *    conforms to the [Explanation][google.cloud.aiplatform.v1.Explanation]
12345      *    object.
12346      *  * `csv`: Generating explanations for CSV format is not supported.
12347      * If this field is set to true, either the
12348      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
12349      * or
12350      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12351      * must be populated.
12352      * </pre>
12353      *
12354      * <code>bool generate_explanation = 23;</code>
12355      *
12356      * @param value The generateExplanation to set.
12357      * @return This builder for chaining.
12358      */
setGenerateExplanation(boolean value)12359     public Builder setGenerateExplanation(boolean value) {
12360 
12361       generateExplanation_ = value;
12362       bitField0_ |= 0x00001000;
12363       onChanged();
12364       return this;
12365     }
12366     /**
12367      *
12368      *
12369      * <pre>
12370      * Generate explanation with the batch prediction results.
12371      * When set to `true`, the batch prediction output changes based on the
12372      * `predictions_format` field of the
12373      * [BatchPredictionJob.output_config][google.cloud.aiplatform.v1.BatchPredictionJob.output_config]
12374      * object:
12375      *  * `bigquery`: output includes a column named `explanation`. The value
12376      *    is a struct that conforms to the
12377      *    [Explanation][google.cloud.aiplatform.v1.Explanation] object.
12378      *  * `jsonl`: The JSON objects on each line include an additional entry
12379      *    keyed `explanation`. The value of the entry is a JSON object that
12380      *    conforms to the [Explanation][google.cloud.aiplatform.v1.Explanation]
12381      *    object.
12382      *  * `csv`: Generating explanations for CSV format is not supported.
12383      * If this field is set to true, either the
12384      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
12385      * or
12386      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12387      * must be populated.
12388      * </pre>
12389      *
12390      * <code>bool generate_explanation = 23;</code>
12391      *
12392      * @return This builder for chaining.
12393      */
clearGenerateExplanation()12394     public Builder clearGenerateExplanation() {
12395       bitField0_ = (bitField0_ & ~0x00001000);
12396       generateExplanation_ = false;
12397       onChanged();
12398       return this;
12399     }
12400 
12401     private com.google.cloud.aiplatform.v1.ExplanationSpec explanationSpec_;
12402     private com.google.protobuf.SingleFieldBuilderV3<
12403             com.google.cloud.aiplatform.v1.ExplanationSpec,
12404             com.google.cloud.aiplatform.v1.ExplanationSpec.Builder,
12405             com.google.cloud.aiplatform.v1.ExplanationSpecOrBuilder>
12406         explanationSpecBuilder_;
12407     /**
12408      *
12409      *
12410      * <pre>
12411      * Explanation configuration for this BatchPredictionJob. Can be
12412      * specified only if
12413      * [generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation]
12414      * is set to `true`.
12415      * This value overrides the value of
12416      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec].
12417      * All fields of
12418      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12419      * are optional in the request. If a field of the
12420      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12421      * object is not populated, the corresponding field of the
12422      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
12423      * object is inherited.
12424      * </pre>
12425      *
12426      * <code>.google.cloud.aiplatform.v1.ExplanationSpec explanation_spec = 25;</code>
12427      *
12428      * @return Whether the explanationSpec field is set.
12429      */
hasExplanationSpec()12430     public boolean hasExplanationSpec() {
12431       return ((bitField0_ & 0x00002000) != 0);
12432     }
12433     /**
12434      *
12435      *
12436      * <pre>
12437      * Explanation configuration for this BatchPredictionJob. Can be
12438      * specified only if
12439      * [generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation]
12440      * is set to `true`.
12441      * This value overrides the value of
12442      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec].
12443      * All fields of
12444      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12445      * are optional in the request. If a field of the
12446      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12447      * object is not populated, the corresponding field of the
12448      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
12449      * object is inherited.
12450      * </pre>
12451      *
12452      * <code>.google.cloud.aiplatform.v1.ExplanationSpec explanation_spec = 25;</code>
12453      *
12454      * @return The explanationSpec.
12455      */
getExplanationSpec()12456     public com.google.cloud.aiplatform.v1.ExplanationSpec getExplanationSpec() {
12457       if (explanationSpecBuilder_ == null) {
12458         return explanationSpec_ == null
12459             ? com.google.cloud.aiplatform.v1.ExplanationSpec.getDefaultInstance()
12460             : explanationSpec_;
12461       } else {
12462         return explanationSpecBuilder_.getMessage();
12463       }
12464     }
12465     /**
12466      *
12467      *
12468      * <pre>
12469      * Explanation configuration for this BatchPredictionJob. Can be
12470      * specified only if
12471      * [generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation]
12472      * is set to `true`.
12473      * This value overrides the value of
12474      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec].
12475      * All fields of
12476      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12477      * are optional in the request. If a field of the
12478      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12479      * object is not populated, the corresponding field of the
12480      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
12481      * object is inherited.
12482      * </pre>
12483      *
12484      * <code>.google.cloud.aiplatform.v1.ExplanationSpec explanation_spec = 25;</code>
12485      */
setExplanationSpec(com.google.cloud.aiplatform.v1.ExplanationSpec value)12486     public Builder setExplanationSpec(com.google.cloud.aiplatform.v1.ExplanationSpec value) {
12487       if (explanationSpecBuilder_ == null) {
12488         if (value == null) {
12489           throw new NullPointerException();
12490         }
12491         explanationSpec_ = value;
12492       } else {
12493         explanationSpecBuilder_.setMessage(value);
12494       }
12495       bitField0_ |= 0x00002000;
12496       onChanged();
12497       return this;
12498     }
12499     /**
12500      *
12501      *
12502      * <pre>
12503      * Explanation configuration for this BatchPredictionJob. Can be
12504      * specified only if
12505      * [generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation]
12506      * is set to `true`.
12507      * This value overrides the value of
12508      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec].
12509      * All fields of
12510      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12511      * are optional in the request. If a field of the
12512      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12513      * object is not populated, the corresponding field of the
12514      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
12515      * object is inherited.
12516      * </pre>
12517      *
12518      * <code>.google.cloud.aiplatform.v1.ExplanationSpec explanation_spec = 25;</code>
12519      */
setExplanationSpec( com.google.cloud.aiplatform.v1.ExplanationSpec.Builder builderForValue)12520     public Builder setExplanationSpec(
12521         com.google.cloud.aiplatform.v1.ExplanationSpec.Builder builderForValue) {
12522       if (explanationSpecBuilder_ == null) {
12523         explanationSpec_ = builderForValue.build();
12524       } else {
12525         explanationSpecBuilder_.setMessage(builderForValue.build());
12526       }
12527       bitField0_ |= 0x00002000;
12528       onChanged();
12529       return this;
12530     }
12531     /**
12532      *
12533      *
12534      * <pre>
12535      * Explanation configuration for this BatchPredictionJob. Can be
12536      * specified only if
12537      * [generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation]
12538      * is set to `true`.
12539      * This value overrides the value of
12540      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec].
12541      * All fields of
12542      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12543      * are optional in the request. If a field of the
12544      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12545      * object is not populated, the corresponding field of the
12546      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
12547      * object is inherited.
12548      * </pre>
12549      *
12550      * <code>.google.cloud.aiplatform.v1.ExplanationSpec explanation_spec = 25;</code>
12551      */
mergeExplanationSpec(com.google.cloud.aiplatform.v1.ExplanationSpec value)12552     public Builder mergeExplanationSpec(com.google.cloud.aiplatform.v1.ExplanationSpec value) {
12553       if (explanationSpecBuilder_ == null) {
12554         if (((bitField0_ & 0x00002000) != 0)
12555             && explanationSpec_ != null
12556             && explanationSpec_
12557                 != com.google.cloud.aiplatform.v1.ExplanationSpec.getDefaultInstance()) {
12558           getExplanationSpecBuilder().mergeFrom(value);
12559         } else {
12560           explanationSpec_ = value;
12561         }
12562       } else {
12563         explanationSpecBuilder_.mergeFrom(value);
12564       }
12565       bitField0_ |= 0x00002000;
12566       onChanged();
12567       return this;
12568     }
12569     /**
12570      *
12571      *
12572      * <pre>
12573      * Explanation configuration for this BatchPredictionJob. Can be
12574      * specified only if
12575      * [generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation]
12576      * is set to `true`.
12577      * This value overrides the value of
12578      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec].
12579      * All fields of
12580      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12581      * are optional in the request. If a field of the
12582      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12583      * object is not populated, the corresponding field of the
12584      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
12585      * object is inherited.
12586      * </pre>
12587      *
12588      * <code>.google.cloud.aiplatform.v1.ExplanationSpec explanation_spec = 25;</code>
12589      */
clearExplanationSpec()12590     public Builder clearExplanationSpec() {
12591       bitField0_ = (bitField0_ & ~0x00002000);
12592       explanationSpec_ = null;
12593       if (explanationSpecBuilder_ != null) {
12594         explanationSpecBuilder_.dispose();
12595         explanationSpecBuilder_ = null;
12596       }
12597       onChanged();
12598       return this;
12599     }
12600     /**
12601      *
12602      *
12603      * <pre>
12604      * Explanation configuration for this BatchPredictionJob. Can be
12605      * specified only if
12606      * [generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation]
12607      * is set to `true`.
12608      * This value overrides the value of
12609      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec].
12610      * All fields of
12611      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12612      * are optional in the request. If a field of the
12613      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12614      * object is not populated, the corresponding field of the
12615      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
12616      * object is inherited.
12617      * </pre>
12618      *
12619      * <code>.google.cloud.aiplatform.v1.ExplanationSpec explanation_spec = 25;</code>
12620      */
getExplanationSpecBuilder()12621     public com.google.cloud.aiplatform.v1.ExplanationSpec.Builder getExplanationSpecBuilder() {
12622       bitField0_ |= 0x00002000;
12623       onChanged();
12624       return getExplanationSpecFieldBuilder().getBuilder();
12625     }
12626     /**
12627      *
12628      *
12629      * <pre>
12630      * Explanation configuration for this BatchPredictionJob. Can be
12631      * specified only if
12632      * [generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation]
12633      * is set to `true`.
12634      * This value overrides the value of
12635      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec].
12636      * All fields of
12637      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12638      * are optional in the request. If a field of the
12639      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12640      * object is not populated, the corresponding field of the
12641      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
12642      * object is inherited.
12643      * </pre>
12644      *
12645      * <code>.google.cloud.aiplatform.v1.ExplanationSpec explanation_spec = 25;</code>
12646      */
getExplanationSpecOrBuilder()12647     public com.google.cloud.aiplatform.v1.ExplanationSpecOrBuilder getExplanationSpecOrBuilder() {
12648       if (explanationSpecBuilder_ != null) {
12649         return explanationSpecBuilder_.getMessageOrBuilder();
12650       } else {
12651         return explanationSpec_ == null
12652             ? com.google.cloud.aiplatform.v1.ExplanationSpec.getDefaultInstance()
12653             : explanationSpec_;
12654       }
12655     }
12656     /**
12657      *
12658      *
12659      * <pre>
12660      * Explanation configuration for this BatchPredictionJob. Can be
12661      * specified only if
12662      * [generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation]
12663      * is set to `true`.
12664      * This value overrides the value of
12665      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec].
12666      * All fields of
12667      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12668      * are optional in the request. If a field of the
12669      * [explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec]
12670      * object is not populated, the corresponding field of the
12671      * [Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec]
12672      * object is inherited.
12673      * </pre>
12674      *
12675      * <code>.google.cloud.aiplatform.v1.ExplanationSpec explanation_spec = 25;</code>
12676      */
12677     private com.google.protobuf.SingleFieldBuilderV3<
12678             com.google.cloud.aiplatform.v1.ExplanationSpec,
12679             com.google.cloud.aiplatform.v1.ExplanationSpec.Builder,
12680             com.google.cloud.aiplatform.v1.ExplanationSpecOrBuilder>
getExplanationSpecFieldBuilder()12681         getExplanationSpecFieldBuilder() {
12682       if (explanationSpecBuilder_ == null) {
12683         explanationSpecBuilder_ =
12684             new com.google.protobuf.SingleFieldBuilderV3<
12685                 com.google.cloud.aiplatform.v1.ExplanationSpec,
12686                 com.google.cloud.aiplatform.v1.ExplanationSpec.Builder,
12687                 com.google.cloud.aiplatform.v1.ExplanationSpecOrBuilder>(
12688                 getExplanationSpec(), getParentForChildren(), isClean());
12689         explanationSpec_ = null;
12690       }
12691       return explanationSpecBuilder_;
12692     }
12693 
12694     private com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo outputInfo_;
12695     private com.google.protobuf.SingleFieldBuilderV3<
12696             com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo,
12697             com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.Builder,
12698             com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfoOrBuilder>
12699         outputInfoBuilder_;
12700     /**
12701      *
12702      *
12703      * <pre>
12704      * Output only. Information further describing the output of this job.
12705      * </pre>
12706      *
12707      * <code>
12708      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
12709      * </code>
12710      *
12711      * @return Whether the outputInfo field is set.
12712      */
hasOutputInfo()12713     public boolean hasOutputInfo() {
12714       return ((bitField0_ & 0x00004000) != 0);
12715     }
12716     /**
12717      *
12718      *
12719      * <pre>
12720      * Output only. Information further describing the output of this job.
12721      * </pre>
12722      *
12723      * <code>
12724      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
12725      * </code>
12726      *
12727      * @return The outputInfo.
12728      */
getOutputInfo()12729     public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo getOutputInfo() {
12730       if (outputInfoBuilder_ == null) {
12731         return outputInfo_ == null
12732             ? com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.getDefaultInstance()
12733             : outputInfo_;
12734       } else {
12735         return outputInfoBuilder_.getMessage();
12736       }
12737     }
12738     /**
12739      *
12740      *
12741      * <pre>
12742      * Output only. Information further describing the output of this job.
12743      * </pre>
12744      *
12745      * <code>
12746      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
12747      * </code>
12748      */
setOutputInfo( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo value)12749     public Builder setOutputInfo(
12750         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo value) {
12751       if (outputInfoBuilder_ == null) {
12752         if (value == null) {
12753           throw new NullPointerException();
12754         }
12755         outputInfo_ = value;
12756       } else {
12757         outputInfoBuilder_.setMessage(value);
12758       }
12759       bitField0_ |= 0x00004000;
12760       onChanged();
12761       return this;
12762     }
12763     /**
12764      *
12765      *
12766      * <pre>
12767      * Output only. Information further describing the output of this job.
12768      * </pre>
12769      *
12770      * <code>
12771      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
12772      * </code>
12773      */
setOutputInfo( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.Builder builderForValue)12774     public Builder setOutputInfo(
12775         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.Builder builderForValue) {
12776       if (outputInfoBuilder_ == null) {
12777         outputInfo_ = builderForValue.build();
12778       } else {
12779         outputInfoBuilder_.setMessage(builderForValue.build());
12780       }
12781       bitField0_ |= 0x00004000;
12782       onChanged();
12783       return this;
12784     }
12785     /**
12786      *
12787      *
12788      * <pre>
12789      * Output only. Information further describing the output of this job.
12790      * </pre>
12791      *
12792      * <code>
12793      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
12794      * </code>
12795      */
mergeOutputInfo( com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo value)12796     public Builder mergeOutputInfo(
12797         com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo value) {
12798       if (outputInfoBuilder_ == null) {
12799         if (((bitField0_ & 0x00004000) != 0)
12800             && outputInfo_ != null
12801             && outputInfo_
12802                 != com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo
12803                     .getDefaultInstance()) {
12804           getOutputInfoBuilder().mergeFrom(value);
12805         } else {
12806           outputInfo_ = value;
12807         }
12808       } else {
12809         outputInfoBuilder_.mergeFrom(value);
12810       }
12811       bitField0_ |= 0x00004000;
12812       onChanged();
12813       return this;
12814     }
12815     /**
12816      *
12817      *
12818      * <pre>
12819      * Output only. Information further describing the output of this job.
12820      * </pre>
12821      *
12822      * <code>
12823      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
12824      * </code>
12825      */
clearOutputInfo()12826     public Builder clearOutputInfo() {
12827       bitField0_ = (bitField0_ & ~0x00004000);
12828       outputInfo_ = null;
12829       if (outputInfoBuilder_ != null) {
12830         outputInfoBuilder_.dispose();
12831         outputInfoBuilder_ = null;
12832       }
12833       onChanged();
12834       return this;
12835     }
12836     /**
12837      *
12838      *
12839      * <pre>
12840      * Output only. Information further describing the output of this job.
12841      * </pre>
12842      *
12843      * <code>
12844      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
12845      * </code>
12846      */
12847     public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.Builder
getOutputInfoBuilder()12848         getOutputInfoBuilder() {
12849       bitField0_ |= 0x00004000;
12850       onChanged();
12851       return getOutputInfoFieldBuilder().getBuilder();
12852     }
12853     /**
12854      *
12855      *
12856      * <pre>
12857      * Output only. Information further describing the output of this job.
12858      * </pre>
12859      *
12860      * <code>
12861      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
12862      * </code>
12863      */
12864     public com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfoOrBuilder
getOutputInfoOrBuilder()12865         getOutputInfoOrBuilder() {
12866       if (outputInfoBuilder_ != null) {
12867         return outputInfoBuilder_.getMessageOrBuilder();
12868       } else {
12869         return outputInfo_ == null
12870             ? com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.getDefaultInstance()
12871             : outputInfo_;
12872       }
12873     }
12874     /**
12875      *
12876      *
12877      * <pre>
12878      * Output only. Information further describing the output of this job.
12879      * </pre>
12880      *
12881      * <code>
12882      * .google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo output_info = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
12883      * </code>
12884      */
12885     private com.google.protobuf.SingleFieldBuilderV3<
12886             com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo,
12887             com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.Builder,
12888             com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfoOrBuilder>
getOutputInfoFieldBuilder()12889         getOutputInfoFieldBuilder() {
12890       if (outputInfoBuilder_ == null) {
12891         outputInfoBuilder_ =
12892             new com.google.protobuf.SingleFieldBuilderV3<
12893                 com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo,
12894                 com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfo.Builder,
12895                 com.google.cloud.aiplatform.v1.BatchPredictionJob.OutputInfoOrBuilder>(
12896                 getOutputInfo(), getParentForChildren(), isClean());
12897         outputInfo_ = null;
12898       }
12899       return outputInfoBuilder_;
12900     }
12901 
12902     private int state_ = 0;
12903     /**
12904      *
12905      *
12906      * <pre>
12907      * Output only. The detailed state of the job.
12908      * </pre>
12909      *
12910      * <code>
12911      * .google.cloud.aiplatform.v1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY];
12912      * </code>
12913      *
12914      * @return The enum numeric value on the wire for state.
12915      */
12916     @java.lang.Override
getStateValue()12917     public int getStateValue() {
12918       return state_;
12919     }
12920     /**
12921      *
12922      *
12923      * <pre>
12924      * Output only. The detailed state of the job.
12925      * </pre>
12926      *
12927      * <code>
12928      * .google.cloud.aiplatform.v1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY];
12929      * </code>
12930      *
12931      * @param value The enum numeric value on the wire for state to set.
12932      * @return This builder for chaining.
12933      */
setStateValue(int value)12934     public Builder setStateValue(int value) {
12935       state_ = value;
12936       bitField0_ |= 0x00008000;
12937       onChanged();
12938       return this;
12939     }
12940     /**
12941      *
12942      *
12943      * <pre>
12944      * Output only. The detailed state of the job.
12945      * </pre>
12946      *
12947      * <code>
12948      * .google.cloud.aiplatform.v1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY];
12949      * </code>
12950      *
12951      * @return The state.
12952      */
12953     @java.lang.Override
getState()12954     public com.google.cloud.aiplatform.v1.JobState getState() {
12955       com.google.cloud.aiplatform.v1.JobState result =
12956           com.google.cloud.aiplatform.v1.JobState.forNumber(state_);
12957       return result == null ? com.google.cloud.aiplatform.v1.JobState.UNRECOGNIZED : result;
12958     }
12959     /**
12960      *
12961      *
12962      * <pre>
12963      * Output only. The detailed state of the job.
12964      * </pre>
12965      *
12966      * <code>
12967      * .google.cloud.aiplatform.v1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY];
12968      * </code>
12969      *
12970      * @param value The state to set.
12971      * @return This builder for chaining.
12972      */
setState(com.google.cloud.aiplatform.v1.JobState value)12973     public Builder setState(com.google.cloud.aiplatform.v1.JobState value) {
12974       if (value == null) {
12975         throw new NullPointerException();
12976       }
12977       bitField0_ |= 0x00008000;
12978       state_ = value.getNumber();
12979       onChanged();
12980       return this;
12981     }
12982     /**
12983      *
12984      *
12985      * <pre>
12986      * Output only. The detailed state of the job.
12987      * </pre>
12988      *
12989      * <code>
12990      * .google.cloud.aiplatform.v1.JobState state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY];
12991      * </code>
12992      *
12993      * @return This builder for chaining.
12994      */
clearState()12995     public Builder clearState() {
12996       bitField0_ = (bitField0_ & ~0x00008000);
12997       state_ = 0;
12998       onChanged();
12999       return this;
13000     }
13001 
13002     private com.google.rpc.Status error_;
13003     private com.google.protobuf.SingleFieldBuilderV3<
13004             com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
13005         errorBuilder_;
13006     /**
13007      *
13008      *
13009      * <pre>
13010      * Output only. Only populated when the job's state is JOB_STATE_FAILED or
13011      * JOB_STATE_CANCELLED.
13012      * </pre>
13013      *
13014      * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
13015      *
13016      * @return Whether the error field is set.
13017      */
hasError()13018     public boolean hasError() {
13019       return ((bitField0_ & 0x00010000) != 0);
13020     }
13021     /**
13022      *
13023      *
13024      * <pre>
13025      * Output only. Only populated when the job's state is JOB_STATE_FAILED or
13026      * JOB_STATE_CANCELLED.
13027      * </pre>
13028      *
13029      * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
13030      *
13031      * @return The error.
13032      */
getError()13033     public com.google.rpc.Status getError() {
13034       if (errorBuilder_ == null) {
13035         return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
13036       } else {
13037         return errorBuilder_.getMessage();
13038       }
13039     }
13040     /**
13041      *
13042      *
13043      * <pre>
13044      * Output only. Only populated when the job's state is JOB_STATE_FAILED or
13045      * JOB_STATE_CANCELLED.
13046      * </pre>
13047      *
13048      * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
13049      */
setError(com.google.rpc.Status value)13050     public Builder setError(com.google.rpc.Status value) {
13051       if (errorBuilder_ == null) {
13052         if (value == null) {
13053           throw new NullPointerException();
13054         }
13055         error_ = value;
13056       } else {
13057         errorBuilder_.setMessage(value);
13058       }
13059       bitField0_ |= 0x00010000;
13060       onChanged();
13061       return this;
13062     }
13063     /**
13064      *
13065      *
13066      * <pre>
13067      * Output only. Only populated when the job's state is JOB_STATE_FAILED or
13068      * JOB_STATE_CANCELLED.
13069      * </pre>
13070      *
13071      * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
13072      */
setError(com.google.rpc.Status.Builder builderForValue)13073     public Builder setError(com.google.rpc.Status.Builder builderForValue) {
13074       if (errorBuilder_ == null) {
13075         error_ = builderForValue.build();
13076       } else {
13077         errorBuilder_.setMessage(builderForValue.build());
13078       }
13079       bitField0_ |= 0x00010000;
13080       onChanged();
13081       return this;
13082     }
13083     /**
13084      *
13085      *
13086      * <pre>
13087      * Output only. Only populated when the job's state is JOB_STATE_FAILED or
13088      * JOB_STATE_CANCELLED.
13089      * </pre>
13090      *
13091      * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
13092      */
mergeError(com.google.rpc.Status value)13093     public Builder mergeError(com.google.rpc.Status value) {
13094       if (errorBuilder_ == null) {
13095         if (((bitField0_ & 0x00010000) != 0)
13096             && error_ != null
13097             && error_ != com.google.rpc.Status.getDefaultInstance()) {
13098           getErrorBuilder().mergeFrom(value);
13099         } else {
13100           error_ = value;
13101         }
13102       } else {
13103         errorBuilder_.mergeFrom(value);
13104       }
13105       bitField0_ |= 0x00010000;
13106       onChanged();
13107       return this;
13108     }
13109     /**
13110      *
13111      *
13112      * <pre>
13113      * Output only. Only populated when the job's state is JOB_STATE_FAILED or
13114      * JOB_STATE_CANCELLED.
13115      * </pre>
13116      *
13117      * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
13118      */
clearError()13119     public Builder clearError() {
13120       bitField0_ = (bitField0_ & ~0x00010000);
13121       error_ = null;
13122       if (errorBuilder_ != null) {
13123         errorBuilder_.dispose();
13124         errorBuilder_ = null;
13125       }
13126       onChanged();
13127       return this;
13128     }
13129     /**
13130      *
13131      *
13132      * <pre>
13133      * Output only. Only populated when the job's state is JOB_STATE_FAILED or
13134      * JOB_STATE_CANCELLED.
13135      * </pre>
13136      *
13137      * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
13138      */
getErrorBuilder()13139     public com.google.rpc.Status.Builder getErrorBuilder() {
13140       bitField0_ |= 0x00010000;
13141       onChanged();
13142       return getErrorFieldBuilder().getBuilder();
13143     }
13144     /**
13145      *
13146      *
13147      * <pre>
13148      * Output only. Only populated when the job's state is JOB_STATE_FAILED or
13149      * JOB_STATE_CANCELLED.
13150      * </pre>
13151      *
13152      * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
13153      */
getErrorOrBuilder()13154     public com.google.rpc.StatusOrBuilder getErrorOrBuilder() {
13155       if (errorBuilder_ != null) {
13156         return errorBuilder_.getMessageOrBuilder();
13157       } else {
13158         return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
13159       }
13160     }
13161     /**
13162      *
13163      *
13164      * <pre>
13165      * Output only. Only populated when the job's state is JOB_STATE_FAILED or
13166      * JOB_STATE_CANCELLED.
13167      * </pre>
13168      *
13169      * <code>.google.rpc.Status error = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
13170      */
13171     private com.google.protobuf.SingleFieldBuilderV3<
13172             com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getErrorFieldBuilder()13173         getErrorFieldBuilder() {
13174       if (errorBuilder_ == null) {
13175         errorBuilder_ =
13176             new com.google.protobuf.SingleFieldBuilderV3<
13177                 com.google.rpc.Status,
13178                 com.google.rpc.Status.Builder,
13179                 com.google.rpc.StatusOrBuilder>(getError(), getParentForChildren(), isClean());
13180         error_ = null;
13181       }
13182       return errorBuilder_;
13183     }
13184 
13185     private java.util.List<com.google.rpc.Status> partialFailures_ =
13186         java.util.Collections.emptyList();
13187 
ensurePartialFailuresIsMutable()13188     private void ensurePartialFailuresIsMutable() {
13189       if (!((bitField0_ & 0x00020000) != 0)) {
13190         partialFailures_ = new java.util.ArrayList<com.google.rpc.Status>(partialFailures_);
13191         bitField0_ |= 0x00020000;
13192       }
13193     }
13194 
13195     private com.google.protobuf.RepeatedFieldBuilderV3<
13196             com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
13197         partialFailuresBuilder_;
13198 
13199     /**
13200      *
13201      *
13202      * <pre>
13203      * Output only. Partial failures encountered.
13204      * For example, single files that can't be read.
13205      * This field never exceeds 20 entries.
13206      * Status details fields contain standard Google Cloud error details.
13207      * </pre>
13208      *
13209      * <code>
13210      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13211      * </code>
13212      */
getPartialFailuresList()13213     public java.util.List<com.google.rpc.Status> getPartialFailuresList() {
13214       if (partialFailuresBuilder_ == null) {
13215         return java.util.Collections.unmodifiableList(partialFailures_);
13216       } else {
13217         return partialFailuresBuilder_.getMessageList();
13218       }
13219     }
13220     /**
13221      *
13222      *
13223      * <pre>
13224      * Output only. Partial failures encountered.
13225      * For example, single files that can't be read.
13226      * This field never exceeds 20 entries.
13227      * Status details fields contain standard Google Cloud error details.
13228      * </pre>
13229      *
13230      * <code>
13231      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13232      * </code>
13233      */
getPartialFailuresCount()13234     public int getPartialFailuresCount() {
13235       if (partialFailuresBuilder_ == null) {
13236         return partialFailures_.size();
13237       } else {
13238         return partialFailuresBuilder_.getCount();
13239       }
13240     }
13241     /**
13242      *
13243      *
13244      * <pre>
13245      * Output only. Partial failures encountered.
13246      * For example, single files that can't be read.
13247      * This field never exceeds 20 entries.
13248      * Status details fields contain standard Google Cloud error details.
13249      * </pre>
13250      *
13251      * <code>
13252      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13253      * </code>
13254      */
getPartialFailures(int index)13255     public com.google.rpc.Status getPartialFailures(int index) {
13256       if (partialFailuresBuilder_ == null) {
13257         return partialFailures_.get(index);
13258       } else {
13259         return partialFailuresBuilder_.getMessage(index);
13260       }
13261     }
13262     /**
13263      *
13264      *
13265      * <pre>
13266      * Output only. Partial failures encountered.
13267      * For example, single files that can't be read.
13268      * This field never exceeds 20 entries.
13269      * Status details fields contain standard Google Cloud error details.
13270      * </pre>
13271      *
13272      * <code>
13273      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13274      * </code>
13275      */
setPartialFailures(int index, com.google.rpc.Status value)13276     public Builder setPartialFailures(int index, com.google.rpc.Status value) {
13277       if (partialFailuresBuilder_ == null) {
13278         if (value == null) {
13279           throw new NullPointerException();
13280         }
13281         ensurePartialFailuresIsMutable();
13282         partialFailures_.set(index, value);
13283         onChanged();
13284       } else {
13285         partialFailuresBuilder_.setMessage(index, value);
13286       }
13287       return this;
13288     }
13289     /**
13290      *
13291      *
13292      * <pre>
13293      * Output only. Partial failures encountered.
13294      * For example, single files that can't be read.
13295      * This field never exceeds 20 entries.
13296      * Status details fields contain standard Google Cloud error details.
13297      * </pre>
13298      *
13299      * <code>
13300      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13301      * </code>
13302      */
setPartialFailures(int index, com.google.rpc.Status.Builder builderForValue)13303     public Builder setPartialFailures(int index, com.google.rpc.Status.Builder builderForValue) {
13304       if (partialFailuresBuilder_ == null) {
13305         ensurePartialFailuresIsMutable();
13306         partialFailures_.set(index, builderForValue.build());
13307         onChanged();
13308       } else {
13309         partialFailuresBuilder_.setMessage(index, builderForValue.build());
13310       }
13311       return this;
13312     }
13313     /**
13314      *
13315      *
13316      * <pre>
13317      * Output only. Partial failures encountered.
13318      * For example, single files that can't be read.
13319      * This field never exceeds 20 entries.
13320      * Status details fields contain standard Google Cloud error details.
13321      * </pre>
13322      *
13323      * <code>
13324      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13325      * </code>
13326      */
addPartialFailures(com.google.rpc.Status value)13327     public Builder addPartialFailures(com.google.rpc.Status value) {
13328       if (partialFailuresBuilder_ == null) {
13329         if (value == null) {
13330           throw new NullPointerException();
13331         }
13332         ensurePartialFailuresIsMutable();
13333         partialFailures_.add(value);
13334         onChanged();
13335       } else {
13336         partialFailuresBuilder_.addMessage(value);
13337       }
13338       return this;
13339     }
13340     /**
13341      *
13342      *
13343      * <pre>
13344      * Output only. Partial failures encountered.
13345      * For example, single files that can't be read.
13346      * This field never exceeds 20 entries.
13347      * Status details fields contain standard Google Cloud error details.
13348      * </pre>
13349      *
13350      * <code>
13351      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13352      * </code>
13353      */
addPartialFailures(int index, com.google.rpc.Status value)13354     public Builder addPartialFailures(int index, com.google.rpc.Status value) {
13355       if (partialFailuresBuilder_ == null) {
13356         if (value == null) {
13357           throw new NullPointerException();
13358         }
13359         ensurePartialFailuresIsMutable();
13360         partialFailures_.add(index, value);
13361         onChanged();
13362       } else {
13363         partialFailuresBuilder_.addMessage(index, value);
13364       }
13365       return this;
13366     }
13367     /**
13368      *
13369      *
13370      * <pre>
13371      * Output only. Partial failures encountered.
13372      * For example, single files that can't be read.
13373      * This field never exceeds 20 entries.
13374      * Status details fields contain standard Google Cloud error details.
13375      * </pre>
13376      *
13377      * <code>
13378      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13379      * </code>
13380      */
addPartialFailures(com.google.rpc.Status.Builder builderForValue)13381     public Builder addPartialFailures(com.google.rpc.Status.Builder builderForValue) {
13382       if (partialFailuresBuilder_ == null) {
13383         ensurePartialFailuresIsMutable();
13384         partialFailures_.add(builderForValue.build());
13385         onChanged();
13386       } else {
13387         partialFailuresBuilder_.addMessage(builderForValue.build());
13388       }
13389       return this;
13390     }
13391     /**
13392      *
13393      *
13394      * <pre>
13395      * Output only. Partial failures encountered.
13396      * For example, single files that can't be read.
13397      * This field never exceeds 20 entries.
13398      * Status details fields contain standard Google Cloud error details.
13399      * </pre>
13400      *
13401      * <code>
13402      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13403      * </code>
13404      */
addPartialFailures(int index, com.google.rpc.Status.Builder builderForValue)13405     public Builder addPartialFailures(int index, com.google.rpc.Status.Builder builderForValue) {
13406       if (partialFailuresBuilder_ == null) {
13407         ensurePartialFailuresIsMutable();
13408         partialFailures_.add(index, builderForValue.build());
13409         onChanged();
13410       } else {
13411         partialFailuresBuilder_.addMessage(index, builderForValue.build());
13412       }
13413       return this;
13414     }
13415     /**
13416      *
13417      *
13418      * <pre>
13419      * Output only. Partial failures encountered.
13420      * For example, single files that can't be read.
13421      * This field never exceeds 20 entries.
13422      * Status details fields contain standard Google Cloud error details.
13423      * </pre>
13424      *
13425      * <code>
13426      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13427      * </code>
13428      */
addAllPartialFailures( java.lang.Iterable<? extends com.google.rpc.Status> values)13429     public Builder addAllPartialFailures(
13430         java.lang.Iterable<? extends com.google.rpc.Status> values) {
13431       if (partialFailuresBuilder_ == null) {
13432         ensurePartialFailuresIsMutable();
13433         com.google.protobuf.AbstractMessageLite.Builder.addAll(values, partialFailures_);
13434         onChanged();
13435       } else {
13436         partialFailuresBuilder_.addAllMessages(values);
13437       }
13438       return this;
13439     }
13440     /**
13441      *
13442      *
13443      * <pre>
13444      * Output only. Partial failures encountered.
13445      * For example, single files that can't be read.
13446      * This field never exceeds 20 entries.
13447      * Status details fields contain standard Google Cloud error details.
13448      * </pre>
13449      *
13450      * <code>
13451      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13452      * </code>
13453      */
clearPartialFailures()13454     public Builder clearPartialFailures() {
13455       if (partialFailuresBuilder_ == null) {
13456         partialFailures_ = java.util.Collections.emptyList();
13457         bitField0_ = (bitField0_ & ~0x00020000);
13458         onChanged();
13459       } else {
13460         partialFailuresBuilder_.clear();
13461       }
13462       return this;
13463     }
13464     /**
13465      *
13466      *
13467      * <pre>
13468      * Output only. Partial failures encountered.
13469      * For example, single files that can't be read.
13470      * This field never exceeds 20 entries.
13471      * Status details fields contain standard Google Cloud error details.
13472      * </pre>
13473      *
13474      * <code>
13475      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13476      * </code>
13477      */
removePartialFailures(int index)13478     public Builder removePartialFailures(int index) {
13479       if (partialFailuresBuilder_ == null) {
13480         ensurePartialFailuresIsMutable();
13481         partialFailures_.remove(index);
13482         onChanged();
13483       } else {
13484         partialFailuresBuilder_.remove(index);
13485       }
13486       return this;
13487     }
13488     /**
13489      *
13490      *
13491      * <pre>
13492      * Output only. Partial failures encountered.
13493      * For example, single files that can't be read.
13494      * This field never exceeds 20 entries.
13495      * Status details fields contain standard Google Cloud error details.
13496      * </pre>
13497      *
13498      * <code>
13499      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13500      * </code>
13501      */
getPartialFailuresBuilder(int index)13502     public com.google.rpc.Status.Builder getPartialFailuresBuilder(int index) {
13503       return getPartialFailuresFieldBuilder().getBuilder(index);
13504     }
13505     /**
13506      *
13507      *
13508      * <pre>
13509      * Output only. Partial failures encountered.
13510      * For example, single files that can't be read.
13511      * This field never exceeds 20 entries.
13512      * Status details fields contain standard Google Cloud error details.
13513      * </pre>
13514      *
13515      * <code>
13516      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13517      * </code>
13518      */
getPartialFailuresOrBuilder(int index)13519     public com.google.rpc.StatusOrBuilder getPartialFailuresOrBuilder(int index) {
13520       if (partialFailuresBuilder_ == null) {
13521         return partialFailures_.get(index);
13522       } else {
13523         return partialFailuresBuilder_.getMessageOrBuilder(index);
13524       }
13525     }
13526     /**
13527      *
13528      *
13529      * <pre>
13530      * Output only. Partial failures encountered.
13531      * For example, single files that can't be read.
13532      * This field never exceeds 20 entries.
13533      * Status details fields contain standard Google Cloud error details.
13534      * </pre>
13535      *
13536      * <code>
13537      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13538      * </code>
13539      */
13540     public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getPartialFailuresOrBuilderList()13541         getPartialFailuresOrBuilderList() {
13542       if (partialFailuresBuilder_ != null) {
13543         return partialFailuresBuilder_.getMessageOrBuilderList();
13544       } else {
13545         return java.util.Collections.unmodifiableList(partialFailures_);
13546       }
13547     }
13548     /**
13549      *
13550      *
13551      * <pre>
13552      * Output only. Partial failures encountered.
13553      * For example, single files that can't be read.
13554      * This field never exceeds 20 entries.
13555      * Status details fields contain standard Google Cloud error details.
13556      * </pre>
13557      *
13558      * <code>
13559      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13560      * </code>
13561      */
addPartialFailuresBuilder()13562     public com.google.rpc.Status.Builder addPartialFailuresBuilder() {
13563       return getPartialFailuresFieldBuilder()
13564           .addBuilder(com.google.rpc.Status.getDefaultInstance());
13565     }
13566     /**
13567      *
13568      *
13569      * <pre>
13570      * Output only. Partial failures encountered.
13571      * For example, single files that can't be read.
13572      * This field never exceeds 20 entries.
13573      * Status details fields contain standard Google Cloud error details.
13574      * </pre>
13575      *
13576      * <code>
13577      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13578      * </code>
13579      */
addPartialFailuresBuilder(int index)13580     public com.google.rpc.Status.Builder addPartialFailuresBuilder(int index) {
13581       return getPartialFailuresFieldBuilder()
13582           .addBuilder(index, com.google.rpc.Status.getDefaultInstance());
13583     }
13584     /**
13585      *
13586      *
13587      * <pre>
13588      * Output only. Partial failures encountered.
13589      * For example, single files that can't be read.
13590      * This field never exceeds 20 entries.
13591      * Status details fields contain standard Google Cloud error details.
13592      * </pre>
13593      *
13594      * <code>
13595      * repeated .google.rpc.Status partial_failures = 12 [(.google.api.field_behavior) = OUTPUT_ONLY];
13596      * </code>
13597      */
getPartialFailuresBuilderList()13598     public java.util.List<com.google.rpc.Status.Builder> getPartialFailuresBuilderList() {
13599       return getPartialFailuresFieldBuilder().getBuilderList();
13600     }
13601 
13602     private com.google.protobuf.RepeatedFieldBuilderV3<
13603             com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getPartialFailuresFieldBuilder()13604         getPartialFailuresFieldBuilder() {
13605       if (partialFailuresBuilder_ == null) {
13606         partialFailuresBuilder_ =
13607             new com.google.protobuf.RepeatedFieldBuilderV3<
13608                 com.google.rpc.Status,
13609                 com.google.rpc.Status.Builder,
13610                 com.google.rpc.StatusOrBuilder>(
13611                 partialFailures_,
13612                 ((bitField0_ & 0x00020000) != 0),
13613                 getParentForChildren(),
13614                 isClean());
13615         partialFailures_ = null;
13616       }
13617       return partialFailuresBuilder_;
13618     }
13619 
13620     private com.google.cloud.aiplatform.v1.ResourcesConsumed resourcesConsumed_;
13621     private com.google.protobuf.SingleFieldBuilderV3<
13622             com.google.cloud.aiplatform.v1.ResourcesConsumed,
13623             com.google.cloud.aiplatform.v1.ResourcesConsumed.Builder,
13624             com.google.cloud.aiplatform.v1.ResourcesConsumedOrBuilder>
13625         resourcesConsumedBuilder_;
13626     /**
13627      *
13628      *
13629      * <pre>
13630      * Output only. Information about resources that had been consumed by this
13631      * job. Provided in real time at best effort basis, as well as a final value
13632      * once the job completes.
13633      * Note: This field currently may be not populated for batch predictions that
13634      * use AutoML Models.
13635      * </pre>
13636      *
13637      * <code>
13638      * .google.cloud.aiplatform.v1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
13639      * </code>
13640      *
13641      * @return Whether the resourcesConsumed field is set.
13642      */
hasResourcesConsumed()13643     public boolean hasResourcesConsumed() {
13644       return ((bitField0_ & 0x00040000) != 0);
13645     }
13646     /**
13647      *
13648      *
13649      * <pre>
13650      * Output only. Information about resources that had been consumed by this
13651      * job. Provided in real time at best effort basis, as well as a final value
13652      * once the job completes.
13653      * Note: This field currently may be not populated for batch predictions that
13654      * use AutoML Models.
13655      * </pre>
13656      *
13657      * <code>
13658      * .google.cloud.aiplatform.v1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
13659      * </code>
13660      *
13661      * @return The resourcesConsumed.
13662      */
getResourcesConsumed()13663     public com.google.cloud.aiplatform.v1.ResourcesConsumed getResourcesConsumed() {
13664       if (resourcesConsumedBuilder_ == null) {
13665         return resourcesConsumed_ == null
13666             ? com.google.cloud.aiplatform.v1.ResourcesConsumed.getDefaultInstance()
13667             : resourcesConsumed_;
13668       } else {
13669         return resourcesConsumedBuilder_.getMessage();
13670       }
13671     }
13672     /**
13673      *
13674      *
13675      * <pre>
13676      * Output only. Information about resources that had been consumed by this
13677      * job. Provided in real time at best effort basis, as well as a final value
13678      * once the job completes.
13679      * Note: This field currently may be not populated for batch predictions that
13680      * use AutoML Models.
13681      * </pre>
13682      *
13683      * <code>
13684      * .google.cloud.aiplatform.v1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
13685      * </code>
13686      */
setResourcesConsumed(com.google.cloud.aiplatform.v1.ResourcesConsumed value)13687     public Builder setResourcesConsumed(com.google.cloud.aiplatform.v1.ResourcesConsumed value) {
13688       if (resourcesConsumedBuilder_ == null) {
13689         if (value == null) {
13690           throw new NullPointerException();
13691         }
13692         resourcesConsumed_ = value;
13693       } else {
13694         resourcesConsumedBuilder_.setMessage(value);
13695       }
13696       bitField0_ |= 0x00040000;
13697       onChanged();
13698       return this;
13699     }
13700     /**
13701      *
13702      *
13703      * <pre>
13704      * Output only. Information about resources that had been consumed by this
13705      * job. Provided in real time at best effort basis, as well as a final value
13706      * once the job completes.
13707      * Note: This field currently may be not populated for batch predictions that
13708      * use AutoML Models.
13709      * </pre>
13710      *
13711      * <code>
13712      * .google.cloud.aiplatform.v1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
13713      * </code>
13714      */
setResourcesConsumed( com.google.cloud.aiplatform.v1.ResourcesConsumed.Builder builderForValue)13715     public Builder setResourcesConsumed(
13716         com.google.cloud.aiplatform.v1.ResourcesConsumed.Builder builderForValue) {
13717       if (resourcesConsumedBuilder_ == null) {
13718         resourcesConsumed_ = builderForValue.build();
13719       } else {
13720         resourcesConsumedBuilder_.setMessage(builderForValue.build());
13721       }
13722       bitField0_ |= 0x00040000;
13723       onChanged();
13724       return this;
13725     }
13726     /**
13727      *
13728      *
13729      * <pre>
13730      * Output only. Information about resources that had been consumed by this
13731      * job. Provided in real time at best effort basis, as well as a final value
13732      * once the job completes.
13733      * Note: This field currently may be not populated for batch predictions that
13734      * use AutoML Models.
13735      * </pre>
13736      *
13737      * <code>
13738      * .google.cloud.aiplatform.v1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
13739      * </code>
13740      */
mergeResourcesConsumed(com.google.cloud.aiplatform.v1.ResourcesConsumed value)13741     public Builder mergeResourcesConsumed(com.google.cloud.aiplatform.v1.ResourcesConsumed value) {
13742       if (resourcesConsumedBuilder_ == null) {
13743         if (((bitField0_ & 0x00040000) != 0)
13744             && resourcesConsumed_ != null
13745             && resourcesConsumed_
13746                 != com.google.cloud.aiplatform.v1.ResourcesConsumed.getDefaultInstance()) {
13747           getResourcesConsumedBuilder().mergeFrom(value);
13748         } else {
13749           resourcesConsumed_ = value;
13750         }
13751       } else {
13752         resourcesConsumedBuilder_.mergeFrom(value);
13753       }
13754       bitField0_ |= 0x00040000;
13755       onChanged();
13756       return this;
13757     }
13758     /**
13759      *
13760      *
13761      * <pre>
13762      * Output only. Information about resources that had been consumed by this
13763      * job. Provided in real time at best effort basis, as well as a final value
13764      * once the job completes.
13765      * Note: This field currently may be not populated for batch predictions that
13766      * use AutoML Models.
13767      * </pre>
13768      *
13769      * <code>
13770      * .google.cloud.aiplatform.v1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
13771      * </code>
13772      */
clearResourcesConsumed()13773     public Builder clearResourcesConsumed() {
13774       bitField0_ = (bitField0_ & ~0x00040000);
13775       resourcesConsumed_ = null;
13776       if (resourcesConsumedBuilder_ != null) {
13777         resourcesConsumedBuilder_.dispose();
13778         resourcesConsumedBuilder_ = null;
13779       }
13780       onChanged();
13781       return this;
13782     }
13783     /**
13784      *
13785      *
13786      * <pre>
13787      * Output only. Information about resources that had been consumed by this
13788      * job. Provided in real time at best effort basis, as well as a final value
13789      * once the job completes.
13790      * Note: This field currently may be not populated for batch predictions that
13791      * use AutoML Models.
13792      * </pre>
13793      *
13794      * <code>
13795      * .google.cloud.aiplatform.v1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
13796      * </code>
13797      */
getResourcesConsumedBuilder()13798     public com.google.cloud.aiplatform.v1.ResourcesConsumed.Builder getResourcesConsumedBuilder() {
13799       bitField0_ |= 0x00040000;
13800       onChanged();
13801       return getResourcesConsumedFieldBuilder().getBuilder();
13802     }
13803     /**
13804      *
13805      *
13806      * <pre>
13807      * Output only. Information about resources that had been consumed by this
13808      * job. Provided in real time at best effort basis, as well as a final value
13809      * once the job completes.
13810      * Note: This field currently may be not populated for batch predictions that
13811      * use AutoML Models.
13812      * </pre>
13813      *
13814      * <code>
13815      * .google.cloud.aiplatform.v1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
13816      * </code>
13817      */
13818     public com.google.cloud.aiplatform.v1.ResourcesConsumedOrBuilder
getResourcesConsumedOrBuilder()13819         getResourcesConsumedOrBuilder() {
13820       if (resourcesConsumedBuilder_ != null) {
13821         return resourcesConsumedBuilder_.getMessageOrBuilder();
13822       } else {
13823         return resourcesConsumed_ == null
13824             ? com.google.cloud.aiplatform.v1.ResourcesConsumed.getDefaultInstance()
13825             : resourcesConsumed_;
13826       }
13827     }
13828     /**
13829      *
13830      *
13831      * <pre>
13832      * Output only. Information about resources that had been consumed by this
13833      * job. Provided in real time at best effort basis, as well as a final value
13834      * once the job completes.
13835      * Note: This field currently may be not populated for batch predictions that
13836      * use AutoML Models.
13837      * </pre>
13838      *
13839      * <code>
13840      * .google.cloud.aiplatform.v1.ResourcesConsumed resources_consumed = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
13841      * </code>
13842      */
13843     private com.google.protobuf.SingleFieldBuilderV3<
13844             com.google.cloud.aiplatform.v1.ResourcesConsumed,
13845             com.google.cloud.aiplatform.v1.ResourcesConsumed.Builder,
13846             com.google.cloud.aiplatform.v1.ResourcesConsumedOrBuilder>
getResourcesConsumedFieldBuilder()13847         getResourcesConsumedFieldBuilder() {
13848       if (resourcesConsumedBuilder_ == null) {
13849         resourcesConsumedBuilder_ =
13850             new com.google.protobuf.SingleFieldBuilderV3<
13851                 com.google.cloud.aiplatform.v1.ResourcesConsumed,
13852                 com.google.cloud.aiplatform.v1.ResourcesConsumed.Builder,
13853                 com.google.cloud.aiplatform.v1.ResourcesConsumedOrBuilder>(
13854                 getResourcesConsumed(), getParentForChildren(), isClean());
13855         resourcesConsumed_ = null;
13856       }
13857       return resourcesConsumedBuilder_;
13858     }
13859 
13860     private com.google.cloud.aiplatform.v1.CompletionStats completionStats_;
13861     private com.google.protobuf.SingleFieldBuilderV3<
13862             com.google.cloud.aiplatform.v1.CompletionStats,
13863             com.google.cloud.aiplatform.v1.CompletionStats.Builder,
13864             com.google.cloud.aiplatform.v1.CompletionStatsOrBuilder>
13865         completionStatsBuilder_;
13866     /**
13867      *
13868      *
13869      * <pre>
13870      * Output only. Statistics on completed and failed prediction instances.
13871      * </pre>
13872      *
13873      * <code>
13874      * .google.cloud.aiplatform.v1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY];
13875      * </code>
13876      *
13877      * @return Whether the completionStats field is set.
13878      */
hasCompletionStats()13879     public boolean hasCompletionStats() {
13880       return ((bitField0_ & 0x00080000) != 0);
13881     }
13882     /**
13883      *
13884      *
13885      * <pre>
13886      * Output only. Statistics on completed and failed prediction instances.
13887      * </pre>
13888      *
13889      * <code>
13890      * .google.cloud.aiplatform.v1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY];
13891      * </code>
13892      *
13893      * @return The completionStats.
13894      */
getCompletionStats()13895     public com.google.cloud.aiplatform.v1.CompletionStats getCompletionStats() {
13896       if (completionStatsBuilder_ == null) {
13897         return completionStats_ == null
13898             ? com.google.cloud.aiplatform.v1.CompletionStats.getDefaultInstance()
13899             : completionStats_;
13900       } else {
13901         return completionStatsBuilder_.getMessage();
13902       }
13903     }
13904     /**
13905      *
13906      *
13907      * <pre>
13908      * Output only. Statistics on completed and failed prediction instances.
13909      * </pre>
13910      *
13911      * <code>
13912      * .google.cloud.aiplatform.v1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY];
13913      * </code>
13914      */
setCompletionStats(com.google.cloud.aiplatform.v1.CompletionStats value)13915     public Builder setCompletionStats(com.google.cloud.aiplatform.v1.CompletionStats value) {
13916       if (completionStatsBuilder_ == null) {
13917         if (value == null) {
13918           throw new NullPointerException();
13919         }
13920         completionStats_ = value;
13921       } else {
13922         completionStatsBuilder_.setMessage(value);
13923       }
13924       bitField0_ |= 0x00080000;
13925       onChanged();
13926       return this;
13927     }
13928     /**
13929      *
13930      *
13931      * <pre>
13932      * Output only. Statistics on completed and failed prediction instances.
13933      * </pre>
13934      *
13935      * <code>
13936      * .google.cloud.aiplatform.v1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY];
13937      * </code>
13938      */
setCompletionStats( com.google.cloud.aiplatform.v1.CompletionStats.Builder builderForValue)13939     public Builder setCompletionStats(
13940         com.google.cloud.aiplatform.v1.CompletionStats.Builder builderForValue) {
13941       if (completionStatsBuilder_ == null) {
13942         completionStats_ = builderForValue.build();
13943       } else {
13944         completionStatsBuilder_.setMessage(builderForValue.build());
13945       }
13946       bitField0_ |= 0x00080000;
13947       onChanged();
13948       return this;
13949     }
13950     /**
13951      *
13952      *
13953      * <pre>
13954      * Output only. Statistics on completed and failed prediction instances.
13955      * </pre>
13956      *
13957      * <code>
13958      * .google.cloud.aiplatform.v1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY];
13959      * </code>
13960      */
mergeCompletionStats(com.google.cloud.aiplatform.v1.CompletionStats value)13961     public Builder mergeCompletionStats(com.google.cloud.aiplatform.v1.CompletionStats value) {
13962       if (completionStatsBuilder_ == null) {
13963         if (((bitField0_ & 0x00080000) != 0)
13964             && completionStats_ != null
13965             && completionStats_
13966                 != com.google.cloud.aiplatform.v1.CompletionStats.getDefaultInstance()) {
13967           getCompletionStatsBuilder().mergeFrom(value);
13968         } else {
13969           completionStats_ = value;
13970         }
13971       } else {
13972         completionStatsBuilder_.mergeFrom(value);
13973       }
13974       bitField0_ |= 0x00080000;
13975       onChanged();
13976       return this;
13977     }
13978     /**
13979      *
13980      *
13981      * <pre>
13982      * Output only. Statistics on completed and failed prediction instances.
13983      * </pre>
13984      *
13985      * <code>
13986      * .google.cloud.aiplatform.v1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY];
13987      * </code>
13988      */
clearCompletionStats()13989     public Builder clearCompletionStats() {
13990       bitField0_ = (bitField0_ & ~0x00080000);
13991       completionStats_ = null;
13992       if (completionStatsBuilder_ != null) {
13993         completionStatsBuilder_.dispose();
13994         completionStatsBuilder_ = null;
13995       }
13996       onChanged();
13997       return this;
13998     }
13999     /**
14000      *
14001      *
14002      * <pre>
14003      * Output only. Statistics on completed and failed prediction instances.
14004      * </pre>
14005      *
14006      * <code>
14007      * .google.cloud.aiplatform.v1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY];
14008      * </code>
14009      */
getCompletionStatsBuilder()14010     public com.google.cloud.aiplatform.v1.CompletionStats.Builder getCompletionStatsBuilder() {
14011       bitField0_ |= 0x00080000;
14012       onChanged();
14013       return getCompletionStatsFieldBuilder().getBuilder();
14014     }
14015     /**
14016      *
14017      *
14018      * <pre>
14019      * Output only. Statistics on completed and failed prediction instances.
14020      * </pre>
14021      *
14022      * <code>
14023      * .google.cloud.aiplatform.v1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY];
14024      * </code>
14025      */
getCompletionStatsOrBuilder()14026     public com.google.cloud.aiplatform.v1.CompletionStatsOrBuilder getCompletionStatsOrBuilder() {
14027       if (completionStatsBuilder_ != null) {
14028         return completionStatsBuilder_.getMessageOrBuilder();
14029       } else {
14030         return completionStats_ == null
14031             ? com.google.cloud.aiplatform.v1.CompletionStats.getDefaultInstance()
14032             : completionStats_;
14033       }
14034     }
14035     /**
14036      *
14037      *
14038      * <pre>
14039      * Output only. Statistics on completed and failed prediction instances.
14040      * </pre>
14041      *
14042      * <code>
14043      * .google.cloud.aiplatform.v1.CompletionStats completion_stats = 14 [(.google.api.field_behavior) = OUTPUT_ONLY];
14044      * </code>
14045      */
14046     private com.google.protobuf.SingleFieldBuilderV3<
14047             com.google.cloud.aiplatform.v1.CompletionStats,
14048             com.google.cloud.aiplatform.v1.CompletionStats.Builder,
14049             com.google.cloud.aiplatform.v1.CompletionStatsOrBuilder>
getCompletionStatsFieldBuilder()14050         getCompletionStatsFieldBuilder() {
14051       if (completionStatsBuilder_ == null) {
14052         completionStatsBuilder_ =
14053             new com.google.protobuf.SingleFieldBuilderV3<
14054                 com.google.cloud.aiplatform.v1.CompletionStats,
14055                 com.google.cloud.aiplatform.v1.CompletionStats.Builder,
14056                 com.google.cloud.aiplatform.v1.CompletionStatsOrBuilder>(
14057                 getCompletionStats(), getParentForChildren(), isClean());
14058         completionStats_ = null;
14059       }
14060       return completionStatsBuilder_;
14061     }
14062 
14063     private com.google.protobuf.Timestamp createTime_;
14064     private com.google.protobuf.SingleFieldBuilderV3<
14065             com.google.protobuf.Timestamp,
14066             com.google.protobuf.Timestamp.Builder,
14067             com.google.protobuf.TimestampOrBuilder>
14068         createTimeBuilder_;
14069     /**
14070      *
14071      *
14072      * <pre>
14073      * Output only. Time when the BatchPredictionJob was created.
14074      * </pre>
14075      *
14076      * <code>
14077      * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
14078      * </code>
14079      *
14080      * @return Whether the createTime field is set.
14081      */
hasCreateTime()14082     public boolean hasCreateTime() {
14083       return ((bitField0_ & 0x00100000) != 0);
14084     }
14085     /**
14086      *
14087      *
14088      * <pre>
14089      * Output only. Time when the BatchPredictionJob was created.
14090      * </pre>
14091      *
14092      * <code>
14093      * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
14094      * </code>
14095      *
14096      * @return The createTime.
14097      */
getCreateTime()14098     public com.google.protobuf.Timestamp getCreateTime() {
14099       if (createTimeBuilder_ == null) {
14100         return createTime_ == null
14101             ? com.google.protobuf.Timestamp.getDefaultInstance()
14102             : createTime_;
14103       } else {
14104         return createTimeBuilder_.getMessage();
14105       }
14106     }
14107     /**
14108      *
14109      *
14110      * <pre>
14111      * Output only. Time when the BatchPredictionJob was created.
14112      * </pre>
14113      *
14114      * <code>
14115      * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
14116      * </code>
14117      */
setCreateTime(com.google.protobuf.Timestamp value)14118     public Builder setCreateTime(com.google.protobuf.Timestamp value) {
14119       if (createTimeBuilder_ == null) {
14120         if (value == null) {
14121           throw new NullPointerException();
14122         }
14123         createTime_ = value;
14124       } else {
14125         createTimeBuilder_.setMessage(value);
14126       }
14127       bitField0_ |= 0x00100000;
14128       onChanged();
14129       return this;
14130     }
14131     /**
14132      *
14133      *
14134      * <pre>
14135      * Output only. Time when the BatchPredictionJob was created.
14136      * </pre>
14137      *
14138      * <code>
14139      * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
14140      * </code>
14141      */
setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue)14142     public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
14143       if (createTimeBuilder_ == null) {
14144         createTime_ = builderForValue.build();
14145       } else {
14146         createTimeBuilder_.setMessage(builderForValue.build());
14147       }
14148       bitField0_ |= 0x00100000;
14149       onChanged();
14150       return this;
14151     }
14152     /**
14153      *
14154      *
14155      * <pre>
14156      * Output only. Time when the BatchPredictionJob was created.
14157      * </pre>
14158      *
14159      * <code>
14160      * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
14161      * </code>
14162      */
mergeCreateTime(com.google.protobuf.Timestamp value)14163     public Builder mergeCreateTime(com.google.protobuf.Timestamp value) {
14164       if (createTimeBuilder_ == null) {
14165         if (((bitField0_ & 0x00100000) != 0)
14166             && createTime_ != null
14167             && createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
14168           getCreateTimeBuilder().mergeFrom(value);
14169         } else {
14170           createTime_ = value;
14171         }
14172       } else {
14173         createTimeBuilder_.mergeFrom(value);
14174       }
14175       bitField0_ |= 0x00100000;
14176       onChanged();
14177       return this;
14178     }
14179     /**
14180      *
14181      *
14182      * <pre>
14183      * Output only. Time when the BatchPredictionJob was created.
14184      * </pre>
14185      *
14186      * <code>
14187      * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
14188      * </code>
14189      */
clearCreateTime()14190     public Builder clearCreateTime() {
14191       bitField0_ = (bitField0_ & ~0x00100000);
14192       createTime_ = null;
14193       if (createTimeBuilder_ != null) {
14194         createTimeBuilder_.dispose();
14195         createTimeBuilder_ = null;
14196       }
14197       onChanged();
14198       return this;
14199     }
14200     /**
14201      *
14202      *
14203      * <pre>
14204      * Output only. Time when the BatchPredictionJob was created.
14205      * </pre>
14206      *
14207      * <code>
14208      * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
14209      * </code>
14210      */
getCreateTimeBuilder()14211     public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() {
14212       bitField0_ |= 0x00100000;
14213       onChanged();
14214       return getCreateTimeFieldBuilder().getBuilder();
14215     }
14216     /**
14217      *
14218      *
14219      * <pre>
14220      * Output only. Time when the BatchPredictionJob was created.
14221      * </pre>
14222      *
14223      * <code>
14224      * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
14225      * </code>
14226      */
getCreateTimeOrBuilder()14227     public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
14228       if (createTimeBuilder_ != null) {
14229         return createTimeBuilder_.getMessageOrBuilder();
14230       } else {
14231         return createTime_ == null
14232             ? com.google.protobuf.Timestamp.getDefaultInstance()
14233             : createTime_;
14234       }
14235     }
14236     /**
14237      *
14238      *
14239      * <pre>
14240      * Output only. Time when the BatchPredictionJob was created.
14241      * </pre>
14242      *
14243      * <code>
14244      * .google.protobuf.Timestamp create_time = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
14245      * </code>
14246      */
14247     private com.google.protobuf.SingleFieldBuilderV3<
14248             com.google.protobuf.Timestamp,
14249             com.google.protobuf.Timestamp.Builder,
14250             com.google.protobuf.TimestampOrBuilder>
getCreateTimeFieldBuilder()14251         getCreateTimeFieldBuilder() {
14252       if (createTimeBuilder_ == null) {
14253         createTimeBuilder_ =
14254             new com.google.protobuf.SingleFieldBuilderV3<
14255                 com.google.protobuf.Timestamp,
14256                 com.google.protobuf.Timestamp.Builder,
14257                 com.google.protobuf.TimestampOrBuilder>(
14258                 getCreateTime(), getParentForChildren(), isClean());
14259         createTime_ = null;
14260       }
14261       return createTimeBuilder_;
14262     }
14263 
14264     private com.google.protobuf.Timestamp startTime_;
14265     private com.google.protobuf.SingleFieldBuilderV3<
14266             com.google.protobuf.Timestamp,
14267             com.google.protobuf.Timestamp.Builder,
14268             com.google.protobuf.TimestampOrBuilder>
14269         startTimeBuilder_;
14270     /**
14271      *
14272      *
14273      * <pre>
14274      * Output only. Time when the BatchPredictionJob for the first time entered
14275      * the `JOB_STATE_RUNNING` state.
14276      * </pre>
14277      *
14278      * <code>
14279      * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
14280      * </code>
14281      *
14282      * @return Whether the startTime field is set.
14283      */
hasStartTime()14284     public boolean hasStartTime() {
14285       return ((bitField0_ & 0x00200000) != 0);
14286     }
14287     /**
14288      *
14289      *
14290      * <pre>
14291      * Output only. Time when the BatchPredictionJob for the first time entered
14292      * the `JOB_STATE_RUNNING` state.
14293      * </pre>
14294      *
14295      * <code>
14296      * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
14297      * </code>
14298      *
14299      * @return The startTime.
14300      */
getStartTime()14301     public com.google.protobuf.Timestamp getStartTime() {
14302       if (startTimeBuilder_ == null) {
14303         return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
14304       } else {
14305         return startTimeBuilder_.getMessage();
14306       }
14307     }
14308     /**
14309      *
14310      *
14311      * <pre>
14312      * Output only. Time when the BatchPredictionJob for the first time entered
14313      * the `JOB_STATE_RUNNING` state.
14314      * </pre>
14315      *
14316      * <code>
14317      * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
14318      * </code>
14319      */
setStartTime(com.google.protobuf.Timestamp value)14320     public Builder setStartTime(com.google.protobuf.Timestamp value) {
14321       if (startTimeBuilder_ == null) {
14322         if (value == null) {
14323           throw new NullPointerException();
14324         }
14325         startTime_ = value;
14326       } else {
14327         startTimeBuilder_.setMessage(value);
14328       }
14329       bitField0_ |= 0x00200000;
14330       onChanged();
14331       return this;
14332     }
14333     /**
14334      *
14335      *
14336      * <pre>
14337      * Output only. Time when the BatchPredictionJob for the first time entered
14338      * the `JOB_STATE_RUNNING` state.
14339      * </pre>
14340      *
14341      * <code>
14342      * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
14343      * </code>
14344      */
setStartTime(com.google.protobuf.Timestamp.Builder builderForValue)14345     public Builder setStartTime(com.google.protobuf.Timestamp.Builder builderForValue) {
14346       if (startTimeBuilder_ == null) {
14347         startTime_ = builderForValue.build();
14348       } else {
14349         startTimeBuilder_.setMessage(builderForValue.build());
14350       }
14351       bitField0_ |= 0x00200000;
14352       onChanged();
14353       return this;
14354     }
14355     /**
14356      *
14357      *
14358      * <pre>
14359      * Output only. Time when the BatchPredictionJob for the first time entered
14360      * the `JOB_STATE_RUNNING` state.
14361      * </pre>
14362      *
14363      * <code>
14364      * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
14365      * </code>
14366      */
mergeStartTime(com.google.protobuf.Timestamp value)14367     public Builder mergeStartTime(com.google.protobuf.Timestamp value) {
14368       if (startTimeBuilder_ == null) {
14369         if (((bitField0_ & 0x00200000) != 0)
14370             && startTime_ != null
14371             && startTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
14372           getStartTimeBuilder().mergeFrom(value);
14373         } else {
14374           startTime_ = value;
14375         }
14376       } else {
14377         startTimeBuilder_.mergeFrom(value);
14378       }
14379       bitField0_ |= 0x00200000;
14380       onChanged();
14381       return this;
14382     }
14383     /**
14384      *
14385      *
14386      * <pre>
14387      * Output only. Time when the BatchPredictionJob for the first time entered
14388      * the `JOB_STATE_RUNNING` state.
14389      * </pre>
14390      *
14391      * <code>
14392      * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
14393      * </code>
14394      */
clearStartTime()14395     public Builder clearStartTime() {
14396       bitField0_ = (bitField0_ & ~0x00200000);
14397       startTime_ = null;
14398       if (startTimeBuilder_ != null) {
14399         startTimeBuilder_.dispose();
14400         startTimeBuilder_ = null;
14401       }
14402       onChanged();
14403       return this;
14404     }
14405     /**
14406      *
14407      *
14408      * <pre>
14409      * Output only. Time when the BatchPredictionJob for the first time entered
14410      * the `JOB_STATE_RUNNING` state.
14411      * </pre>
14412      *
14413      * <code>
14414      * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
14415      * </code>
14416      */
getStartTimeBuilder()14417     public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() {
14418       bitField0_ |= 0x00200000;
14419       onChanged();
14420       return getStartTimeFieldBuilder().getBuilder();
14421     }
14422     /**
14423      *
14424      *
14425      * <pre>
14426      * Output only. Time when the BatchPredictionJob for the first time entered
14427      * the `JOB_STATE_RUNNING` state.
14428      * </pre>
14429      *
14430      * <code>
14431      * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
14432      * </code>
14433      */
getStartTimeOrBuilder()14434     public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() {
14435       if (startTimeBuilder_ != null) {
14436         return startTimeBuilder_.getMessageOrBuilder();
14437       } else {
14438         return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
14439       }
14440     }
14441     /**
14442      *
14443      *
14444      * <pre>
14445      * Output only. Time when the BatchPredictionJob for the first time entered
14446      * the `JOB_STATE_RUNNING` state.
14447      * </pre>
14448      *
14449      * <code>
14450      * .google.protobuf.Timestamp start_time = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
14451      * </code>
14452      */
14453     private com.google.protobuf.SingleFieldBuilderV3<
14454             com.google.protobuf.Timestamp,
14455             com.google.protobuf.Timestamp.Builder,
14456             com.google.protobuf.TimestampOrBuilder>
getStartTimeFieldBuilder()14457         getStartTimeFieldBuilder() {
14458       if (startTimeBuilder_ == null) {
14459         startTimeBuilder_ =
14460             new com.google.protobuf.SingleFieldBuilderV3<
14461                 com.google.protobuf.Timestamp,
14462                 com.google.protobuf.Timestamp.Builder,
14463                 com.google.protobuf.TimestampOrBuilder>(
14464                 getStartTime(), getParentForChildren(), isClean());
14465         startTime_ = null;
14466       }
14467       return startTimeBuilder_;
14468     }
14469 
14470     private com.google.protobuf.Timestamp endTime_;
14471     private com.google.protobuf.SingleFieldBuilderV3<
14472             com.google.protobuf.Timestamp,
14473             com.google.protobuf.Timestamp.Builder,
14474             com.google.protobuf.TimestampOrBuilder>
14475         endTimeBuilder_;
14476     /**
14477      *
14478      *
14479      * <pre>
14480      * Output only. Time when the BatchPredictionJob entered any of the following
14481      * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
14482      * </pre>
14483      *
14484      * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
14485      * </code>
14486      *
14487      * @return Whether the endTime field is set.
14488      */
hasEndTime()14489     public boolean hasEndTime() {
14490       return ((bitField0_ & 0x00400000) != 0);
14491     }
14492     /**
14493      *
14494      *
14495      * <pre>
14496      * Output only. Time when the BatchPredictionJob entered any of the following
14497      * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
14498      * </pre>
14499      *
14500      * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
14501      * </code>
14502      *
14503      * @return The endTime.
14504      */
getEndTime()14505     public com.google.protobuf.Timestamp getEndTime() {
14506       if (endTimeBuilder_ == null) {
14507         return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;
14508       } else {
14509         return endTimeBuilder_.getMessage();
14510       }
14511     }
14512     /**
14513      *
14514      *
14515      * <pre>
14516      * Output only. Time when the BatchPredictionJob entered any of the following
14517      * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
14518      * </pre>
14519      *
14520      * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
14521      * </code>
14522      */
setEndTime(com.google.protobuf.Timestamp value)14523     public Builder setEndTime(com.google.protobuf.Timestamp value) {
14524       if (endTimeBuilder_ == null) {
14525         if (value == null) {
14526           throw new NullPointerException();
14527         }
14528         endTime_ = value;
14529       } else {
14530         endTimeBuilder_.setMessage(value);
14531       }
14532       bitField0_ |= 0x00400000;
14533       onChanged();
14534       return this;
14535     }
14536     /**
14537      *
14538      *
14539      * <pre>
14540      * Output only. Time when the BatchPredictionJob entered any of the following
14541      * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
14542      * </pre>
14543      *
14544      * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
14545      * </code>
14546      */
setEndTime(com.google.protobuf.Timestamp.Builder builderForValue)14547     public Builder setEndTime(com.google.protobuf.Timestamp.Builder builderForValue) {
14548       if (endTimeBuilder_ == null) {
14549         endTime_ = builderForValue.build();
14550       } else {
14551         endTimeBuilder_.setMessage(builderForValue.build());
14552       }
14553       bitField0_ |= 0x00400000;
14554       onChanged();
14555       return this;
14556     }
14557     /**
14558      *
14559      *
14560      * <pre>
14561      * Output only. Time when the BatchPredictionJob entered any of the following
14562      * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
14563      * </pre>
14564      *
14565      * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
14566      * </code>
14567      */
mergeEndTime(com.google.protobuf.Timestamp value)14568     public Builder mergeEndTime(com.google.protobuf.Timestamp value) {
14569       if (endTimeBuilder_ == null) {
14570         if (((bitField0_ & 0x00400000) != 0)
14571             && endTime_ != null
14572             && endTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
14573           getEndTimeBuilder().mergeFrom(value);
14574         } else {
14575           endTime_ = value;
14576         }
14577       } else {
14578         endTimeBuilder_.mergeFrom(value);
14579       }
14580       bitField0_ |= 0x00400000;
14581       onChanged();
14582       return this;
14583     }
14584     /**
14585      *
14586      *
14587      * <pre>
14588      * Output only. Time when the BatchPredictionJob entered any of the following
14589      * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
14590      * </pre>
14591      *
14592      * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
14593      * </code>
14594      */
clearEndTime()14595     public Builder clearEndTime() {
14596       bitField0_ = (bitField0_ & ~0x00400000);
14597       endTime_ = null;
14598       if (endTimeBuilder_ != null) {
14599         endTimeBuilder_.dispose();
14600         endTimeBuilder_ = null;
14601       }
14602       onChanged();
14603       return this;
14604     }
14605     /**
14606      *
14607      *
14608      * <pre>
14609      * Output only. Time when the BatchPredictionJob entered any of the following
14610      * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
14611      * </pre>
14612      *
14613      * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
14614      * </code>
14615      */
getEndTimeBuilder()14616     public com.google.protobuf.Timestamp.Builder getEndTimeBuilder() {
14617       bitField0_ |= 0x00400000;
14618       onChanged();
14619       return getEndTimeFieldBuilder().getBuilder();
14620     }
14621     /**
14622      *
14623      *
14624      * <pre>
14625      * Output only. Time when the BatchPredictionJob entered any of the following
14626      * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
14627      * </pre>
14628      *
14629      * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
14630      * </code>
14631      */
getEndTimeOrBuilder()14632     public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() {
14633       if (endTimeBuilder_ != null) {
14634         return endTimeBuilder_.getMessageOrBuilder();
14635       } else {
14636         return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;
14637       }
14638     }
14639     /**
14640      *
14641      *
14642      * <pre>
14643      * Output only. Time when the BatchPredictionJob entered any of the following
14644      * states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
14645      * </pre>
14646      *
14647      * <code>.google.protobuf.Timestamp end_time = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
14648      * </code>
14649      */
14650     private com.google.protobuf.SingleFieldBuilderV3<
14651             com.google.protobuf.Timestamp,
14652             com.google.protobuf.Timestamp.Builder,
14653             com.google.protobuf.TimestampOrBuilder>
getEndTimeFieldBuilder()14654         getEndTimeFieldBuilder() {
14655       if (endTimeBuilder_ == null) {
14656         endTimeBuilder_ =
14657             new com.google.protobuf.SingleFieldBuilderV3<
14658                 com.google.protobuf.Timestamp,
14659                 com.google.protobuf.Timestamp.Builder,
14660                 com.google.protobuf.TimestampOrBuilder>(
14661                 getEndTime(), getParentForChildren(), isClean());
14662         endTime_ = null;
14663       }
14664       return endTimeBuilder_;
14665     }
14666 
14667     private com.google.protobuf.Timestamp updateTime_;
14668     private com.google.protobuf.SingleFieldBuilderV3<
14669             com.google.protobuf.Timestamp,
14670             com.google.protobuf.Timestamp.Builder,
14671             com.google.protobuf.TimestampOrBuilder>
14672         updateTimeBuilder_;
14673     /**
14674      *
14675      *
14676      * <pre>
14677      * Output only. Time when the BatchPredictionJob was most recently updated.
14678      * </pre>
14679      *
14680      * <code>
14681      * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
14682      * </code>
14683      *
14684      * @return Whether the updateTime field is set.
14685      */
hasUpdateTime()14686     public boolean hasUpdateTime() {
14687       return ((bitField0_ & 0x00800000) != 0);
14688     }
14689     /**
14690      *
14691      *
14692      * <pre>
14693      * Output only. Time when the BatchPredictionJob was most recently updated.
14694      * </pre>
14695      *
14696      * <code>
14697      * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
14698      * </code>
14699      *
14700      * @return The updateTime.
14701      */
getUpdateTime()14702     public com.google.protobuf.Timestamp getUpdateTime() {
14703       if (updateTimeBuilder_ == null) {
14704         return updateTime_ == null
14705             ? com.google.protobuf.Timestamp.getDefaultInstance()
14706             : updateTime_;
14707       } else {
14708         return updateTimeBuilder_.getMessage();
14709       }
14710     }
14711     /**
14712      *
14713      *
14714      * <pre>
14715      * Output only. Time when the BatchPredictionJob was most recently updated.
14716      * </pre>
14717      *
14718      * <code>
14719      * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
14720      * </code>
14721      */
setUpdateTime(com.google.protobuf.Timestamp value)14722     public Builder setUpdateTime(com.google.protobuf.Timestamp value) {
14723       if (updateTimeBuilder_ == null) {
14724         if (value == null) {
14725           throw new NullPointerException();
14726         }
14727         updateTime_ = value;
14728       } else {
14729         updateTimeBuilder_.setMessage(value);
14730       }
14731       bitField0_ |= 0x00800000;
14732       onChanged();
14733       return this;
14734     }
14735     /**
14736      *
14737      *
14738      * <pre>
14739      * Output only. Time when the BatchPredictionJob was most recently updated.
14740      * </pre>
14741      *
14742      * <code>
14743      * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
14744      * </code>
14745      */
setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue)14746     public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
14747       if (updateTimeBuilder_ == null) {
14748         updateTime_ = builderForValue.build();
14749       } else {
14750         updateTimeBuilder_.setMessage(builderForValue.build());
14751       }
14752       bitField0_ |= 0x00800000;
14753       onChanged();
14754       return this;
14755     }
14756     /**
14757      *
14758      *
14759      * <pre>
14760      * Output only. Time when the BatchPredictionJob was most recently updated.
14761      * </pre>
14762      *
14763      * <code>
14764      * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
14765      * </code>
14766      */
mergeUpdateTime(com.google.protobuf.Timestamp value)14767     public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) {
14768       if (updateTimeBuilder_ == null) {
14769         if (((bitField0_ & 0x00800000) != 0)
14770             && updateTime_ != null
14771             && updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
14772           getUpdateTimeBuilder().mergeFrom(value);
14773         } else {
14774           updateTime_ = value;
14775         }
14776       } else {
14777         updateTimeBuilder_.mergeFrom(value);
14778       }
14779       bitField0_ |= 0x00800000;
14780       onChanged();
14781       return this;
14782     }
14783     /**
14784      *
14785      *
14786      * <pre>
14787      * Output only. Time when the BatchPredictionJob was most recently updated.
14788      * </pre>
14789      *
14790      * <code>
14791      * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
14792      * </code>
14793      */
clearUpdateTime()14794     public Builder clearUpdateTime() {
14795       bitField0_ = (bitField0_ & ~0x00800000);
14796       updateTime_ = null;
14797       if (updateTimeBuilder_ != null) {
14798         updateTimeBuilder_.dispose();
14799         updateTimeBuilder_ = null;
14800       }
14801       onChanged();
14802       return this;
14803     }
14804     /**
14805      *
14806      *
14807      * <pre>
14808      * Output only. Time when the BatchPredictionJob was most recently updated.
14809      * </pre>
14810      *
14811      * <code>
14812      * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
14813      * </code>
14814      */
getUpdateTimeBuilder()14815     public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() {
14816       bitField0_ |= 0x00800000;
14817       onChanged();
14818       return getUpdateTimeFieldBuilder().getBuilder();
14819     }
14820     /**
14821      *
14822      *
14823      * <pre>
14824      * Output only. Time when the BatchPredictionJob was most recently updated.
14825      * </pre>
14826      *
14827      * <code>
14828      * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
14829      * </code>
14830      */
getUpdateTimeOrBuilder()14831     public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
14832       if (updateTimeBuilder_ != null) {
14833         return updateTimeBuilder_.getMessageOrBuilder();
14834       } else {
14835         return updateTime_ == null
14836             ? com.google.protobuf.Timestamp.getDefaultInstance()
14837             : updateTime_;
14838       }
14839     }
14840     /**
14841      *
14842      *
14843      * <pre>
14844      * Output only. Time when the BatchPredictionJob was most recently updated.
14845      * </pre>
14846      *
14847      * <code>
14848      * .google.protobuf.Timestamp update_time = 18 [(.google.api.field_behavior) = OUTPUT_ONLY];
14849      * </code>
14850      */
14851     private com.google.protobuf.SingleFieldBuilderV3<
14852             com.google.protobuf.Timestamp,
14853             com.google.protobuf.Timestamp.Builder,
14854             com.google.protobuf.TimestampOrBuilder>
getUpdateTimeFieldBuilder()14855         getUpdateTimeFieldBuilder() {
14856       if (updateTimeBuilder_ == null) {
14857         updateTimeBuilder_ =
14858             new com.google.protobuf.SingleFieldBuilderV3<
14859                 com.google.protobuf.Timestamp,
14860                 com.google.protobuf.Timestamp.Builder,
14861                 com.google.protobuf.TimestampOrBuilder>(
14862                 getUpdateTime(), getParentForChildren(), isClean());
14863         updateTime_ = null;
14864       }
14865       return updateTimeBuilder_;
14866     }
14867 
14868     private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;
14869 
internalGetLabels()14870     private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
14871       if (labels_ == null) {
14872         return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
14873       }
14874       return labels_;
14875     }
14876 
14877     private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableLabels()14878         internalGetMutableLabels() {
14879       if (labels_ == null) {
14880         labels_ = com.google.protobuf.MapField.newMapField(LabelsDefaultEntryHolder.defaultEntry);
14881       }
14882       if (!labels_.isMutable()) {
14883         labels_ = labels_.copy();
14884       }
14885       bitField0_ |= 0x01000000;
14886       onChanged();
14887       return labels_;
14888     }
14889 
getLabelsCount()14890     public int getLabelsCount() {
14891       return internalGetLabels().getMap().size();
14892     }
14893     /**
14894      *
14895      *
14896      * <pre>
14897      * The labels with user-defined metadata to organize BatchPredictionJobs.
14898      * Label keys and values can be no longer than 64 characters
14899      * (Unicode codepoints), can only contain lowercase letters, numeric
14900      * characters, underscores and dashes. International characters are allowed.
14901      * See https://goo.gl/xmQnxf for more information and examples of labels.
14902      * </pre>
14903      *
14904      * <code>map&lt;string, string&gt; labels = 19;</code>
14905      */
14906     @java.lang.Override
containsLabels(java.lang.String key)14907     public boolean containsLabels(java.lang.String key) {
14908       if (key == null) {
14909         throw new NullPointerException("map key");
14910       }
14911       return internalGetLabels().getMap().containsKey(key);
14912     }
14913     /** Use {@link #getLabelsMap()} instead. */
14914     @java.lang.Override
14915     @java.lang.Deprecated
getLabels()14916     public java.util.Map<java.lang.String, java.lang.String> getLabels() {
14917       return getLabelsMap();
14918     }
14919     /**
14920      *
14921      *
14922      * <pre>
14923      * The labels with user-defined metadata to organize BatchPredictionJobs.
14924      * Label keys and values can be no longer than 64 characters
14925      * (Unicode codepoints), can only contain lowercase letters, numeric
14926      * characters, underscores and dashes. International characters are allowed.
14927      * See https://goo.gl/xmQnxf for more information and examples of labels.
14928      * </pre>
14929      *
14930      * <code>map&lt;string, string&gt; labels = 19;</code>
14931      */
14932     @java.lang.Override
getLabelsMap()14933     public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
14934       return internalGetLabels().getMap();
14935     }
14936     /**
14937      *
14938      *
14939      * <pre>
14940      * The labels with user-defined metadata to organize BatchPredictionJobs.
14941      * Label keys and values can be no longer than 64 characters
14942      * (Unicode codepoints), can only contain lowercase letters, numeric
14943      * characters, underscores and dashes. International characters are allowed.
14944      * See https://goo.gl/xmQnxf for more information and examples of labels.
14945      * </pre>
14946      *
14947      * <code>map&lt;string, string&gt; labels = 19;</code>
14948      */
14949     @java.lang.Override
getLabelsOrDefault( java.lang.String key, java.lang.String defaultValue)14950     public /* nullable */ java.lang.String getLabelsOrDefault(
14951         java.lang.String key,
14952         /* nullable */
14953         java.lang.String defaultValue) {
14954       if (key == null) {
14955         throw new NullPointerException("map key");
14956       }
14957       java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
14958       return map.containsKey(key) ? map.get(key) : defaultValue;
14959     }
14960     /**
14961      *
14962      *
14963      * <pre>
14964      * The labels with user-defined metadata to organize BatchPredictionJobs.
14965      * Label keys and values can be no longer than 64 characters
14966      * (Unicode codepoints), can only contain lowercase letters, numeric
14967      * characters, underscores and dashes. International characters are allowed.
14968      * See https://goo.gl/xmQnxf for more information and examples of labels.
14969      * </pre>
14970      *
14971      * <code>map&lt;string, string&gt; labels = 19;</code>
14972      */
14973     @java.lang.Override
getLabelsOrThrow(java.lang.String key)14974     public java.lang.String getLabelsOrThrow(java.lang.String key) {
14975       if (key == null) {
14976         throw new NullPointerException("map key");
14977       }
14978       java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
14979       if (!map.containsKey(key)) {
14980         throw new java.lang.IllegalArgumentException();
14981       }
14982       return map.get(key);
14983     }
14984 
clearLabels()14985     public Builder clearLabels() {
14986       bitField0_ = (bitField0_ & ~0x01000000);
14987       internalGetMutableLabels().getMutableMap().clear();
14988       return this;
14989     }
14990     /**
14991      *
14992      *
14993      * <pre>
14994      * The labels with user-defined metadata to organize BatchPredictionJobs.
14995      * Label keys and values can be no longer than 64 characters
14996      * (Unicode codepoints), can only contain lowercase letters, numeric
14997      * characters, underscores and dashes. International characters are allowed.
14998      * See https://goo.gl/xmQnxf for more information and examples of labels.
14999      * </pre>
15000      *
15001      * <code>map&lt;string, string&gt; labels = 19;</code>
15002      */
removeLabels(java.lang.String key)15003     public Builder removeLabels(java.lang.String key) {
15004       if (key == null) {
15005         throw new NullPointerException("map key");
15006       }
15007       internalGetMutableLabels().getMutableMap().remove(key);
15008       return this;
15009     }
15010     /** Use alternate mutation accessors instead. */
15011     @java.lang.Deprecated
getMutableLabels()15012     public java.util.Map<java.lang.String, java.lang.String> getMutableLabels() {
15013       bitField0_ |= 0x01000000;
15014       return internalGetMutableLabels().getMutableMap();
15015     }
15016     /**
15017      *
15018      *
15019      * <pre>
15020      * The labels with user-defined metadata to organize BatchPredictionJobs.
15021      * Label keys and values can be no longer than 64 characters
15022      * (Unicode codepoints), can only contain lowercase letters, numeric
15023      * characters, underscores and dashes. International characters are allowed.
15024      * See https://goo.gl/xmQnxf for more information and examples of labels.
15025      * </pre>
15026      *
15027      * <code>map&lt;string, string&gt; labels = 19;</code>
15028      */
putLabels(java.lang.String key, java.lang.String value)15029     public Builder putLabels(java.lang.String key, java.lang.String value) {
15030       if (key == null) {
15031         throw new NullPointerException("map key");
15032       }
15033       if (value == null) {
15034         throw new NullPointerException("map value");
15035       }
15036       internalGetMutableLabels().getMutableMap().put(key, value);
15037       bitField0_ |= 0x01000000;
15038       return this;
15039     }
15040     /**
15041      *
15042      *
15043      * <pre>
15044      * The labels with user-defined metadata to organize BatchPredictionJobs.
15045      * Label keys and values can be no longer than 64 characters
15046      * (Unicode codepoints), can only contain lowercase letters, numeric
15047      * characters, underscores and dashes. International characters are allowed.
15048      * See https://goo.gl/xmQnxf for more information and examples of labels.
15049      * </pre>
15050      *
15051      * <code>map&lt;string, string&gt; labels = 19;</code>
15052      */
putAllLabels(java.util.Map<java.lang.String, java.lang.String> values)15053     public Builder putAllLabels(java.util.Map<java.lang.String, java.lang.String> values) {
15054       internalGetMutableLabels().getMutableMap().putAll(values);
15055       bitField0_ |= 0x01000000;
15056       return this;
15057     }
15058 
15059     private com.google.cloud.aiplatform.v1.EncryptionSpec encryptionSpec_;
15060     private com.google.protobuf.SingleFieldBuilderV3<
15061             com.google.cloud.aiplatform.v1.EncryptionSpec,
15062             com.google.cloud.aiplatform.v1.EncryptionSpec.Builder,
15063             com.google.cloud.aiplatform.v1.EncryptionSpecOrBuilder>
15064         encryptionSpecBuilder_;
15065     /**
15066      *
15067      *
15068      * <pre>
15069      * Customer-managed encryption key options for a BatchPredictionJob. If this
15070      * is set, then all resources created by the BatchPredictionJob will be
15071      * encrypted with the provided encryption key.
15072      * </pre>
15073      *
15074      * <code>.google.cloud.aiplatform.v1.EncryptionSpec encryption_spec = 24;</code>
15075      *
15076      * @return Whether the encryptionSpec field is set.
15077      */
hasEncryptionSpec()15078     public boolean hasEncryptionSpec() {
15079       return ((bitField0_ & 0x02000000) != 0);
15080     }
15081     /**
15082      *
15083      *
15084      * <pre>
15085      * Customer-managed encryption key options for a BatchPredictionJob. If this
15086      * is set, then all resources created by the BatchPredictionJob will be
15087      * encrypted with the provided encryption key.
15088      * </pre>
15089      *
15090      * <code>.google.cloud.aiplatform.v1.EncryptionSpec encryption_spec = 24;</code>
15091      *
15092      * @return The encryptionSpec.
15093      */
getEncryptionSpec()15094     public com.google.cloud.aiplatform.v1.EncryptionSpec getEncryptionSpec() {
15095       if (encryptionSpecBuilder_ == null) {
15096         return encryptionSpec_ == null
15097             ? com.google.cloud.aiplatform.v1.EncryptionSpec.getDefaultInstance()
15098             : encryptionSpec_;
15099       } else {
15100         return encryptionSpecBuilder_.getMessage();
15101       }
15102     }
15103     /**
15104      *
15105      *
15106      * <pre>
15107      * Customer-managed encryption key options for a BatchPredictionJob. If this
15108      * is set, then all resources created by the BatchPredictionJob will be
15109      * encrypted with the provided encryption key.
15110      * </pre>
15111      *
15112      * <code>.google.cloud.aiplatform.v1.EncryptionSpec encryption_spec = 24;</code>
15113      */
setEncryptionSpec(com.google.cloud.aiplatform.v1.EncryptionSpec value)15114     public Builder setEncryptionSpec(com.google.cloud.aiplatform.v1.EncryptionSpec value) {
15115       if (encryptionSpecBuilder_ == null) {
15116         if (value == null) {
15117           throw new NullPointerException();
15118         }
15119         encryptionSpec_ = value;
15120       } else {
15121         encryptionSpecBuilder_.setMessage(value);
15122       }
15123       bitField0_ |= 0x02000000;
15124       onChanged();
15125       return this;
15126     }
15127     /**
15128      *
15129      *
15130      * <pre>
15131      * Customer-managed encryption key options for a BatchPredictionJob. If this
15132      * is set, then all resources created by the BatchPredictionJob will be
15133      * encrypted with the provided encryption key.
15134      * </pre>
15135      *
15136      * <code>.google.cloud.aiplatform.v1.EncryptionSpec encryption_spec = 24;</code>
15137      */
setEncryptionSpec( com.google.cloud.aiplatform.v1.EncryptionSpec.Builder builderForValue)15138     public Builder setEncryptionSpec(
15139         com.google.cloud.aiplatform.v1.EncryptionSpec.Builder builderForValue) {
15140       if (encryptionSpecBuilder_ == null) {
15141         encryptionSpec_ = builderForValue.build();
15142       } else {
15143         encryptionSpecBuilder_.setMessage(builderForValue.build());
15144       }
15145       bitField0_ |= 0x02000000;
15146       onChanged();
15147       return this;
15148     }
15149     /**
15150      *
15151      *
15152      * <pre>
15153      * Customer-managed encryption key options for a BatchPredictionJob. If this
15154      * is set, then all resources created by the BatchPredictionJob will be
15155      * encrypted with the provided encryption key.
15156      * </pre>
15157      *
15158      * <code>.google.cloud.aiplatform.v1.EncryptionSpec encryption_spec = 24;</code>
15159      */
mergeEncryptionSpec(com.google.cloud.aiplatform.v1.EncryptionSpec value)15160     public Builder mergeEncryptionSpec(com.google.cloud.aiplatform.v1.EncryptionSpec value) {
15161       if (encryptionSpecBuilder_ == null) {
15162         if (((bitField0_ & 0x02000000) != 0)
15163             && encryptionSpec_ != null
15164             && encryptionSpec_
15165                 != com.google.cloud.aiplatform.v1.EncryptionSpec.getDefaultInstance()) {
15166           getEncryptionSpecBuilder().mergeFrom(value);
15167         } else {
15168           encryptionSpec_ = value;
15169         }
15170       } else {
15171         encryptionSpecBuilder_.mergeFrom(value);
15172       }
15173       bitField0_ |= 0x02000000;
15174       onChanged();
15175       return this;
15176     }
15177     /**
15178      *
15179      *
15180      * <pre>
15181      * Customer-managed encryption key options for a BatchPredictionJob. If this
15182      * is set, then all resources created by the BatchPredictionJob will be
15183      * encrypted with the provided encryption key.
15184      * </pre>
15185      *
15186      * <code>.google.cloud.aiplatform.v1.EncryptionSpec encryption_spec = 24;</code>
15187      */
clearEncryptionSpec()15188     public Builder clearEncryptionSpec() {
15189       bitField0_ = (bitField0_ & ~0x02000000);
15190       encryptionSpec_ = null;
15191       if (encryptionSpecBuilder_ != null) {
15192         encryptionSpecBuilder_.dispose();
15193         encryptionSpecBuilder_ = null;
15194       }
15195       onChanged();
15196       return this;
15197     }
15198     /**
15199      *
15200      *
15201      * <pre>
15202      * Customer-managed encryption key options for a BatchPredictionJob. If this
15203      * is set, then all resources created by the BatchPredictionJob will be
15204      * encrypted with the provided encryption key.
15205      * </pre>
15206      *
15207      * <code>.google.cloud.aiplatform.v1.EncryptionSpec encryption_spec = 24;</code>
15208      */
getEncryptionSpecBuilder()15209     public com.google.cloud.aiplatform.v1.EncryptionSpec.Builder getEncryptionSpecBuilder() {
15210       bitField0_ |= 0x02000000;
15211       onChanged();
15212       return getEncryptionSpecFieldBuilder().getBuilder();
15213     }
15214     /**
15215      *
15216      *
15217      * <pre>
15218      * Customer-managed encryption key options for a BatchPredictionJob. If this
15219      * is set, then all resources created by the BatchPredictionJob will be
15220      * encrypted with the provided encryption key.
15221      * </pre>
15222      *
15223      * <code>.google.cloud.aiplatform.v1.EncryptionSpec encryption_spec = 24;</code>
15224      */
getEncryptionSpecOrBuilder()15225     public com.google.cloud.aiplatform.v1.EncryptionSpecOrBuilder getEncryptionSpecOrBuilder() {
15226       if (encryptionSpecBuilder_ != null) {
15227         return encryptionSpecBuilder_.getMessageOrBuilder();
15228       } else {
15229         return encryptionSpec_ == null
15230             ? com.google.cloud.aiplatform.v1.EncryptionSpec.getDefaultInstance()
15231             : encryptionSpec_;
15232       }
15233     }
15234     /**
15235      *
15236      *
15237      * <pre>
15238      * Customer-managed encryption key options for a BatchPredictionJob. If this
15239      * is set, then all resources created by the BatchPredictionJob will be
15240      * encrypted with the provided encryption key.
15241      * </pre>
15242      *
15243      * <code>.google.cloud.aiplatform.v1.EncryptionSpec encryption_spec = 24;</code>
15244      */
15245     private com.google.protobuf.SingleFieldBuilderV3<
15246             com.google.cloud.aiplatform.v1.EncryptionSpec,
15247             com.google.cloud.aiplatform.v1.EncryptionSpec.Builder,
15248             com.google.cloud.aiplatform.v1.EncryptionSpecOrBuilder>
getEncryptionSpecFieldBuilder()15249         getEncryptionSpecFieldBuilder() {
15250       if (encryptionSpecBuilder_ == null) {
15251         encryptionSpecBuilder_ =
15252             new com.google.protobuf.SingleFieldBuilderV3<
15253                 com.google.cloud.aiplatform.v1.EncryptionSpec,
15254                 com.google.cloud.aiplatform.v1.EncryptionSpec.Builder,
15255                 com.google.cloud.aiplatform.v1.EncryptionSpecOrBuilder>(
15256                 getEncryptionSpec(), getParentForChildren(), isClean());
15257         encryptionSpec_ = null;
15258       }
15259       return encryptionSpecBuilder_;
15260     }
15261 
15262     private boolean disableContainerLogging_;
15263     /**
15264      *
15265      *
15266      * <pre>
15267      * For custom-trained Models and AutoML Tabular Models, the container of the
15268      * DeployedModel instances will send `stderr` and `stdout` streams to
15269      * Cloud Logging by default. Please note that the logs incur cost,
15270      * which are subject to [Cloud Logging
15271      * pricing](https://cloud.google.com/logging/pricing).
15272      * User can disable container logging by setting this flag to true.
15273      * </pre>
15274      *
15275      * <code>bool disable_container_logging = 34;</code>
15276      *
15277      * @return The disableContainerLogging.
15278      */
15279     @java.lang.Override
getDisableContainerLogging()15280     public boolean getDisableContainerLogging() {
15281       return disableContainerLogging_;
15282     }
15283     /**
15284      *
15285      *
15286      * <pre>
15287      * For custom-trained Models and AutoML Tabular Models, the container of the
15288      * DeployedModel instances will send `stderr` and `stdout` streams to
15289      * Cloud Logging by default. Please note that the logs incur cost,
15290      * which are subject to [Cloud Logging
15291      * pricing](https://cloud.google.com/logging/pricing).
15292      * User can disable container logging by setting this flag to true.
15293      * </pre>
15294      *
15295      * <code>bool disable_container_logging = 34;</code>
15296      *
15297      * @param value The disableContainerLogging to set.
15298      * @return This builder for chaining.
15299      */
setDisableContainerLogging(boolean value)15300     public Builder setDisableContainerLogging(boolean value) {
15301 
15302       disableContainerLogging_ = value;
15303       bitField0_ |= 0x04000000;
15304       onChanged();
15305       return this;
15306     }
15307     /**
15308      *
15309      *
15310      * <pre>
15311      * For custom-trained Models and AutoML Tabular Models, the container of the
15312      * DeployedModel instances will send `stderr` and `stdout` streams to
15313      * Cloud Logging by default. Please note that the logs incur cost,
15314      * which are subject to [Cloud Logging
15315      * pricing](https://cloud.google.com/logging/pricing).
15316      * User can disable container logging by setting this flag to true.
15317      * </pre>
15318      *
15319      * <code>bool disable_container_logging = 34;</code>
15320      *
15321      * @return This builder for chaining.
15322      */
clearDisableContainerLogging()15323     public Builder clearDisableContainerLogging() {
15324       bitField0_ = (bitField0_ & ~0x04000000);
15325       disableContainerLogging_ = false;
15326       onChanged();
15327       return this;
15328     }
15329 
15330     @java.lang.Override
setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields)15331     public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
15332       return super.setUnknownFields(unknownFields);
15333     }
15334 
15335     @java.lang.Override
mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)15336     public final Builder mergeUnknownFields(
15337         final com.google.protobuf.UnknownFieldSet unknownFields) {
15338       return super.mergeUnknownFields(unknownFields);
15339     }
15340 
15341     // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.BatchPredictionJob)
15342   }
15343 
15344   // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.BatchPredictionJob)
15345   private static final com.google.cloud.aiplatform.v1.BatchPredictionJob DEFAULT_INSTANCE;
15346 
15347   static {
15348     DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.BatchPredictionJob();
15349   }
15350 
getDefaultInstance()15351   public static com.google.cloud.aiplatform.v1.BatchPredictionJob getDefaultInstance() {
15352     return DEFAULT_INSTANCE;
15353   }
15354 
15355   private static final com.google.protobuf.Parser<BatchPredictionJob> PARSER =
15356       new com.google.protobuf.AbstractParser<BatchPredictionJob>() {
15357         @java.lang.Override
15358         public BatchPredictionJob parsePartialFrom(
15359             com.google.protobuf.CodedInputStream input,
15360             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15361             throws com.google.protobuf.InvalidProtocolBufferException {
15362           Builder builder = newBuilder();
15363           try {
15364             builder.mergeFrom(input, extensionRegistry);
15365           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15366             throw e.setUnfinishedMessage(builder.buildPartial());
15367           } catch (com.google.protobuf.UninitializedMessageException e) {
15368             throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
15369           } catch (java.io.IOException e) {
15370             throw new com.google.protobuf.InvalidProtocolBufferException(e)
15371                 .setUnfinishedMessage(builder.buildPartial());
15372           }
15373           return builder.buildPartial();
15374         }
15375       };
15376 
parser()15377   public static com.google.protobuf.Parser<BatchPredictionJob> parser() {
15378     return PARSER;
15379   }
15380 
15381   @java.lang.Override
getParserForType()15382   public com.google.protobuf.Parser<BatchPredictionJob> getParserForType() {
15383     return PARSER;
15384   }
15385 
15386   @java.lang.Override
getDefaultInstanceForType()15387   public com.google.cloud.aiplatform.v1.BatchPredictionJob getDefaultInstanceForType() {
15388     return DEFAULT_INSTANCE;
15389   }
15390 }
15391