• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2020 Google LLC
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     https://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 // Generated by the protocol buffer compiler.  DO NOT EDIT!
17 // source: google/cloud/dataproc/v1/jobs.proto
18 
19 package com.google.cloud.dataproc.v1;
20 
21 /**
22  *
23  *
24  * <pre>
25  * A Dataproc job resource.
26  * </pre>
27  *
28  * Protobuf type {@code google.cloud.dataproc.v1.Job}
29  */
30 public final class Job extends com.google.protobuf.GeneratedMessageV3
31     implements
32     // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.Job)
33     JobOrBuilder {
34   private static final long serialVersionUID = 0L;
35   // Use Job.newBuilder() to construct.
Job(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)36   private Job(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
37     super(builder);
38   }
39 
Job()40   private Job() {
41     statusHistory_ = java.util.Collections.emptyList();
42     yarnApplications_ = java.util.Collections.emptyList();
43     driverOutputResourceUri_ = "";
44     driverControlFilesUri_ = "";
45     jobUuid_ = "";
46   }
47 
48   @java.lang.Override
49   @SuppressWarnings({"unused"})
newInstance(UnusedPrivateParameter unused)50   protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
51     return new Job();
52   }
53 
54   @java.lang.Override
getUnknownFields()55   public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
56     return this.unknownFields;
57   }
58 
getDescriptor()59   public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
60     return com.google.cloud.dataproc.v1.JobsProto
61         .internal_static_google_cloud_dataproc_v1_Job_descriptor;
62   }
63 
64   @SuppressWarnings({"rawtypes"})
65   @java.lang.Override
internalGetMapField(int number)66   protected com.google.protobuf.MapField internalGetMapField(int number) {
67     switch (number) {
68       case 18:
69         return internalGetLabels();
70       default:
71         throw new RuntimeException("Invalid map field number: " + number);
72     }
73   }
74 
75   @java.lang.Override
76   protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()77       internalGetFieldAccessorTable() {
78     return com.google.cloud.dataproc.v1.JobsProto
79         .internal_static_google_cloud_dataproc_v1_Job_fieldAccessorTable
80         .ensureFieldAccessorsInitialized(
81             com.google.cloud.dataproc.v1.Job.class, com.google.cloud.dataproc.v1.Job.Builder.class);
82   }
83 
84   private int typeJobCase_ = 0;
85   private java.lang.Object typeJob_;
86 
87   public enum TypeJobCase
88       implements
89           com.google.protobuf.Internal.EnumLite,
90           com.google.protobuf.AbstractMessage.InternalOneOfEnum {
91     HADOOP_JOB(3),
92     SPARK_JOB(4),
93     PYSPARK_JOB(5),
94     HIVE_JOB(6),
95     PIG_JOB(7),
96     SPARK_R_JOB(21),
97     SPARK_SQL_JOB(12),
98     PRESTO_JOB(23),
99     TRINO_JOB(28),
100     TYPEJOB_NOT_SET(0);
101     private final int value;
102 
TypeJobCase(int value)103     private TypeJobCase(int value) {
104       this.value = value;
105     }
106     /**
107      * @param value The number of the enum to look for.
108      * @return The enum associated with the given number.
109      * @deprecated Use {@link #forNumber(int)} instead.
110      */
111     @java.lang.Deprecated
valueOf(int value)112     public static TypeJobCase valueOf(int value) {
113       return forNumber(value);
114     }
115 
forNumber(int value)116     public static TypeJobCase forNumber(int value) {
117       switch (value) {
118         case 3:
119           return HADOOP_JOB;
120         case 4:
121           return SPARK_JOB;
122         case 5:
123           return PYSPARK_JOB;
124         case 6:
125           return HIVE_JOB;
126         case 7:
127           return PIG_JOB;
128         case 21:
129           return SPARK_R_JOB;
130         case 12:
131           return SPARK_SQL_JOB;
132         case 23:
133           return PRESTO_JOB;
134         case 28:
135           return TRINO_JOB;
136         case 0:
137           return TYPEJOB_NOT_SET;
138         default:
139           return null;
140       }
141     }
142 
getNumber()143     public int getNumber() {
144       return this.value;
145     }
146   };
147 
getTypeJobCase()148   public TypeJobCase getTypeJobCase() {
149     return TypeJobCase.forNumber(typeJobCase_);
150   }
151 
152   public static final int REFERENCE_FIELD_NUMBER = 1;
153   private com.google.cloud.dataproc.v1.JobReference reference_;
154   /**
155    *
156    *
157    * <pre>
158    * Optional. The fully qualified reference to the job, which can be used to
159    * obtain the equivalent REST path of the job resource. If this property
160    * is not specified when a job is created, the server generates a
161    * &lt;code&gt;job_id&lt;/code&gt;.
162    * </pre>
163    *
164    * <code>
165    * .google.cloud.dataproc.v1.JobReference reference = 1 [(.google.api.field_behavior) = OPTIONAL];
166    * </code>
167    *
168    * @return Whether the reference field is set.
169    */
170   @java.lang.Override
hasReference()171   public boolean hasReference() {
172     return reference_ != null;
173   }
174   /**
175    *
176    *
177    * <pre>
178    * Optional. The fully qualified reference to the job, which can be used to
179    * obtain the equivalent REST path of the job resource. If this property
180    * is not specified when a job is created, the server generates a
181    * &lt;code&gt;job_id&lt;/code&gt;.
182    * </pre>
183    *
184    * <code>
185    * .google.cloud.dataproc.v1.JobReference reference = 1 [(.google.api.field_behavior) = OPTIONAL];
186    * </code>
187    *
188    * @return The reference.
189    */
190   @java.lang.Override
getReference()191   public com.google.cloud.dataproc.v1.JobReference getReference() {
192     return reference_ == null
193         ? com.google.cloud.dataproc.v1.JobReference.getDefaultInstance()
194         : reference_;
195   }
196   /**
197    *
198    *
199    * <pre>
200    * Optional. The fully qualified reference to the job, which can be used to
201    * obtain the equivalent REST path of the job resource. If this property
202    * is not specified when a job is created, the server generates a
203    * &lt;code&gt;job_id&lt;/code&gt;.
204    * </pre>
205    *
206    * <code>
207    * .google.cloud.dataproc.v1.JobReference reference = 1 [(.google.api.field_behavior) = OPTIONAL];
208    * </code>
209    */
210   @java.lang.Override
getReferenceOrBuilder()211   public com.google.cloud.dataproc.v1.JobReferenceOrBuilder getReferenceOrBuilder() {
212     return reference_ == null
213         ? com.google.cloud.dataproc.v1.JobReference.getDefaultInstance()
214         : reference_;
215   }
216 
217   public static final int PLACEMENT_FIELD_NUMBER = 2;
218   private com.google.cloud.dataproc.v1.JobPlacement placement_;
219   /**
220    *
221    *
222    * <pre>
223    * Required. Job information, including how, when, and where to
224    * run the job.
225    * </pre>
226    *
227    * <code>
228    * .google.cloud.dataproc.v1.JobPlacement placement = 2 [(.google.api.field_behavior) = REQUIRED];
229    * </code>
230    *
231    * @return Whether the placement field is set.
232    */
233   @java.lang.Override
hasPlacement()234   public boolean hasPlacement() {
235     return placement_ != null;
236   }
237   /**
238    *
239    *
240    * <pre>
241    * Required. Job information, including how, when, and where to
242    * run the job.
243    * </pre>
244    *
245    * <code>
246    * .google.cloud.dataproc.v1.JobPlacement placement = 2 [(.google.api.field_behavior) = REQUIRED];
247    * </code>
248    *
249    * @return The placement.
250    */
251   @java.lang.Override
getPlacement()252   public com.google.cloud.dataproc.v1.JobPlacement getPlacement() {
253     return placement_ == null
254         ? com.google.cloud.dataproc.v1.JobPlacement.getDefaultInstance()
255         : placement_;
256   }
257   /**
258    *
259    *
260    * <pre>
261    * Required. Job information, including how, when, and where to
262    * run the job.
263    * </pre>
264    *
265    * <code>
266    * .google.cloud.dataproc.v1.JobPlacement placement = 2 [(.google.api.field_behavior) = REQUIRED];
267    * </code>
268    */
269   @java.lang.Override
getPlacementOrBuilder()270   public com.google.cloud.dataproc.v1.JobPlacementOrBuilder getPlacementOrBuilder() {
271     return placement_ == null
272         ? com.google.cloud.dataproc.v1.JobPlacement.getDefaultInstance()
273         : placement_;
274   }
275 
276   public static final int HADOOP_JOB_FIELD_NUMBER = 3;
277   /**
278    *
279    *
280    * <pre>
281    * Optional. Job is a Hadoop job.
282    * </pre>
283    *
284    * <code>
285    * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 3 [(.google.api.field_behavior) = OPTIONAL];
286    * </code>
287    *
288    * @return Whether the hadoopJob field is set.
289    */
290   @java.lang.Override
hasHadoopJob()291   public boolean hasHadoopJob() {
292     return typeJobCase_ == 3;
293   }
294   /**
295    *
296    *
297    * <pre>
298    * Optional. Job is a Hadoop job.
299    * </pre>
300    *
301    * <code>
302    * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 3 [(.google.api.field_behavior) = OPTIONAL];
303    * </code>
304    *
305    * @return The hadoopJob.
306    */
307   @java.lang.Override
getHadoopJob()308   public com.google.cloud.dataproc.v1.HadoopJob getHadoopJob() {
309     if (typeJobCase_ == 3) {
310       return (com.google.cloud.dataproc.v1.HadoopJob) typeJob_;
311     }
312     return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
313   }
314   /**
315    *
316    *
317    * <pre>
318    * Optional. Job is a Hadoop job.
319    * </pre>
320    *
321    * <code>
322    * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 3 [(.google.api.field_behavior) = OPTIONAL];
323    * </code>
324    */
325   @java.lang.Override
getHadoopJobOrBuilder()326   public com.google.cloud.dataproc.v1.HadoopJobOrBuilder getHadoopJobOrBuilder() {
327     if (typeJobCase_ == 3) {
328       return (com.google.cloud.dataproc.v1.HadoopJob) typeJob_;
329     }
330     return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
331   }
332 
333   public static final int SPARK_JOB_FIELD_NUMBER = 4;
334   /**
335    *
336    *
337    * <pre>
338    * Optional. Job is a Spark job.
339    * </pre>
340    *
341    * <code>
342    * .google.cloud.dataproc.v1.SparkJob spark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
343    * </code>
344    *
345    * @return Whether the sparkJob field is set.
346    */
347   @java.lang.Override
hasSparkJob()348   public boolean hasSparkJob() {
349     return typeJobCase_ == 4;
350   }
351   /**
352    *
353    *
354    * <pre>
355    * Optional. Job is a Spark job.
356    * </pre>
357    *
358    * <code>
359    * .google.cloud.dataproc.v1.SparkJob spark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
360    * </code>
361    *
362    * @return The sparkJob.
363    */
364   @java.lang.Override
getSparkJob()365   public com.google.cloud.dataproc.v1.SparkJob getSparkJob() {
366     if (typeJobCase_ == 4) {
367       return (com.google.cloud.dataproc.v1.SparkJob) typeJob_;
368     }
369     return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
370   }
371   /**
372    *
373    *
374    * <pre>
375    * Optional. Job is a Spark job.
376    * </pre>
377    *
378    * <code>
379    * .google.cloud.dataproc.v1.SparkJob spark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
380    * </code>
381    */
382   @java.lang.Override
getSparkJobOrBuilder()383   public com.google.cloud.dataproc.v1.SparkJobOrBuilder getSparkJobOrBuilder() {
384     if (typeJobCase_ == 4) {
385       return (com.google.cloud.dataproc.v1.SparkJob) typeJob_;
386     }
387     return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
388   }
389 
390   public static final int PYSPARK_JOB_FIELD_NUMBER = 5;
391   /**
392    *
393    *
394    * <pre>
395    * Optional. Job is a PySpark job.
396    * </pre>
397    *
398    * <code>
399    * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 5 [(.google.api.field_behavior) = OPTIONAL];
400    * </code>
401    *
402    * @return Whether the pysparkJob field is set.
403    */
404   @java.lang.Override
hasPysparkJob()405   public boolean hasPysparkJob() {
406     return typeJobCase_ == 5;
407   }
408   /**
409    *
410    *
411    * <pre>
412    * Optional. Job is a PySpark job.
413    * </pre>
414    *
415    * <code>
416    * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 5 [(.google.api.field_behavior) = OPTIONAL];
417    * </code>
418    *
419    * @return The pysparkJob.
420    */
421   @java.lang.Override
getPysparkJob()422   public com.google.cloud.dataproc.v1.PySparkJob getPysparkJob() {
423     if (typeJobCase_ == 5) {
424       return (com.google.cloud.dataproc.v1.PySparkJob) typeJob_;
425     }
426     return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
427   }
428   /**
429    *
430    *
431    * <pre>
432    * Optional. Job is a PySpark job.
433    * </pre>
434    *
435    * <code>
436    * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 5 [(.google.api.field_behavior) = OPTIONAL];
437    * </code>
438    */
439   @java.lang.Override
getPysparkJobOrBuilder()440   public com.google.cloud.dataproc.v1.PySparkJobOrBuilder getPysparkJobOrBuilder() {
441     if (typeJobCase_ == 5) {
442       return (com.google.cloud.dataproc.v1.PySparkJob) typeJob_;
443     }
444     return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
445   }
446 
447   public static final int HIVE_JOB_FIELD_NUMBER = 6;
448   /**
449    *
450    *
451    * <pre>
452    * Optional. Job is a Hive job.
453    * </pre>
454    *
455    * <code>.google.cloud.dataproc.v1.HiveJob hive_job = 6 [(.google.api.field_behavior) = OPTIONAL];
456    * </code>
457    *
458    * @return Whether the hiveJob field is set.
459    */
460   @java.lang.Override
hasHiveJob()461   public boolean hasHiveJob() {
462     return typeJobCase_ == 6;
463   }
464   /**
465    *
466    *
467    * <pre>
468    * Optional. Job is a Hive job.
469    * </pre>
470    *
471    * <code>.google.cloud.dataproc.v1.HiveJob hive_job = 6 [(.google.api.field_behavior) = OPTIONAL];
472    * </code>
473    *
474    * @return The hiveJob.
475    */
476   @java.lang.Override
getHiveJob()477   public com.google.cloud.dataproc.v1.HiveJob getHiveJob() {
478     if (typeJobCase_ == 6) {
479       return (com.google.cloud.dataproc.v1.HiveJob) typeJob_;
480     }
481     return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
482   }
483   /**
484    *
485    *
486    * <pre>
487    * Optional. Job is a Hive job.
488    * </pre>
489    *
490    * <code>.google.cloud.dataproc.v1.HiveJob hive_job = 6 [(.google.api.field_behavior) = OPTIONAL];
491    * </code>
492    */
493   @java.lang.Override
getHiveJobOrBuilder()494   public com.google.cloud.dataproc.v1.HiveJobOrBuilder getHiveJobOrBuilder() {
495     if (typeJobCase_ == 6) {
496       return (com.google.cloud.dataproc.v1.HiveJob) typeJob_;
497     }
498     return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
499   }
500 
501   public static final int PIG_JOB_FIELD_NUMBER = 7;
502   /**
503    *
504    *
505    * <pre>
506    * Optional. Job is a Pig job.
507    * </pre>
508    *
509    * <code>.google.cloud.dataproc.v1.PigJob pig_job = 7 [(.google.api.field_behavior) = OPTIONAL];
510    * </code>
511    *
512    * @return Whether the pigJob field is set.
513    */
514   @java.lang.Override
hasPigJob()515   public boolean hasPigJob() {
516     return typeJobCase_ == 7;
517   }
518   /**
519    *
520    *
521    * <pre>
522    * Optional. Job is a Pig job.
523    * </pre>
524    *
525    * <code>.google.cloud.dataproc.v1.PigJob pig_job = 7 [(.google.api.field_behavior) = OPTIONAL];
526    * </code>
527    *
528    * @return The pigJob.
529    */
530   @java.lang.Override
getPigJob()531   public com.google.cloud.dataproc.v1.PigJob getPigJob() {
532     if (typeJobCase_ == 7) {
533       return (com.google.cloud.dataproc.v1.PigJob) typeJob_;
534     }
535     return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
536   }
537   /**
538    *
539    *
540    * <pre>
541    * Optional. Job is a Pig job.
542    * </pre>
543    *
544    * <code>.google.cloud.dataproc.v1.PigJob pig_job = 7 [(.google.api.field_behavior) = OPTIONAL];
545    * </code>
546    */
547   @java.lang.Override
getPigJobOrBuilder()548   public com.google.cloud.dataproc.v1.PigJobOrBuilder getPigJobOrBuilder() {
549     if (typeJobCase_ == 7) {
550       return (com.google.cloud.dataproc.v1.PigJob) typeJob_;
551     }
552     return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
553   }
554 
555   public static final int SPARK_R_JOB_FIELD_NUMBER = 21;
556   /**
557    *
558    *
559    * <pre>
560    * Optional. Job is a SparkR job.
561    * </pre>
562    *
563    * <code>
564    * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 21 [(.google.api.field_behavior) = OPTIONAL];
565    * </code>
566    *
567    * @return Whether the sparkRJob field is set.
568    */
569   @java.lang.Override
hasSparkRJob()570   public boolean hasSparkRJob() {
571     return typeJobCase_ == 21;
572   }
573   /**
574    *
575    *
576    * <pre>
577    * Optional. Job is a SparkR job.
578    * </pre>
579    *
580    * <code>
581    * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 21 [(.google.api.field_behavior) = OPTIONAL];
582    * </code>
583    *
584    * @return The sparkRJob.
585    */
586   @java.lang.Override
getSparkRJob()587   public com.google.cloud.dataproc.v1.SparkRJob getSparkRJob() {
588     if (typeJobCase_ == 21) {
589       return (com.google.cloud.dataproc.v1.SparkRJob) typeJob_;
590     }
591     return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
592   }
593   /**
594    *
595    *
596    * <pre>
597    * Optional. Job is a SparkR job.
598    * </pre>
599    *
600    * <code>
601    * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 21 [(.google.api.field_behavior) = OPTIONAL];
602    * </code>
603    */
604   @java.lang.Override
getSparkRJobOrBuilder()605   public com.google.cloud.dataproc.v1.SparkRJobOrBuilder getSparkRJobOrBuilder() {
606     if (typeJobCase_ == 21) {
607       return (com.google.cloud.dataproc.v1.SparkRJob) typeJob_;
608     }
609     return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
610   }
611 
612   public static final int SPARK_SQL_JOB_FIELD_NUMBER = 12;
613   /**
614    *
615    *
616    * <pre>
617    * Optional. Job is a SparkSql job.
618    * </pre>
619    *
620    * <code>
621    * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12 [(.google.api.field_behavior) = OPTIONAL];
622    * </code>
623    *
624    * @return Whether the sparkSqlJob field is set.
625    */
626   @java.lang.Override
hasSparkSqlJob()627   public boolean hasSparkSqlJob() {
628     return typeJobCase_ == 12;
629   }
630   /**
631    *
632    *
633    * <pre>
634    * Optional. Job is a SparkSql job.
635    * </pre>
636    *
637    * <code>
638    * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12 [(.google.api.field_behavior) = OPTIONAL];
639    * </code>
640    *
641    * @return The sparkSqlJob.
642    */
643   @java.lang.Override
getSparkSqlJob()644   public com.google.cloud.dataproc.v1.SparkSqlJob getSparkSqlJob() {
645     if (typeJobCase_ == 12) {
646       return (com.google.cloud.dataproc.v1.SparkSqlJob) typeJob_;
647     }
648     return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
649   }
650   /**
651    *
652    *
653    * <pre>
654    * Optional. Job is a SparkSql job.
655    * </pre>
656    *
657    * <code>
658    * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12 [(.google.api.field_behavior) = OPTIONAL];
659    * </code>
660    */
661   @java.lang.Override
getSparkSqlJobOrBuilder()662   public com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder() {
663     if (typeJobCase_ == 12) {
664       return (com.google.cloud.dataproc.v1.SparkSqlJob) typeJob_;
665     }
666     return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
667   }
668 
669   public static final int PRESTO_JOB_FIELD_NUMBER = 23;
670   /**
671    *
672    *
673    * <pre>
674    * Optional. Job is a Presto job.
675    * </pre>
676    *
677    * <code>
678    * .google.cloud.dataproc.v1.PrestoJob presto_job = 23 [(.google.api.field_behavior) = OPTIONAL];
679    * </code>
680    *
681    * @return Whether the prestoJob field is set.
682    */
683   @java.lang.Override
hasPrestoJob()684   public boolean hasPrestoJob() {
685     return typeJobCase_ == 23;
686   }
687   /**
688    *
689    *
690    * <pre>
691    * Optional. Job is a Presto job.
692    * </pre>
693    *
694    * <code>
695    * .google.cloud.dataproc.v1.PrestoJob presto_job = 23 [(.google.api.field_behavior) = OPTIONAL];
696    * </code>
697    *
698    * @return The prestoJob.
699    */
700   @java.lang.Override
getPrestoJob()701   public com.google.cloud.dataproc.v1.PrestoJob getPrestoJob() {
702     if (typeJobCase_ == 23) {
703       return (com.google.cloud.dataproc.v1.PrestoJob) typeJob_;
704     }
705     return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
706   }
707   /**
708    *
709    *
710    * <pre>
711    * Optional. Job is a Presto job.
712    * </pre>
713    *
714    * <code>
715    * .google.cloud.dataproc.v1.PrestoJob presto_job = 23 [(.google.api.field_behavior) = OPTIONAL];
716    * </code>
717    */
718   @java.lang.Override
getPrestoJobOrBuilder()719   public com.google.cloud.dataproc.v1.PrestoJobOrBuilder getPrestoJobOrBuilder() {
720     if (typeJobCase_ == 23) {
721       return (com.google.cloud.dataproc.v1.PrestoJob) typeJob_;
722     }
723     return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
724   }
725 
726   public static final int TRINO_JOB_FIELD_NUMBER = 28;
727   /**
728    *
729    *
730    * <pre>
731    * Optional. Job is a Trino job.
732    * </pre>
733    *
734    * <code>
735    * .google.cloud.dataproc.v1.TrinoJob trino_job = 28 [(.google.api.field_behavior) = OPTIONAL];
736    * </code>
737    *
738    * @return Whether the trinoJob field is set.
739    */
740   @java.lang.Override
hasTrinoJob()741   public boolean hasTrinoJob() {
742     return typeJobCase_ == 28;
743   }
744   /**
745    *
746    *
747    * <pre>
748    * Optional. Job is a Trino job.
749    * </pre>
750    *
751    * <code>
752    * .google.cloud.dataproc.v1.TrinoJob trino_job = 28 [(.google.api.field_behavior) = OPTIONAL];
753    * </code>
754    *
755    * @return The trinoJob.
756    */
757   @java.lang.Override
getTrinoJob()758   public com.google.cloud.dataproc.v1.TrinoJob getTrinoJob() {
759     if (typeJobCase_ == 28) {
760       return (com.google.cloud.dataproc.v1.TrinoJob) typeJob_;
761     }
762     return com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance();
763   }
764   /**
765    *
766    *
767    * <pre>
768    * Optional. Job is a Trino job.
769    * </pre>
770    *
771    * <code>
772    * .google.cloud.dataproc.v1.TrinoJob trino_job = 28 [(.google.api.field_behavior) = OPTIONAL];
773    * </code>
774    */
775   @java.lang.Override
getTrinoJobOrBuilder()776   public com.google.cloud.dataproc.v1.TrinoJobOrBuilder getTrinoJobOrBuilder() {
777     if (typeJobCase_ == 28) {
778       return (com.google.cloud.dataproc.v1.TrinoJob) typeJob_;
779     }
780     return com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance();
781   }
782 
783   public static final int STATUS_FIELD_NUMBER = 8;
784   private com.google.cloud.dataproc.v1.JobStatus status_;
785   /**
786    *
787    *
788    * <pre>
789    * Output only. The job status. Additional application-specific
790    * status information may be contained in the &lt;code&gt;type_job&lt;/code&gt;
791    * and &lt;code&gt;yarn_applications&lt;/code&gt; fields.
792    * </pre>
793    *
794    * <code>
795    * .google.cloud.dataproc.v1.JobStatus status = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
796    * </code>
797    *
798    * @return Whether the status field is set.
799    */
800   @java.lang.Override
hasStatus()801   public boolean hasStatus() {
802     return status_ != null;
803   }
804   /**
805    *
806    *
807    * <pre>
808    * Output only. The job status. Additional application-specific
809    * status information may be contained in the &lt;code&gt;type_job&lt;/code&gt;
810    * and &lt;code&gt;yarn_applications&lt;/code&gt; fields.
811    * </pre>
812    *
813    * <code>
814    * .google.cloud.dataproc.v1.JobStatus status = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
815    * </code>
816    *
817    * @return The status.
818    */
819   @java.lang.Override
getStatus()820   public com.google.cloud.dataproc.v1.JobStatus getStatus() {
821     return status_ == null ? com.google.cloud.dataproc.v1.JobStatus.getDefaultInstance() : status_;
822   }
823   /**
824    *
825    *
826    * <pre>
827    * Output only. The job status. Additional application-specific
828    * status information may be contained in the &lt;code&gt;type_job&lt;/code&gt;
829    * and &lt;code&gt;yarn_applications&lt;/code&gt; fields.
830    * </pre>
831    *
832    * <code>
833    * .google.cloud.dataproc.v1.JobStatus status = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
834    * </code>
835    */
836   @java.lang.Override
getStatusOrBuilder()837   public com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusOrBuilder() {
838     return status_ == null ? com.google.cloud.dataproc.v1.JobStatus.getDefaultInstance() : status_;
839   }
840 
841   public static final int STATUS_HISTORY_FIELD_NUMBER = 13;
842 
843   @SuppressWarnings("serial")
844   private java.util.List<com.google.cloud.dataproc.v1.JobStatus> statusHistory_;
845   /**
846    *
847    *
848    * <pre>
849    * Output only. The previous job status.
850    * </pre>
851    *
852    * <code>
853    * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
854    * </code>
855    */
856   @java.lang.Override
getStatusHistoryList()857   public java.util.List<com.google.cloud.dataproc.v1.JobStatus> getStatusHistoryList() {
858     return statusHistory_;
859   }
860   /**
861    *
862    *
863    * <pre>
864    * Output only. The previous job status.
865    * </pre>
866    *
867    * <code>
868    * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
869    * </code>
870    */
871   @java.lang.Override
872   public java.util.List<? extends com.google.cloud.dataproc.v1.JobStatusOrBuilder>
getStatusHistoryOrBuilderList()873       getStatusHistoryOrBuilderList() {
874     return statusHistory_;
875   }
876   /**
877    *
878    *
879    * <pre>
880    * Output only. The previous job status.
881    * </pre>
882    *
883    * <code>
884    * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
885    * </code>
886    */
887   @java.lang.Override
getStatusHistoryCount()888   public int getStatusHistoryCount() {
889     return statusHistory_.size();
890   }
891   /**
892    *
893    *
894    * <pre>
895    * Output only. The previous job status.
896    * </pre>
897    *
898    * <code>
899    * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
900    * </code>
901    */
902   @java.lang.Override
getStatusHistory(int index)903   public com.google.cloud.dataproc.v1.JobStatus getStatusHistory(int index) {
904     return statusHistory_.get(index);
905   }
906   /**
907    *
908    *
909    * <pre>
910    * Output only. The previous job status.
911    * </pre>
912    *
913    * <code>
914    * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
915    * </code>
916    */
917   @java.lang.Override
getStatusHistoryOrBuilder(int index)918   public com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusHistoryOrBuilder(int index) {
919     return statusHistory_.get(index);
920   }
921 
922   public static final int YARN_APPLICATIONS_FIELD_NUMBER = 9;
923 
924   @SuppressWarnings("serial")
925   private java.util.List<com.google.cloud.dataproc.v1.YarnApplication> yarnApplications_;
926   /**
927    *
928    *
929    * <pre>
930    * Output only. The collection of YARN applications spun up by this job.
931    * **Beta** Feature: This report is available for testing purposes only. It
932    * may be changed before final release.
933    * </pre>
934    *
935    * <code>
936    * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
937    * </code>
938    */
939   @java.lang.Override
getYarnApplicationsList()940   public java.util.List<com.google.cloud.dataproc.v1.YarnApplication> getYarnApplicationsList() {
941     return yarnApplications_;
942   }
943   /**
944    *
945    *
946    * <pre>
947    * Output only. The collection of YARN applications spun up by this job.
948    * **Beta** Feature: This report is available for testing purposes only. It
949    * may be changed before final release.
950    * </pre>
951    *
952    * <code>
953    * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
954    * </code>
955    */
956   @java.lang.Override
957   public java.util.List<? extends com.google.cloud.dataproc.v1.YarnApplicationOrBuilder>
getYarnApplicationsOrBuilderList()958       getYarnApplicationsOrBuilderList() {
959     return yarnApplications_;
960   }
961   /**
962    *
963    *
964    * <pre>
965    * Output only. The collection of YARN applications spun up by this job.
966    * **Beta** Feature: This report is available for testing purposes only. It
967    * may be changed before final release.
968    * </pre>
969    *
970    * <code>
971    * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
972    * </code>
973    */
974   @java.lang.Override
getYarnApplicationsCount()975   public int getYarnApplicationsCount() {
976     return yarnApplications_.size();
977   }
978   /**
979    *
980    *
981    * <pre>
982    * Output only. The collection of YARN applications spun up by this job.
983    * **Beta** Feature: This report is available for testing purposes only. It
984    * may be changed before final release.
985    * </pre>
986    *
987    * <code>
988    * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
989    * </code>
990    */
991   @java.lang.Override
getYarnApplications(int index)992   public com.google.cloud.dataproc.v1.YarnApplication getYarnApplications(int index) {
993     return yarnApplications_.get(index);
994   }
995   /**
996    *
997    *
998    * <pre>
999    * Output only. The collection of YARN applications spun up by this job.
1000    * **Beta** Feature: This report is available for testing purposes only. It
1001    * may be changed before final release.
1002    * </pre>
1003    *
1004    * <code>
1005    * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
1006    * </code>
1007    */
1008   @java.lang.Override
getYarnApplicationsOrBuilder( int index)1009   public com.google.cloud.dataproc.v1.YarnApplicationOrBuilder getYarnApplicationsOrBuilder(
1010       int index) {
1011     return yarnApplications_.get(index);
1012   }
1013 
1014   public static final int DRIVER_OUTPUT_RESOURCE_URI_FIELD_NUMBER = 17;
1015 
1016   @SuppressWarnings("serial")
1017   private volatile java.lang.Object driverOutputResourceUri_ = "";
1018   /**
1019    *
1020    *
1021    * <pre>
1022    * Output only. A URI pointing to the location of the stdout of the job's
1023    * driver program.
1024    * </pre>
1025    *
1026    * <code>string driver_output_resource_uri = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
1027    * </code>
1028    *
1029    * @return The driverOutputResourceUri.
1030    */
1031   @java.lang.Override
getDriverOutputResourceUri()1032   public java.lang.String getDriverOutputResourceUri() {
1033     java.lang.Object ref = driverOutputResourceUri_;
1034     if (ref instanceof java.lang.String) {
1035       return (java.lang.String) ref;
1036     } else {
1037       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
1038       java.lang.String s = bs.toStringUtf8();
1039       driverOutputResourceUri_ = s;
1040       return s;
1041     }
1042   }
1043   /**
1044    *
1045    *
1046    * <pre>
1047    * Output only. A URI pointing to the location of the stdout of the job's
1048    * driver program.
1049    * </pre>
1050    *
1051    * <code>string driver_output_resource_uri = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
1052    * </code>
1053    *
1054    * @return The bytes for driverOutputResourceUri.
1055    */
1056   @java.lang.Override
getDriverOutputResourceUriBytes()1057   public com.google.protobuf.ByteString getDriverOutputResourceUriBytes() {
1058     java.lang.Object ref = driverOutputResourceUri_;
1059     if (ref instanceof java.lang.String) {
1060       com.google.protobuf.ByteString b =
1061           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
1062       driverOutputResourceUri_ = b;
1063       return b;
1064     } else {
1065       return (com.google.protobuf.ByteString) ref;
1066     }
1067   }
1068 
1069   public static final int DRIVER_CONTROL_FILES_URI_FIELD_NUMBER = 15;
1070 
1071   @SuppressWarnings("serial")
1072   private volatile java.lang.Object driverControlFilesUri_ = "";
1073   /**
1074    *
1075    *
1076    * <pre>
1077    * Output only. If present, the location of miscellaneous control files
1078    * which may be used as part of job setup and handling. If not present,
1079    * control files may be placed in the same location as `driver_output_uri`.
1080    * </pre>
1081    *
1082    * <code>string driver_control_files_uri = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
1083    *
1084    * @return The driverControlFilesUri.
1085    */
1086   @java.lang.Override
getDriverControlFilesUri()1087   public java.lang.String getDriverControlFilesUri() {
1088     java.lang.Object ref = driverControlFilesUri_;
1089     if (ref instanceof java.lang.String) {
1090       return (java.lang.String) ref;
1091     } else {
1092       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
1093       java.lang.String s = bs.toStringUtf8();
1094       driverControlFilesUri_ = s;
1095       return s;
1096     }
1097   }
1098   /**
1099    *
1100    *
1101    * <pre>
1102    * Output only. If present, the location of miscellaneous control files
1103    * which may be used as part of job setup and handling. If not present,
1104    * control files may be placed in the same location as `driver_output_uri`.
1105    * </pre>
1106    *
1107    * <code>string driver_control_files_uri = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
1108    *
1109    * @return The bytes for driverControlFilesUri.
1110    */
1111   @java.lang.Override
getDriverControlFilesUriBytes()1112   public com.google.protobuf.ByteString getDriverControlFilesUriBytes() {
1113     java.lang.Object ref = driverControlFilesUri_;
1114     if (ref instanceof java.lang.String) {
1115       com.google.protobuf.ByteString b =
1116           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
1117       driverControlFilesUri_ = b;
1118       return b;
1119     } else {
1120       return (com.google.protobuf.ByteString) ref;
1121     }
1122   }
1123 
1124   public static final int LABELS_FIELD_NUMBER = 18;
1125 
1126   private static final class LabelsDefaultEntryHolder {
1127     static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
1128         com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
1129             com.google.cloud.dataproc.v1.JobsProto
1130                 .internal_static_google_cloud_dataproc_v1_Job_LabelsEntry_descriptor,
1131             com.google.protobuf.WireFormat.FieldType.STRING,
1132             "",
1133             com.google.protobuf.WireFormat.FieldType.STRING,
1134             "");
1135   }
1136 
1137   @SuppressWarnings("serial")
1138   private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;
1139 
internalGetLabels()1140   private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
1141     if (labels_ == null) {
1142       return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
1143     }
1144     return labels_;
1145   }
1146 
getLabelsCount()1147   public int getLabelsCount() {
1148     return internalGetLabels().getMap().size();
1149   }
1150   /**
1151    *
1152    *
1153    * <pre>
1154    * Optional. The labels to associate with this job.
1155    * Label **keys** must contain 1 to 63 characters, and must conform to
1156    * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
1157    * Label **values** may be empty, but, if present, must contain 1 to 63
1158    * characters, and must conform to [RFC
1159    * 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
1160    * associated with a job.
1161    * </pre>
1162    *
1163    * <code>map&lt;string, string&gt; labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
1164    */
1165   @java.lang.Override
containsLabels(java.lang.String key)1166   public boolean containsLabels(java.lang.String key) {
1167     if (key == null) {
1168       throw new NullPointerException("map key");
1169     }
1170     return internalGetLabels().getMap().containsKey(key);
1171   }
1172   /** Use {@link #getLabelsMap()} instead. */
1173   @java.lang.Override
1174   @java.lang.Deprecated
getLabels()1175   public java.util.Map<java.lang.String, java.lang.String> getLabels() {
1176     return getLabelsMap();
1177   }
1178   /**
1179    *
1180    *
1181    * <pre>
1182    * Optional. The labels to associate with this job.
1183    * Label **keys** must contain 1 to 63 characters, and must conform to
1184    * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
1185    * Label **values** may be empty, but, if present, must contain 1 to 63
1186    * characters, and must conform to [RFC
1187    * 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
1188    * associated with a job.
1189    * </pre>
1190    *
1191    * <code>map&lt;string, string&gt; labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
1192    */
1193   @java.lang.Override
getLabelsMap()1194   public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
1195     return internalGetLabels().getMap();
1196   }
1197   /**
1198    *
1199    *
1200    * <pre>
1201    * Optional. The labels to associate with this job.
1202    * Label **keys** must contain 1 to 63 characters, and must conform to
1203    * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
1204    * Label **values** may be empty, but, if present, must contain 1 to 63
1205    * characters, and must conform to [RFC
1206    * 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
1207    * associated with a job.
1208    * </pre>
1209    *
1210    * <code>map&lt;string, string&gt; labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
1211    */
1212   @java.lang.Override
getLabelsOrDefault( java.lang.String key, java.lang.String defaultValue)1213   public /* nullable */ java.lang.String getLabelsOrDefault(
1214       java.lang.String key,
1215       /* nullable */
1216       java.lang.String defaultValue) {
1217     if (key == null) {
1218       throw new NullPointerException("map key");
1219     }
1220     java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
1221     return map.containsKey(key) ? map.get(key) : defaultValue;
1222   }
1223   /**
1224    *
1225    *
1226    * <pre>
1227    * Optional. The labels to associate with this job.
1228    * Label **keys** must contain 1 to 63 characters, and must conform to
1229    * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
1230    * Label **values** may be empty, but, if present, must contain 1 to 63
1231    * characters, and must conform to [RFC
1232    * 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
1233    * associated with a job.
1234    * </pre>
1235    *
1236    * <code>map&lt;string, string&gt; labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
1237    */
1238   @java.lang.Override
getLabelsOrThrow(java.lang.String key)1239   public java.lang.String getLabelsOrThrow(java.lang.String key) {
1240     if (key == null) {
1241       throw new NullPointerException("map key");
1242     }
1243     java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
1244     if (!map.containsKey(key)) {
1245       throw new java.lang.IllegalArgumentException();
1246     }
1247     return map.get(key);
1248   }
1249 
1250   public static final int SCHEDULING_FIELD_NUMBER = 20;
1251   private com.google.cloud.dataproc.v1.JobScheduling scheduling_;
1252   /**
1253    *
1254    *
1255    * <pre>
1256    * Optional. Job scheduling configuration.
1257    * </pre>
1258    *
1259    * <code>
1260    * .google.cloud.dataproc.v1.JobScheduling scheduling = 20 [(.google.api.field_behavior) = OPTIONAL];
1261    * </code>
1262    *
1263    * @return Whether the scheduling field is set.
1264    */
1265   @java.lang.Override
hasScheduling()1266   public boolean hasScheduling() {
1267     return scheduling_ != null;
1268   }
1269   /**
1270    *
1271    *
1272    * <pre>
1273    * Optional. Job scheduling configuration.
1274    * </pre>
1275    *
1276    * <code>
1277    * .google.cloud.dataproc.v1.JobScheduling scheduling = 20 [(.google.api.field_behavior) = OPTIONAL];
1278    * </code>
1279    *
1280    * @return The scheduling.
1281    */
1282   @java.lang.Override
getScheduling()1283   public com.google.cloud.dataproc.v1.JobScheduling getScheduling() {
1284     return scheduling_ == null
1285         ? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()
1286         : scheduling_;
1287   }
1288   /**
1289    *
1290    *
1291    * <pre>
1292    * Optional. Job scheduling configuration.
1293    * </pre>
1294    *
1295    * <code>
1296    * .google.cloud.dataproc.v1.JobScheduling scheduling = 20 [(.google.api.field_behavior) = OPTIONAL];
1297    * </code>
1298    */
1299   @java.lang.Override
getSchedulingOrBuilder()1300   public com.google.cloud.dataproc.v1.JobSchedulingOrBuilder getSchedulingOrBuilder() {
1301     return scheduling_ == null
1302         ? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()
1303         : scheduling_;
1304   }
1305 
1306   public static final int JOB_UUID_FIELD_NUMBER = 22;
1307 
1308   @SuppressWarnings("serial")
1309   private volatile java.lang.Object jobUuid_ = "";
1310   /**
1311    *
1312    *
1313    * <pre>
1314    * Output only. A UUID that uniquely identifies a job within the project
1315    * over time. This is in contrast to a user-settable reference.job_id that
1316    * may be reused over time.
1317    * </pre>
1318    *
1319    * <code>string job_uuid = 22 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
1320    *
1321    * @return The jobUuid.
1322    */
1323   @java.lang.Override
getJobUuid()1324   public java.lang.String getJobUuid() {
1325     java.lang.Object ref = jobUuid_;
1326     if (ref instanceof java.lang.String) {
1327       return (java.lang.String) ref;
1328     } else {
1329       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
1330       java.lang.String s = bs.toStringUtf8();
1331       jobUuid_ = s;
1332       return s;
1333     }
1334   }
1335   /**
1336    *
1337    *
1338    * <pre>
1339    * Output only. A UUID that uniquely identifies a job within the project
1340    * over time. This is in contrast to a user-settable reference.job_id that
1341    * may be reused over time.
1342    * </pre>
1343    *
1344    * <code>string job_uuid = 22 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
1345    *
1346    * @return The bytes for jobUuid.
1347    */
1348   @java.lang.Override
getJobUuidBytes()1349   public com.google.protobuf.ByteString getJobUuidBytes() {
1350     java.lang.Object ref = jobUuid_;
1351     if (ref instanceof java.lang.String) {
1352       com.google.protobuf.ByteString b =
1353           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
1354       jobUuid_ = b;
1355       return b;
1356     } else {
1357       return (com.google.protobuf.ByteString) ref;
1358     }
1359   }
1360 
1361   public static final int DONE_FIELD_NUMBER = 24;
1362   private boolean done_ = false;
1363   /**
1364    *
1365    *
1366    * <pre>
1367    * Output only. Indicates whether the job is completed. If the value is
1368    * `false`, the job is still in progress. If `true`, the job is completed, and
1369    * `status.state` field will indicate if it was successful, failed,
1370    * or cancelled.
1371    * </pre>
1372    *
1373    * <code>bool done = 24 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
1374    *
1375    * @return The done.
1376    */
1377   @java.lang.Override
getDone()1378   public boolean getDone() {
1379     return done_;
1380   }
1381 
1382   public static final int DRIVER_SCHEDULING_CONFIG_FIELD_NUMBER = 27;
1383   private com.google.cloud.dataproc.v1.DriverSchedulingConfig driverSchedulingConfig_;
1384   /**
1385    *
1386    *
1387    * <pre>
1388    * Optional. Driver scheduling configuration.
1389    * </pre>
1390    *
1391    * <code>
1392    * .google.cloud.dataproc.v1.DriverSchedulingConfig driver_scheduling_config = 27 [(.google.api.field_behavior) = OPTIONAL];
1393    * </code>
1394    *
1395    * @return Whether the driverSchedulingConfig field is set.
1396    */
1397   @java.lang.Override
hasDriverSchedulingConfig()1398   public boolean hasDriverSchedulingConfig() {
1399     return driverSchedulingConfig_ != null;
1400   }
1401   /**
1402    *
1403    *
1404    * <pre>
1405    * Optional. Driver scheduling configuration.
1406    * </pre>
1407    *
1408    * <code>
1409    * .google.cloud.dataproc.v1.DriverSchedulingConfig driver_scheduling_config = 27 [(.google.api.field_behavior) = OPTIONAL];
1410    * </code>
1411    *
1412    * @return The driverSchedulingConfig.
1413    */
1414   @java.lang.Override
getDriverSchedulingConfig()1415   public com.google.cloud.dataproc.v1.DriverSchedulingConfig getDriverSchedulingConfig() {
1416     return driverSchedulingConfig_ == null
1417         ? com.google.cloud.dataproc.v1.DriverSchedulingConfig.getDefaultInstance()
1418         : driverSchedulingConfig_;
1419   }
1420   /**
1421    *
1422    *
1423    * <pre>
1424    * Optional. Driver scheduling configuration.
1425    * </pre>
1426    *
1427    * <code>
1428    * .google.cloud.dataproc.v1.DriverSchedulingConfig driver_scheduling_config = 27 [(.google.api.field_behavior) = OPTIONAL];
1429    * </code>
1430    */
1431   @java.lang.Override
1432   public com.google.cloud.dataproc.v1.DriverSchedulingConfigOrBuilder
getDriverSchedulingConfigOrBuilder()1433       getDriverSchedulingConfigOrBuilder() {
1434     return driverSchedulingConfig_ == null
1435         ? com.google.cloud.dataproc.v1.DriverSchedulingConfig.getDefaultInstance()
1436         : driverSchedulingConfig_;
1437   }
1438 
1439   private byte memoizedIsInitialized = -1;
1440 
1441   @java.lang.Override
isInitialized()1442   public final boolean isInitialized() {
1443     byte isInitialized = memoizedIsInitialized;
1444     if (isInitialized == 1) return true;
1445     if (isInitialized == 0) return false;
1446 
1447     memoizedIsInitialized = 1;
1448     return true;
1449   }
1450 
1451   @java.lang.Override
writeTo(com.google.protobuf.CodedOutputStream output)1452   public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
1453     if (reference_ != null) {
1454       output.writeMessage(1, getReference());
1455     }
1456     if (placement_ != null) {
1457       output.writeMessage(2, getPlacement());
1458     }
1459     if (typeJobCase_ == 3) {
1460       output.writeMessage(3, (com.google.cloud.dataproc.v1.HadoopJob) typeJob_);
1461     }
1462     if (typeJobCase_ == 4) {
1463       output.writeMessage(4, (com.google.cloud.dataproc.v1.SparkJob) typeJob_);
1464     }
1465     if (typeJobCase_ == 5) {
1466       output.writeMessage(5, (com.google.cloud.dataproc.v1.PySparkJob) typeJob_);
1467     }
1468     if (typeJobCase_ == 6) {
1469       output.writeMessage(6, (com.google.cloud.dataproc.v1.HiveJob) typeJob_);
1470     }
1471     if (typeJobCase_ == 7) {
1472       output.writeMessage(7, (com.google.cloud.dataproc.v1.PigJob) typeJob_);
1473     }
1474     if (status_ != null) {
1475       output.writeMessage(8, getStatus());
1476     }
1477     for (int i = 0; i < yarnApplications_.size(); i++) {
1478       output.writeMessage(9, yarnApplications_.get(i));
1479     }
1480     if (typeJobCase_ == 12) {
1481       output.writeMessage(12, (com.google.cloud.dataproc.v1.SparkSqlJob) typeJob_);
1482     }
1483     for (int i = 0; i < statusHistory_.size(); i++) {
1484       output.writeMessage(13, statusHistory_.get(i));
1485     }
1486     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(driverControlFilesUri_)) {
1487       com.google.protobuf.GeneratedMessageV3.writeString(output, 15, driverControlFilesUri_);
1488     }
1489     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(driverOutputResourceUri_)) {
1490       com.google.protobuf.GeneratedMessageV3.writeString(output, 17, driverOutputResourceUri_);
1491     }
1492     com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
1493         output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 18);
1494     if (scheduling_ != null) {
1495       output.writeMessage(20, getScheduling());
1496     }
1497     if (typeJobCase_ == 21) {
1498       output.writeMessage(21, (com.google.cloud.dataproc.v1.SparkRJob) typeJob_);
1499     }
1500     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(jobUuid_)) {
1501       com.google.protobuf.GeneratedMessageV3.writeString(output, 22, jobUuid_);
1502     }
1503     if (typeJobCase_ == 23) {
1504       output.writeMessage(23, (com.google.cloud.dataproc.v1.PrestoJob) typeJob_);
1505     }
1506     if (done_ != false) {
1507       output.writeBool(24, done_);
1508     }
1509     if (driverSchedulingConfig_ != null) {
1510       output.writeMessage(27, getDriverSchedulingConfig());
1511     }
1512     if (typeJobCase_ == 28) {
1513       output.writeMessage(28, (com.google.cloud.dataproc.v1.TrinoJob) typeJob_);
1514     }
1515     getUnknownFields().writeTo(output);
1516   }
1517 
1518   @java.lang.Override
getSerializedSize()1519   public int getSerializedSize() {
1520     int size = memoizedSize;
1521     if (size != -1) return size;
1522 
1523     size = 0;
1524     if (reference_ != null) {
1525       size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getReference());
1526     }
1527     if (placement_ != null) {
1528       size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getPlacement());
1529     }
1530     if (typeJobCase_ == 3) {
1531       size +=
1532           com.google.protobuf.CodedOutputStream.computeMessageSize(
1533               3, (com.google.cloud.dataproc.v1.HadoopJob) typeJob_);
1534     }
1535     if (typeJobCase_ == 4) {
1536       size +=
1537           com.google.protobuf.CodedOutputStream.computeMessageSize(
1538               4, (com.google.cloud.dataproc.v1.SparkJob) typeJob_);
1539     }
1540     if (typeJobCase_ == 5) {
1541       size +=
1542           com.google.protobuf.CodedOutputStream.computeMessageSize(
1543               5, (com.google.cloud.dataproc.v1.PySparkJob) typeJob_);
1544     }
1545     if (typeJobCase_ == 6) {
1546       size +=
1547           com.google.protobuf.CodedOutputStream.computeMessageSize(
1548               6, (com.google.cloud.dataproc.v1.HiveJob) typeJob_);
1549     }
1550     if (typeJobCase_ == 7) {
1551       size +=
1552           com.google.protobuf.CodedOutputStream.computeMessageSize(
1553               7, (com.google.cloud.dataproc.v1.PigJob) typeJob_);
1554     }
1555     if (status_ != null) {
1556       size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, getStatus());
1557     }
1558     for (int i = 0; i < yarnApplications_.size(); i++) {
1559       size += com.google.protobuf.CodedOutputStream.computeMessageSize(9, yarnApplications_.get(i));
1560     }
1561     if (typeJobCase_ == 12) {
1562       size +=
1563           com.google.protobuf.CodedOutputStream.computeMessageSize(
1564               12, (com.google.cloud.dataproc.v1.SparkSqlJob) typeJob_);
1565     }
1566     for (int i = 0; i < statusHistory_.size(); i++) {
1567       size += com.google.protobuf.CodedOutputStream.computeMessageSize(13, statusHistory_.get(i));
1568     }
1569     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(driverControlFilesUri_)) {
1570       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(15, driverControlFilesUri_);
1571     }
1572     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(driverOutputResourceUri_)) {
1573       size +=
1574           com.google.protobuf.GeneratedMessageV3.computeStringSize(17, driverOutputResourceUri_);
1575     }
1576     for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
1577         internalGetLabels().getMap().entrySet()) {
1578       com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
1579           LabelsDefaultEntryHolder.defaultEntry
1580               .newBuilderForType()
1581               .setKey(entry.getKey())
1582               .setValue(entry.getValue())
1583               .build();
1584       size += com.google.protobuf.CodedOutputStream.computeMessageSize(18, labels__);
1585     }
1586     if (scheduling_ != null) {
1587       size += com.google.protobuf.CodedOutputStream.computeMessageSize(20, getScheduling());
1588     }
1589     if (typeJobCase_ == 21) {
1590       size +=
1591           com.google.protobuf.CodedOutputStream.computeMessageSize(
1592               21, (com.google.cloud.dataproc.v1.SparkRJob) typeJob_);
1593     }
1594     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(jobUuid_)) {
1595       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(22, jobUuid_);
1596     }
1597     if (typeJobCase_ == 23) {
1598       size +=
1599           com.google.protobuf.CodedOutputStream.computeMessageSize(
1600               23, (com.google.cloud.dataproc.v1.PrestoJob) typeJob_);
1601     }
1602     if (done_ != false) {
1603       size += com.google.protobuf.CodedOutputStream.computeBoolSize(24, done_);
1604     }
1605     if (driverSchedulingConfig_ != null) {
1606       size +=
1607           com.google.protobuf.CodedOutputStream.computeMessageSize(27, getDriverSchedulingConfig());
1608     }
1609     if (typeJobCase_ == 28) {
1610       size +=
1611           com.google.protobuf.CodedOutputStream.computeMessageSize(
1612               28, (com.google.cloud.dataproc.v1.TrinoJob) typeJob_);
1613     }
1614     size += getUnknownFields().getSerializedSize();
1615     memoizedSize = size;
1616     return size;
1617   }
1618 
1619   @java.lang.Override
equals(final java.lang.Object obj)1620   public boolean equals(final java.lang.Object obj) {
1621     if (obj == this) {
1622       return true;
1623     }
1624     if (!(obj instanceof com.google.cloud.dataproc.v1.Job)) {
1625       return super.equals(obj);
1626     }
1627     com.google.cloud.dataproc.v1.Job other = (com.google.cloud.dataproc.v1.Job) obj;
1628 
1629     if (hasReference() != other.hasReference()) return false;
1630     if (hasReference()) {
1631       if (!getReference().equals(other.getReference())) return false;
1632     }
1633     if (hasPlacement() != other.hasPlacement()) return false;
1634     if (hasPlacement()) {
1635       if (!getPlacement().equals(other.getPlacement())) return false;
1636     }
1637     if (hasStatus() != other.hasStatus()) return false;
1638     if (hasStatus()) {
1639       if (!getStatus().equals(other.getStatus())) return false;
1640     }
1641     if (!getStatusHistoryList().equals(other.getStatusHistoryList())) return false;
1642     if (!getYarnApplicationsList().equals(other.getYarnApplicationsList())) return false;
1643     if (!getDriverOutputResourceUri().equals(other.getDriverOutputResourceUri())) return false;
1644     if (!getDriverControlFilesUri().equals(other.getDriverControlFilesUri())) return false;
1645     if (!internalGetLabels().equals(other.internalGetLabels())) return false;
1646     if (hasScheduling() != other.hasScheduling()) return false;
1647     if (hasScheduling()) {
1648       if (!getScheduling().equals(other.getScheduling())) return false;
1649     }
1650     if (!getJobUuid().equals(other.getJobUuid())) return false;
1651     if (getDone() != other.getDone()) return false;
1652     if (hasDriverSchedulingConfig() != other.hasDriverSchedulingConfig()) return false;
1653     if (hasDriverSchedulingConfig()) {
1654       if (!getDriverSchedulingConfig().equals(other.getDriverSchedulingConfig())) return false;
1655     }
1656     if (!getTypeJobCase().equals(other.getTypeJobCase())) return false;
1657     switch (typeJobCase_) {
1658       case 3:
1659         if (!getHadoopJob().equals(other.getHadoopJob())) return false;
1660         break;
1661       case 4:
1662         if (!getSparkJob().equals(other.getSparkJob())) return false;
1663         break;
1664       case 5:
1665         if (!getPysparkJob().equals(other.getPysparkJob())) return false;
1666         break;
1667       case 6:
1668         if (!getHiveJob().equals(other.getHiveJob())) return false;
1669         break;
1670       case 7:
1671         if (!getPigJob().equals(other.getPigJob())) return false;
1672         break;
1673       case 21:
1674         if (!getSparkRJob().equals(other.getSparkRJob())) return false;
1675         break;
1676       case 12:
1677         if (!getSparkSqlJob().equals(other.getSparkSqlJob())) return false;
1678         break;
1679       case 23:
1680         if (!getPrestoJob().equals(other.getPrestoJob())) return false;
1681         break;
1682       case 28:
1683         if (!getTrinoJob().equals(other.getTrinoJob())) return false;
1684         break;
1685       case 0:
1686       default:
1687     }
1688     if (!getUnknownFields().equals(other.getUnknownFields())) return false;
1689     return true;
1690   }
1691 
1692   @java.lang.Override
hashCode()1693   public int hashCode() {
1694     if (memoizedHashCode != 0) {
1695       return memoizedHashCode;
1696     }
1697     int hash = 41;
1698     hash = (19 * hash) + getDescriptor().hashCode();
1699     if (hasReference()) {
1700       hash = (37 * hash) + REFERENCE_FIELD_NUMBER;
1701       hash = (53 * hash) + getReference().hashCode();
1702     }
1703     if (hasPlacement()) {
1704       hash = (37 * hash) + PLACEMENT_FIELD_NUMBER;
1705       hash = (53 * hash) + getPlacement().hashCode();
1706     }
1707     if (hasStatus()) {
1708       hash = (37 * hash) + STATUS_FIELD_NUMBER;
1709       hash = (53 * hash) + getStatus().hashCode();
1710     }
1711     if (getStatusHistoryCount() > 0) {
1712       hash = (37 * hash) + STATUS_HISTORY_FIELD_NUMBER;
1713       hash = (53 * hash) + getStatusHistoryList().hashCode();
1714     }
1715     if (getYarnApplicationsCount() > 0) {
1716       hash = (37 * hash) + YARN_APPLICATIONS_FIELD_NUMBER;
1717       hash = (53 * hash) + getYarnApplicationsList().hashCode();
1718     }
1719     hash = (37 * hash) + DRIVER_OUTPUT_RESOURCE_URI_FIELD_NUMBER;
1720     hash = (53 * hash) + getDriverOutputResourceUri().hashCode();
1721     hash = (37 * hash) + DRIVER_CONTROL_FILES_URI_FIELD_NUMBER;
1722     hash = (53 * hash) + getDriverControlFilesUri().hashCode();
1723     if (!internalGetLabels().getMap().isEmpty()) {
1724       hash = (37 * hash) + LABELS_FIELD_NUMBER;
1725       hash = (53 * hash) + internalGetLabels().hashCode();
1726     }
1727     if (hasScheduling()) {
1728       hash = (37 * hash) + SCHEDULING_FIELD_NUMBER;
1729       hash = (53 * hash) + getScheduling().hashCode();
1730     }
1731     hash = (37 * hash) + JOB_UUID_FIELD_NUMBER;
1732     hash = (53 * hash) + getJobUuid().hashCode();
1733     hash = (37 * hash) + DONE_FIELD_NUMBER;
1734     hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getDone());
1735     if (hasDriverSchedulingConfig()) {
1736       hash = (37 * hash) + DRIVER_SCHEDULING_CONFIG_FIELD_NUMBER;
1737       hash = (53 * hash) + getDriverSchedulingConfig().hashCode();
1738     }
1739     switch (typeJobCase_) {
1740       case 3:
1741         hash = (37 * hash) + HADOOP_JOB_FIELD_NUMBER;
1742         hash = (53 * hash) + getHadoopJob().hashCode();
1743         break;
1744       case 4:
1745         hash = (37 * hash) + SPARK_JOB_FIELD_NUMBER;
1746         hash = (53 * hash) + getSparkJob().hashCode();
1747         break;
1748       case 5:
1749         hash = (37 * hash) + PYSPARK_JOB_FIELD_NUMBER;
1750         hash = (53 * hash) + getPysparkJob().hashCode();
1751         break;
1752       case 6:
1753         hash = (37 * hash) + HIVE_JOB_FIELD_NUMBER;
1754         hash = (53 * hash) + getHiveJob().hashCode();
1755         break;
1756       case 7:
1757         hash = (37 * hash) + PIG_JOB_FIELD_NUMBER;
1758         hash = (53 * hash) + getPigJob().hashCode();
1759         break;
1760       case 21:
1761         hash = (37 * hash) + SPARK_R_JOB_FIELD_NUMBER;
1762         hash = (53 * hash) + getSparkRJob().hashCode();
1763         break;
1764       case 12:
1765         hash = (37 * hash) + SPARK_SQL_JOB_FIELD_NUMBER;
1766         hash = (53 * hash) + getSparkSqlJob().hashCode();
1767         break;
1768       case 23:
1769         hash = (37 * hash) + PRESTO_JOB_FIELD_NUMBER;
1770         hash = (53 * hash) + getPrestoJob().hashCode();
1771         break;
1772       case 28:
1773         hash = (37 * hash) + TRINO_JOB_FIELD_NUMBER;
1774         hash = (53 * hash) + getTrinoJob().hashCode();
1775         break;
1776       case 0:
1777       default:
1778     }
1779     hash = (29 * hash) + getUnknownFields().hashCode();
1780     memoizedHashCode = hash;
1781     return hash;
1782   }
1783 
parseFrom(java.nio.ByteBuffer data)1784   public static com.google.cloud.dataproc.v1.Job parseFrom(java.nio.ByteBuffer data)
1785       throws com.google.protobuf.InvalidProtocolBufferException {
1786     return PARSER.parseFrom(data);
1787   }
1788 
parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1789   public static com.google.cloud.dataproc.v1.Job parseFrom(
1790       java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1791       throws com.google.protobuf.InvalidProtocolBufferException {
1792     return PARSER.parseFrom(data, extensionRegistry);
1793   }
1794 
parseFrom(com.google.protobuf.ByteString data)1795   public static com.google.cloud.dataproc.v1.Job parseFrom(com.google.protobuf.ByteString data)
1796       throws com.google.protobuf.InvalidProtocolBufferException {
1797     return PARSER.parseFrom(data);
1798   }
1799 
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1800   public static com.google.cloud.dataproc.v1.Job parseFrom(
1801       com.google.protobuf.ByteString data,
1802       com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1803       throws com.google.protobuf.InvalidProtocolBufferException {
1804     return PARSER.parseFrom(data, extensionRegistry);
1805   }
1806 
parseFrom(byte[] data)1807   public static com.google.cloud.dataproc.v1.Job parseFrom(byte[] data)
1808       throws com.google.protobuf.InvalidProtocolBufferException {
1809     return PARSER.parseFrom(data);
1810   }
1811 
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1812   public static com.google.cloud.dataproc.v1.Job parseFrom(
1813       byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1814       throws com.google.protobuf.InvalidProtocolBufferException {
1815     return PARSER.parseFrom(data, extensionRegistry);
1816   }
1817 
parseFrom(java.io.InputStream input)1818   public static com.google.cloud.dataproc.v1.Job parseFrom(java.io.InputStream input)
1819       throws java.io.IOException {
1820     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
1821   }
1822 
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1823   public static com.google.cloud.dataproc.v1.Job parseFrom(
1824       java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1825       throws java.io.IOException {
1826     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
1827         PARSER, input, extensionRegistry);
1828   }
1829 
parseDelimitedFrom(java.io.InputStream input)1830   public static com.google.cloud.dataproc.v1.Job parseDelimitedFrom(java.io.InputStream input)
1831       throws java.io.IOException {
1832     return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
1833   }
1834 
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1835   public static com.google.cloud.dataproc.v1.Job parseDelimitedFrom(
1836       java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1837       throws java.io.IOException {
1838     return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
1839         PARSER, input, extensionRegistry);
1840   }
1841 
parseFrom( com.google.protobuf.CodedInputStream input)1842   public static com.google.cloud.dataproc.v1.Job parseFrom(
1843       com.google.protobuf.CodedInputStream input) throws java.io.IOException {
1844     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
1845   }
1846 
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1847   public static com.google.cloud.dataproc.v1.Job parseFrom(
1848       com.google.protobuf.CodedInputStream input,
1849       com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1850       throws java.io.IOException {
1851     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
1852         PARSER, input, extensionRegistry);
1853   }
1854 
1855   @java.lang.Override
newBuilderForType()1856   public Builder newBuilderForType() {
1857     return newBuilder();
1858   }
1859 
newBuilder()1860   public static Builder newBuilder() {
1861     return DEFAULT_INSTANCE.toBuilder();
1862   }
1863 
newBuilder(com.google.cloud.dataproc.v1.Job prototype)1864   public static Builder newBuilder(com.google.cloud.dataproc.v1.Job prototype) {
1865     return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
1866   }
1867 
1868   @java.lang.Override
toBuilder()1869   public Builder toBuilder() {
1870     return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
1871   }
1872 
1873   @java.lang.Override
newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)1874   protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
1875     Builder builder = new Builder(parent);
1876     return builder;
1877   }
1878   /**
1879    *
1880    *
1881    * <pre>
1882    * A Dataproc job resource.
1883    * </pre>
1884    *
1885    * Protobuf type {@code google.cloud.dataproc.v1.Job}
1886    */
1887   public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
1888       implements
1889       // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.Job)
1890       com.google.cloud.dataproc.v1.JobOrBuilder {
getDescriptor()1891     public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
1892       return com.google.cloud.dataproc.v1.JobsProto
1893           .internal_static_google_cloud_dataproc_v1_Job_descriptor;
1894     }
1895 
1896     @SuppressWarnings({"rawtypes"})
internalGetMapField(int number)1897     protected com.google.protobuf.MapField internalGetMapField(int number) {
1898       switch (number) {
1899         case 18:
1900           return internalGetLabels();
1901         default:
1902           throw new RuntimeException("Invalid map field number: " + number);
1903       }
1904     }
1905 
1906     @SuppressWarnings({"rawtypes"})
internalGetMutableMapField(int number)1907     protected com.google.protobuf.MapField internalGetMutableMapField(int number) {
1908       switch (number) {
1909         case 18:
1910           return internalGetMutableLabels();
1911         default:
1912           throw new RuntimeException("Invalid map field number: " + number);
1913       }
1914     }
1915 
1916     @java.lang.Override
1917     protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()1918         internalGetFieldAccessorTable() {
1919       return com.google.cloud.dataproc.v1.JobsProto
1920           .internal_static_google_cloud_dataproc_v1_Job_fieldAccessorTable
1921           .ensureFieldAccessorsInitialized(
1922               com.google.cloud.dataproc.v1.Job.class,
1923               com.google.cloud.dataproc.v1.Job.Builder.class);
1924     }
1925 
1926     // Construct using com.google.cloud.dataproc.v1.Job.newBuilder()
Builder()1927     private Builder() {}
1928 
Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)1929     private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
1930       super(parent);
1931     }
1932 
1933     @java.lang.Override
clear()1934     public Builder clear() {
1935       super.clear();
1936       bitField0_ = 0;
1937       reference_ = null;
1938       if (referenceBuilder_ != null) {
1939         referenceBuilder_.dispose();
1940         referenceBuilder_ = null;
1941       }
1942       placement_ = null;
1943       if (placementBuilder_ != null) {
1944         placementBuilder_.dispose();
1945         placementBuilder_ = null;
1946       }
1947       if (hadoopJobBuilder_ != null) {
1948         hadoopJobBuilder_.clear();
1949       }
1950       if (sparkJobBuilder_ != null) {
1951         sparkJobBuilder_.clear();
1952       }
1953       if (pysparkJobBuilder_ != null) {
1954         pysparkJobBuilder_.clear();
1955       }
1956       if (hiveJobBuilder_ != null) {
1957         hiveJobBuilder_.clear();
1958       }
1959       if (pigJobBuilder_ != null) {
1960         pigJobBuilder_.clear();
1961       }
1962       if (sparkRJobBuilder_ != null) {
1963         sparkRJobBuilder_.clear();
1964       }
1965       if (sparkSqlJobBuilder_ != null) {
1966         sparkSqlJobBuilder_.clear();
1967       }
1968       if (prestoJobBuilder_ != null) {
1969         prestoJobBuilder_.clear();
1970       }
1971       if (trinoJobBuilder_ != null) {
1972         trinoJobBuilder_.clear();
1973       }
1974       status_ = null;
1975       if (statusBuilder_ != null) {
1976         statusBuilder_.dispose();
1977         statusBuilder_ = null;
1978       }
1979       if (statusHistoryBuilder_ == null) {
1980         statusHistory_ = java.util.Collections.emptyList();
1981       } else {
1982         statusHistory_ = null;
1983         statusHistoryBuilder_.clear();
1984       }
1985       bitField0_ = (bitField0_ & ~0x00001000);
1986       if (yarnApplicationsBuilder_ == null) {
1987         yarnApplications_ = java.util.Collections.emptyList();
1988       } else {
1989         yarnApplications_ = null;
1990         yarnApplicationsBuilder_.clear();
1991       }
1992       bitField0_ = (bitField0_ & ~0x00002000);
1993       driverOutputResourceUri_ = "";
1994       driverControlFilesUri_ = "";
1995       internalGetMutableLabels().clear();
1996       scheduling_ = null;
1997       if (schedulingBuilder_ != null) {
1998         schedulingBuilder_.dispose();
1999         schedulingBuilder_ = null;
2000       }
2001       jobUuid_ = "";
2002       done_ = false;
2003       driverSchedulingConfig_ = null;
2004       if (driverSchedulingConfigBuilder_ != null) {
2005         driverSchedulingConfigBuilder_.dispose();
2006         driverSchedulingConfigBuilder_ = null;
2007       }
2008       typeJobCase_ = 0;
2009       typeJob_ = null;
2010       return this;
2011     }
2012 
2013     @java.lang.Override
getDescriptorForType()2014     public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
2015       return com.google.cloud.dataproc.v1.JobsProto
2016           .internal_static_google_cloud_dataproc_v1_Job_descriptor;
2017     }
2018 
2019     @java.lang.Override
getDefaultInstanceForType()2020     public com.google.cloud.dataproc.v1.Job getDefaultInstanceForType() {
2021       return com.google.cloud.dataproc.v1.Job.getDefaultInstance();
2022     }
2023 
2024     @java.lang.Override
build()2025     public com.google.cloud.dataproc.v1.Job build() {
2026       com.google.cloud.dataproc.v1.Job result = buildPartial();
2027       if (!result.isInitialized()) {
2028         throw newUninitializedMessageException(result);
2029       }
2030       return result;
2031     }
2032 
2033     @java.lang.Override
buildPartial()2034     public com.google.cloud.dataproc.v1.Job buildPartial() {
2035       com.google.cloud.dataproc.v1.Job result = new com.google.cloud.dataproc.v1.Job(this);
2036       buildPartialRepeatedFields(result);
2037       if (bitField0_ != 0) {
2038         buildPartial0(result);
2039       }
2040       buildPartialOneofs(result);
2041       onBuilt();
2042       return result;
2043     }
2044 
buildPartialRepeatedFields(com.google.cloud.dataproc.v1.Job result)2045     private void buildPartialRepeatedFields(com.google.cloud.dataproc.v1.Job result) {
2046       if (statusHistoryBuilder_ == null) {
2047         if (((bitField0_ & 0x00001000) != 0)) {
2048           statusHistory_ = java.util.Collections.unmodifiableList(statusHistory_);
2049           bitField0_ = (bitField0_ & ~0x00001000);
2050         }
2051         result.statusHistory_ = statusHistory_;
2052       } else {
2053         result.statusHistory_ = statusHistoryBuilder_.build();
2054       }
2055       if (yarnApplicationsBuilder_ == null) {
2056         if (((bitField0_ & 0x00002000) != 0)) {
2057           yarnApplications_ = java.util.Collections.unmodifiableList(yarnApplications_);
2058           bitField0_ = (bitField0_ & ~0x00002000);
2059         }
2060         result.yarnApplications_ = yarnApplications_;
2061       } else {
2062         result.yarnApplications_ = yarnApplicationsBuilder_.build();
2063       }
2064     }
2065 
buildPartial0(com.google.cloud.dataproc.v1.Job result)2066     private void buildPartial0(com.google.cloud.dataproc.v1.Job result) {
2067       int from_bitField0_ = bitField0_;
2068       if (((from_bitField0_ & 0x00000001) != 0)) {
2069         result.reference_ = referenceBuilder_ == null ? reference_ : referenceBuilder_.build();
2070       }
2071       if (((from_bitField0_ & 0x00000002) != 0)) {
2072         result.placement_ = placementBuilder_ == null ? placement_ : placementBuilder_.build();
2073       }
2074       if (((from_bitField0_ & 0x00000800) != 0)) {
2075         result.status_ = statusBuilder_ == null ? status_ : statusBuilder_.build();
2076       }
2077       if (((from_bitField0_ & 0x00004000) != 0)) {
2078         result.driverOutputResourceUri_ = driverOutputResourceUri_;
2079       }
2080       if (((from_bitField0_ & 0x00008000) != 0)) {
2081         result.driverControlFilesUri_ = driverControlFilesUri_;
2082       }
2083       if (((from_bitField0_ & 0x00010000) != 0)) {
2084         result.labels_ = internalGetLabels();
2085         result.labels_.makeImmutable();
2086       }
2087       if (((from_bitField0_ & 0x00020000) != 0)) {
2088         result.scheduling_ = schedulingBuilder_ == null ? scheduling_ : schedulingBuilder_.build();
2089       }
2090       if (((from_bitField0_ & 0x00040000) != 0)) {
2091         result.jobUuid_ = jobUuid_;
2092       }
2093       if (((from_bitField0_ & 0x00080000) != 0)) {
2094         result.done_ = done_;
2095       }
2096       if (((from_bitField0_ & 0x00100000) != 0)) {
2097         result.driverSchedulingConfig_ =
2098             driverSchedulingConfigBuilder_ == null
2099                 ? driverSchedulingConfig_
2100                 : driverSchedulingConfigBuilder_.build();
2101       }
2102     }
2103 
buildPartialOneofs(com.google.cloud.dataproc.v1.Job result)2104     private void buildPartialOneofs(com.google.cloud.dataproc.v1.Job result) {
2105       result.typeJobCase_ = typeJobCase_;
2106       result.typeJob_ = this.typeJob_;
2107       if (typeJobCase_ == 3 && hadoopJobBuilder_ != null) {
2108         result.typeJob_ = hadoopJobBuilder_.build();
2109       }
2110       if (typeJobCase_ == 4 && sparkJobBuilder_ != null) {
2111         result.typeJob_ = sparkJobBuilder_.build();
2112       }
2113       if (typeJobCase_ == 5 && pysparkJobBuilder_ != null) {
2114         result.typeJob_ = pysparkJobBuilder_.build();
2115       }
2116       if (typeJobCase_ == 6 && hiveJobBuilder_ != null) {
2117         result.typeJob_ = hiveJobBuilder_.build();
2118       }
2119       if (typeJobCase_ == 7 && pigJobBuilder_ != null) {
2120         result.typeJob_ = pigJobBuilder_.build();
2121       }
2122       if (typeJobCase_ == 21 && sparkRJobBuilder_ != null) {
2123         result.typeJob_ = sparkRJobBuilder_.build();
2124       }
2125       if (typeJobCase_ == 12 && sparkSqlJobBuilder_ != null) {
2126         result.typeJob_ = sparkSqlJobBuilder_.build();
2127       }
2128       if (typeJobCase_ == 23 && prestoJobBuilder_ != null) {
2129         result.typeJob_ = prestoJobBuilder_.build();
2130       }
2131       if (typeJobCase_ == 28 && trinoJobBuilder_ != null) {
2132         result.typeJob_ = trinoJobBuilder_.build();
2133       }
2134     }
2135 
2136     @java.lang.Override
clone()2137     public Builder clone() {
2138       return super.clone();
2139     }
2140 
2141     @java.lang.Override
setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)2142     public Builder setField(
2143         com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
2144       return super.setField(field, value);
2145     }
2146 
2147     @java.lang.Override
clearField(com.google.protobuf.Descriptors.FieldDescriptor field)2148     public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
2149       return super.clearField(field);
2150     }
2151 
2152     @java.lang.Override
clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)2153     public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
2154       return super.clearOneof(oneof);
2155     }
2156 
2157     @java.lang.Override
setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)2158     public Builder setRepeatedField(
2159         com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
2160       return super.setRepeatedField(field, index, value);
2161     }
2162 
2163     @java.lang.Override
addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)2164     public Builder addRepeatedField(
2165         com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
2166       return super.addRepeatedField(field, value);
2167     }
2168 
2169     @java.lang.Override
mergeFrom(com.google.protobuf.Message other)2170     public Builder mergeFrom(com.google.protobuf.Message other) {
2171       if (other instanceof com.google.cloud.dataproc.v1.Job) {
2172         return mergeFrom((com.google.cloud.dataproc.v1.Job) other);
2173       } else {
2174         super.mergeFrom(other);
2175         return this;
2176       }
2177     }
2178 
mergeFrom(com.google.cloud.dataproc.v1.Job other)2179     public Builder mergeFrom(com.google.cloud.dataproc.v1.Job other) {
2180       if (other == com.google.cloud.dataproc.v1.Job.getDefaultInstance()) return this;
2181       if (other.hasReference()) {
2182         mergeReference(other.getReference());
2183       }
2184       if (other.hasPlacement()) {
2185         mergePlacement(other.getPlacement());
2186       }
2187       if (other.hasStatus()) {
2188         mergeStatus(other.getStatus());
2189       }
2190       if (statusHistoryBuilder_ == null) {
2191         if (!other.statusHistory_.isEmpty()) {
2192           if (statusHistory_.isEmpty()) {
2193             statusHistory_ = other.statusHistory_;
2194             bitField0_ = (bitField0_ & ~0x00001000);
2195           } else {
2196             ensureStatusHistoryIsMutable();
2197             statusHistory_.addAll(other.statusHistory_);
2198           }
2199           onChanged();
2200         }
2201       } else {
2202         if (!other.statusHistory_.isEmpty()) {
2203           if (statusHistoryBuilder_.isEmpty()) {
2204             statusHistoryBuilder_.dispose();
2205             statusHistoryBuilder_ = null;
2206             statusHistory_ = other.statusHistory_;
2207             bitField0_ = (bitField0_ & ~0x00001000);
2208             statusHistoryBuilder_ =
2209                 com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
2210                     ? getStatusHistoryFieldBuilder()
2211                     : null;
2212           } else {
2213             statusHistoryBuilder_.addAllMessages(other.statusHistory_);
2214           }
2215         }
2216       }
2217       if (yarnApplicationsBuilder_ == null) {
2218         if (!other.yarnApplications_.isEmpty()) {
2219           if (yarnApplications_.isEmpty()) {
2220             yarnApplications_ = other.yarnApplications_;
2221             bitField0_ = (bitField0_ & ~0x00002000);
2222           } else {
2223             ensureYarnApplicationsIsMutable();
2224             yarnApplications_.addAll(other.yarnApplications_);
2225           }
2226           onChanged();
2227         }
2228       } else {
2229         if (!other.yarnApplications_.isEmpty()) {
2230           if (yarnApplicationsBuilder_.isEmpty()) {
2231             yarnApplicationsBuilder_.dispose();
2232             yarnApplicationsBuilder_ = null;
2233             yarnApplications_ = other.yarnApplications_;
2234             bitField0_ = (bitField0_ & ~0x00002000);
2235             yarnApplicationsBuilder_ =
2236                 com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
2237                     ? getYarnApplicationsFieldBuilder()
2238                     : null;
2239           } else {
2240             yarnApplicationsBuilder_.addAllMessages(other.yarnApplications_);
2241           }
2242         }
2243       }
2244       if (!other.getDriverOutputResourceUri().isEmpty()) {
2245         driverOutputResourceUri_ = other.driverOutputResourceUri_;
2246         bitField0_ |= 0x00004000;
2247         onChanged();
2248       }
2249       if (!other.getDriverControlFilesUri().isEmpty()) {
2250         driverControlFilesUri_ = other.driverControlFilesUri_;
2251         bitField0_ |= 0x00008000;
2252         onChanged();
2253       }
2254       internalGetMutableLabels().mergeFrom(other.internalGetLabels());
2255       bitField0_ |= 0x00010000;
2256       if (other.hasScheduling()) {
2257         mergeScheduling(other.getScheduling());
2258       }
2259       if (!other.getJobUuid().isEmpty()) {
2260         jobUuid_ = other.jobUuid_;
2261         bitField0_ |= 0x00040000;
2262         onChanged();
2263       }
2264       if (other.getDone() != false) {
2265         setDone(other.getDone());
2266       }
2267       if (other.hasDriverSchedulingConfig()) {
2268         mergeDriverSchedulingConfig(other.getDriverSchedulingConfig());
2269       }
2270       switch (other.getTypeJobCase()) {
2271         case HADOOP_JOB:
2272           {
2273             mergeHadoopJob(other.getHadoopJob());
2274             break;
2275           }
2276         case SPARK_JOB:
2277           {
2278             mergeSparkJob(other.getSparkJob());
2279             break;
2280           }
2281         case PYSPARK_JOB:
2282           {
2283             mergePysparkJob(other.getPysparkJob());
2284             break;
2285           }
2286         case HIVE_JOB:
2287           {
2288             mergeHiveJob(other.getHiveJob());
2289             break;
2290           }
2291         case PIG_JOB:
2292           {
2293             mergePigJob(other.getPigJob());
2294             break;
2295           }
2296         case SPARK_R_JOB:
2297           {
2298             mergeSparkRJob(other.getSparkRJob());
2299             break;
2300           }
2301         case SPARK_SQL_JOB:
2302           {
2303             mergeSparkSqlJob(other.getSparkSqlJob());
2304             break;
2305           }
2306         case PRESTO_JOB:
2307           {
2308             mergePrestoJob(other.getPrestoJob());
2309             break;
2310           }
2311         case TRINO_JOB:
2312           {
2313             mergeTrinoJob(other.getTrinoJob());
2314             break;
2315           }
2316         case TYPEJOB_NOT_SET:
2317           {
2318             break;
2319           }
2320       }
2321       this.mergeUnknownFields(other.getUnknownFields());
2322       onChanged();
2323       return this;
2324     }
2325 
2326     @java.lang.Override
isInitialized()2327     public final boolean isInitialized() {
2328       return true;
2329     }
2330 
2331     @java.lang.Override
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2332     public Builder mergeFrom(
2333         com.google.protobuf.CodedInputStream input,
2334         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2335         throws java.io.IOException {
2336       if (extensionRegistry == null) {
2337         throw new java.lang.NullPointerException();
2338       }
2339       try {
2340         boolean done = false;
2341         while (!done) {
2342           int tag = input.readTag();
2343           switch (tag) {
2344             case 0:
2345               done = true;
2346               break;
2347             case 10:
2348               {
2349                 input.readMessage(getReferenceFieldBuilder().getBuilder(), extensionRegistry);
2350                 bitField0_ |= 0x00000001;
2351                 break;
2352               } // case 10
2353             case 18:
2354               {
2355                 input.readMessage(getPlacementFieldBuilder().getBuilder(), extensionRegistry);
2356                 bitField0_ |= 0x00000002;
2357                 break;
2358               } // case 18
2359             case 26:
2360               {
2361                 input.readMessage(getHadoopJobFieldBuilder().getBuilder(), extensionRegistry);
2362                 typeJobCase_ = 3;
2363                 break;
2364               } // case 26
2365             case 34:
2366               {
2367                 input.readMessage(getSparkJobFieldBuilder().getBuilder(), extensionRegistry);
2368                 typeJobCase_ = 4;
2369                 break;
2370               } // case 34
2371             case 42:
2372               {
2373                 input.readMessage(getPysparkJobFieldBuilder().getBuilder(), extensionRegistry);
2374                 typeJobCase_ = 5;
2375                 break;
2376               } // case 42
2377             case 50:
2378               {
2379                 input.readMessage(getHiveJobFieldBuilder().getBuilder(), extensionRegistry);
2380                 typeJobCase_ = 6;
2381                 break;
2382               } // case 50
2383             case 58:
2384               {
2385                 input.readMessage(getPigJobFieldBuilder().getBuilder(), extensionRegistry);
2386                 typeJobCase_ = 7;
2387                 break;
2388               } // case 58
2389             case 66:
2390               {
2391                 input.readMessage(getStatusFieldBuilder().getBuilder(), extensionRegistry);
2392                 bitField0_ |= 0x00000800;
2393                 break;
2394               } // case 66
2395             case 74:
2396               {
2397                 com.google.cloud.dataproc.v1.YarnApplication m =
2398                     input.readMessage(
2399                         com.google.cloud.dataproc.v1.YarnApplication.parser(), extensionRegistry);
2400                 if (yarnApplicationsBuilder_ == null) {
2401                   ensureYarnApplicationsIsMutable();
2402                   yarnApplications_.add(m);
2403                 } else {
2404                   yarnApplicationsBuilder_.addMessage(m);
2405                 }
2406                 break;
2407               } // case 74
2408             case 98:
2409               {
2410                 input.readMessage(getSparkSqlJobFieldBuilder().getBuilder(), extensionRegistry);
2411                 typeJobCase_ = 12;
2412                 break;
2413               } // case 98
2414             case 106:
2415               {
2416                 com.google.cloud.dataproc.v1.JobStatus m =
2417                     input.readMessage(
2418                         com.google.cloud.dataproc.v1.JobStatus.parser(), extensionRegistry);
2419                 if (statusHistoryBuilder_ == null) {
2420                   ensureStatusHistoryIsMutable();
2421                   statusHistory_.add(m);
2422                 } else {
2423                   statusHistoryBuilder_.addMessage(m);
2424                 }
2425                 break;
2426               } // case 106
2427             case 122:
2428               {
2429                 driverControlFilesUri_ = input.readStringRequireUtf8();
2430                 bitField0_ |= 0x00008000;
2431                 break;
2432               } // case 122
2433             case 138:
2434               {
2435                 driverOutputResourceUri_ = input.readStringRequireUtf8();
2436                 bitField0_ |= 0x00004000;
2437                 break;
2438               } // case 138
2439             case 146:
2440               {
2441                 com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
2442                     input.readMessage(
2443                         LabelsDefaultEntryHolder.defaultEntry.getParserForType(),
2444                         extensionRegistry);
2445                 internalGetMutableLabels()
2446                     .getMutableMap()
2447                     .put(labels__.getKey(), labels__.getValue());
2448                 bitField0_ |= 0x00010000;
2449                 break;
2450               } // case 146
2451             case 162:
2452               {
2453                 input.readMessage(getSchedulingFieldBuilder().getBuilder(), extensionRegistry);
2454                 bitField0_ |= 0x00020000;
2455                 break;
2456               } // case 162
2457             case 170:
2458               {
2459                 input.readMessage(getSparkRJobFieldBuilder().getBuilder(), extensionRegistry);
2460                 typeJobCase_ = 21;
2461                 break;
2462               } // case 170
2463             case 178:
2464               {
2465                 jobUuid_ = input.readStringRequireUtf8();
2466                 bitField0_ |= 0x00040000;
2467                 break;
2468               } // case 178
2469             case 186:
2470               {
2471                 input.readMessage(getPrestoJobFieldBuilder().getBuilder(), extensionRegistry);
2472                 typeJobCase_ = 23;
2473                 break;
2474               } // case 186
2475             case 192:
2476               {
2477                 done_ = input.readBool();
2478                 bitField0_ |= 0x00080000;
2479                 break;
2480               } // case 192
2481             case 218:
2482               {
2483                 input.readMessage(
2484                     getDriverSchedulingConfigFieldBuilder().getBuilder(), extensionRegistry);
2485                 bitField0_ |= 0x00100000;
2486                 break;
2487               } // case 218
2488             case 226:
2489               {
2490                 input.readMessage(getTrinoJobFieldBuilder().getBuilder(), extensionRegistry);
2491                 typeJobCase_ = 28;
2492                 break;
2493               } // case 226
2494             default:
2495               {
2496                 if (!super.parseUnknownField(input, extensionRegistry, tag)) {
2497                   done = true; // was an endgroup tag
2498                 }
2499                 break;
2500               } // default:
2501           } // switch (tag)
2502         } // while (!done)
2503       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2504         throw e.unwrapIOException();
2505       } finally {
2506         onChanged();
2507       } // finally
2508       return this;
2509     }
2510 
2511     private int typeJobCase_ = 0;
2512     private java.lang.Object typeJob_;
2513 
getTypeJobCase()2514     public TypeJobCase getTypeJobCase() {
2515       return TypeJobCase.forNumber(typeJobCase_);
2516     }
2517 
clearTypeJob()2518     public Builder clearTypeJob() {
2519       typeJobCase_ = 0;
2520       typeJob_ = null;
2521       onChanged();
2522       return this;
2523     }
2524 
2525     private int bitField0_;
2526 
2527     private com.google.cloud.dataproc.v1.JobReference reference_;
2528     private com.google.protobuf.SingleFieldBuilderV3<
2529             com.google.cloud.dataproc.v1.JobReference,
2530             com.google.cloud.dataproc.v1.JobReference.Builder,
2531             com.google.cloud.dataproc.v1.JobReferenceOrBuilder>
2532         referenceBuilder_;
2533     /**
2534      *
2535      *
2536      * <pre>
2537      * Optional. The fully qualified reference to the job, which can be used to
2538      * obtain the equivalent REST path of the job resource. If this property
2539      * is not specified when a job is created, the server generates a
2540      * &lt;code&gt;job_id&lt;/code&gt;.
2541      * </pre>
2542      *
2543      * <code>
2544      * .google.cloud.dataproc.v1.JobReference reference = 1 [(.google.api.field_behavior) = OPTIONAL];
2545      * </code>
2546      *
2547      * @return Whether the reference field is set.
2548      */
hasReference()2549     public boolean hasReference() {
2550       return ((bitField0_ & 0x00000001) != 0);
2551     }
2552     /**
2553      *
2554      *
2555      * <pre>
2556      * Optional. The fully qualified reference to the job, which can be used to
2557      * obtain the equivalent REST path of the job resource. If this property
2558      * is not specified when a job is created, the server generates a
2559      * &lt;code&gt;job_id&lt;/code&gt;.
2560      * </pre>
2561      *
2562      * <code>
2563      * .google.cloud.dataproc.v1.JobReference reference = 1 [(.google.api.field_behavior) = OPTIONAL];
2564      * </code>
2565      *
2566      * @return The reference.
2567      */
getReference()2568     public com.google.cloud.dataproc.v1.JobReference getReference() {
2569       if (referenceBuilder_ == null) {
2570         return reference_ == null
2571             ? com.google.cloud.dataproc.v1.JobReference.getDefaultInstance()
2572             : reference_;
2573       } else {
2574         return referenceBuilder_.getMessage();
2575       }
2576     }
2577     /**
2578      *
2579      *
2580      * <pre>
2581      * Optional. The fully qualified reference to the job, which can be used to
2582      * obtain the equivalent REST path of the job resource. If this property
2583      * is not specified when a job is created, the server generates a
2584      * &lt;code&gt;job_id&lt;/code&gt;.
2585      * </pre>
2586      *
2587      * <code>
2588      * .google.cloud.dataproc.v1.JobReference reference = 1 [(.google.api.field_behavior) = OPTIONAL];
2589      * </code>
2590      */
setReference(com.google.cloud.dataproc.v1.JobReference value)2591     public Builder setReference(com.google.cloud.dataproc.v1.JobReference value) {
2592       if (referenceBuilder_ == null) {
2593         if (value == null) {
2594           throw new NullPointerException();
2595         }
2596         reference_ = value;
2597       } else {
2598         referenceBuilder_.setMessage(value);
2599       }
2600       bitField0_ |= 0x00000001;
2601       onChanged();
2602       return this;
2603     }
2604     /**
2605      *
2606      *
2607      * <pre>
2608      * Optional. The fully qualified reference to the job, which can be used to
2609      * obtain the equivalent REST path of the job resource. If this property
2610      * is not specified when a job is created, the server generates a
2611      * &lt;code&gt;job_id&lt;/code&gt;.
2612      * </pre>
2613      *
2614      * <code>
2615      * .google.cloud.dataproc.v1.JobReference reference = 1 [(.google.api.field_behavior) = OPTIONAL];
2616      * </code>
2617      */
setReference(com.google.cloud.dataproc.v1.JobReference.Builder builderForValue)2618     public Builder setReference(com.google.cloud.dataproc.v1.JobReference.Builder builderForValue) {
2619       if (referenceBuilder_ == null) {
2620         reference_ = builderForValue.build();
2621       } else {
2622         referenceBuilder_.setMessage(builderForValue.build());
2623       }
2624       bitField0_ |= 0x00000001;
2625       onChanged();
2626       return this;
2627     }
2628     /**
2629      *
2630      *
2631      * <pre>
2632      * Optional. The fully qualified reference to the job, which can be used to
2633      * obtain the equivalent REST path of the job resource. If this property
2634      * is not specified when a job is created, the server generates a
2635      * &lt;code&gt;job_id&lt;/code&gt;.
2636      * </pre>
2637      *
2638      * <code>
2639      * .google.cloud.dataproc.v1.JobReference reference = 1 [(.google.api.field_behavior) = OPTIONAL];
2640      * </code>
2641      */
mergeReference(com.google.cloud.dataproc.v1.JobReference value)2642     public Builder mergeReference(com.google.cloud.dataproc.v1.JobReference value) {
2643       if (referenceBuilder_ == null) {
2644         if (((bitField0_ & 0x00000001) != 0)
2645             && reference_ != null
2646             && reference_ != com.google.cloud.dataproc.v1.JobReference.getDefaultInstance()) {
2647           getReferenceBuilder().mergeFrom(value);
2648         } else {
2649           reference_ = value;
2650         }
2651       } else {
2652         referenceBuilder_.mergeFrom(value);
2653       }
2654       bitField0_ |= 0x00000001;
2655       onChanged();
2656       return this;
2657     }
2658     /**
2659      *
2660      *
2661      * <pre>
2662      * Optional. The fully qualified reference to the job, which can be used to
2663      * obtain the equivalent REST path of the job resource. If this property
2664      * is not specified when a job is created, the server generates a
2665      * &lt;code&gt;job_id&lt;/code&gt;.
2666      * </pre>
2667      *
2668      * <code>
2669      * .google.cloud.dataproc.v1.JobReference reference = 1 [(.google.api.field_behavior) = OPTIONAL];
2670      * </code>
2671      */
clearReference()2672     public Builder clearReference() {
2673       bitField0_ = (bitField0_ & ~0x00000001);
2674       reference_ = null;
2675       if (referenceBuilder_ != null) {
2676         referenceBuilder_.dispose();
2677         referenceBuilder_ = null;
2678       }
2679       onChanged();
2680       return this;
2681     }
2682     /**
2683      *
2684      *
2685      * <pre>
2686      * Optional. The fully qualified reference to the job, which can be used to
2687      * obtain the equivalent REST path of the job resource. If this property
2688      * is not specified when a job is created, the server generates a
2689      * &lt;code&gt;job_id&lt;/code&gt;.
2690      * </pre>
2691      *
2692      * <code>
2693      * .google.cloud.dataproc.v1.JobReference reference = 1 [(.google.api.field_behavior) = OPTIONAL];
2694      * </code>
2695      */
getReferenceBuilder()2696     public com.google.cloud.dataproc.v1.JobReference.Builder getReferenceBuilder() {
2697       bitField0_ |= 0x00000001;
2698       onChanged();
2699       return getReferenceFieldBuilder().getBuilder();
2700     }
2701     /**
2702      *
2703      *
2704      * <pre>
2705      * Optional. The fully qualified reference to the job, which can be used to
2706      * obtain the equivalent REST path of the job resource. If this property
2707      * is not specified when a job is created, the server generates a
2708      * &lt;code&gt;job_id&lt;/code&gt;.
2709      * </pre>
2710      *
2711      * <code>
2712      * .google.cloud.dataproc.v1.JobReference reference = 1 [(.google.api.field_behavior) = OPTIONAL];
2713      * </code>
2714      */
getReferenceOrBuilder()2715     public com.google.cloud.dataproc.v1.JobReferenceOrBuilder getReferenceOrBuilder() {
2716       if (referenceBuilder_ != null) {
2717         return referenceBuilder_.getMessageOrBuilder();
2718       } else {
2719         return reference_ == null
2720             ? com.google.cloud.dataproc.v1.JobReference.getDefaultInstance()
2721             : reference_;
2722       }
2723     }
2724     /**
2725      *
2726      *
2727      * <pre>
2728      * Optional. The fully qualified reference to the job, which can be used to
2729      * obtain the equivalent REST path of the job resource. If this property
2730      * is not specified when a job is created, the server generates a
2731      * &lt;code&gt;job_id&lt;/code&gt;.
2732      * </pre>
2733      *
2734      * <code>
2735      * .google.cloud.dataproc.v1.JobReference reference = 1 [(.google.api.field_behavior) = OPTIONAL];
2736      * </code>
2737      */
2738     private com.google.protobuf.SingleFieldBuilderV3<
2739             com.google.cloud.dataproc.v1.JobReference,
2740             com.google.cloud.dataproc.v1.JobReference.Builder,
2741             com.google.cloud.dataproc.v1.JobReferenceOrBuilder>
getReferenceFieldBuilder()2742         getReferenceFieldBuilder() {
2743       if (referenceBuilder_ == null) {
2744         referenceBuilder_ =
2745             new com.google.protobuf.SingleFieldBuilderV3<
2746                 com.google.cloud.dataproc.v1.JobReference,
2747                 com.google.cloud.dataproc.v1.JobReference.Builder,
2748                 com.google.cloud.dataproc.v1.JobReferenceOrBuilder>(
2749                 getReference(), getParentForChildren(), isClean());
2750         reference_ = null;
2751       }
2752       return referenceBuilder_;
2753     }
2754 
2755     private com.google.cloud.dataproc.v1.JobPlacement placement_;
2756     private com.google.protobuf.SingleFieldBuilderV3<
2757             com.google.cloud.dataproc.v1.JobPlacement,
2758             com.google.cloud.dataproc.v1.JobPlacement.Builder,
2759             com.google.cloud.dataproc.v1.JobPlacementOrBuilder>
2760         placementBuilder_;
2761     /**
2762      *
2763      *
2764      * <pre>
2765      * Required. Job information, including how, when, and where to
2766      * run the job.
2767      * </pre>
2768      *
2769      * <code>
2770      * .google.cloud.dataproc.v1.JobPlacement placement = 2 [(.google.api.field_behavior) = REQUIRED];
2771      * </code>
2772      *
2773      * @return Whether the placement field is set.
2774      */
hasPlacement()2775     public boolean hasPlacement() {
2776       return ((bitField0_ & 0x00000002) != 0);
2777     }
2778     /**
2779      *
2780      *
2781      * <pre>
2782      * Required. Job information, including how, when, and where to
2783      * run the job.
2784      * </pre>
2785      *
2786      * <code>
2787      * .google.cloud.dataproc.v1.JobPlacement placement = 2 [(.google.api.field_behavior) = REQUIRED];
2788      * </code>
2789      *
2790      * @return The placement.
2791      */
getPlacement()2792     public com.google.cloud.dataproc.v1.JobPlacement getPlacement() {
2793       if (placementBuilder_ == null) {
2794         return placement_ == null
2795             ? com.google.cloud.dataproc.v1.JobPlacement.getDefaultInstance()
2796             : placement_;
2797       } else {
2798         return placementBuilder_.getMessage();
2799       }
2800     }
2801     /**
2802      *
2803      *
2804      * <pre>
2805      * Required. Job information, including how, when, and where to
2806      * run the job.
2807      * </pre>
2808      *
2809      * <code>
2810      * .google.cloud.dataproc.v1.JobPlacement placement = 2 [(.google.api.field_behavior) = REQUIRED];
2811      * </code>
2812      */
setPlacement(com.google.cloud.dataproc.v1.JobPlacement value)2813     public Builder setPlacement(com.google.cloud.dataproc.v1.JobPlacement value) {
2814       if (placementBuilder_ == null) {
2815         if (value == null) {
2816           throw new NullPointerException();
2817         }
2818         placement_ = value;
2819       } else {
2820         placementBuilder_.setMessage(value);
2821       }
2822       bitField0_ |= 0x00000002;
2823       onChanged();
2824       return this;
2825     }
2826     /**
2827      *
2828      *
2829      * <pre>
2830      * Required. Job information, including how, when, and where to
2831      * run the job.
2832      * </pre>
2833      *
2834      * <code>
2835      * .google.cloud.dataproc.v1.JobPlacement placement = 2 [(.google.api.field_behavior) = REQUIRED];
2836      * </code>
2837      */
setPlacement(com.google.cloud.dataproc.v1.JobPlacement.Builder builderForValue)2838     public Builder setPlacement(com.google.cloud.dataproc.v1.JobPlacement.Builder builderForValue) {
2839       if (placementBuilder_ == null) {
2840         placement_ = builderForValue.build();
2841       } else {
2842         placementBuilder_.setMessage(builderForValue.build());
2843       }
2844       bitField0_ |= 0x00000002;
2845       onChanged();
2846       return this;
2847     }
2848     /**
2849      *
2850      *
2851      * <pre>
2852      * Required. Job information, including how, when, and where to
2853      * run the job.
2854      * </pre>
2855      *
2856      * <code>
2857      * .google.cloud.dataproc.v1.JobPlacement placement = 2 [(.google.api.field_behavior) = REQUIRED];
2858      * </code>
2859      */
mergePlacement(com.google.cloud.dataproc.v1.JobPlacement value)2860     public Builder mergePlacement(com.google.cloud.dataproc.v1.JobPlacement value) {
2861       if (placementBuilder_ == null) {
2862         if (((bitField0_ & 0x00000002) != 0)
2863             && placement_ != null
2864             && placement_ != com.google.cloud.dataproc.v1.JobPlacement.getDefaultInstance()) {
2865           getPlacementBuilder().mergeFrom(value);
2866         } else {
2867           placement_ = value;
2868         }
2869       } else {
2870         placementBuilder_.mergeFrom(value);
2871       }
2872       bitField0_ |= 0x00000002;
2873       onChanged();
2874       return this;
2875     }
2876     /**
2877      *
2878      *
2879      * <pre>
2880      * Required. Job information, including how, when, and where to
2881      * run the job.
2882      * </pre>
2883      *
2884      * <code>
2885      * .google.cloud.dataproc.v1.JobPlacement placement = 2 [(.google.api.field_behavior) = REQUIRED];
2886      * </code>
2887      */
clearPlacement()2888     public Builder clearPlacement() {
2889       bitField0_ = (bitField0_ & ~0x00000002);
2890       placement_ = null;
2891       if (placementBuilder_ != null) {
2892         placementBuilder_.dispose();
2893         placementBuilder_ = null;
2894       }
2895       onChanged();
2896       return this;
2897     }
2898     /**
2899      *
2900      *
2901      * <pre>
2902      * Required. Job information, including how, when, and where to
2903      * run the job.
2904      * </pre>
2905      *
2906      * <code>
2907      * .google.cloud.dataproc.v1.JobPlacement placement = 2 [(.google.api.field_behavior) = REQUIRED];
2908      * </code>
2909      */
getPlacementBuilder()2910     public com.google.cloud.dataproc.v1.JobPlacement.Builder getPlacementBuilder() {
2911       bitField0_ |= 0x00000002;
2912       onChanged();
2913       return getPlacementFieldBuilder().getBuilder();
2914     }
2915     /**
2916      *
2917      *
2918      * <pre>
2919      * Required. Job information, including how, when, and where to
2920      * run the job.
2921      * </pre>
2922      *
2923      * <code>
2924      * .google.cloud.dataproc.v1.JobPlacement placement = 2 [(.google.api.field_behavior) = REQUIRED];
2925      * </code>
2926      */
getPlacementOrBuilder()2927     public com.google.cloud.dataproc.v1.JobPlacementOrBuilder getPlacementOrBuilder() {
2928       if (placementBuilder_ != null) {
2929         return placementBuilder_.getMessageOrBuilder();
2930       } else {
2931         return placement_ == null
2932             ? com.google.cloud.dataproc.v1.JobPlacement.getDefaultInstance()
2933             : placement_;
2934       }
2935     }
2936     /**
2937      *
2938      *
2939      * <pre>
2940      * Required. Job information, including how, when, and where to
2941      * run the job.
2942      * </pre>
2943      *
2944      * <code>
2945      * .google.cloud.dataproc.v1.JobPlacement placement = 2 [(.google.api.field_behavior) = REQUIRED];
2946      * </code>
2947      */
2948     private com.google.protobuf.SingleFieldBuilderV3<
2949             com.google.cloud.dataproc.v1.JobPlacement,
2950             com.google.cloud.dataproc.v1.JobPlacement.Builder,
2951             com.google.cloud.dataproc.v1.JobPlacementOrBuilder>
getPlacementFieldBuilder()2952         getPlacementFieldBuilder() {
2953       if (placementBuilder_ == null) {
2954         placementBuilder_ =
2955             new com.google.protobuf.SingleFieldBuilderV3<
2956                 com.google.cloud.dataproc.v1.JobPlacement,
2957                 com.google.cloud.dataproc.v1.JobPlacement.Builder,
2958                 com.google.cloud.dataproc.v1.JobPlacementOrBuilder>(
2959                 getPlacement(), getParentForChildren(), isClean());
2960         placement_ = null;
2961       }
2962       return placementBuilder_;
2963     }
2964 
2965     private com.google.protobuf.SingleFieldBuilderV3<
2966             com.google.cloud.dataproc.v1.HadoopJob,
2967             com.google.cloud.dataproc.v1.HadoopJob.Builder,
2968             com.google.cloud.dataproc.v1.HadoopJobOrBuilder>
2969         hadoopJobBuilder_;
2970     /**
2971      *
2972      *
2973      * <pre>
2974      * Optional. Job is a Hadoop job.
2975      * </pre>
2976      *
2977      * <code>
2978      * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 3 [(.google.api.field_behavior) = OPTIONAL];
2979      * </code>
2980      *
2981      * @return Whether the hadoopJob field is set.
2982      */
2983     @java.lang.Override
hasHadoopJob()2984     public boolean hasHadoopJob() {
2985       return typeJobCase_ == 3;
2986     }
2987     /**
2988      *
2989      *
2990      * <pre>
2991      * Optional. Job is a Hadoop job.
2992      * </pre>
2993      *
2994      * <code>
2995      * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 3 [(.google.api.field_behavior) = OPTIONAL];
2996      * </code>
2997      *
2998      * @return The hadoopJob.
2999      */
3000     @java.lang.Override
getHadoopJob()3001     public com.google.cloud.dataproc.v1.HadoopJob getHadoopJob() {
3002       if (hadoopJobBuilder_ == null) {
3003         if (typeJobCase_ == 3) {
3004           return (com.google.cloud.dataproc.v1.HadoopJob) typeJob_;
3005         }
3006         return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
3007       } else {
3008         if (typeJobCase_ == 3) {
3009           return hadoopJobBuilder_.getMessage();
3010         }
3011         return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
3012       }
3013     }
3014     /**
3015      *
3016      *
3017      * <pre>
3018      * Optional. Job is a Hadoop job.
3019      * </pre>
3020      *
3021      * <code>
3022      * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 3 [(.google.api.field_behavior) = OPTIONAL];
3023      * </code>
3024      */
setHadoopJob(com.google.cloud.dataproc.v1.HadoopJob value)3025     public Builder setHadoopJob(com.google.cloud.dataproc.v1.HadoopJob value) {
3026       if (hadoopJobBuilder_ == null) {
3027         if (value == null) {
3028           throw new NullPointerException();
3029         }
3030         typeJob_ = value;
3031         onChanged();
3032       } else {
3033         hadoopJobBuilder_.setMessage(value);
3034       }
3035       typeJobCase_ = 3;
3036       return this;
3037     }
3038     /**
3039      *
3040      *
3041      * <pre>
3042      * Optional. Job is a Hadoop job.
3043      * </pre>
3044      *
3045      * <code>
3046      * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 3 [(.google.api.field_behavior) = OPTIONAL];
3047      * </code>
3048      */
setHadoopJob(com.google.cloud.dataproc.v1.HadoopJob.Builder builderForValue)3049     public Builder setHadoopJob(com.google.cloud.dataproc.v1.HadoopJob.Builder builderForValue) {
3050       if (hadoopJobBuilder_ == null) {
3051         typeJob_ = builderForValue.build();
3052         onChanged();
3053       } else {
3054         hadoopJobBuilder_.setMessage(builderForValue.build());
3055       }
3056       typeJobCase_ = 3;
3057       return this;
3058     }
3059     /**
3060      *
3061      *
3062      * <pre>
3063      * Optional. Job is a Hadoop job.
3064      * </pre>
3065      *
3066      * <code>
3067      * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 3 [(.google.api.field_behavior) = OPTIONAL];
3068      * </code>
3069      */
mergeHadoopJob(com.google.cloud.dataproc.v1.HadoopJob value)3070     public Builder mergeHadoopJob(com.google.cloud.dataproc.v1.HadoopJob value) {
3071       if (hadoopJobBuilder_ == null) {
3072         if (typeJobCase_ == 3
3073             && typeJob_ != com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance()) {
3074           typeJob_ =
3075               com.google.cloud.dataproc.v1.HadoopJob.newBuilder(
3076                       (com.google.cloud.dataproc.v1.HadoopJob) typeJob_)
3077                   .mergeFrom(value)
3078                   .buildPartial();
3079         } else {
3080           typeJob_ = value;
3081         }
3082         onChanged();
3083       } else {
3084         if (typeJobCase_ == 3) {
3085           hadoopJobBuilder_.mergeFrom(value);
3086         } else {
3087           hadoopJobBuilder_.setMessage(value);
3088         }
3089       }
3090       typeJobCase_ = 3;
3091       return this;
3092     }
3093     /**
3094      *
3095      *
3096      * <pre>
3097      * Optional. Job is a Hadoop job.
3098      * </pre>
3099      *
3100      * <code>
3101      * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 3 [(.google.api.field_behavior) = OPTIONAL];
3102      * </code>
3103      */
clearHadoopJob()3104     public Builder clearHadoopJob() {
3105       if (hadoopJobBuilder_ == null) {
3106         if (typeJobCase_ == 3) {
3107           typeJobCase_ = 0;
3108           typeJob_ = null;
3109           onChanged();
3110         }
3111       } else {
3112         if (typeJobCase_ == 3) {
3113           typeJobCase_ = 0;
3114           typeJob_ = null;
3115         }
3116         hadoopJobBuilder_.clear();
3117       }
3118       return this;
3119     }
3120     /**
3121      *
3122      *
3123      * <pre>
3124      * Optional. Job is a Hadoop job.
3125      * </pre>
3126      *
3127      * <code>
3128      * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 3 [(.google.api.field_behavior) = OPTIONAL];
3129      * </code>
3130      */
getHadoopJobBuilder()3131     public com.google.cloud.dataproc.v1.HadoopJob.Builder getHadoopJobBuilder() {
3132       return getHadoopJobFieldBuilder().getBuilder();
3133     }
3134     /**
3135      *
3136      *
3137      * <pre>
3138      * Optional. Job is a Hadoop job.
3139      * </pre>
3140      *
3141      * <code>
3142      * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 3 [(.google.api.field_behavior) = OPTIONAL];
3143      * </code>
3144      */
3145     @java.lang.Override
getHadoopJobOrBuilder()3146     public com.google.cloud.dataproc.v1.HadoopJobOrBuilder getHadoopJobOrBuilder() {
3147       if ((typeJobCase_ == 3) && (hadoopJobBuilder_ != null)) {
3148         return hadoopJobBuilder_.getMessageOrBuilder();
3149       } else {
3150         if (typeJobCase_ == 3) {
3151           return (com.google.cloud.dataproc.v1.HadoopJob) typeJob_;
3152         }
3153         return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
3154       }
3155     }
3156     /**
3157      *
3158      *
3159      * <pre>
3160      * Optional. Job is a Hadoop job.
3161      * </pre>
3162      *
3163      * <code>
3164      * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 3 [(.google.api.field_behavior) = OPTIONAL];
3165      * </code>
3166      */
3167     private com.google.protobuf.SingleFieldBuilderV3<
3168             com.google.cloud.dataproc.v1.HadoopJob,
3169             com.google.cloud.dataproc.v1.HadoopJob.Builder,
3170             com.google.cloud.dataproc.v1.HadoopJobOrBuilder>
getHadoopJobFieldBuilder()3171         getHadoopJobFieldBuilder() {
3172       if (hadoopJobBuilder_ == null) {
3173         if (!(typeJobCase_ == 3)) {
3174           typeJob_ = com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
3175         }
3176         hadoopJobBuilder_ =
3177             new com.google.protobuf.SingleFieldBuilderV3<
3178                 com.google.cloud.dataproc.v1.HadoopJob,
3179                 com.google.cloud.dataproc.v1.HadoopJob.Builder,
3180                 com.google.cloud.dataproc.v1.HadoopJobOrBuilder>(
3181                 (com.google.cloud.dataproc.v1.HadoopJob) typeJob_,
3182                 getParentForChildren(),
3183                 isClean());
3184         typeJob_ = null;
3185       }
3186       typeJobCase_ = 3;
3187       onChanged();
3188       return hadoopJobBuilder_;
3189     }
3190 
3191     private com.google.protobuf.SingleFieldBuilderV3<
3192             com.google.cloud.dataproc.v1.SparkJob,
3193             com.google.cloud.dataproc.v1.SparkJob.Builder,
3194             com.google.cloud.dataproc.v1.SparkJobOrBuilder>
3195         sparkJobBuilder_;
3196     /**
3197      *
3198      *
3199      * <pre>
3200      * Optional. Job is a Spark job.
3201      * </pre>
3202      *
3203      * <code>
3204      * .google.cloud.dataproc.v1.SparkJob spark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
3205      * </code>
3206      *
3207      * @return Whether the sparkJob field is set.
3208      */
3209     @java.lang.Override
hasSparkJob()3210     public boolean hasSparkJob() {
3211       return typeJobCase_ == 4;
3212     }
3213     /**
3214      *
3215      *
3216      * <pre>
3217      * Optional. Job is a Spark job.
3218      * </pre>
3219      *
3220      * <code>
3221      * .google.cloud.dataproc.v1.SparkJob spark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
3222      * </code>
3223      *
3224      * @return The sparkJob.
3225      */
3226     @java.lang.Override
getSparkJob()3227     public com.google.cloud.dataproc.v1.SparkJob getSparkJob() {
3228       if (sparkJobBuilder_ == null) {
3229         if (typeJobCase_ == 4) {
3230           return (com.google.cloud.dataproc.v1.SparkJob) typeJob_;
3231         }
3232         return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
3233       } else {
3234         if (typeJobCase_ == 4) {
3235           return sparkJobBuilder_.getMessage();
3236         }
3237         return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
3238       }
3239     }
3240     /**
3241      *
3242      *
3243      * <pre>
3244      * Optional. Job is a Spark job.
3245      * </pre>
3246      *
3247      * <code>
3248      * .google.cloud.dataproc.v1.SparkJob spark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
3249      * </code>
3250      */
setSparkJob(com.google.cloud.dataproc.v1.SparkJob value)3251     public Builder setSparkJob(com.google.cloud.dataproc.v1.SparkJob value) {
3252       if (sparkJobBuilder_ == null) {
3253         if (value == null) {
3254           throw new NullPointerException();
3255         }
3256         typeJob_ = value;
3257         onChanged();
3258       } else {
3259         sparkJobBuilder_.setMessage(value);
3260       }
3261       typeJobCase_ = 4;
3262       return this;
3263     }
3264     /**
3265      *
3266      *
3267      * <pre>
3268      * Optional. Job is a Spark job.
3269      * </pre>
3270      *
3271      * <code>
3272      * .google.cloud.dataproc.v1.SparkJob spark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
3273      * </code>
3274      */
setSparkJob(com.google.cloud.dataproc.v1.SparkJob.Builder builderForValue)3275     public Builder setSparkJob(com.google.cloud.dataproc.v1.SparkJob.Builder builderForValue) {
3276       if (sparkJobBuilder_ == null) {
3277         typeJob_ = builderForValue.build();
3278         onChanged();
3279       } else {
3280         sparkJobBuilder_.setMessage(builderForValue.build());
3281       }
3282       typeJobCase_ = 4;
3283       return this;
3284     }
3285     /**
3286      *
3287      *
3288      * <pre>
3289      * Optional. Job is a Spark job.
3290      * </pre>
3291      *
3292      * <code>
3293      * .google.cloud.dataproc.v1.SparkJob spark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
3294      * </code>
3295      */
mergeSparkJob(com.google.cloud.dataproc.v1.SparkJob value)3296     public Builder mergeSparkJob(com.google.cloud.dataproc.v1.SparkJob value) {
3297       if (sparkJobBuilder_ == null) {
3298         if (typeJobCase_ == 4
3299             && typeJob_ != com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance()) {
3300           typeJob_ =
3301               com.google.cloud.dataproc.v1.SparkJob.newBuilder(
3302                       (com.google.cloud.dataproc.v1.SparkJob) typeJob_)
3303                   .mergeFrom(value)
3304                   .buildPartial();
3305         } else {
3306           typeJob_ = value;
3307         }
3308         onChanged();
3309       } else {
3310         if (typeJobCase_ == 4) {
3311           sparkJobBuilder_.mergeFrom(value);
3312         } else {
3313           sparkJobBuilder_.setMessage(value);
3314         }
3315       }
3316       typeJobCase_ = 4;
3317       return this;
3318     }
3319     /**
3320      *
3321      *
3322      * <pre>
3323      * Optional. Job is a Spark job.
3324      * </pre>
3325      *
3326      * <code>
3327      * .google.cloud.dataproc.v1.SparkJob spark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
3328      * </code>
3329      */
clearSparkJob()3330     public Builder clearSparkJob() {
3331       if (sparkJobBuilder_ == null) {
3332         if (typeJobCase_ == 4) {
3333           typeJobCase_ = 0;
3334           typeJob_ = null;
3335           onChanged();
3336         }
3337       } else {
3338         if (typeJobCase_ == 4) {
3339           typeJobCase_ = 0;
3340           typeJob_ = null;
3341         }
3342         sparkJobBuilder_.clear();
3343       }
3344       return this;
3345     }
3346     /**
3347      *
3348      *
3349      * <pre>
3350      * Optional. Job is a Spark job.
3351      * </pre>
3352      *
3353      * <code>
3354      * .google.cloud.dataproc.v1.SparkJob spark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
3355      * </code>
3356      */
getSparkJobBuilder()3357     public com.google.cloud.dataproc.v1.SparkJob.Builder getSparkJobBuilder() {
3358       return getSparkJobFieldBuilder().getBuilder();
3359     }
3360     /**
3361      *
3362      *
3363      * <pre>
3364      * Optional. Job is a Spark job.
3365      * </pre>
3366      *
3367      * <code>
3368      * .google.cloud.dataproc.v1.SparkJob spark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
3369      * </code>
3370      */
3371     @java.lang.Override
getSparkJobOrBuilder()3372     public com.google.cloud.dataproc.v1.SparkJobOrBuilder getSparkJobOrBuilder() {
3373       if ((typeJobCase_ == 4) && (sparkJobBuilder_ != null)) {
3374         return sparkJobBuilder_.getMessageOrBuilder();
3375       } else {
3376         if (typeJobCase_ == 4) {
3377           return (com.google.cloud.dataproc.v1.SparkJob) typeJob_;
3378         }
3379         return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
3380       }
3381     }
3382     /**
3383      *
3384      *
3385      * <pre>
3386      * Optional. Job is a Spark job.
3387      * </pre>
3388      *
3389      * <code>
3390      * .google.cloud.dataproc.v1.SparkJob spark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
3391      * </code>
3392      */
3393     private com.google.protobuf.SingleFieldBuilderV3<
3394             com.google.cloud.dataproc.v1.SparkJob,
3395             com.google.cloud.dataproc.v1.SparkJob.Builder,
3396             com.google.cloud.dataproc.v1.SparkJobOrBuilder>
getSparkJobFieldBuilder()3397         getSparkJobFieldBuilder() {
3398       if (sparkJobBuilder_ == null) {
3399         if (!(typeJobCase_ == 4)) {
3400           typeJob_ = com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
3401         }
3402         sparkJobBuilder_ =
3403             new com.google.protobuf.SingleFieldBuilderV3<
3404                 com.google.cloud.dataproc.v1.SparkJob,
3405                 com.google.cloud.dataproc.v1.SparkJob.Builder,
3406                 com.google.cloud.dataproc.v1.SparkJobOrBuilder>(
3407                 (com.google.cloud.dataproc.v1.SparkJob) typeJob_,
3408                 getParentForChildren(),
3409                 isClean());
3410         typeJob_ = null;
3411       }
3412       typeJobCase_ = 4;
3413       onChanged();
3414       return sparkJobBuilder_;
3415     }
3416 
3417     private com.google.protobuf.SingleFieldBuilderV3<
3418             com.google.cloud.dataproc.v1.PySparkJob,
3419             com.google.cloud.dataproc.v1.PySparkJob.Builder,
3420             com.google.cloud.dataproc.v1.PySparkJobOrBuilder>
3421         pysparkJobBuilder_;
3422     /**
3423      *
3424      *
3425      * <pre>
3426      * Optional. Job is a PySpark job.
3427      * </pre>
3428      *
3429      * <code>
3430      * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 5 [(.google.api.field_behavior) = OPTIONAL];
3431      * </code>
3432      *
3433      * @return Whether the pysparkJob field is set.
3434      */
3435     @java.lang.Override
hasPysparkJob()3436     public boolean hasPysparkJob() {
3437       return typeJobCase_ == 5;
3438     }
3439     /**
3440      *
3441      *
3442      * <pre>
3443      * Optional. Job is a PySpark job.
3444      * </pre>
3445      *
3446      * <code>
3447      * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 5 [(.google.api.field_behavior) = OPTIONAL];
3448      * </code>
3449      *
3450      * @return The pysparkJob.
3451      */
3452     @java.lang.Override
getPysparkJob()3453     public com.google.cloud.dataproc.v1.PySparkJob getPysparkJob() {
3454       if (pysparkJobBuilder_ == null) {
3455         if (typeJobCase_ == 5) {
3456           return (com.google.cloud.dataproc.v1.PySparkJob) typeJob_;
3457         }
3458         return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
3459       } else {
3460         if (typeJobCase_ == 5) {
3461           return pysparkJobBuilder_.getMessage();
3462         }
3463         return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
3464       }
3465     }
3466     /**
3467      *
3468      *
3469      * <pre>
3470      * Optional. Job is a PySpark job.
3471      * </pre>
3472      *
3473      * <code>
3474      * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 5 [(.google.api.field_behavior) = OPTIONAL];
3475      * </code>
3476      */
setPysparkJob(com.google.cloud.dataproc.v1.PySparkJob value)3477     public Builder setPysparkJob(com.google.cloud.dataproc.v1.PySparkJob value) {
3478       if (pysparkJobBuilder_ == null) {
3479         if (value == null) {
3480           throw new NullPointerException();
3481         }
3482         typeJob_ = value;
3483         onChanged();
3484       } else {
3485         pysparkJobBuilder_.setMessage(value);
3486       }
3487       typeJobCase_ = 5;
3488       return this;
3489     }
3490     /**
3491      *
3492      *
3493      * <pre>
3494      * Optional. Job is a PySpark job.
3495      * </pre>
3496      *
3497      * <code>
3498      * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 5 [(.google.api.field_behavior) = OPTIONAL];
3499      * </code>
3500      */
setPysparkJob(com.google.cloud.dataproc.v1.PySparkJob.Builder builderForValue)3501     public Builder setPysparkJob(com.google.cloud.dataproc.v1.PySparkJob.Builder builderForValue) {
3502       if (pysparkJobBuilder_ == null) {
3503         typeJob_ = builderForValue.build();
3504         onChanged();
3505       } else {
3506         pysparkJobBuilder_.setMessage(builderForValue.build());
3507       }
3508       typeJobCase_ = 5;
3509       return this;
3510     }
3511     /**
3512      *
3513      *
3514      * <pre>
3515      * Optional. Job is a PySpark job.
3516      * </pre>
3517      *
3518      * <code>
3519      * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 5 [(.google.api.field_behavior) = OPTIONAL];
3520      * </code>
3521      */
mergePysparkJob(com.google.cloud.dataproc.v1.PySparkJob value)3522     public Builder mergePysparkJob(com.google.cloud.dataproc.v1.PySparkJob value) {
3523       if (pysparkJobBuilder_ == null) {
3524         if (typeJobCase_ == 5
3525             && typeJob_ != com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance()) {
3526           typeJob_ =
3527               com.google.cloud.dataproc.v1.PySparkJob.newBuilder(
3528                       (com.google.cloud.dataproc.v1.PySparkJob) typeJob_)
3529                   .mergeFrom(value)
3530                   .buildPartial();
3531         } else {
3532           typeJob_ = value;
3533         }
3534         onChanged();
3535       } else {
3536         if (typeJobCase_ == 5) {
3537           pysparkJobBuilder_.mergeFrom(value);
3538         } else {
3539           pysparkJobBuilder_.setMessage(value);
3540         }
3541       }
3542       typeJobCase_ = 5;
3543       return this;
3544     }
3545     /**
3546      *
3547      *
3548      * <pre>
3549      * Optional. Job is a PySpark job.
3550      * </pre>
3551      *
3552      * <code>
3553      * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 5 [(.google.api.field_behavior) = OPTIONAL];
3554      * </code>
3555      */
clearPysparkJob()3556     public Builder clearPysparkJob() {
3557       if (pysparkJobBuilder_ == null) {
3558         if (typeJobCase_ == 5) {
3559           typeJobCase_ = 0;
3560           typeJob_ = null;
3561           onChanged();
3562         }
3563       } else {
3564         if (typeJobCase_ == 5) {
3565           typeJobCase_ = 0;
3566           typeJob_ = null;
3567         }
3568         pysparkJobBuilder_.clear();
3569       }
3570       return this;
3571     }
3572     /**
3573      *
3574      *
3575      * <pre>
3576      * Optional. Job is a PySpark job.
3577      * </pre>
3578      *
3579      * <code>
3580      * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 5 [(.google.api.field_behavior) = OPTIONAL];
3581      * </code>
3582      */
getPysparkJobBuilder()3583     public com.google.cloud.dataproc.v1.PySparkJob.Builder getPysparkJobBuilder() {
3584       return getPysparkJobFieldBuilder().getBuilder();
3585     }
3586     /**
3587      *
3588      *
3589      * <pre>
3590      * Optional. Job is a PySpark job.
3591      * </pre>
3592      *
3593      * <code>
3594      * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 5 [(.google.api.field_behavior) = OPTIONAL];
3595      * </code>
3596      */
3597     @java.lang.Override
getPysparkJobOrBuilder()3598     public com.google.cloud.dataproc.v1.PySparkJobOrBuilder getPysparkJobOrBuilder() {
3599       if ((typeJobCase_ == 5) && (pysparkJobBuilder_ != null)) {
3600         return pysparkJobBuilder_.getMessageOrBuilder();
3601       } else {
3602         if (typeJobCase_ == 5) {
3603           return (com.google.cloud.dataproc.v1.PySparkJob) typeJob_;
3604         }
3605         return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
3606       }
3607     }
3608     /**
3609      *
3610      *
3611      * <pre>
3612      * Optional. Job is a PySpark job.
3613      * </pre>
3614      *
3615      * <code>
3616      * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 5 [(.google.api.field_behavior) = OPTIONAL];
3617      * </code>
3618      */
3619     private com.google.protobuf.SingleFieldBuilderV3<
3620             com.google.cloud.dataproc.v1.PySparkJob,
3621             com.google.cloud.dataproc.v1.PySparkJob.Builder,
3622             com.google.cloud.dataproc.v1.PySparkJobOrBuilder>
getPysparkJobFieldBuilder()3623         getPysparkJobFieldBuilder() {
3624       if (pysparkJobBuilder_ == null) {
3625         if (!(typeJobCase_ == 5)) {
3626           typeJob_ = com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
3627         }
3628         pysparkJobBuilder_ =
3629             new com.google.protobuf.SingleFieldBuilderV3<
3630                 com.google.cloud.dataproc.v1.PySparkJob,
3631                 com.google.cloud.dataproc.v1.PySparkJob.Builder,
3632                 com.google.cloud.dataproc.v1.PySparkJobOrBuilder>(
3633                 (com.google.cloud.dataproc.v1.PySparkJob) typeJob_,
3634                 getParentForChildren(),
3635                 isClean());
3636         typeJob_ = null;
3637       }
3638       typeJobCase_ = 5;
3639       onChanged();
3640       return pysparkJobBuilder_;
3641     }
3642 
3643     private com.google.protobuf.SingleFieldBuilderV3<
3644             com.google.cloud.dataproc.v1.HiveJob,
3645             com.google.cloud.dataproc.v1.HiveJob.Builder,
3646             com.google.cloud.dataproc.v1.HiveJobOrBuilder>
3647         hiveJobBuilder_;
3648     /**
3649      *
3650      *
3651      * <pre>
3652      * Optional. Job is a Hive job.
3653      * </pre>
3654      *
3655      * <code>
3656      * .google.cloud.dataproc.v1.HiveJob hive_job = 6 [(.google.api.field_behavior) = OPTIONAL];
3657      * </code>
3658      *
3659      * @return Whether the hiveJob field is set.
3660      */
3661     @java.lang.Override
hasHiveJob()3662     public boolean hasHiveJob() {
3663       return typeJobCase_ == 6;
3664     }
3665     /**
3666      *
3667      *
3668      * <pre>
3669      * Optional. Job is a Hive job.
3670      * </pre>
3671      *
3672      * <code>
3673      * .google.cloud.dataproc.v1.HiveJob hive_job = 6 [(.google.api.field_behavior) = OPTIONAL];
3674      * </code>
3675      *
3676      * @return The hiveJob.
3677      */
3678     @java.lang.Override
getHiveJob()3679     public com.google.cloud.dataproc.v1.HiveJob getHiveJob() {
3680       if (hiveJobBuilder_ == null) {
3681         if (typeJobCase_ == 6) {
3682           return (com.google.cloud.dataproc.v1.HiveJob) typeJob_;
3683         }
3684         return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
3685       } else {
3686         if (typeJobCase_ == 6) {
3687           return hiveJobBuilder_.getMessage();
3688         }
3689         return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
3690       }
3691     }
3692     /**
3693      *
3694      *
3695      * <pre>
3696      * Optional. Job is a Hive job.
3697      * </pre>
3698      *
3699      * <code>
3700      * .google.cloud.dataproc.v1.HiveJob hive_job = 6 [(.google.api.field_behavior) = OPTIONAL];
3701      * </code>
3702      */
setHiveJob(com.google.cloud.dataproc.v1.HiveJob value)3703     public Builder setHiveJob(com.google.cloud.dataproc.v1.HiveJob value) {
3704       if (hiveJobBuilder_ == null) {
3705         if (value == null) {
3706           throw new NullPointerException();
3707         }
3708         typeJob_ = value;
3709         onChanged();
3710       } else {
3711         hiveJobBuilder_.setMessage(value);
3712       }
3713       typeJobCase_ = 6;
3714       return this;
3715     }
3716     /**
3717      *
3718      *
3719      * <pre>
3720      * Optional. Job is a Hive job.
3721      * </pre>
3722      *
3723      * <code>
3724      * .google.cloud.dataproc.v1.HiveJob hive_job = 6 [(.google.api.field_behavior) = OPTIONAL];
3725      * </code>
3726      */
setHiveJob(com.google.cloud.dataproc.v1.HiveJob.Builder builderForValue)3727     public Builder setHiveJob(com.google.cloud.dataproc.v1.HiveJob.Builder builderForValue) {
3728       if (hiveJobBuilder_ == null) {
3729         typeJob_ = builderForValue.build();
3730         onChanged();
3731       } else {
3732         hiveJobBuilder_.setMessage(builderForValue.build());
3733       }
3734       typeJobCase_ = 6;
3735       return this;
3736     }
3737     /**
3738      *
3739      *
3740      * <pre>
3741      * Optional. Job is a Hive job.
3742      * </pre>
3743      *
3744      * <code>
3745      * .google.cloud.dataproc.v1.HiveJob hive_job = 6 [(.google.api.field_behavior) = OPTIONAL];
3746      * </code>
3747      */
mergeHiveJob(com.google.cloud.dataproc.v1.HiveJob value)3748     public Builder mergeHiveJob(com.google.cloud.dataproc.v1.HiveJob value) {
3749       if (hiveJobBuilder_ == null) {
3750         if (typeJobCase_ == 6
3751             && typeJob_ != com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance()) {
3752           typeJob_ =
3753               com.google.cloud.dataproc.v1.HiveJob.newBuilder(
3754                       (com.google.cloud.dataproc.v1.HiveJob) typeJob_)
3755                   .mergeFrom(value)
3756                   .buildPartial();
3757         } else {
3758           typeJob_ = value;
3759         }
3760         onChanged();
3761       } else {
3762         if (typeJobCase_ == 6) {
3763           hiveJobBuilder_.mergeFrom(value);
3764         } else {
3765           hiveJobBuilder_.setMessage(value);
3766         }
3767       }
3768       typeJobCase_ = 6;
3769       return this;
3770     }
3771     /**
3772      *
3773      *
3774      * <pre>
3775      * Optional. Job is a Hive job.
3776      * </pre>
3777      *
3778      * <code>
3779      * .google.cloud.dataproc.v1.HiveJob hive_job = 6 [(.google.api.field_behavior) = OPTIONAL];
3780      * </code>
3781      */
clearHiveJob()3782     public Builder clearHiveJob() {
3783       if (hiveJobBuilder_ == null) {
3784         if (typeJobCase_ == 6) {
3785           typeJobCase_ = 0;
3786           typeJob_ = null;
3787           onChanged();
3788         }
3789       } else {
3790         if (typeJobCase_ == 6) {
3791           typeJobCase_ = 0;
3792           typeJob_ = null;
3793         }
3794         hiveJobBuilder_.clear();
3795       }
3796       return this;
3797     }
3798     /**
3799      *
3800      *
3801      * <pre>
3802      * Optional. Job is a Hive job.
3803      * </pre>
3804      *
3805      * <code>
3806      * .google.cloud.dataproc.v1.HiveJob hive_job = 6 [(.google.api.field_behavior) = OPTIONAL];
3807      * </code>
3808      */
getHiveJobBuilder()3809     public com.google.cloud.dataproc.v1.HiveJob.Builder getHiveJobBuilder() {
3810       return getHiveJobFieldBuilder().getBuilder();
3811     }
3812     /**
3813      *
3814      *
3815      * <pre>
3816      * Optional. Job is a Hive job.
3817      * </pre>
3818      *
3819      * <code>
3820      * .google.cloud.dataproc.v1.HiveJob hive_job = 6 [(.google.api.field_behavior) = OPTIONAL];
3821      * </code>
3822      */
3823     @java.lang.Override
getHiveJobOrBuilder()3824     public com.google.cloud.dataproc.v1.HiveJobOrBuilder getHiveJobOrBuilder() {
3825       if ((typeJobCase_ == 6) && (hiveJobBuilder_ != null)) {
3826         return hiveJobBuilder_.getMessageOrBuilder();
3827       } else {
3828         if (typeJobCase_ == 6) {
3829           return (com.google.cloud.dataproc.v1.HiveJob) typeJob_;
3830         }
3831         return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
3832       }
3833     }
3834     /**
3835      *
3836      *
3837      * <pre>
3838      * Optional. Job is a Hive job.
3839      * </pre>
3840      *
3841      * <code>
3842      * .google.cloud.dataproc.v1.HiveJob hive_job = 6 [(.google.api.field_behavior) = OPTIONAL];
3843      * </code>
3844      */
3845     private com.google.protobuf.SingleFieldBuilderV3<
3846             com.google.cloud.dataproc.v1.HiveJob,
3847             com.google.cloud.dataproc.v1.HiveJob.Builder,
3848             com.google.cloud.dataproc.v1.HiveJobOrBuilder>
getHiveJobFieldBuilder()3849         getHiveJobFieldBuilder() {
3850       if (hiveJobBuilder_ == null) {
3851         if (!(typeJobCase_ == 6)) {
3852           typeJob_ = com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
3853         }
3854         hiveJobBuilder_ =
3855             new com.google.protobuf.SingleFieldBuilderV3<
3856                 com.google.cloud.dataproc.v1.HiveJob,
3857                 com.google.cloud.dataproc.v1.HiveJob.Builder,
3858                 com.google.cloud.dataproc.v1.HiveJobOrBuilder>(
3859                 (com.google.cloud.dataproc.v1.HiveJob) typeJob_, getParentForChildren(), isClean());
3860         typeJob_ = null;
3861       }
3862       typeJobCase_ = 6;
3863       onChanged();
3864       return hiveJobBuilder_;
3865     }
3866 
3867     private com.google.protobuf.SingleFieldBuilderV3<
3868             com.google.cloud.dataproc.v1.PigJob,
3869             com.google.cloud.dataproc.v1.PigJob.Builder,
3870             com.google.cloud.dataproc.v1.PigJobOrBuilder>
3871         pigJobBuilder_;
3872     /**
3873      *
3874      *
3875      * <pre>
3876      * Optional. Job is a Pig job.
3877      * </pre>
3878      *
3879      * <code>.google.cloud.dataproc.v1.PigJob pig_job = 7 [(.google.api.field_behavior) = OPTIONAL];
3880      * </code>
3881      *
3882      * @return Whether the pigJob field is set.
3883      */
3884     @java.lang.Override
hasPigJob()3885     public boolean hasPigJob() {
3886       return typeJobCase_ == 7;
3887     }
3888     /**
3889      *
3890      *
3891      * <pre>
3892      * Optional. Job is a Pig job.
3893      * </pre>
3894      *
3895      * <code>.google.cloud.dataproc.v1.PigJob pig_job = 7 [(.google.api.field_behavior) = OPTIONAL];
3896      * </code>
3897      *
3898      * @return The pigJob.
3899      */
3900     @java.lang.Override
getPigJob()3901     public com.google.cloud.dataproc.v1.PigJob getPigJob() {
3902       if (pigJobBuilder_ == null) {
3903         if (typeJobCase_ == 7) {
3904           return (com.google.cloud.dataproc.v1.PigJob) typeJob_;
3905         }
3906         return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
3907       } else {
3908         if (typeJobCase_ == 7) {
3909           return pigJobBuilder_.getMessage();
3910         }
3911         return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
3912       }
3913     }
3914     /**
3915      *
3916      *
3917      * <pre>
3918      * Optional. Job is a Pig job.
3919      * </pre>
3920      *
3921      * <code>.google.cloud.dataproc.v1.PigJob pig_job = 7 [(.google.api.field_behavior) = OPTIONAL];
3922      * </code>
3923      */
setPigJob(com.google.cloud.dataproc.v1.PigJob value)3924     public Builder setPigJob(com.google.cloud.dataproc.v1.PigJob value) {
3925       if (pigJobBuilder_ == null) {
3926         if (value == null) {
3927           throw new NullPointerException();
3928         }
3929         typeJob_ = value;
3930         onChanged();
3931       } else {
3932         pigJobBuilder_.setMessage(value);
3933       }
3934       typeJobCase_ = 7;
3935       return this;
3936     }
3937     /**
3938      *
3939      *
3940      * <pre>
3941      * Optional. Job is a Pig job.
3942      * </pre>
3943      *
3944      * <code>.google.cloud.dataproc.v1.PigJob pig_job = 7 [(.google.api.field_behavior) = OPTIONAL];
3945      * </code>
3946      */
setPigJob(com.google.cloud.dataproc.v1.PigJob.Builder builderForValue)3947     public Builder setPigJob(com.google.cloud.dataproc.v1.PigJob.Builder builderForValue) {
3948       if (pigJobBuilder_ == null) {
3949         typeJob_ = builderForValue.build();
3950         onChanged();
3951       } else {
3952         pigJobBuilder_.setMessage(builderForValue.build());
3953       }
3954       typeJobCase_ = 7;
3955       return this;
3956     }
3957     /**
3958      *
3959      *
3960      * <pre>
3961      * Optional. Job is a Pig job.
3962      * </pre>
3963      *
3964      * <code>.google.cloud.dataproc.v1.PigJob pig_job = 7 [(.google.api.field_behavior) = OPTIONAL];
3965      * </code>
3966      */
mergePigJob(com.google.cloud.dataproc.v1.PigJob value)3967     public Builder mergePigJob(com.google.cloud.dataproc.v1.PigJob value) {
3968       if (pigJobBuilder_ == null) {
3969         if (typeJobCase_ == 7
3970             && typeJob_ != com.google.cloud.dataproc.v1.PigJob.getDefaultInstance()) {
3971           typeJob_ =
3972               com.google.cloud.dataproc.v1.PigJob.newBuilder(
3973                       (com.google.cloud.dataproc.v1.PigJob) typeJob_)
3974                   .mergeFrom(value)
3975                   .buildPartial();
3976         } else {
3977           typeJob_ = value;
3978         }
3979         onChanged();
3980       } else {
3981         if (typeJobCase_ == 7) {
3982           pigJobBuilder_.mergeFrom(value);
3983         } else {
3984           pigJobBuilder_.setMessage(value);
3985         }
3986       }
3987       typeJobCase_ = 7;
3988       return this;
3989     }
3990     /**
3991      *
3992      *
3993      * <pre>
3994      * Optional. Job is a Pig job.
3995      * </pre>
3996      *
3997      * <code>.google.cloud.dataproc.v1.PigJob pig_job = 7 [(.google.api.field_behavior) = OPTIONAL];
3998      * </code>
3999      */
clearPigJob()4000     public Builder clearPigJob() {
4001       if (pigJobBuilder_ == null) {
4002         if (typeJobCase_ == 7) {
4003           typeJobCase_ = 0;
4004           typeJob_ = null;
4005           onChanged();
4006         }
4007       } else {
4008         if (typeJobCase_ == 7) {
4009           typeJobCase_ = 0;
4010           typeJob_ = null;
4011         }
4012         pigJobBuilder_.clear();
4013       }
4014       return this;
4015     }
4016     /**
4017      *
4018      *
4019      * <pre>
4020      * Optional. Job is a Pig job.
4021      * </pre>
4022      *
4023      * <code>.google.cloud.dataproc.v1.PigJob pig_job = 7 [(.google.api.field_behavior) = OPTIONAL];
4024      * </code>
4025      */
getPigJobBuilder()4026     public com.google.cloud.dataproc.v1.PigJob.Builder getPigJobBuilder() {
4027       return getPigJobFieldBuilder().getBuilder();
4028     }
4029     /**
4030      *
4031      *
4032      * <pre>
4033      * Optional. Job is a Pig job.
4034      * </pre>
4035      *
4036      * <code>.google.cloud.dataproc.v1.PigJob pig_job = 7 [(.google.api.field_behavior) = OPTIONAL];
4037      * </code>
4038      */
4039     @java.lang.Override
getPigJobOrBuilder()4040     public com.google.cloud.dataproc.v1.PigJobOrBuilder getPigJobOrBuilder() {
4041       if ((typeJobCase_ == 7) && (pigJobBuilder_ != null)) {
4042         return pigJobBuilder_.getMessageOrBuilder();
4043       } else {
4044         if (typeJobCase_ == 7) {
4045           return (com.google.cloud.dataproc.v1.PigJob) typeJob_;
4046         }
4047         return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
4048       }
4049     }
4050     /**
4051      *
4052      *
4053      * <pre>
4054      * Optional. Job is a Pig job.
4055      * </pre>
4056      *
4057      * <code>.google.cloud.dataproc.v1.PigJob pig_job = 7 [(.google.api.field_behavior) = OPTIONAL];
4058      * </code>
4059      */
4060     private com.google.protobuf.SingleFieldBuilderV3<
4061             com.google.cloud.dataproc.v1.PigJob,
4062             com.google.cloud.dataproc.v1.PigJob.Builder,
4063             com.google.cloud.dataproc.v1.PigJobOrBuilder>
getPigJobFieldBuilder()4064         getPigJobFieldBuilder() {
4065       if (pigJobBuilder_ == null) {
4066         if (!(typeJobCase_ == 7)) {
4067           typeJob_ = com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
4068         }
4069         pigJobBuilder_ =
4070             new com.google.protobuf.SingleFieldBuilderV3<
4071                 com.google.cloud.dataproc.v1.PigJob,
4072                 com.google.cloud.dataproc.v1.PigJob.Builder,
4073                 com.google.cloud.dataproc.v1.PigJobOrBuilder>(
4074                 (com.google.cloud.dataproc.v1.PigJob) typeJob_, getParentForChildren(), isClean());
4075         typeJob_ = null;
4076       }
4077       typeJobCase_ = 7;
4078       onChanged();
4079       return pigJobBuilder_;
4080     }
4081 
4082     private com.google.protobuf.SingleFieldBuilderV3<
4083             com.google.cloud.dataproc.v1.SparkRJob,
4084             com.google.cloud.dataproc.v1.SparkRJob.Builder,
4085             com.google.cloud.dataproc.v1.SparkRJobOrBuilder>
4086         sparkRJobBuilder_;
4087     /**
4088      *
4089      *
4090      * <pre>
4091      * Optional. Job is a SparkR job.
4092      * </pre>
4093      *
4094      * <code>
4095      * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 21 [(.google.api.field_behavior) = OPTIONAL];
4096      * </code>
4097      *
4098      * @return Whether the sparkRJob field is set.
4099      */
4100     @java.lang.Override
hasSparkRJob()4101     public boolean hasSparkRJob() {
4102       return typeJobCase_ == 21;
4103     }
4104     /**
4105      *
4106      *
4107      * <pre>
4108      * Optional. Job is a SparkR job.
4109      * </pre>
4110      *
4111      * <code>
4112      * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 21 [(.google.api.field_behavior) = OPTIONAL];
4113      * </code>
4114      *
4115      * @return The sparkRJob.
4116      */
4117     @java.lang.Override
getSparkRJob()4118     public com.google.cloud.dataproc.v1.SparkRJob getSparkRJob() {
4119       if (sparkRJobBuilder_ == null) {
4120         if (typeJobCase_ == 21) {
4121           return (com.google.cloud.dataproc.v1.SparkRJob) typeJob_;
4122         }
4123         return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
4124       } else {
4125         if (typeJobCase_ == 21) {
4126           return sparkRJobBuilder_.getMessage();
4127         }
4128         return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
4129       }
4130     }
4131     /**
4132      *
4133      *
4134      * <pre>
4135      * Optional. Job is a SparkR job.
4136      * </pre>
4137      *
4138      * <code>
4139      * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 21 [(.google.api.field_behavior) = OPTIONAL];
4140      * </code>
4141      */
setSparkRJob(com.google.cloud.dataproc.v1.SparkRJob value)4142     public Builder setSparkRJob(com.google.cloud.dataproc.v1.SparkRJob value) {
4143       if (sparkRJobBuilder_ == null) {
4144         if (value == null) {
4145           throw new NullPointerException();
4146         }
4147         typeJob_ = value;
4148         onChanged();
4149       } else {
4150         sparkRJobBuilder_.setMessage(value);
4151       }
4152       typeJobCase_ = 21;
4153       return this;
4154     }
4155     /**
4156      *
4157      *
4158      * <pre>
4159      * Optional. Job is a SparkR job.
4160      * </pre>
4161      *
4162      * <code>
4163      * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 21 [(.google.api.field_behavior) = OPTIONAL];
4164      * </code>
4165      */
setSparkRJob(com.google.cloud.dataproc.v1.SparkRJob.Builder builderForValue)4166     public Builder setSparkRJob(com.google.cloud.dataproc.v1.SparkRJob.Builder builderForValue) {
4167       if (sparkRJobBuilder_ == null) {
4168         typeJob_ = builderForValue.build();
4169         onChanged();
4170       } else {
4171         sparkRJobBuilder_.setMessage(builderForValue.build());
4172       }
4173       typeJobCase_ = 21;
4174       return this;
4175     }
4176     /**
4177      *
4178      *
4179      * <pre>
4180      * Optional. Job is a SparkR job.
4181      * </pre>
4182      *
4183      * <code>
4184      * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 21 [(.google.api.field_behavior) = OPTIONAL];
4185      * </code>
4186      */
mergeSparkRJob(com.google.cloud.dataproc.v1.SparkRJob value)4187     public Builder mergeSparkRJob(com.google.cloud.dataproc.v1.SparkRJob value) {
4188       if (sparkRJobBuilder_ == null) {
4189         if (typeJobCase_ == 21
4190             && typeJob_ != com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance()) {
4191           typeJob_ =
4192               com.google.cloud.dataproc.v1.SparkRJob.newBuilder(
4193                       (com.google.cloud.dataproc.v1.SparkRJob) typeJob_)
4194                   .mergeFrom(value)
4195                   .buildPartial();
4196         } else {
4197           typeJob_ = value;
4198         }
4199         onChanged();
4200       } else {
4201         if (typeJobCase_ == 21) {
4202           sparkRJobBuilder_.mergeFrom(value);
4203         } else {
4204           sparkRJobBuilder_.setMessage(value);
4205         }
4206       }
4207       typeJobCase_ = 21;
4208       return this;
4209     }
4210     /**
4211      *
4212      *
4213      * <pre>
4214      * Optional. Job is a SparkR job.
4215      * </pre>
4216      *
4217      * <code>
4218      * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 21 [(.google.api.field_behavior) = OPTIONAL];
4219      * </code>
4220      */
clearSparkRJob()4221     public Builder clearSparkRJob() {
4222       if (sparkRJobBuilder_ == null) {
4223         if (typeJobCase_ == 21) {
4224           typeJobCase_ = 0;
4225           typeJob_ = null;
4226           onChanged();
4227         }
4228       } else {
4229         if (typeJobCase_ == 21) {
4230           typeJobCase_ = 0;
4231           typeJob_ = null;
4232         }
4233         sparkRJobBuilder_.clear();
4234       }
4235       return this;
4236     }
4237     /**
4238      *
4239      *
4240      * <pre>
4241      * Optional. Job is a SparkR job.
4242      * </pre>
4243      *
4244      * <code>
4245      * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 21 [(.google.api.field_behavior) = OPTIONAL];
4246      * </code>
4247      */
getSparkRJobBuilder()4248     public com.google.cloud.dataproc.v1.SparkRJob.Builder getSparkRJobBuilder() {
4249       return getSparkRJobFieldBuilder().getBuilder();
4250     }
4251     /**
4252      *
4253      *
4254      * <pre>
4255      * Optional. Job is a SparkR job.
4256      * </pre>
4257      *
4258      * <code>
4259      * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 21 [(.google.api.field_behavior) = OPTIONAL];
4260      * </code>
4261      */
4262     @java.lang.Override
getSparkRJobOrBuilder()4263     public com.google.cloud.dataproc.v1.SparkRJobOrBuilder getSparkRJobOrBuilder() {
4264       if ((typeJobCase_ == 21) && (sparkRJobBuilder_ != null)) {
4265         return sparkRJobBuilder_.getMessageOrBuilder();
4266       } else {
4267         if (typeJobCase_ == 21) {
4268           return (com.google.cloud.dataproc.v1.SparkRJob) typeJob_;
4269         }
4270         return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
4271       }
4272     }
4273     /**
4274      *
4275      *
4276      * <pre>
4277      * Optional. Job is a SparkR job.
4278      * </pre>
4279      *
4280      * <code>
4281      * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 21 [(.google.api.field_behavior) = OPTIONAL];
4282      * </code>
4283      */
4284     private com.google.protobuf.SingleFieldBuilderV3<
4285             com.google.cloud.dataproc.v1.SparkRJob,
4286             com.google.cloud.dataproc.v1.SparkRJob.Builder,
4287             com.google.cloud.dataproc.v1.SparkRJobOrBuilder>
getSparkRJobFieldBuilder()4288         getSparkRJobFieldBuilder() {
4289       if (sparkRJobBuilder_ == null) {
4290         if (!(typeJobCase_ == 21)) {
4291           typeJob_ = com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
4292         }
4293         sparkRJobBuilder_ =
4294             new com.google.protobuf.SingleFieldBuilderV3<
4295                 com.google.cloud.dataproc.v1.SparkRJob,
4296                 com.google.cloud.dataproc.v1.SparkRJob.Builder,
4297                 com.google.cloud.dataproc.v1.SparkRJobOrBuilder>(
4298                 (com.google.cloud.dataproc.v1.SparkRJob) typeJob_,
4299                 getParentForChildren(),
4300                 isClean());
4301         typeJob_ = null;
4302       }
4303       typeJobCase_ = 21;
4304       onChanged();
4305       return sparkRJobBuilder_;
4306     }
4307 
4308     private com.google.protobuf.SingleFieldBuilderV3<
4309             com.google.cloud.dataproc.v1.SparkSqlJob,
4310             com.google.cloud.dataproc.v1.SparkSqlJob.Builder,
4311             com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder>
4312         sparkSqlJobBuilder_;
4313     /**
4314      *
4315      *
4316      * <pre>
4317      * Optional. Job is a SparkSql job.
4318      * </pre>
4319      *
4320      * <code>
4321      * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12 [(.google.api.field_behavior) = OPTIONAL];
4322      * </code>
4323      *
4324      * @return Whether the sparkSqlJob field is set.
4325      */
4326     @java.lang.Override
hasSparkSqlJob()4327     public boolean hasSparkSqlJob() {
4328       return typeJobCase_ == 12;
4329     }
4330     /**
4331      *
4332      *
4333      * <pre>
4334      * Optional. Job is a SparkSql job.
4335      * </pre>
4336      *
4337      * <code>
4338      * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12 [(.google.api.field_behavior) = OPTIONAL];
4339      * </code>
4340      *
4341      * @return The sparkSqlJob.
4342      */
4343     @java.lang.Override
getSparkSqlJob()4344     public com.google.cloud.dataproc.v1.SparkSqlJob getSparkSqlJob() {
4345       if (sparkSqlJobBuilder_ == null) {
4346         if (typeJobCase_ == 12) {
4347           return (com.google.cloud.dataproc.v1.SparkSqlJob) typeJob_;
4348         }
4349         return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
4350       } else {
4351         if (typeJobCase_ == 12) {
4352           return sparkSqlJobBuilder_.getMessage();
4353         }
4354         return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
4355       }
4356     }
4357     /**
4358      *
4359      *
4360      * <pre>
4361      * Optional. Job is a SparkSql job.
4362      * </pre>
4363      *
4364      * <code>
4365      * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12 [(.google.api.field_behavior) = OPTIONAL];
4366      * </code>
4367      */
setSparkSqlJob(com.google.cloud.dataproc.v1.SparkSqlJob value)4368     public Builder setSparkSqlJob(com.google.cloud.dataproc.v1.SparkSqlJob value) {
4369       if (sparkSqlJobBuilder_ == null) {
4370         if (value == null) {
4371           throw new NullPointerException();
4372         }
4373         typeJob_ = value;
4374         onChanged();
4375       } else {
4376         sparkSqlJobBuilder_.setMessage(value);
4377       }
4378       typeJobCase_ = 12;
4379       return this;
4380     }
4381     /**
4382      *
4383      *
4384      * <pre>
4385      * Optional. Job is a SparkSql job.
4386      * </pre>
4387      *
4388      * <code>
4389      * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12 [(.google.api.field_behavior) = OPTIONAL];
4390      * </code>
4391      */
setSparkSqlJob( com.google.cloud.dataproc.v1.SparkSqlJob.Builder builderForValue)4392     public Builder setSparkSqlJob(
4393         com.google.cloud.dataproc.v1.SparkSqlJob.Builder builderForValue) {
4394       if (sparkSqlJobBuilder_ == null) {
4395         typeJob_ = builderForValue.build();
4396         onChanged();
4397       } else {
4398         sparkSqlJobBuilder_.setMessage(builderForValue.build());
4399       }
4400       typeJobCase_ = 12;
4401       return this;
4402     }
4403     /**
4404      *
4405      *
4406      * <pre>
4407      * Optional. Job is a SparkSql job.
4408      * </pre>
4409      *
4410      * <code>
4411      * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12 [(.google.api.field_behavior) = OPTIONAL];
4412      * </code>
4413      */
mergeSparkSqlJob(com.google.cloud.dataproc.v1.SparkSqlJob value)4414     public Builder mergeSparkSqlJob(com.google.cloud.dataproc.v1.SparkSqlJob value) {
4415       if (sparkSqlJobBuilder_ == null) {
4416         if (typeJobCase_ == 12
4417             && typeJob_ != com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance()) {
4418           typeJob_ =
4419               com.google.cloud.dataproc.v1.SparkSqlJob.newBuilder(
4420                       (com.google.cloud.dataproc.v1.SparkSqlJob) typeJob_)
4421                   .mergeFrom(value)
4422                   .buildPartial();
4423         } else {
4424           typeJob_ = value;
4425         }
4426         onChanged();
4427       } else {
4428         if (typeJobCase_ == 12) {
4429           sparkSqlJobBuilder_.mergeFrom(value);
4430         } else {
4431           sparkSqlJobBuilder_.setMessage(value);
4432         }
4433       }
4434       typeJobCase_ = 12;
4435       return this;
4436     }
4437     /**
4438      *
4439      *
4440      * <pre>
4441      * Optional. Job is a SparkSql job.
4442      * </pre>
4443      *
4444      * <code>
4445      * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12 [(.google.api.field_behavior) = OPTIONAL];
4446      * </code>
4447      */
clearSparkSqlJob()4448     public Builder clearSparkSqlJob() {
4449       if (sparkSqlJobBuilder_ == null) {
4450         if (typeJobCase_ == 12) {
4451           typeJobCase_ = 0;
4452           typeJob_ = null;
4453           onChanged();
4454         }
4455       } else {
4456         if (typeJobCase_ == 12) {
4457           typeJobCase_ = 0;
4458           typeJob_ = null;
4459         }
4460         sparkSqlJobBuilder_.clear();
4461       }
4462       return this;
4463     }
4464     /**
4465      *
4466      *
4467      * <pre>
4468      * Optional. Job is a SparkSql job.
4469      * </pre>
4470      *
4471      * <code>
4472      * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12 [(.google.api.field_behavior) = OPTIONAL];
4473      * </code>
4474      */
getSparkSqlJobBuilder()4475     public com.google.cloud.dataproc.v1.SparkSqlJob.Builder getSparkSqlJobBuilder() {
4476       return getSparkSqlJobFieldBuilder().getBuilder();
4477     }
4478     /**
4479      *
4480      *
4481      * <pre>
4482      * Optional. Job is a SparkSql job.
4483      * </pre>
4484      *
4485      * <code>
4486      * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12 [(.google.api.field_behavior) = OPTIONAL];
4487      * </code>
4488      */
4489     @java.lang.Override
getSparkSqlJobOrBuilder()4490     public com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder() {
4491       if ((typeJobCase_ == 12) && (sparkSqlJobBuilder_ != null)) {
4492         return sparkSqlJobBuilder_.getMessageOrBuilder();
4493       } else {
4494         if (typeJobCase_ == 12) {
4495           return (com.google.cloud.dataproc.v1.SparkSqlJob) typeJob_;
4496         }
4497         return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
4498       }
4499     }
4500     /**
4501      *
4502      *
4503      * <pre>
4504      * Optional. Job is a SparkSql job.
4505      * </pre>
4506      *
4507      * <code>
4508      * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12 [(.google.api.field_behavior) = OPTIONAL];
4509      * </code>
4510      */
4511     private com.google.protobuf.SingleFieldBuilderV3<
4512             com.google.cloud.dataproc.v1.SparkSqlJob,
4513             com.google.cloud.dataproc.v1.SparkSqlJob.Builder,
4514             com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder>
getSparkSqlJobFieldBuilder()4515         getSparkSqlJobFieldBuilder() {
4516       if (sparkSqlJobBuilder_ == null) {
4517         if (!(typeJobCase_ == 12)) {
4518           typeJob_ = com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
4519         }
4520         sparkSqlJobBuilder_ =
4521             new com.google.protobuf.SingleFieldBuilderV3<
4522                 com.google.cloud.dataproc.v1.SparkSqlJob,
4523                 com.google.cloud.dataproc.v1.SparkSqlJob.Builder,
4524                 com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder>(
4525                 (com.google.cloud.dataproc.v1.SparkSqlJob) typeJob_,
4526                 getParentForChildren(),
4527                 isClean());
4528         typeJob_ = null;
4529       }
4530       typeJobCase_ = 12;
4531       onChanged();
4532       return sparkSqlJobBuilder_;
4533     }
4534 
4535     private com.google.protobuf.SingleFieldBuilderV3<
4536             com.google.cloud.dataproc.v1.PrestoJob,
4537             com.google.cloud.dataproc.v1.PrestoJob.Builder,
4538             com.google.cloud.dataproc.v1.PrestoJobOrBuilder>
4539         prestoJobBuilder_;
4540     /**
4541      *
4542      *
4543      * <pre>
4544      * Optional. Job is a Presto job.
4545      * </pre>
4546      *
4547      * <code>
4548      * .google.cloud.dataproc.v1.PrestoJob presto_job = 23 [(.google.api.field_behavior) = OPTIONAL];
4549      * </code>
4550      *
4551      * @return Whether the prestoJob field is set.
4552      */
4553     @java.lang.Override
hasPrestoJob()4554     public boolean hasPrestoJob() {
4555       return typeJobCase_ == 23;
4556     }
4557     /**
4558      *
4559      *
4560      * <pre>
4561      * Optional. Job is a Presto job.
4562      * </pre>
4563      *
4564      * <code>
4565      * .google.cloud.dataproc.v1.PrestoJob presto_job = 23 [(.google.api.field_behavior) = OPTIONAL];
4566      * </code>
4567      *
4568      * @return The prestoJob.
4569      */
4570     @java.lang.Override
getPrestoJob()4571     public com.google.cloud.dataproc.v1.PrestoJob getPrestoJob() {
4572       if (prestoJobBuilder_ == null) {
4573         if (typeJobCase_ == 23) {
4574           return (com.google.cloud.dataproc.v1.PrestoJob) typeJob_;
4575         }
4576         return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
4577       } else {
4578         if (typeJobCase_ == 23) {
4579           return prestoJobBuilder_.getMessage();
4580         }
4581         return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
4582       }
4583     }
4584     /**
4585      *
4586      *
4587      * <pre>
4588      * Optional. Job is a Presto job.
4589      * </pre>
4590      *
4591      * <code>
4592      * .google.cloud.dataproc.v1.PrestoJob presto_job = 23 [(.google.api.field_behavior) = OPTIONAL];
4593      * </code>
4594      */
setPrestoJob(com.google.cloud.dataproc.v1.PrestoJob value)4595     public Builder setPrestoJob(com.google.cloud.dataproc.v1.PrestoJob value) {
4596       if (prestoJobBuilder_ == null) {
4597         if (value == null) {
4598           throw new NullPointerException();
4599         }
4600         typeJob_ = value;
4601         onChanged();
4602       } else {
4603         prestoJobBuilder_.setMessage(value);
4604       }
4605       typeJobCase_ = 23;
4606       return this;
4607     }
4608     /**
4609      *
4610      *
4611      * <pre>
4612      * Optional. Job is a Presto job.
4613      * </pre>
4614      *
4615      * <code>
4616      * .google.cloud.dataproc.v1.PrestoJob presto_job = 23 [(.google.api.field_behavior) = OPTIONAL];
4617      * </code>
4618      */
setPrestoJob(com.google.cloud.dataproc.v1.PrestoJob.Builder builderForValue)4619     public Builder setPrestoJob(com.google.cloud.dataproc.v1.PrestoJob.Builder builderForValue) {
4620       if (prestoJobBuilder_ == null) {
4621         typeJob_ = builderForValue.build();
4622         onChanged();
4623       } else {
4624         prestoJobBuilder_.setMessage(builderForValue.build());
4625       }
4626       typeJobCase_ = 23;
4627       return this;
4628     }
4629     /**
4630      *
4631      *
4632      * <pre>
4633      * Optional. Job is a Presto job.
4634      * </pre>
4635      *
4636      * <code>
4637      * .google.cloud.dataproc.v1.PrestoJob presto_job = 23 [(.google.api.field_behavior) = OPTIONAL];
4638      * </code>
4639      */
mergePrestoJob(com.google.cloud.dataproc.v1.PrestoJob value)4640     public Builder mergePrestoJob(com.google.cloud.dataproc.v1.PrestoJob value) {
4641       if (prestoJobBuilder_ == null) {
4642         if (typeJobCase_ == 23
4643             && typeJob_ != com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance()) {
4644           typeJob_ =
4645               com.google.cloud.dataproc.v1.PrestoJob.newBuilder(
4646                       (com.google.cloud.dataproc.v1.PrestoJob) typeJob_)
4647                   .mergeFrom(value)
4648                   .buildPartial();
4649         } else {
4650           typeJob_ = value;
4651         }
4652         onChanged();
4653       } else {
4654         if (typeJobCase_ == 23) {
4655           prestoJobBuilder_.mergeFrom(value);
4656         } else {
4657           prestoJobBuilder_.setMessage(value);
4658         }
4659       }
4660       typeJobCase_ = 23;
4661       return this;
4662     }
4663     /**
4664      *
4665      *
4666      * <pre>
4667      * Optional. Job is a Presto job.
4668      * </pre>
4669      *
4670      * <code>
4671      * .google.cloud.dataproc.v1.PrestoJob presto_job = 23 [(.google.api.field_behavior) = OPTIONAL];
4672      * </code>
4673      */
clearPrestoJob()4674     public Builder clearPrestoJob() {
4675       if (prestoJobBuilder_ == null) {
4676         if (typeJobCase_ == 23) {
4677           typeJobCase_ = 0;
4678           typeJob_ = null;
4679           onChanged();
4680         }
4681       } else {
4682         if (typeJobCase_ == 23) {
4683           typeJobCase_ = 0;
4684           typeJob_ = null;
4685         }
4686         prestoJobBuilder_.clear();
4687       }
4688       return this;
4689     }
4690     /**
4691      *
4692      *
4693      * <pre>
4694      * Optional. Job is a Presto job.
4695      * </pre>
4696      *
4697      * <code>
4698      * .google.cloud.dataproc.v1.PrestoJob presto_job = 23 [(.google.api.field_behavior) = OPTIONAL];
4699      * </code>
4700      */
getPrestoJobBuilder()4701     public com.google.cloud.dataproc.v1.PrestoJob.Builder getPrestoJobBuilder() {
4702       return getPrestoJobFieldBuilder().getBuilder();
4703     }
4704     /**
4705      *
4706      *
4707      * <pre>
4708      * Optional. Job is a Presto job.
4709      * </pre>
4710      *
4711      * <code>
4712      * .google.cloud.dataproc.v1.PrestoJob presto_job = 23 [(.google.api.field_behavior) = OPTIONAL];
4713      * </code>
4714      */
4715     @java.lang.Override
getPrestoJobOrBuilder()4716     public com.google.cloud.dataproc.v1.PrestoJobOrBuilder getPrestoJobOrBuilder() {
4717       if ((typeJobCase_ == 23) && (prestoJobBuilder_ != null)) {
4718         return prestoJobBuilder_.getMessageOrBuilder();
4719       } else {
4720         if (typeJobCase_ == 23) {
4721           return (com.google.cloud.dataproc.v1.PrestoJob) typeJob_;
4722         }
4723         return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
4724       }
4725     }
4726     /**
4727      *
4728      *
4729      * <pre>
4730      * Optional. Job is a Presto job.
4731      * </pre>
4732      *
4733      * <code>
4734      * .google.cloud.dataproc.v1.PrestoJob presto_job = 23 [(.google.api.field_behavior) = OPTIONAL];
4735      * </code>
4736      */
4737     private com.google.protobuf.SingleFieldBuilderV3<
4738             com.google.cloud.dataproc.v1.PrestoJob,
4739             com.google.cloud.dataproc.v1.PrestoJob.Builder,
4740             com.google.cloud.dataproc.v1.PrestoJobOrBuilder>
getPrestoJobFieldBuilder()4741         getPrestoJobFieldBuilder() {
4742       if (prestoJobBuilder_ == null) {
4743         if (!(typeJobCase_ == 23)) {
4744           typeJob_ = com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
4745         }
4746         prestoJobBuilder_ =
4747             new com.google.protobuf.SingleFieldBuilderV3<
4748                 com.google.cloud.dataproc.v1.PrestoJob,
4749                 com.google.cloud.dataproc.v1.PrestoJob.Builder,
4750                 com.google.cloud.dataproc.v1.PrestoJobOrBuilder>(
4751                 (com.google.cloud.dataproc.v1.PrestoJob) typeJob_,
4752                 getParentForChildren(),
4753                 isClean());
4754         typeJob_ = null;
4755       }
4756       typeJobCase_ = 23;
4757       onChanged();
4758       return prestoJobBuilder_;
4759     }
4760 
4761     private com.google.protobuf.SingleFieldBuilderV3<
4762             com.google.cloud.dataproc.v1.TrinoJob,
4763             com.google.cloud.dataproc.v1.TrinoJob.Builder,
4764             com.google.cloud.dataproc.v1.TrinoJobOrBuilder>
4765         trinoJobBuilder_;
4766     /**
4767      *
4768      *
4769      * <pre>
4770      * Optional. Job is a Trino job.
4771      * </pre>
4772      *
4773      * <code>
4774      * .google.cloud.dataproc.v1.TrinoJob trino_job = 28 [(.google.api.field_behavior) = OPTIONAL];
4775      * </code>
4776      *
4777      * @return Whether the trinoJob field is set.
4778      */
4779     @java.lang.Override
hasTrinoJob()4780     public boolean hasTrinoJob() {
4781       return typeJobCase_ == 28;
4782     }
4783     /**
4784      *
4785      *
4786      * <pre>
4787      * Optional. Job is a Trino job.
4788      * </pre>
4789      *
4790      * <code>
4791      * .google.cloud.dataproc.v1.TrinoJob trino_job = 28 [(.google.api.field_behavior) = OPTIONAL];
4792      * </code>
4793      *
4794      * @return The trinoJob.
4795      */
4796     @java.lang.Override
getTrinoJob()4797     public com.google.cloud.dataproc.v1.TrinoJob getTrinoJob() {
4798       if (trinoJobBuilder_ == null) {
4799         if (typeJobCase_ == 28) {
4800           return (com.google.cloud.dataproc.v1.TrinoJob) typeJob_;
4801         }
4802         return com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance();
4803       } else {
4804         if (typeJobCase_ == 28) {
4805           return trinoJobBuilder_.getMessage();
4806         }
4807         return com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance();
4808       }
4809     }
4810     /**
4811      *
4812      *
4813      * <pre>
4814      * Optional. Job is a Trino job.
4815      * </pre>
4816      *
4817      * <code>
4818      * .google.cloud.dataproc.v1.TrinoJob trino_job = 28 [(.google.api.field_behavior) = OPTIONAL];
4819      * </code>
4820      */
setTrinoJob(com.google.cloud.dataproc.v1.TrinoJob value)4821     public Builder setTrinoJob(com.google.cloud.dataproc.v1.TrinoJob value) {
4822       if (trinoJobBuilder_ == null) {
4823         if (value == null) {
4824           throw new NullPointerException();
4825         }
4826         typeJob_ = value;
4827         onChanged();
4828       } else {
4829         trinoJobBuilder_.setMessage(value);
4830       }
4831       typeJobCase_ = 28;
4832       return this;
4833     }
4834     /**
4835      *
4836      *
4837      * <pre>
4838      * Optional. Job is a Trino job.
4839      * </pre>
4840      *
4841      * <code>
4842      * .google.cloud.dataproc.v1.TrinoJob trino_job = 28 [(.google.api.field_behavior) = OPTIONAL];
4843      * </code>
4844      */
setTrinoJob(com.google.cloud.dataproc.v1.TrinoJob.Builder builderForValue)4845     public Builder setTrinoJob(com.google.cloud.dataproc.v1.TrinoJob.Builder builderForValue) {
4846       if (trinoJobBuilder_ == null) {
4847         typeJob_ = builderForValue.build();
4848         onChanged();
4849       } else {
4850         trinoJobBuilder_.setMessage(builderForValue.build());
4851       }
4852       typeJobCase_ = 28;
4853       return this;
4854     }
4855     /**
4856      *
4857      *
4858      * <pre>
4859      * Optional. Job is a Trino job.
4860      * </pre>
4861      *
4862      * <code>
4863      * .google.cloud.dataproc.v1.TrinoJob trino_job = 28 [(.google.api.field_behavior) = OPTIONAL];
4864      * </code>
4865      */
mergeTrinoJob(com.google.cloud.dataproc.v1.TrinoJob value)4866     public Builder mergeTrinoJob(com.google.cloud.dataproc.v1.TrinoJob value) {
4867       if (trinoJobBuilder_ == null) {
4868         if (typeJobCase_ == 28
4869             && typeJob_ != com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance()) {
4870           typeJob_ =
4871               com.google.cloud.dataproc.v1.TrinoJob.newBuilder(
4872                       (com.google.cloud.dataproc.v1.TrinoJob) typeJob_)
4873                   .mergeFrom(value)
4874                   .buildPartial();
4875         } else {
4876           typeJob_ = value;
4877         }
4878         onChanged();
4879       } else {
4880         if (typeJobCase_ == 28) {
4881           trinoJobBuilder_.mergeFrom(value);
4882         } else {
4883           trinoJobBuilder_.setMessage(value);
4884         }
4885       }
4886       typeJobCase_ = 28;
4887       return this;
4888     }
4889     /**
4890      *
4891      *
4892      * <pre>
4893      * Optional. Job is a Trino job.
4894      * </pre>
4895      *
4896      * <code>
4897      * .google.cloud.dataproc.v1.TrinoJob trino_job = 28 [(.google.api.field_behavior) = OPTIONAL];
4898      * </code>
4899      */
clearTrinoJob()4900     public Builder clearTrinoJob() {
4901       if (trinoJobBuilder_ == null) {
4902         if (typeJobCase_ == 28) {
4903           typeJobCase_ = 0;
4904           typeJob_ = null;
4905           onChanged();
4906         }
4907       } else {
4908         if (typeJobCase_ == 28) {
4909           typeJobCase_ = 0;
4910           typeJob_ = null;
4911         }
4912         trinoJobBuilder_.clear();
4913       }
4914       return this;
4915     }
4916     /**
4917      *
4918      *
4919      * <pre>
4920      * Optional. Job is a Trino job.
4921      * </pre>
4922      *
4923      * <code>
4924      * .google.cloud.dataproc.v1.TrinoJob trino_job = 28 [(.google.api.field_behavior) = OPTIONAL];
4925      * </code>
4926      */
getTrinoJobBuilder()4927     public com.google.cloud.dataproc.v1.TrinoJob.Builder getTrinoJobBuilder() {
4928       return getTrinoJobFieldBuilder().getBuilder();
4929     }
4930     /**
4931      *
4932      *
4933      * <pre>
4934      * Optional. Job is a Trino job.
4935      * </pre>
4936      *
4937      * <code>
4938      * .google.cloud.dataproc.v1.TrinoJob trino_job = 28 [(.google.api.field_behavior) = OPTIONAL];
4939      * </code>
4940      */
4941     @java.lang.Override
getTrinoJobOrBuilder()4942     public com.google.cloud.dataproc.v1.TrinoJobOrBuilder getTrinoJobOrBuilder() {
4943       if ((typeJobCase_ == 28) && (trinoJobBuilder_ != null)) {
4944         return trinoJobBuilder_.getMessageOrBuilder();
4945       } else {
4946         if (typeJobCase_ == 28) {
4947           return (com.google.cloud.dataproc.v1.TrinoJob) typeJob_;
4948         }
4949         return com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance();
4950       }
4951     }
4952     /**
4953      *
4954      *
4955      * <pre>
4956      * Optional. Job is a Trino job.
4957      * </pre>
4958      *
4959      * <code>
4960      * .google.cloud.dataproc.v1.TrinoJob trino_job = 28 [(.google.api.field_behavior) = OPTIONAL];
4961      * </code>
4962      */
4963     private com.google.protobuf.SingleFieldBuilderV3<
4964             com.google.cloud.dataproc.v1.TrinoJob,
4965             com.google.cloud.dataproc.v1.TrinoJob.Builder,
4966             com.google.cloud.dataproc.v1.TrinoJobOrBuilder>
getTrinoJobFieldBuilder()4967         getTrinoJobFieldBuilder() {
4968       if (trinoJobBuilder_ == null) {
4969         if (!(typeJobCase_ == 28)) {
4970           typeJob_ = com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance();
4971         }
4972         trinoJobBuilder_ =
4973             new com.google.protobuf.SingleFieldBuilderV3<
4974                 com.google.cloud.dataproc.v1.TrinoJob,
4975                 com.google.cloud.dataproc.v1.TrinoJob.Builder,
4976                 com.google.cloud.dataproc.v1.TrinoJobOrBuilder>(
4977                 (com.google.cloud.dataproc.v1.TrinoJob) typeJob_,
4978                 getParentForChildren(),
4979                 isClean());
4980         typeJob_ = null;
4981       }
4982       typeJobCase_ = 28;
4983       onChanged();
4984       return trinoJobBuilder_;
4985     }
4986 
4987     private com.google.cloud.dataproc.v1.JobStatus status_;
4988     private com.google.protobuf.SingleFieldBuilderV3<
4989             com.google.cloud.dataproc.v1.JobStatus,
4990             com.google.cloud.dataproc.v1.JobStatus.Builder,
4991             com.google.cloud.dataproc.v1.JobStatusOrBuilder>
4992         statusBuilder_;
4993     /**
4994      *
4995      *
4996      * <pre>
4997      * Output only. The job status. Additional application-specific
4998      * status information may be contained in the &lt;code&gt;type_job&lt;/code&gt;
4999      * and &lt;code&gt;yarn_applications&lt;/code&gt; fields.
5000      * </pre>
5001      *
5002      * <code>
5003      * .google.cloud.dataproc.v1.JobStatus status = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
5004      * </code>
5005      *
5006      * @return Whether the status field is set.
5007      */
hasStatus()5008     public boolean hasStatus() {
5009       return ((bitField0_ & 0x00000800) != 0);
5010     }
5011     /**
5012      *
5013      *
5014      * <pre>
5015      * Output only. The job status. Additional application-specific
5016      * status information may be contained in the &lt;code&gt;type_job&lt;/code&gt;
5017      * and &lt;code&gt;yarn_applications&lt;/code&gt; fields.
5018      * </pre>
5019      *
5020      * <code>
5021      * .google.cloud.dataproc.v1.JobStatus status = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
5022      * </code>
5023      *
5024      * @return The status.
5025      */
getStatus()5026     public com.google.cloud.dataproc.v1.JobStatus getStatus() {
5027       if (statusBuilder_ == null) {
5028         return status_ == null
5029             ? com.google.cloud.dataproc.v1.JobStatus.getDefaultInstance()
5030             : status_;
5031       } else {
5032         return statusBuilder_.getMessage();
5033       }
5034     }
5035     /**
5036      *
5037      *
5038      * <pre>
5039      * Output only. The job status. Additional application-specific
5040      * status information may be contained in the &lt;code&gt;type_job&lt;/code&gt;
5041      * and &lt;code&gt;yarn_applications&lt;/code&gt; fields.
5042      * </pre>
5043      *
5044      * <code>
5045      * .google.cloud.dataproc.v1.JobStatus status = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
5046      * </code>
5047      */
setStatus(com.google.cloud.dataproc.v1.JobStatus value)5048     public Builder setStatus(com.google.cloud.dataproc.v1.JobStatus value) {
5049       if (statusBuilder_ == null) {
5050         if (value == null) {
5051           throw new NullPointerException();
5052         }
5053         status_ = value;
5054       } else {
5055         statusBuilder_.setMessage(value);
5056       }
5057       bitField0_ |= 0x00000800;
5058       onChanged();
5059       return this;
5060     }
5061     /**
5062      *
5063      *
5064      * <pre>
5065      * Output only. The job status. Additional application-specific
5066      * status information may be contained in the &lt;code&gt;type_job&lt;/code&gt;
5067      * and &lt;code&gt;yarn_applications&lt;/code&gt; fields.
5068      * </pre>
5069      *
5070      * <code>
5071      * .google.cloud.dataproc.v1.JobStatus status = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
5072      * </code>
5073      */
setStatus(com.google.cloud.dataproc.v1.JobStatus.Builder builderForValue)5074     public Builder setStatus(com.google.cloud.dataproc.v1.JobStatus.Builder builderForValue) {
5075       if (statusBuilder_ == null) {
5076         status_ = builderForValue.build();
5077       } else {
5078         statusBuilder_.setMessage(builderForValue.build());
5079       }
5080       bitField0_ |= 0x00000800;
5081       onChanged();
5082       return this;
5083     }
5084     /**
5085      *
5086      *
5087      * <pre>
5088      * Output only. The job status. Additional application-specific
5089      * status information may be contained in the &lt;code&gt;type_job&lt;/code&gt;
5090      * and &lt;code&gt;yarn_applications&lt;/code&gt; fields.
5091      * </pre>
5092      *
5093      * <code>
5094      * .google.cloud.dataproc.v1.JobStatus status = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
5095      * </code>
5096      */
mergeStatus(com.google.cloud.dataproc.v1.JobStatus value)5097     public Builder mergeStatus(com.google.cloud.dataproc.v1.JobStatus value) {
5098       if (statusBuilder_ == null) {
5099         if (((bitField0_ & 0x00000800) != 0)
5100             && status_ != null
5101             && status_ != com.google.cloud.dataproc.v1.JobStatus.getDefaultInstance()) {
5102           getStatusBuilder().mergeFrom(value);
5103         } else {
5104           status_ = value;
5105         }
5106       } else {
5107         statusBuilder_.mergeFrom(value);
5108       }
5109       bitField0_ |= 0x00000800;
5110       onChanged();
5111       return this;
5112     }
5113     /**
5114      *
5115      *
5116      * <pre>
5117      * Output only. The job status. Additional application-specific
5118      * status information may be contained in the &lt;code&gt;type_job&lt;/code&gt;
5119      * and &lt;code&gt;yarn_applications&lt;/code&gt; fields.
5120      * </pre>
5121      *
5122      * <code>
5123      * .google.cloud.dataproc.v1.JobStatus status = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
5124      * </code>
5125      */
clearStatus()5126     public Builder clearStatus() {
5127       bitField0_ = (bitField0_ & ~0x00000800);
5128       status_ = null;
5129       if (statusBuilder_ != null) {
5130         statusBuilder_.dispose();
5131         statusBuilder_ = null;
5132       }
5133       onChanged();
5134       return this;
5135     }
5136     /**
5137      *
5138      *
5139      * <pre>
5140      * Output only. The job status. Additional application-specific
5141      * status information may be contained in the &lt;code&gt;type_job&lt;/code&gt;
5142      * and &lt;code&gt;yarn_applications&lt;/code&gt; fields.
5143      * </pre>
5144      *
5145      * <code>
5146      * .google.cloud.dataproc.v1.JobStatus status = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
5147      * </code>
5148      */
getStatusBuilder()5149     public com.google.cloud.dataproc.v1.JobStatus.Builder getStatusBuilder() {
5150       bitField0_ |= 0x00000800;
5151       onChanged();
5152       return getStatusFieldBuilder().getBuilder();
5153     }
5154     /**
5155      *
5156      *
5157      * <pre>
5158      * Output only. The job status. Additional application-specific
5159      * status information may be contained in the &lt;code&gt;type_job&lt;/code&gt;
5160      * and &lt;code&gt;yarn_applications&lt;/code&gt; fields.
5161      * </pre>
5162      *
5163      * <code>
5164      * .google.cloud.dataproc.v1.JobStatus status = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
5165      * </code>
5166      */
getStatusOrBuilder()5167     public com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusOrBuilder() {
5168       if (statusBuilder_ != null) {
5169         return statusBuilder_.getMessageOrBuilder();
5170       } else {
5171         return status_ == null
5172             ? com.google.cloud.dataproc.v1.JobStatus.getDefaultInstance()
5173             : status_;
5174       }
5175     }
5176     /**
5177      *
5178      *
5179      * <pre>
5180      * Output only. The job status. Additional application-specific
5181      * status information may be contained in the &lt;code&gt;type_job&lt;/code&gt;
5182      * and &lt;code&gt;yarn_applications&lt;/code&gt; fields.
5183      * </pre>
5184      *
5185      * <code>
5186      * .google.cloud.dataproc.v1.JobStatus status = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
5187      * </code>
5188      */
5189     private com.google.protobuf.SingleFieldBuilderV3<
5190             com.google.cloud.dataproc.v1.JobStatus,
5191             com.google.cloud.dataproc.v1.JobStatus.Builder,
5192             com.google.cloud.dataproc.v1.JobStatusOrBuilder>
getStatusFieldBuilder()5193         getStatusFieldBuilder() {
5194       if (statusBuilder_ == null) {
5195         statusBuilder_ =
5196             new com.google.protobuf.SingleFieldBuilderV3<
5197                 com.google.cloud.dataproc.v1.JobStatus,
5198                 com.google.cloud.dataproc.v1.JobStatus.Builder,
5199                 com.google.cloud.dataproc.v1.JobStatusOrBuilder>(
5200                 getStatus(), getParentForChildren(), isClean());
5201         status_ = null;
5202       }
5203       return statusBuilder_;
5204     }
5205 
5206     private java.util.List<com.google.cloud.dataproc.v1.JobStatus> statusHistory_ =
5207         java.util.Collections.emptyList();
5208 
ensureStatusHistoryIsMutable()5209     private void ensureStatusHistoryIsMutable() {
5210       if (!((bitField0_ & 0x00001000) != 0)) {
5211         statusHistory_ =
5212             new java.util.ArrayList<com.google.cloud.dataproc.v1.JobStatus>(statusHistory_);
5213         bitField0_ |= 0x00001000;
5214       }
5215     }
5216 
5217     private com.google.protobuf.RepeatedFieldBuilderV3<
5218             com.google.cloud.dataproc.v1.JobStatus,
5219             com.google.cloud.dataproc.v1.JobStatus.Builder,
5220             com.google.cloud.dataproc.v1.JobStatusOrBuilder>
5221         statusHistoryBuilder_;
5222 
5223     /**
5224      *
5225      *
5226      * <pre>
5227      * Output only. The previous job status.
5228      * </pre>
5229      *
5230      * <code>
5231      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5232      * </code>
5233      */
getStatusHistoryList()5234     public java.util.List<com.google.cloud.dataproc.v1.JobStatus> getStatusHistoryList() {
5235       if (statusHistoryBuilder_ == null) {
5236         return java.util.Collections.unmodifiableList(statusHistory_);
5237       } else {
5238         return statusHistoryBuilder_.getMessageList();
5239       }
5240     }
5241     /**
5242      *
5243      *
5244      * <pre>
5245      * Output only. The previous job status.
5246      * </pre>
5247      *
5248      * <code>
5249      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5250      * </code>
5251      */
getStatusHistoryCount()5252     public int getStatusHistoryCount() {
5253       if (statusHistoryBuilder_ == null) {
5254         return statusHistory_.size();
5255       } else {
5256         return statusHistoryBuilder_.getCount();
5257       }
5258     }
5259     /**
5260      *
5261      *
5262      * <pre>
5263      * Output only. The previous job status.
5264      * </pre>
5265      *
5266      * <code>
5267      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5268      * </code>
5269      */
getStatusHistory(int index)5270     public com.google.cloud.dataproc.v1.JobStatus getStatusHistory(int index) {
5271       if (statusHistoryBuilder_ == null) {
5272         return statusHistory_.get(index);
5273       } else {
5274         return statusHistoryBuilder_.getMessage(index);
5275       }
5276     }
5277     /**
5278      *
5279      *
5280      * <pre>
5281      * Output only. The previous job status.
5282      * </pre>
5283      *
5284      * <code>
5285      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5286      * </code>
5287      */
setStatusHistory(int index, com.google.cloud.dataproc.v1.JobStatus value)5288     public Builder setStatusHistory(int index, com.google.cloud.dataproc.v1.JobStatus value) {
5289       if (statusHistoryBuilder_ == null) {
5290         if (value == null) {
5291           throw new NullPointerException();
5292         }
5293         ensureStatusHistoryIsMutable();
5294         statusHistory_.set(index, value);
5295         onChanged();
5296       } else {
5297         statusHistoryBuilder_.setMessage(index, value);
5298       }
5299       return this;
5300     }
5301     /**
5302      *
5303      *
5304      * <pre>
5305      * Output only. The previous job status.
5306      * </pre>
5307      *
5308      * <code>
5309      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5310      * </code>
5311      */
setStatusHistory( int index, com.google.cloud.dataproc.v1.JobStatus.Builder builderForValue)5312     public Builder setStatusHistory(
5313         int index, com.google.cloud.dataproc.v1.JobStatus.Builder builderForValue) {
5314       if (statusHistoryBuilder_ == null) {
5315         ensureStatusHistoryIsMutable();
5316         statusHistory_.set(index, builderForValue.build());
5317         onChanged();
5318       } else {
5319         statusHistoryBuilder_.setMessage(index, builderForValue.build());
5320       }
5321       return this;
5322     }
5323     /**
5324      *
5325      *
5326      * <pre>
5327      * Output only. The previous job status.
5328      * </pre>
5329      *
5330      * <code>
5331      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5332      * </code>
5333      */
addStatusHistory(com.google.cloud.dataproc.v1.JobStatus value)5334     public Builder addStatusHistory(com.google.cloud.dataproc.v1.JobStatus value) {
5335       if (statusHistoryBuilder_ == null) {
5336         if (value == null) {
5337           throw new NullPointerException();
5338         }
5339         ensureStatusHistoryIsMutable();
5340         statusHistory_.add(value);
5341         onChanged();
5342       } else {
5343         statusHistoryBuilder_.addMessage(value);
5344       }
5345       return this;
5346     }
5347     /**
5348      *
5349      *
5350      * <pre>
5351      * Output only. The previous job status.
5352      * </pre>
5353      *
5354      * <code>
5355      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5356      * </code>
5357      */
addStatusHistory(int index, com.google.cloud.dataproc.v1.JobStatus value)5358     public Builder addStatusHistory(int index, com.google.cloud.dataproc.v1.JobStatus value) {
5359       if (statusHistoryBuilder_ == null) {
5360         if (value == null) {
5361           throw new NullPointerException();
5362         }
5363         ensureStatusHistoryIsMutable();
5364         statusHistory_.add(index, value);
5365         onChanged();
5366       } else {
5367         statusHistoryBuilder_.addMessage(index, value);
5368       }
5369       return this;
5370     }
5371     /**
5372      *
5373      *
5374      * <pre>
5375      * Output only. The previous job status.
5376      * </pre>
5377      *
5378      * <code>
5379      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5380      * </code>
5381      */
addStatusHistory( com.google.cloud.dataproc.v1.JobStatus.Builder builderForValue)5382     public Builder addStatusHistory(
5383         com.google.cloud.dataproc.v1.JobStatus.Builder builderForValue) {
5384       if (statusHistoryBuilder_ == null) {
5385         ensureStatusHistoryIsMutable();
5386         statusHistory_.add(builderForValue.build());
5387         onChanged();
5388       } else {
5389         statusHistoryBuilder_.addMessage(builderForValue.build());
5390       }
5391       return this;
5392     }
5393     /**
5394      *
5395      *
5396      * <pre>
5397      * Output only. The previous job status.
5398      * </pre>
5399      *
5400      * <code>
5401      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5402      * </code>
5403      */
addStatusHistory( int index, com.google.cloud.dataproc.v1.JobStatus.Builder builderForValue)5404     public Builder addStatusHistory(
5405         int index, com.google.cloud.dataproc.v1.JobStatus.Builder builderForValue) {
5406       if (statusHistoryBuilder_ == null) {
5407         ensureStatusHistoryIsMutable();
5408         statusHistory_.add(index, builderForValue.build());
5409         onChanged();
5410       } else {
5411         statusHistoryBuilder_.addMessage(index, builderForValue.build());
5412       }
5413       return this;
5414     }
5415     /**
5416      *
5417      *
5418      * <pre>
5419      * Output only. The previous job status.
5420      * </pre>
5421      *
5422      * <code>
5423      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5424      * </code>
5425      */
addAllStatusHistory( java.lang.Iterable<? extends com.google.cloud.dataproc.v1.JobStatus> values)5426     public Builder addAllStatusHistory(
5427         java.lang.Iterable<? extends com.google.cloud.dataproc.v1.JobStatus> values) {
5428       if (statusHistoryBuilder_ == null) {
5429         ensureStatusHistoryIsMutable();
5430         com.google.protobuf.AbstractMessageLite.Builder.addAll(values, statusHistory_);
5431         onChanged();
5432       } else {
5433         statusHistoryBuilder_.addAllMessages(values);
5434       }
5435       return this;
5436     }
5437     /**
5438      *
5439      *
5440      * <pre>
5441      * Output only. The previous job status.
5442      * </pre>
5443      *
5444      * <code>
5445      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5446      * </code>
5447      */
clearStatusHistory()5448     public Builder clearStatusHistory() {
5449       if (statusHistoryBuilder_ == null) {
5450         statusHistory_ = java.util.Collections.emptyList();
5451         bitField0_ = (bitField0_ & ~0x00001000);
5452         onChanged();
5453       } else {
5454         statusHistoryBuilder_.clear();
5455       }
5456       return this;
5457     }
5458     /**
5459      *
5460      *
5461      * <pre>
5462      * Output only. The previous job status.
5463      * </pre>
5464      *
5465      * <code>
5466      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5467      * </code>
5468      */
removeStatusHistory(int index)5469     public Builder removeStatusHistory(int index) {
5470       if (statusHistoryBuilder_ == null) {
5471         ensureStatusHistoryIsMutable();
5472         statusHistory_.remove(index);
5473         onChanged();
5474       } else {
5475         statusHistoryBuilder_.remove(index);
5476       }
5477       return this;
5478     }
5479     /**
5480      *
5481      *
5482      * <pre>
5483      * Output only. The previous job status.
5484      * </pre>
5485      *
5486      * <code>
5487      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5488      * </code>
5489      */
getStatusHistoryBuilder(int index)5490     public com.google.cloud.dataproc.v1.JobStatus.Builder getStatusHistoryBuilder(int index) {
5491       return getStatusHistoryFieldBuilder().getBuilder(index);
5492     }
5493     /**
5494      *
5495      *
5496      * <pre>
5497      * Output only. The previous job status.
5498      * </pre>
5499      *
5500      * <code>
5501      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5502      * </code>
5503      */
getStatusHistoryOrBuilder(int index)5504     public com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusHistoryOrBuilder(int index) {
5505       if (statusHistoryBuilder_ == null) {
5506         return statusHistory_.get(index);
5507       } else {
5508         return statusHistoryBuilder_.getMessageOrBuilder(index);
5509       }
5510     }
5511     /**
5512      *
5513      *
5514      * <pre>
5515      * Output only. The previous job status.
5516      * </pre>
5517      *
5518      * <code>
5519      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5520      * </code>
5521      */
5522     public java.util.List<? extends com.google.cloud.dataproc.v1.JobStatusOrBuilder>
getStatusHistoryOrBuilderList()5523         getStatusHistoryOrBuilderList() {
5524       if (statusHistoryBuilder_ != null) {
5525         return statusHistoryBuilder_.getMessageOrBuilderList();
5526       } else {
5527         return java.util.Collections.unmodifiableList(statusHistory_);
5528       }
5529     }
5530     /**
5531      *
5532      *
5533      * <pre>
5534      * Output only. The previous job status.
5535      * </pre>
5536      *
5537      * <code>
5538      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5539      * </code>
5540      */
addStatusHistoryBuilder()5541     public com.google.cloud.dataproc.v1.JobStatus.Builder addStatusHistoryBuilder() {
5542       return getStatusHistoryFieldBuilder()
5543           .addBuilder(com.google.cloud.dataproc.v1.JobStatus.getDefaultInstance());
5544     }
5545     /**
5546      *
5547      *
5548      * <pre>
5549      * Output only. The previous job status.
5550      * </pre>
5551      *
5552      * <code>
5553      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5554      * </code>
5555      */
addStatusHistoryBuilder(int index)5556     public com.google.cloud.dataproc.v1.JobStatus.Builder addStatusHistoryBuilder(int index) {
5557       return getStatusHistoryFieldBuilder()
5558           .addBuilder(index, com.google.cloud.dataproc.v1.JobStatus.getDefaultInstance());
5559     }
5560     /**
5561      *
5562      *
5563      * <pre>
5564      * Output only. The previous job status.
5565      * </pre>
5566      *
5567      * <code>
5568      * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];
5569      * </code>
5570      */
5571     public java.util.List<com.google.cloud.dataproc.v1.JobStatus.Builder>
getStatusHistoryBuilderList()5572         getStatusHistoryBuilderList() {
5573       return getStatusHistoryFieldBuilder().getBuilderList();
5574     }
5575 
5576     private com.google.protobuf.RepeatedFieldBuilderV3<
5577             com.google.cloud.dataproc.v1.JobStatus,
5578             com.google.cloud.dataproc.v1.JobStatus.Builder,
5579             com.google.cloud.dataproc.v1.JobStatusOrBuilder>
getStatusHistoryFieldBuilder()5580         getStatusHistoryFieldBuilder() {
5581       if (statusHistoryBuilder_ == null) {
5582         statusHistoryBuilder_ =
5583             new com.google.protobuf.RepeatedFieldBuilderV3<
5584                 com.google.cloud.dataproc.v1.JobStatus,
5585                 com.google.cloud.dataproc.v1.JobStatus.Builder,
5586                 com.google.cloud.dataproc.v1.JobStatusOrBuilder>(
5587                 statusHistory_,
5588                 ((bitField0_ & 0x00001000) != 0),
5589                 getParentForChildren(),
5590                 isClean());
5591         statusHistory_ = null;
5592       }
5593       return statusHistoryBuilder_;
5594     }
5595 
5596     private java.util.List<com.google.cloud.dataproc.v1.YarnApplication> yarnApplications_ =
5597         java.util.Collections.emptyList();
5598 
ensureYarnApplicationsIsMutable()5599     private void ensureYarnApplicationsIsMutable() {
5600       if (!((bitField0_ & 0x00002000) != 0)) {
5601         yarnApplications_ =
5602             new java.util.ArrayList<com.google.cloud.dataproc.v1.YarnApplication>(
5603                 yarnApplications_);
5604         bitField0_ |= 0x00002000;
5605       }
5606     }
5607 
5608     private com.google.protobuf.RepeatedFieldBuilderV3<
5609             com.google.cloud.dataproc.v1.YarnApplication,
5610             com.google.cloud.dataproc.v1.YarnApplication.Builder,
5611             com.google.cloud.dataproc.v1.YarnApplicationOrBuilder>
5612         yarnApplicationsBuilder_;
5613 
5614     /**
5615      *
5616      *
5617      * <pre>
5618      * Output only. The collection of YARN applications spun up by this job.
5619      * **Beta** Feature: This report is available for testing purposes only. It
5620      * may be changed before final release.
5621      * </pre>
5622      *
5623      * <code>
5624      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5625      * </code>
5626      */
getYarnApplicationsList()5627     public java.util.List<com.google.cloud.dataproc.v1.YarnApplication> getYarnApplicationsList() {
5628       if (yarnApplicationsBuilder_ == null) {
5629         return java.util.Collections.unmodifiableList(yarnApplications_);
5630       } else {
5631         return yarnApplicationsBuilder_.getMessageList();
5632       }
5633     }
5634     /**
5635      *
5636      *
5637      * <pre>
5638      * Output only. The collection of YARN applications spun up by this job.
5639      * **Beta** Feature: This report is available for testing purposes only. It
5640      * may be changed before final release.
5641      * </pre>
5642      *
5643      * <code>
5644      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5645      * </code>
5646      */
getYarnApplicationsCount()5647     public int getYarnApplicationsCount() {
5648       if (yarnApplicationsBuilder_ == null) {
5649         return yarnApplications_.size();
5650       } else {
5651         return yarnApplicationsBuilder_.getCount();
5652       }
5653     }
5654     /**
5655      *
5656      *
5657      * <pre>
5658      * Output only. The collection of YARN applications spun up by this job.
5659      * **Beta** Feature: This report is available for testing purposes only. It
5660      * may be changed before final release.
5661      * </pre>
5662      *
5663      * <code>
5664      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5665      * </code>
5666      */
getYarnApplications(int index)5667     public com.google.cloud.dataproc.v1.YarnApplication getYarnApplications(int index) {
5668       if (yarnApplicationsBuilder_ == null) {
5669         return yarnApplications_.get(index);
5670       } else {
5671         return yarnApplicationsBuilder_.getMessage(index);
5672       }
5673     }
5674     /**
5675      *
5676      *
5677      * <pre>
5678      * Output only. The collection of YARN applications spun up by this job.
5679      * **Beta** Feature: This report is available for testing purposes only. It
5680      * may be changed before final release.
5681      * </pre>
5682      *
5683      * <code>
5684      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5685      * </code>
5686      */
setYarnApplications( int index, com.google.cloud.dataproc.v1.YarnApplication value)5687     public Builder setYarnApplications(
5688         int index, com.google.cloud.dataproc.v1.YarnApplication value) {
5689       if (yarnApplicationsBuilder_ == null) {
5690         if (value == null) {
5691           throw new NullPointerException();
5692         }
5693         ensureYarnApplicationsIsMutable();
5694         yarnApplications_.set(index, value);
5695         onChanged();
5696       } else {
5697         yarnApplicationsBuilder_.setMessage(index, value);
5698       }
5699       return this;
5700     }
5701     /**
5702      *
5703      *
5704      * <pre>
5705      * Output only. The collection of YARN applications spun up by this job.
5706      * **Beta** Feature: This report is available for testing purposes only. It
5707      * may be changed before final release.
5708      * </pre>
5709      *
5710      * <code>
5711      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5712      * </code>
5713      */
setYarnApplications( int index, com.google.cloud.dataproc.v1.YarnApplication.Builder builderForValue)5714     public Builder setYarnApplications(
5715         int index, com.google.cloud.dataproc.v1.YarnApplication.Builder builderForValue) {
5716       if (yarnApplicationsBuilder_ == null) {
5717         ensureYarnApplicationsIsMutable();
5718         yarnApplications_.set(index, builderForValue.build());
5719         onChanged();
5720       } else {
5721         yarnApplicationsBuilder_.setMessage(index, builderForValue.build());
5722       }
5723       return this;
5724     }
5725     /**
5726      *
5727      *
5728      * <pre>
5729      * Output only. The collection of YARN applications spun up by this job.
5730      * **Beta** Feature: This report is available for testing purposes only. It
5731      * may be changed before final release.
5732      * </pre>
5733      *
5734      * <code>
5735      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5736      * </code>
5737      */
addYarnApplications(com.google.cloud.dataproc.v1.YarnApplication value)5738     public Builder addYarnApplications(com.google.cloud.dataproc.v1.YarnApplication value) {
5739       if (yarnApplicationsBuilder_ == null) {
5740         if (value == null) {
5741           throw new NullPointerException();
5742         }
5743         ensureYarnApplicationsIsMutable();
5744         yarnApplications_.add(value);
5745         onChanged();
5746       } else {
5747         yarnApplicationsBuilder_.addMessage(value);
5748       }
5749       return this;
5750     }
5751     /**
5752      *
5753      *
5754      * <pre>
5755      * Output only. The collection of YARN applications spun up by this job.
5756      * **Beta** Feature: This report is available for testing purposes only. It
5757      * may be changed before final release.
5758      * </pre>
5759      *
5760      * <code>
5761      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5762      * </code>
5763      */
addYarnApplications( int index, com.google.cloud.dataproc.v1.YarnApplication value)5764     public Builder addYarnApplications(
5765         int index, com.google.cloud.dataproc.v1.YarnApplication value) {
5766       if (yarnApplicationsBuilder_ == null) {
5767         if (value == null) {
5768           throw new NullPointerException();
5769         }
5770         ensureYarnApplicationsIsMutable();
5771         yarnApplications_.add(index, value);
5772         onChanged();
5773       } else {
5774         yarnApplicationsBuilder_.addMessage(index, value);
5775       }
5776       return this;
5777     }
5778     /**
5779      *
5780      *
5781      * <pre>
5782      * Output only. The collection of YARN applications spun up by this job.
5783      * **Beta** Feature: This report is available for testing purposes only. It
5784      * may be changed before final release.
5785      * </pre>
5786      *
5787      * <code>
5788      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5789      * </code>
5790      */
addYarnApplications( com.google.cloud.dataproc.v1.YarnApplication.Builder builderForValue)5791     public Builder addYarnApplications(
5792         com.google.cloud.dataproc.v1.YarnApplication.Builder builderForValue) {
5793       if (yarnApplicationsBuilder_ == null) {
5794         ensureYarnApplicationsIsMutable();
5795         yarnApplications_.add(builderForValue.build());
5796         onChanged();
5797       } else {
5798         yarnApplicationsBuilder_.addMessage(builderForValue.build());
5799       }
5800       return this;
5801     }
5802     /**
5803      *
5804      *
5805      * <pre>
5806      * Output only. The collection of YARN applications spun up by this job.
5807      * **Beta** Feature: This report is available for testing purposes only. It
5808      * may be changed before final release.
5809      * </pre>
5810      *
5811      * <code>
5812      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5813      * </code>
5814      */
addYarnApplications( int index, com.google.cloud.dataproc.v1.YarnApplication.Builder builderForValue)5815     public Builder addYarnApplications(
5816         int index, com.google.cloud.dataproc.v1.YarnApplication.Builder builderForValue) {
5817       if (yarnApplicationsBuilder_ == null) {
5818         ensureYarnApplicationsIsMutable();
5819         yarnApplications_.add(index, builderForValue.build());
5820         onChanged();
5821       } else {
5822         yarnApplicationsBuilder_.addMessage(index, builderForValue.build());
5823       }
5824       return this;
5825     }
5826     /**
5827      *
5828      *
5829      * <pre>
5830      * Output only. The collection of YARN applications spun up by this job.
5831      * **Beta** Feature: This report is available for testing purposes only. It
5832      * may be changed before final release.
5833      * </pre>
5834      *
5835      * <code>
5836      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5837      * </code>
5838      */
addAllYarnApplications( java.lang.Iterable<? extends com.google.cloud.dataproc.v1.YarnApplication> values)5839     public Builder addAllYarnApplications(
5840         java.lang.Iterable<? extends com.google.cloud.dataproc.v1.YarnApplication> values) {
5841       if (yarnApplicationsBuilder_ == null) {
5842         ensureYarnApplicationsIsMutable();
5843         com.google.protobuf.AbstractMessageLite.Builder.addAll(values, yarnApplications_);
5844         onChanged();
5845       } else {
5846         yarnApplicationsBuilder_.addAllMessages(values);
5847       }
5848       return this;
5849     }
5850     /**
5851      *
5852      *
5853      * <pre>
5854      * Output only. The collection of YARN applications spun up by this job.
5855      * **Beta** Feature: This report is available for testing purposes only. It
5856      * may be changed before final release.
5857      * </pre>
5858      *
5859      * <code>
5860      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5861      * </code>
5862      */
clearYarnApplications()5863     public Builder clearYarnApplications() {
5864       if (yarnApplicationsBuilder_ == null) {
5865         yarnApplications_ = java.util.Collections.emptyList();
5866         bitField0_ = (bitField0_ & ~0x00002000);
5867         onChanged();
5868       } else {
5869         yarnApplicationsBuilder_.clear();
5870       }
5871       return this;
5872     }
5873     /**
5874      *
5875      *
5876      * <pre>
5877      * Output only. The collection of YARN applications spun up by this job.
5878      * **Beta** Feature: This report is available for testing purposes only. It
5879      * may be changed before final release.
5880      * </pre>
5881      *
5882      * <code>
5883      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5884      * </code>
5885      */
removeYarnApplications(int index)5886     public Builder removeYarnApplications(int index) {
5887       if (yarnApplicationsBuilder_ == null) {
5888         ensureYarnApplicationsIsMutable();
5889         yarnApplications_.remove(index);
5890         onChanged();
5891       } else {
5892         yarnApplicationsBuilder_.remove(index);
5893       }
5894       return this;
5895     }
5896     /**
5897      *
5898      *
5899      * <pre>
5900      * Output only. The collection of YARN applications spun up by this job.
5901      * **Beta** Feature: This report is available for testing purposes only. It
5902      * may be changed before final release.
5903      * </pre>
5904      *
5905      * <code>
5906      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5907      * </code>
5908      */
getYarnApplicationsBuilder( int index)5909     public com.google.cloud.dataproc.v1.YarnApplication.Builder getYarnApplicationsBuilder(
5910         int index) {
5911       return getYarnApplicationsFieldBuilder().getBuilder(index);
5912     }
5913     /**
5914      *
5915      *
5916      * <pre>
5917      * Output only. The collection of YARN applications spun up by this job.
5918      * **Beta** Feature: This report is available for testing purposes only. It
5919      * may be changed before final release.
5920      * </pre>
5921      *
5922      * <code>
5923      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5924      * </code>
5925      */
getYarnApplicationsOrBuilder( int index)5926     public com.google.cloud.dataproc.v1.YarnApplicationOrBuilder getYarnApplicationsOrBuilder(
5927         int index) {
5928       if (yarnApplicationsBuilder_ == null) {
5929         return yarnApplications_.get(index);
5930       } else {
5931         return yarnApplicationsBuilder_.getMessageOrBuilder(index);
5932       }
5933     }
5934     /**
5935      *
5936      *
5937      * <pre>
5938      * Output only. The collection of YARN applications spun up by this job.
5939      * **Beta** Feature: This report is available for testing purposes only. It
5940      * may be changed before final release.
5941      * </pre>
5942      *
5943      * <code>
5944      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5945      * </code>
5946      */
5947     public java.util.List<? extends com.google.cloud.dataproc.v1.YarnApplicationOrBuilder>
getYarnApplicationsOrBuilderList()5948         getYarnApplicationsOrBuilderList() {
5949       if (yarnApplicationsBuilder_ != null) {
5950         return yarnApplicationsBuilder_.getMessageOrBuilderList();
5951       } else {
5952         return java.util.Collections.unmodifiableList(yarnApplications_);
5953       }
5954     }
5955     /**
5956      *
5957      *
5958      * <pre>
5959      * Output only. The collection of YARN applications spun up by this job.
5960      * **Beta** Feature: This report is available for testing purposes only. It
5961      * may be changed before final release.
5962      * </pre>
5963      *
5964      * <code>
5965      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5966      * </code>
5967      */
addYarnApplicationsBuilder()5968     public com.google.cloud.dataproc.v1.YarnApplication.Builder addYarnApplicationsBuilder() {
5969       return getYarnApplicationsFieldBuilder()
5970           .addBuilder(com.google.cloud.dataproc.v1.YarnApplication.getDefaultInstance());
5971     }
5972     /**
5973      *
5974      *
5975      * <pre>
5976      * Output only. The collection of YARN applications spun up by this job.
5977      * **Beta** Feature: This report is available for testing purposes only. It
5978      * may be changed before final release.
5979      * </pre>
5980      *
5981      * <code>
5982      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
5983      * </code>
5984      */
addYarnApplicationsBuilder( int index)5985     public com.google.cloud.dataproc.v1.YarnApplication.Builder addYarnApplicationsBuilder(
5986         int index) {
5987       return getYarnApplicationsFieldBuilder()
5988           .addBuilder(index, com.google.cloud.dataproc.v1.YarnApplication.getDefaultInstance());
5989     }
5990     /**
5991      *
5992      *
5993      * <pre>
5994      * Output only. The collection of YARN applications spun up by this job.
5995      * **Beta** Feature: This report is available for testing purposes only. It
5996      * may be changed before final release.
5997      * </pre>
5998      *
5999      * <code>
6000      * repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9 [(.google.api.field_behavior) = OUTPUT_ONLY];
6001      * </code>
6002      */
6003     public java.util.List<com.google.cloud.dataproc.v1.YarnApplication.Builder>
getYarnApplicationsBuilderList()6004         getYarnApplicationsBuilderList() {
6005       return getYarnApplicationsFieldBuilder().getBuilderList();
6006     }
6007 
6008     private com.google.protobuf.RepeatedFieldBuilderV3<
6009             com.google.cloud.dataproc.v1.YarnApplication,
6010             com.google.cloud.dataproc.v1.YarnApplication.Builder,
6011             com.google.cloud.dataproc.v1.YarnApplicationOrBuilder>
getYarnApplicationsFieldBuilder()6012         getYarnApplicationsFieldBuilder() {
6013       if (yarnApplicationsBuilder_ == null) {
6014         yarnApplicationsBuilder_ =
6015             new com.google.protobuf.RepeatedFieldBuilderV3<
6016                 com.google.cloud.dataproc.v1.YarnApplication,
6017                 com.google.cloud.dataproc.v1.YarnApplication.Builder,
6018                 com.google.cloud.dataproc.v1.YarnApplicationOrBuilder>(
6019                 yarnApplications_,
6020                 ((bitField0_ & 0x00002000) != 0),
6021                 getParentForChildren(),
6022                 isClean());
6023         yarnApplications_ = null;
6024       }
6025       return yarnApplicationsBuilder_;
6026     }
6027 
6028     private java.lang.Object driverOutputResourceUri_ = "";
6029     /**
6030      *
6031      *
6032      * <pre>
6033      * Output only. A URI pointing to the location of the stdout of the job's
6034      * driver program.
6035      * </pre>
6036      *
6037      * <code>string driver_output_resource_uri = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
6038      * </code>
6039      *
6040      * @return The driverOutputResourceUri.
6041      */
getDriverOutputResourceUri()6042     public java.lang.String getDriverOutputResourceUri() {
6043       java.lang.Object ref = driverOutputResourceUri_;
6044       if (!(ref instanceof java.lang.String)) {
6045         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
6046         java.lang.String s = bs.toStringUtf8();
6047         driverOutputResourceUri_ = s;
6048         return s;
6049       } else {
6050         return (java.lang.String) ref;
6051       }
6052     }
6053     /**
6054      *
6055      *
6056      * <pre>
6057      * Output only. A URI pointing to the location of the stdout of the job's
6058      * driver program.
6059      * </pre>
6060      *
6061      * <code>string driver_output_resource_uri = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
6062      * </code>
6063      *
6064      * @return The bytes for driverOutputResourceUri.
6065      */
getDriverOutputResourceUriBytes()6066     public com.google.protobuf.ByteString getDriverOutputResourceUriBytes() {
6067       java.lang.Object ref = driverOutputResourceUri_;
6068       if (ref instanceof String) {
6069         com.google.protobuf.ByteString b =
6070             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
6071         driverOutputResourceUri_ = b;
6072         return b;
6073       } else {
6074         return (com.google.protobuf.ByteString) ref;
6075       }
6076     }
6077     /**
6078      *
6079      *
6080      * <pre>
6081      * Output only. A URI pointing to the location of the stdout of the job's
6082      * driver program.
6083      * </pre>
6084      *
6085      * <code>string driver_output_resource_uri = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
6086      * </code>
6087      *
6088      * @param value The driverOutputResourceUri to set.
6089      * @return This builder for chaining.
6090      */
setDriverOutputResourceUri(java.lang.String value)6091     public Builder setDriverOutputResourceUri(java.lang.String value) {
6092       if (value == null) {
6093         throw new NullPointerException();
6094       }
6095       driverOutputResourceUri_ = value;
6096       bitField0_ |= 0x00004000;
6097       onChanged();
6098       return this;
6099     }
6100     /**
6101      *
6102      *
6103      * <pre>
6104      * Output only. A URI pointing to the location of the stdout of the job's
6105      * driver program.
6106      * </pre>
6107      *
6108      * <code>string driver_output_resource_uri = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
6109      * </code>
6110      *
6111      * @return This builder for chaining.
6112      */
clearDriverOutputResourceUri()6113     public Builder clearDriverOutputResourceUri() {
6114       driverOutputResourceUri_ = getDefaultInstance().getDriverOutputResourceUri();
6115       bitField0_ = (bitField0_ & ~0x00004000);
6116       onChanged();
6117       return this;
6118     }
6119     /**
6120      *
6121      *
6122      * <pre>
6123      * Output only. A URI pointing to the location of the stdout of the job's
6124      * driver program.
6125      * </pre>
6126      *
6127      * <code>string driver_output_resource_uri = 17 [(.google.api.field_behavior) = OUTPUT_ONLY];
6128      * </code>
6129      *
6130      * @param value The bytes for driverOutputResourceUri to set.
6131      * @return This builder for chaining.
6132      */
setDriverOutputResourceUriBytes(com.google.protobuf.ByteString value)6133     public Builder setDriverOutputResourceUriBytes(com.google.protobuf.ByteString value) {
6134       if (value == null) {
6135         throw new NullPointerException();
6136       }
6137       checkByteStringIsUtf8(value);
6138       driverOutputResourceUri_ = value;
6139       bitField0_ |= 0x00004000;
6140       onChanged();
6141       return this;
6142     }
6143 
6144     private java.lang.Object driverControlFilesUri_ = "";
6145     /**
6146      *
6147      *
6148      * <pre>
6149      * Output only. If present, the location of miscellaneous control files
6150      * which may be used as part of job setup and handling. If not present,
6151      * control files may be placed in the same location as `driver_output_uri`.
6152      * </pre>
6153      *
6154      * <code>string driver_control_files_uri = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
6155      * </code>
6156      *
6157      * @return The driverControlFilesUri.
6158      */
getDriverControlFilesUri()6159     public java.lang.String getDriverControlFilesUri() {
6160       java.lang.Object ref = driverControlFilesUri_;
6161       if (!(ref instanceof java.lang.String)) {
6162         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
6163         java.lang.String s = bs.toStringUtf8();
6164         driverControlFilesUri_ = s;
6165         return s;
6166       } else {
6167         return (java.lang.String) ref;
6168       }
6169     }
6170     /**
6171      *
6172      *
6173      * <pre>
6174      * Output only. If present, the location of miscellaneous control files
6175      * which may be used as part of job setup and handling. If not present,
6176      * control files may be placed in the same location as `driver_output_uri`.
6177      * </pre>
6178      *
6179      * <code>string driver_control_files_uri = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
6180      * </code>
6181      *
6182      * @return The bytes for driverControlFilesUri.
6183      */
getDriverControlFilesUriBytes()6184     public com.google.protobuf.ByteString getDriverControlFilesUriBytes() {
6185       java.lang.Object ref = driverControlFilesUri_;
6186       if (ref instanceof String) {
6187         com.google.protobuf.ByteString b =
6188             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
6189         driverControlFilesUri_ = b;
6190         return b;
6191       } else {
6192         return (com.google.protobuf.ByteString) ref;
6193       }
6194     }
6195     /**
6196      *
6197      *
6198      * <pre>
6199      * Output only. If present, the location of miscellaneous control files
6200      * which may be used as part of job setup and handling. If not present,
6201      * control files may be placed in the same location as `driver_output_uri`.
6202      * </pre>
6203      *
6204      * <code>string driver_control_files_uri = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
6205      * </code>
6206      *
6207      * @param value The driverControlFilesUri to set.
6208      * @return This builder for chaining.
6209      */
setDriverControlFilesUri(java.lang.String value)6210     public Builder setDriverControlFilesUri(java.lang.String value) {
6211       if (value == null) {
6212         throw new NullPointerException();
6213       }
6214       driverControlFilesUri_ = value;
6215       bitField0_ |= 0x00008000;
6216       onChanged();
6217       return this;
6218     }
6219     /**
6220      *
6221      *
6222      * <pre>
6223      * Output only. If present, the location of miscellaneous control files
6224      * which may be used as part of job setup and handling. If not present,
6225      * control files may be placed in the same location as `driver_output_uri`.
6226      * </pre>
6227      *
6228      * <code>string driver_control_files_uri = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
6229      * </code>
6230      *
6231      * @return This builder for chaining.
6232      */
clearDriverControlFilesUri()6233     public Builder clearDriverControlFilesUri() {
6234       driverControlFilesUri_ = getDefaultInstance().getDriverControlFilesUri();
6235       bitField0_ = (bitField0_ & ~0x00008000);
6236       onChanged();
6237       return this;
6238     }
6239     /**
6240      *
6241      *
6242      * <pre>
6243      * Output only. If present, the location of miscellaneous control files
6244      * which may be used as part of job setup and handling. If not present,
6245      * control files may be placed in the same location as `driver_output_uri`.
6246      * </pre>
6247      *
6248      * <code>string driver_control_files_uri = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
6249      * </code>
6250      *
6251      * @param value The bytes for driverControlFilesUri to set.
6252      * @return This builder for chaining.
6253      */
setDriverControlFilesUriBytes(com.google.protobuf.ByteString value)6254     public Builder setDriverControlFilesUriBytes(com.google.protobuf.ByteString value) {
6255       if (value == null) {
6256         throw new NullPointerException();
6257       }
6258       checkByteStringIsUtf8(value);
6259       driverControlFilesUri_ = value;
6260       bitField0_ |= 0x00008000;
6261       onChanged();
6262       return this;
6263     }
6264 
6265     private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;
6266 
internalGetLabels()6267     private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
6268       if (labels_ == null) {
6269         return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
6270       }
6271       return labels_;
6272     }
6273 
6274     private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableLabels()6275         internalGetMutableLabels() {
6276       if (labels_ == null) {
6277         labels_ = com.google.protobuf.MapField.newMapField(LabelsDefaultEntryHolder.defaultEntry);
6278       }
6279       if (!labels_.isMutable()) {
6280         labels_ = labels_.copy();
6281       }
6282       bitField0_ |= 0x00010000;
6283       onChanged();
6284       return labels_;
6285     }
6286 
getLabelsCount()6287     public int getLabelsCount() {
6288       return internalGetLabels().getMap().size();
6289     }
6290     /**
6291      *
6292      *
6293      * <pre>
6294      * Optional. The labels to associate with this job.
6295      * Label **keys** must contain 1 to 63 characters, and must conform to
6296      * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
6297      * Label **values** may be empty, but, if present, must contain 1 to 63
6298      * characters, and must conform to [RFC
6299      * 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
6300      * associated with a job.
6301      * </pre>
6302      *
6303      * <code>map&lt;string, string&gt; labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
6304      */
6305     @java.lang.Override
containsLabels(java.lang.String key)6306     public boolean containsLabels(java.lang.String key) {
6307       if (key == null) {
6308         throw new NullPointerException("map key");
6309       }
6310       return internalGetLabels().getMap().containsKey(key);
6311     }
6312     /** Use {@link #getLabelsMap()} instead. */
6313     @java.lang.Override
6314     @java.lang.Deprecated
getLabels()6315     public java.util.Map<java.lang.String, java.lang.String> getLabels() {
6316       return getLabelsMap();
6317     }
6318     /**
6319      *
6320      *
6321      * <pre>
6322      * Optional. The labels to associate with this job.
6323      * Label **keys** must contain 1 to 63 characters, and must conform to
6324      * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
6325      * Label **values** may be empty, but, if present, must contain 1 to 63
6326      * characters, and must conform to [RFC
6327      * 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
6328      * associated with a job.
6329      * </pre>
6330      *
6331      * <code>map&lt;string, string&gt; labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
6332      */
6333     @java.lang.Override
getLabelsMap()6334     public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
6335       return internalGetLabels().getMap();
6336     }
6337     /**
6338      *
6339      *
6340      * <pre>
6341      * Optional. The labels to associate with this job.
6342      * Label **keys** must contain 1 to 63 characters, and must conform to
6343      * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
6344      * Label **values** may be empty, but, if present, must contain 1 to 63
6345      * characters, and must conform to [RFC
6346      * 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
6347      * associated with a job.
6348      * </pre>
6349      *
6350      * <code>map&lt;string, string&gt; labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
6351      */
6352     @java.lang.Override
getLabelsOrDefault( java.lang.String key, java.lang.String defaultValue)6353     public /* nullable */ java.lang.String getLabelsOrDefault(
6354         java.lang.String key,
6355         /* nullable */
6356         java.lang.String defaultValue) {
6357       if (key == null) {
6358         throw new NullPointerException("map key");
6359       }
6360       java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
6361       return map.containsKey(key) ? map.get(key) : defaultValue;
6362     }
6363     /**
6364      *
6365      *
6366      * <pre>
6367      * Optional. The labels to associate with this job.
6368      * Label **keys** must contain 1 to 63 characters, and must conform to
6369      * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
6370      * Label **values** may be empty, but, if present, must contain 1 to 63
6371      * characters, and must conform to [RFC
6372      * 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
6373      * associated with a job.
6374      * </pre>
6375      *
6376      * <code>map&lt;string, string&gt; labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
6377      */
6378     @java.lang.Override
getLabelsOrThrow(java.lang.String key)6379     public java.lang.String getLabelsOrThrow(java.lang.String key) {
6380       if (key == null) {
6381         throw new NullPointerException("map key");
6382       }
6383       java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
6384       if (!map.containsKey(key)) {
6385         throw new java.lang.IllegalArgumentException();
6386       }
6387       return map.get(key);
6388     }
6389 
clearLabels()6390     public Builder clearLabels() {
6391       bitField0_ = (bitField0_ & ~0x00010000);
6392       internalGetMutableLabels().getMutableMap().clear();
6393       return this;
6394     }
6395     /**
6396      *
6397      *
6398      * <pre>
6399      * Optional. The labels to associate with this job.
6400      * Label **keys** must contain 1 to 63 characters, and must conform to
6401      * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
6402      * Label **values** may be empty, but, if present, must contain 1 to 63
6403      * characters, and must conform to [RFC
6404      * 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
6405      * associated with a job.
6406      * </pre>
6407      *
6408      * <code>map&lt;string, string&gt; labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
6409      */
removeLabels(java.lang.String key)6410     public Builder removeLabels(java.lang.String key) {
6411       if (key == null) {
6412         throw new NullPointerException("map key");
6413       }
6414       internalGetMutableLabels().getMutableMap().remove(key);
6415       return this;
6416     }
6417     /** Use alternate mutation accessors instead. */
6418     @java.lang.Deprecated
getMutableLabels()6419     public java.util.Map<java.lang.String, java.lang.String> getMutableLabels() {
6420       bitField0_ |= 0x00010000;
6421       return internalGetMutableLabels().getMutableMap();
6422     }
6423     /**
6424      *
6425      *
6426      * <pre>
6427      * Optional. The labels to associate with this job.
6428      * Label **keys** must contain 1 to 63 characters, and must conform to
6429      * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
6430      * Label **values** may be empty, but, if present, must contain 1 to 63
6431      * characters, and must conform to [RFC
6432      * 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
6433      * associated with a job.
6434      * </pre>
6435      *
6436      * <code>map&lt;string, string&gt; labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
6437      */
putLabels(java.lang.String key, java.lang.String value)6438     public Builder putLabels(java.lang.String key, java.lang.String value) {
6439       if (key == null) {
6440         throw new NullPointerException("map key");
6441       }
6442       if (value == null) {
6443         throw new NullPointerException("map value");
6444       }
6445       internalGetMutableLabels().getMutableMap().put(key, value);
6446       bitField0_ |= 0x00010000;
6447       return this;
6448     }
6449     /**
6450      *
6451      *
6452      * <pre>
6453      * Optional. The labels to associate with this job.
6454      * Label **keys** must contain 1 to 63 characters, and must conform to
6455      * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
6456      * Label **values** may be empty, but, if present, must contain 1 to 63
6457      * characters, and must conform to [RFC
6458      * 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
6459      * associated with a job.
6460      * </pre>
6461      *
6462      * <code>map&lt;string, string&gt; labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
6463      */
putAllLabels(java.util.Map<java.lang.String, java.lang.String> values)6464     public Builder putAllLabels(java.util.Map<java.lang.String, java.lang.String> values) {
6465       internalGetMutableLabels().getMutableMap().putAll(values);
6466       bitField0_ |= 0x00010000;
6467       return this;
6468     }
6469 
6470     private com.google.cloud.dataproc.v1.JobScheduling scheduling_;
6471     private com.google.protobuf.SingleFieldBuilderV3<
6472             com.google.cloud.dataproc.v1.JobScheduling,
6473             com.google.cloud.dataproc.v1.JobScheduling.Builder,
6474             com.google.cloud.dataproc.v1.JobSchedulingOrBuilder>
6475         schedulingBuilder_;
6476     /**
6477      *
6478      *
6479      * <pre>
6480      * Optional. Job scheduling configuration.
6481      * </pre>
6482      *
6483      * <code>
6484      * .google.cloud.dataproc.v1.JobScheduling scheduling = 20 [(.google.api.field_behavior) = OPTIONAL];
6485      * </code>
6486      *
6487      * @return Whether the scheduling field is set.
6488      */
hasScheduling()6489     public boolean hasScheduling() {
6490       return ((bitField0_ & 0x00020000) != 0);
6491     }
6492     /**
6493      *
6494      *
6495      * <pre>
6496      * Optional. Job scheduling configuration.
6497      * </pre>
6498      *
6499      * <code>
6500      * .google.cloud.dataproc.v1.JobScheduling scheduling = 20 [(.google.api.field_behavior) = OPTIONAL];
6501      * </code>
6502      *
6503      * @return The scheduling.
6504      */
getScheduling()6505     public com.google.cloud.dataproc.v1.JobScheduling getScheduling() {
6506       if (schedulingBuilder_ == null) {
6507         return scheduling_ == null
6508             ? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()
6509             : scheduling_;
6510       } else {
6511         return schedulingBuilder_.getMessage();
6512       }
6513     }
6514     /**
6515      *
6516      *
6517      * <pre>
6518      * Optional. Job scheduling configuration.
6519      * </pre>
6520      *
6521      * <code>
6522      * .google.cloud.dataproc.v1.JobScheduling scheduling = 20 [(.google.api.field_behavior) = OPTIONAL];
6523      * </code>
6524      */
setScheduling(com.google.cloud.dataproc.v1.JobScheduling value)6525     public Builder setScheduling(com.google.cloud.dataproc.v1.JobScheduling value) {
6526       if (schedulingBuilder_ == null) {
6527         if (value == null) {
6528           throw new NullPointerException();
6529         }
6530         scheduling_ = value;
6531       } else {
6532         schedulingBuilder_.setMessage(value);
6533       }
6534       bitField0_ |= 0x00020000;
6535       onChanged();
6536       return this;
6537     }
6538     /**
6539      *
6540      *
6541      * <pre>
6542      * Optional. Job scheduling configuration.
6543      * </pre>
6544      *
6545      * <code>
6546      * .google.cloud.dataproc.v1.JobScheduling scheduling = 20 [(.google.api.field_behavior) = OPTIONAL];
6547      * </code>
6548      */
setScheduling( com.google.cloud.dataproc.v1.JobScheduling.Builder builderForValue)6549     public Builder setScheduling(
6550         com.google.cloud.dataproc.v1.JobScheduling.Builder builderForValue) {
6551       if (schedulingBuilder_ == null) {
6552         scheduling_ = builderForValue.build();
6553       } else {
6554         schedulingBuilder_.setMessage(builderForValue.build());
6555       }
6556       bitField0_ |= 0x00020000;
6557       onChanged();
6558       return this;
6559     }
6560     /**
6561      *
6562      *
6563      * <pre>
6564      * Optional. Job scheduling configuration.
6565      * </pre>
6566      *
6567      * <code>
6568      * .google.cloud.dataproc.v1.JobScheduling scheduling = 20 [(.google.api.field_behavior) = OPTIONAL];
6569      * </code>
6570      */
mergeScheduling(com.google.cloud.dataproc.v1.JobScheduling value)6571     public Builder mergeScheduling(com.google.cloud.dataproc.v1.JobScheduling value) {
6572       if (schedulingBuilder_ == null) {
6573         if (((bitField0_ & 0x00020000) != 0)
6574             && scheduling_ != null
6575             && scheduling_ != com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()) {
6576           getSchedulingBuilder().mergeFrom(value);
6577         } else {
6578           scheduling_ = value;
6579         }
6580       } else {
6581         schedulingBuilder_.mergeFrom(value);
6582       }
6583       bitField0_ |= 0x00020000;
6584       onChanged();
6585       return this;
6586     }
6587     /**
6588      *
6589      *
6590      * <pre>
6591      * Optional. Job scheduling configuration.
6592      * </pre>
6593      *
6594      * <code>
6595      * .google.cloud.dataproc.v1.JobScheduling scheduling = 20 [(.google.api.field_behavior) = OPTIONAL];
6596      * </code>
6597      */
clearScheduling()6598     public Builder clearScheduling() {
6599       bitField0_ = (bitField0_ & ~0x00020000);
6600       scheduling_ = null;
6601       if (schedulingBuilder_ != null) {
6602         schedulingBuilder_.dispose();
6603         schedulingBuilder_ = null;
6604       }
6605       onChanged();
6606       return this;
6607     }
6608     /**
6609      *
6610      *
6611      * <pre>
6612      * Optional. Job scheduling configuration.
6613      * </pre>
6614      *
6615      * <code>
6616      * .google.cloud.dataproc.v1.JobScheduling scheduling = 20 [(.google.api.field_behavior) = OPTIONAL];
6617      * </code>
6618      */
getSchedulingBuilder()6619     public com.google.cloud.dataproc.v1.JobScheduling.Builder getSchedulingBuilder() {
6620       bitField0_ |= 0x00020000;
6621       onChanged();
6622       return getSchedulingFieldBuilder().getBuilder();
6623     }
6624     /**
6625      *
6626      *
6627      * <pre>
6628      * Optional. Job scheduling configuration.
6629      * </pre>
6630      *
6631      * <code>
6632      * .google.cloud.dataproc.v1.JobScheduling scheduling = 20 [(.google.api.field_behavior) = OPTIONAL];
6633      * </code>
6634      */
getSchedulingOrBuilder()6635     public com.google.cloud.dataproc.v1.JobSchedulingOrBuilder getSchedulingOrBuilder() {
6636       if (schedulingBuilder_ != null) {
6637         return schedulingBuilder_.getMessageOrBuilder();
6638       } else {
6639         return scheduling_ == null
6640             ? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()
6641             : scheduling_;
6642       }
6643     }
6644     /**
6645      *
6646      *
6647      * <pre>
6648      * Optional. Job scheduling configuration.
6649      * </pre>
6650      *
6651      * <code>
6652      * .google.cloud.dataproc.v1.JobScheduling scheduling = 20 [(.google.api.field_behavior) = OPTIONAL];
6653      * </code>
6654      */
6655     private com.google.protobuf.SingleFieldBuilderV3<
6656             com.google.cloud.dataproc.v1.JobScheduling,
6657             com.google.cloud.dataproc.v1.JobScheduling.Builder,
6658             com.google.cloud.dataproc.v1.JobSchedulingOrBuilder>
getSchedulingFieldBuilder()6659         getSchedulingFieldBuilder() {
6660       if (schedulingBuilder_ == null) {
6661         schedulingBuilder_ =
6662             new com.google.protobuf.SingleFieldBuilderV3<
6663                 com.google.cloud.dataproc.v1.JobScheduling,
6664                 com.google.cloud.dataproc.v1.JobScheduling.Builder,
6665                 com.google.cloud.dataproc.v1.JobSchedulingOrBuilder>(
6666                 getScheduling(), getParentForChildren(), isClean());
6667         scheduling_ = null;
6668       }
6669       return schedulingBuilder_;
6670     }
6671 
6672     private java.lang.Object jobUuid_ = "";
6673     /**
6674      *
6675      *
6676      * <pre>
6677      * Output only. A UUID that uniquely identifies a job within the project
6678      * over time. This is in contrast to a user-settable reference.job_id that
6679      * may be reused over time.
6680      * </pre>
6681      *
6682      * <code>string job_uuid = 22 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6683      *
6684      * @return The jobUuid.
6685      */
getJobUuid()6686     public java.lang.String getJobUuid() {
6687       java.lang.Object ref = jobUuid_;
6688       if (!(ref instanceof java.lang.String)) {
6689         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
6690         java.lang.String s = bs.toStringUtf8();
6691         jobUuid_ = s;
6692         return s;
6693       } else {
6694         return (java.lang.String) ref;
6695       }
6696     }
6697     /**
6698      *
6699      *
6700      * <pre>
6701      * Output only. A UUID that uniquely identifies a job within the project
6702      * over time. This is in contrast to a user-settable reference.job_id that
6703      * may be reused over time.
6704      * </pre>
6705      *
6706      * <code>string job_uuid = 22 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6707      *
6708      * @return The bytes for jobUuid.
6709      */
getJobUuidBytes()6710     public com.google.protobuf.ByteString getJobUuidBytes() {
6711       java.lang.Object ref = jobUuid_;
6712       if (ref instanceof String) {
6713         com.google.protobuf.ByteString b =
6714             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
6715         jobUuid_ = b;
6716         return b;
6717       } else {
6718         return (com.google.protobuf.ByteString) ref;
6719       }
6720     }
6721     /**
6722      *
6723      *
6724      * <pre>
6725      * Output only. A UUID that uniquely identifies a job within the project
6726      * over time. This is in contrast to a user-settable reference.job_id that
6727      * may be reused over time.
6728      * </pre>
6729      *
6730      * <code>string job_uuid = 22 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6731      *
6732      * @param value The jobUuid to set.
6733      * @return This builder for chaining.
6734      */
setJobUuid(java.lang.String value)6735     public Builder setJobUuid(java.lang.String value) {
6736       if (value == null) {
6737         throw new NullPointerException();
6738       }
6739       jobUuid_ = value;
6740       bitField0_ |= 0x00040000;
6741       onChanged();
6742       return this;
6743     }
6744     /**
6745      *
6746      *
6747      * <pre>
6748      * Output only. A UUID that uniquely identifies a job within the project
6749      * over time. This is in contrast to a user-settable reference.job_id that
6750      * may be reused over time.
6751      * </pre>
6752      *
6753      * <code>string job_uuid = 22 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6754      *
6755      * @return This builder for chaining.
6756      */
clearJobUuid()6757     public Builder clearJobUuid() {
6758       jobUuid_ = getDefaultInstance().getJobUuid();
6759       bitField0_ = (bitField0_ & ~0x00040000);
6760       onChanged();
6761       return this;
6762     }
6763     /**
6764      *
6765      *
6766      * <pre>
6767      * Output only. A UUID that uniquely identifies a job within the project
6768      * over time. This is in contrast to a user-settable reference.job_id that
6769      * may be reused over time.
6770      * </pre>
6771      *
6772      * <code>string job_uuid = 22 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6773      *
6774      * @param value The bytes for jobUuid to set.
6775      * @return This builder for chaining.
6776      */
setJobUuidBytes(com.google.protobuf.ByteString value)6777     public Builder setJobUuidBytes(com.google.protobuf.ByteString value) {
6778       if (value == null) {
6779         throw new NullPointerException();
6780       }
6781       checkByteStringIsUtf8(value);
6782       jobUuid_ = value;
6783       bitField0_ |= 0x00040000;
6784       onChanged();
6785       return this;
6786     }
6787 
6788     private boolean done_;
6789     /**
6790      *
6791      *
6792      * <pre>
6793      * Output only. Indicates whether the job is completed. If the value is
6794      * `false`, the job is still in progress. If `true`, the job is completed, and
6795      * `status.state` field will indicate if it was successful, failed,
6796      * or cancelled.
6797      * </pre>
6798      *
6799      * <code>bool done = 24 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6800      *
6801      * @return The done.
6802      */
6803     @java.lang.Override
getDone()6804     public boolean getDone() {
6805       return done_;
6806     }
6807     /**
6808      *
6809      *
6810      * <pre>
6811      * Output only. Indicates whether the job is completed. If the value is
6812      * `false`, the job is still in progress. If `true`, the job is completed, and
6813      * `status.state` field will indicate if it was successful, failed,
6814      * or cancelled.
6815      * </pre>
6816      *
6817      * <code>bool done = 24 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6818      *
6819      * @param value The done to set.
6820      * @return This builder for chaining.
6821      */
setDone(boolean value)6822     public Builder setDone(boolean value) {
6823 
6824       done_ = value;
6825       bitField0_ |= 0x00080000;
6826       onChanged();
6827       return this;
6828     }
6829     /**
6830      *
6831      *
6832      * <pre>
6833      * Output only. Indicates whether the job is completed. If the value is
6834      * `false`, the job is still in progress. If `true`, the job is completed, and
6835      * `status.state` field will indicate if it was successful, failed,
6836      * or cancelled.
6837      * </pre>
6838      *
6839      * <code>bool done = 24 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
6840      *
6841      * @return This builder for chaining.
6842      */
clearDone()6843     public Builder clearDone() {
6844       bitField0_ = (bitField0_ & ~0x00080000);
6845       done_ = false;
6846       onChanged();
6847       return this;
6848     }
6849 
6850     private com.google.cloud.dataproc.v1.DriverSchedulingConfig driverSchedulingConfig_;
6851     private com.google.protobuf.SingleFieldBuilderV3<
6852             com.google.cloud.dataproc.v1.DriverSchedulingConfig,
6853             com.google.cloud.dataproc.v1.DriverSchedulingConfig.Builder,
6854             com.google.cloud.dataproc.v1.DriverSchedulingConfigOrBuilder>
6855         driverSchedulingConfigBuilder_;
6856     /**
6857      *
6858      *
6859      * <pre>
6860      * Optional. Driver scheduling configuration.
6861      * </pre>
6862      *
6863      * <code>
6864      * .google.cloud.dataproc.v1.DriverSchedulingConfig driver_scheduling_config = 27 [(.google.api.field_behavior) = OPTIONAL];
6865      * </code>
6866      *
6867      * @return Whether the driverSchedulingConfig field is set.
6868      */
hasDriverSchedulingConfig()6869     public boolean hasDriverSchedulingConfig() {
6870       return ((bitField0_ & 0x00100000) != 0);
6871     }
6872     /**
6873      *
6874      *
6875      * <pre>
6876      * Optional. Driver scheduling configuration.
6877      * </pre>
6878      *
6879      * <code>
6880      * .google.cloud.dataproc.v1.DriverSchedulingConfig driver_scheduling_config = 27 [(.google.api.field_behavior) = OPTIONAL];
6881      * </code>
6882      *
6883      * @return The driverSchedulingConfig.
6884      */
getDriverSchedulingConfig()6885     public com.google.cloud.dataproc.v1.DriverSchedulingConfig getDriverSchedulingConfig() {
6886       if (driverSchedulingConfigBuilder_ == null) {
6887         return driverSchedulingConfig_ == null
6888             ? com.google.cloud.dataproc.v1.DriverSchedulingConfig.getDefaultInstance()
6889             : driverSchedulingConfig_;
6890       } else {
6891         return driverSchedulingConfigBuilder_.getMessage();
6892       }
6893     }
6894     /**
6895      *
6896      *
6897      * <pre>
6898      * Optional. Driver scheduling configuration.
6899      * </pre>
6900      *
6901      * <code>
6902      * .google.cloud.dataproc.v1.DriverSchedulingConfig driver_scheduling_config = 27 [(.google.api.field_behavior) = OPTIONAL];
6903      * </code>
6904      */
setDriverSchedulingConfig( com.google.cloud.dataproc.v1.DriverSchedulingConfig value)6905     public Builder setDriverSchedulingConfig(
6906         com.google.cloud.dataproc.v1.DriverSchedulingConfig value) {
6907       if (driverSchedulingConfigBuilder_ == null) {
6908         if (value == null) {
6909           throw new NullPointerException();
6910         }
6911         driverSchedulingConfig_ = value;
6912       } else {
6913         driverSchedulingConfigBuilder_.setMessage(value);
6914       }
6915       bitField0_ |= 0x00100000;
6916       onChanged();
6917       return this;
6918     }
6919     /**
6920      *
6921      *
6922      * <pre>
6923      * Optional. Driver scheduling configuration.
6924      * </pre>
6925      *
6926      * <code>
6927      * .google.cloud.dataproc.v1.DriverSchedulingConfig driver_scheduling_config = 27 [(.google.api.field_behavior) = OPTIONAL];
6928      * </code>
6929      */
setDriverSchedulingConfig( com.google.cloud.dataproc.v1.DriverSchedulingConfig.Builder builderForValue)6930     public Builder setDriverSchedulingConfig(
6931         com.google.cloud.dataproc.v1.DriverSchedulingConfig.Builder builderForValue) {
6932       if (driverSchedulingConfigBuilder_ == null) {
6933         driverSchedulingConfig_ = builderForValue.build();
6934       } else {
6935         driverSchedulingConfigBuilder_.setMessage(builderForValue.build());
6936       }
6937       bitField0_ |= 0x00100000;
6938       onChanged();
6939       return this;
6940     }
6941     /**
6942      *
6943      *
6944      * <pre>
6945      * Optional. Driver scheduling configuration.
6946      * </pre>
6947      *
6948      * <code>
6949      * .google.cloud.dataproc.v1.DriverSchedulingConfig driver_scheduling_config = 27 [(.google.api.field_behavior) = OPTIONAL];
6950      * </code>
6951      */
mergeDriverSchedulingConfig( com.google.cloud.dataproc.v1.DriverSchedulingConfig value)6952     public Builder mergeDriverSchedulingConfig(
6953         com.google.cloud.dataproc.v1.DriverSchedulingConfig value) {
6954       if (driverSchedulingConfigBuilder_ == null) {
6955         if (((bitField0_ & 0x00100000) != 0)
6956             && driverSchedulingConfig_ != null
6957             && driverSchedulingConfig_
6958                 != com.google.cloud.dataproc.v1.DriverSchedulingConfig.getDefaultInstance()) {
6959           getDriverSchedulingConfigBuilder().mergeFrom(value);
6960         } else {
6961           driverSchedulingConfig_ = value;
6962         }
6963       } else {
6964         driverSchedulingConfigBuilder_.mergeFrom(value);
6965       }
6966       bitField0_ |= 0x00100000;
6967       onChanged();
6968       return this;
6969     }
6970     /**
6971      *
6972      *
6973      * <pre>
6974      * Optional. Driver scheduling configuration.
6975      * </pre>
6976      *
6977      * <code>
6978      * .google.cloud.dataproc.v1.DriverSchedulingConfig driver_scheduling_config = 27 [(.google.api.field_behavior) = OPTIONAL];
6979      * </code>
6980      */
clearDriverSchedulingConfig()6981     public Builder clearDriverSchedulingConfig() {
6982       bitField0_ = (bitField0_ & ~0x00100000);
6983       driverSchedulingConfig_ = null;
6984       if (driverSchedulingConfigBuilder_ != null) {
6985         driverSchedulingConfigBuilder_.dispose();
6986         driverSchedulingConfigBuilder_ = null;
6987       }
6988       onChanged();
6989       return this;
6990     }
6991     /**
6992      *
6993      *
6994      * <pre>
6995      * Optional. Driver scheduling configuration.
6996      * </pre>
6997      *
6998      * <code>
6999      * .google.cloud.dataproc.v1.DriverSchedulingConfig driver_scheduling_config = 27 [(.google.api.field_behavior) = OPTIONAL];
7000      * </code>
7001      */
7002     public com.google.cloud.dataproc.v1.DriverSchedulingConfig.Builder
getDriverSchedulingConfigBuilder()7003         getDriverSchedulingConfigBuilder() {
7004       bitField0_ |= 0x00100000;
7005       onChanged();
7006       return getDriverSchedulingConfigFieldBuilder().getBuilder();
7007     }
7008     /**
7009      *
7010      *
7011      * <pre>
7012      * Optional. Driver scheduling configuration.
7013      * </pre>
7014      *
7015      * <code>
7016      * .google.cloud.dataproc.v1.DriverSchedulingConfig driver_scheduling_config = 27 [(.google.api.field_behavior) = OPTIONAL];
7017      * </code>
7018      */
7019     public com.google.cloud.dataproc.v1.DriverSchedulingConfigOrBuilder
getDriverSchedulingConfigOrBuilder()7020         getDriverSchedulingConfigOrBuilder() {
7021       if (driverSchedulingConfigBuilder_ != null) {
7022         return driverSchedulingConfigBuilder_.getMessageOrBuilder();
7023       } else {
7024         return driverSchedulingConfig_ == null
7025             ? com.google.cloud.dataproc.v1.DriverSchedulingConfig.getDefaultInstance()
7026             : driverSchedulingConfig_;
7027       }
7028     }
7029     /**
7030      *
7031      *
7032      * <pre>
7033      * Optional. Driver scheduling configuration.
7034      * </pre>
7035      *
7036      * <code>
7037      * .google.cloud.dataproc.v1.DriverSchedulingConfig driver_scheduling_config = 27 [(.google.api.field_behavior) = OPTIONAL];
7038      * </code>
7039      */
7040     private com.google.protobuf.SingleFieldBuilderV3<
7041             com.google.cloud.dataproc.v1.DriverSchedulingConfig,
7042             com.google.cloud.dataproc.v1.DriverSchedulingConfig.Builder,
7043             com.google.cloud.dataproc.v1.DriverSchedulingConfigOrBuilder>
getDriverSchedulingConfigFieldBuilder()7044         getDriverSchedulingConfigFieldBuilder() {
7045       if (driverSchedulingConfigBuilder_ == null) {
7046         driverSchedulingConfigBuilder_ =
7047             new com.google.protobuf.SingleFieldBuilderV3<
7048                 com.google.cloud.dataproc.v1.DriverSchedulingConfig,
7049                 com.google.cloud.dataproc.v1.DriverSchedulingConfig.Builder,
7050                 com.google.cloud.dataproc.v1.DriverSchedulingConfigOrBuilder>(
7051                 getDriverSchedulingConfig(), getParentForChildren(), isClean());
7052         driverSchedulingConfig_ = null;
7053       }
7054       return driverSchedulingConfigBuilder_;
7055     }
7056 
7057     @java.lang.Override
setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields)7058     public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
7059       return super.setUnknownFields(unknownFields);
7060     }
7061 
7062     @java.lang.Override
mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)7063     public final Builder mergeUnknownFields(
7064         final com.google.protobuf.UnknownFieldSet unknownFields) {
7065       return super.mergeUnknownFields(unknownFields);
7066     }
7067 
7068     // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.Job)
7069   }
7070 
7071   // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.Job)
7072   private static final com.google.cloud.dataproc.v1.Job DEFAULT_INSTANCE;
7073 
7074   static {
7075     DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.Job();
7076   }
7077 
getDefaultInstance()7078   public static com.google.cloud.dataproc.v1.Job getDefaultInstance() {
7079     return DEFAULT_INSTANCE;
7080   }
7081 
7082   private static final com.google.protobuf.Parser<Job> PARSER =
7083       new com.google.protobuf.AbstractParser<Job>() {
7084         @java.lang.Override
7085         public Job parsePartialFrom(
7086             com.google.protobuf.CodedInputStream input,
7087             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7088             throws com.google.protobuf.InvalidProtocolBufferException {
7089           Builder builder = newBuilder();
7090           try {
7091             builder.mergeFrom(input, extensionRegistry);
7092           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7093             throw e.setUnfinishedMessage(builder.buildPartial());
7094           } catch (com.google.protobuf.UninitializedMessageException e) {
7095             throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
7096           } catch (java.io.IOException e) {
7097             throw new com.google.protobuf.InvalidProtocolBufferException(e)
7098                 .setUnfinishedMessage(builder.buildPartial());
7099           }
7100           return builder.buildPartial();
7101         }
7102       };
7103 
parser()7104   public static com.google.protobuf.Parser<Job> parser() {
7105     return PARSER;
7106   }
7107 
7108   @java.lang.Override
getParserForType()7109   public com.google.protobuf.Parser<Job> getParserForType() {
7110     return PARSER;
7111   }
7112 
7113   @java.lang.Override
getDefaultInstanceForType()7114   public com.google.cloud.dataproc.v1.Job getDefaultInstanceForType() {
7115     return DEFAULT_INSTANCE;
7116   }
7117 }
7118