• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2020 Google LLC
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     https://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 // Generated by the protocol buffer compiler.  DO NOT EDIT!
17 // source: google/cloud/dataproc/v1/jobs.proto
18 
19 package com.google.cloud.dataproc.v1;
20 
21 /**
22  *
23  *
24  * <pre>
25  * A Dataproc job for running [Apache Spark
26  * SQL](https://spark.apache.org/sql/) queries.
27  * </pre>
28  *
29  * Protobuf type {@code google.cloud.dataproc.v1.SparkSqlJob}
30  */
31 public final class SparkSqlJob extends com.google.protobuf.GeneratedMessageV3
32     implements
33     // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.SparkSqlJob)
34     SparkSqlJobOrBuilder {
35   private static final long serialVersionUID = 0L;
36   // Use SparkSqlJob.newBuilder() to construct.
SparkSqlJob(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)37   private SparkSqlJob(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
38     super(builder);
39   }
40 
SparkSqlJob()41   private SparkSqlJob() {
42     jarFileUris_ = com.google.protobuf.LazyStringArrayList.EMPTY;
43   }
44 
45   @java.lang.Override
46   @SuppressWarnings({"unused"})
newInstance(UnusedPrivateParameter unused)47   protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
48     return new SparkSqlJob();
49   }
50 
51   @java.lang.Override
getUnknownFields()52   public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
53     return this.unknownFields;
54   }
55 
getDescriptor()56   public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
57     return com.google.cloud.dataproc.v1.JobsProto
58         .internal_static_google_cloud_dataproc_v1_SparkSqlJob_descriptor;
59   }
60 
61   @SuppressWarnings({"rawtypes"})
62   @java.lang.Override
internalGetMapField(int number)63   protected com.google.protobuf.MapField internalGetMapField(int number) {
64     switch (number) {
65       case 3:
66         return internalGetScriptVariables();
67       case 4:
68         return internalGetProperties();
69       default:
70         throw new RuntimeException("Invalid map field number: " + number);
71     }
72   }
73 
74   @java.lang.Override
75   protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()76       internalGetFieldAccessorTable() {
77     return com.google.cloud.dataproc.v1.JobsProto
78         .internal_static_google_cloud_dataproc_v1_SparkSqlJob_fieldAccessorTable
79         .ensureFieldAccessorsInitialized(
80             com.google.cloud.dataproc.v1.SparkSqlJob.class,
81             com.google.cloud.dataproc.v1.SparkSqlJob.Builder.class);
82   }
83 
84   private int queriesCase_ = 0;
85   private java.lang.Object queries_;
86 
87   public enum QueriesCase
88       implements
89           com.google.protobuf.Internal.EnumLite,
90           com.google.protobuf.AbstractMessage.InternalOneOfEnum {
91     QUERY_FILE_URI(1),
92     QUERY_LIST(2),
93     QUERIES_NOT_SET(0);
94     private final int value;
95 
QueriesCase(int value)96     private QueriesCase(int value) {
97       this.value = value;
98     }
99     /**
100      * @param value The number of the enum to look for.
101      * @return The enum associated with the given number.
102      * @deprecated Use {@link #forNumber(int)} instead.
103      */
104     @java.lang.Deprecated
valueOf(int value)105     public static QueriesCase valueOf(int value) {
106       return forNumber(value);
107     }
108 
forNumber(int value)109     public static QueriesCase forNumber(int value) {
110       switch (value) {
111         case 1:
112           return QUERY_FILE_URI;
113         case 2:
114           return QUERY_LIST;
115         case 0:
116           return QUERIES_NOT_SET;
117         default:
118           return null;
119       }
120     }
121 
getNumber()122     public int getNumber() {
123       return this.value;
124     }
125   };
126 
getQueriesCase()127   public QueriesCase getQueriesCase() {
128     return QueriesCase.forNumber(queriesCase_);
129   }
130 
131   public static final int QUERY_FILE_URI_FIELD_NUMBER = 1;
132   /**
133    *
134    *
135    * <pre>
136    * The HCFS URI of the script that contains SQL queries.
137    * </pre>
138    *
139    * <code>string query_file_uri = 1;</code>
140    *
141    * @return Whether the queryFileUri field is set.
142    */
hasQueryFileUri()143   public boolean hasQueryFileUri() {
144     return queriesCase_ == 1;
145   }
146   /**
147    *
148    *
149    * <pre>
150    * The HCFS URI of the script that contains SQL queries.
151    * </pre>
152    *
153    * <code>string query_file_uri = 1;</code>
154    *
155    * @return The queryFileUri.
156    */
getQueryFileUri()157   public java.lang.String getQueryFileUri() {
158     java.lang.Object ref = "";
159     if (queriesCase_ == 1) {
160       ref = queries_;
161     }
162     if (ref instanceof java.lang.String) {
163       return (java.lang.String) ref;
164     } else {
165       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
166       java.lang.String s = bs.toStringUtf8();
167       if (queriesCase_ == 1) {
168         queries_ = s;
169       }
170       return s;
171     }
172   }
173   /**
174    *
175    *
176    * <pre>
177    * The HCFS URI of the script that contains SQL queries.
178    * </pre>
179    *
180    * <code>string query_file_uri = 1;</code>
181    *
182    * @return The bytes for queryFileUri.
183    */
getQueryFileUriBytes()184   public com.google.protobuf.ByteString getQueryFileUriBytes() {
185     java.lang.Object ref = "";
186     if (queriesCase_ == 1) {
187       ref = queries_;
188     }
189     if (ref instanceof java.lang.String) {
190       com.google.protobuf.ByteString b =
191           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
192       if (queriesCase_ == 1) {
193         queries_ = b;
194       }
195       return b;
196     } else {
197       return (com.google.protobuf.ByteString) ref;
198     }
199   }
200 
201   public static final int QUERY_LIST_FIELD_NUMBER = 2;
202   /**
203    *
204    *
205    * <pre>
206    * A list of queries.
207    * </pre>
208    *
209    * <code>.google.cloud.dataproc.v1.QueryList query_list = 2;</code>
210    *
211    * @return Whether the queryList field is set.
212    */
213   @java.lang.Override
hasQueryList()214   public boolean hasQueryList() {
215     return queriesCase_ == 2;
216   }
217   /**
218    *
219    *
220    * <pre>
221    * A list of queries.
222    * </pre>
223    *
224    * <code>.google.cloud.dataproc.v1.QueryList query_list = 2;</code>
225    *
226    * @return The queryList.
227    */
228   @java.lang.Override
getQueryList()229   public com.google.cloud.dataproc.v1.QueryList getQueryList() {
230     if (queriesCase_ == 2) {
231       return (com.google.cloud.dataproc.v1.QueryList) queries_;
232     }
233     return com.google.cloud.dataproc.v1.QueryList.getDefaultInstance();
234   }
235   /**
236    *
237    *
238    * <pre>
239    * A list of queries.
240    * </pre>
241    *
242    * <code>.google.cloud.dataproc.v1.QueryList query_list = 2;</code>
243    */
244   @java.lang.Override
getQueryListOrBuilder()245   public com.google.cloud.dataproc.v1.QueryListOrBuilder getQueryListOrBuilder() {
246     if (queriesCase_ == 2) {
247       return (com.google.cloud.dataproc.v1.QueryList) queries_;
248     }
249     return com.google.cloud.dataproc.v1.QueryList.getDefaultInstance();
250   }
251 
252   public static final int SCRIPT_VARIABLES_FIELD_NUMBER = 3;
253 
254   private static final class ScriptVariablesDefaultEntryHolder {
255     static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
256         com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
257             com.google.cloud.dataproc.v1.JobsProto
258                 .internal_static_google_cloud_dataproc_v1_SparkSqlJob_ScriptVariablesEntry_descriptor,
259             com.google.protobuf.WireFormat.FieldType.STRING,
260             "",
261             com.google.protobuf.WireFormat.FieldType.STRING,
262             "");
263   }
264 
265   @SuppressWarnings("serial")
266   private com.google.protobuf.MapField<java.lang.String, java.lang.String> scriptVariables_;
267 
268   private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetScriptVariables()269       internalGetScriptVariables() {
270     if (scriptVariables_ == null) {
271       return com.google.protobuf.MapField.emptyMapField(
272           ScriptVariablesDefaultEntryHolder.defaultEntry);
273     }
274     return scriptVariables_;
275   }
276 
getScriptVariablesCount()277   public int getScriptVariablesCount() {
278     return internalGetScriptVariables().getMap().size();
279   }
280   /**
281    *
282    *
283    * <pre>
284    * Optional. Mapping of query variable names to values (equivalent to the
285    * Spark SQL command: SET `name="value";`).
286    * </pre>
287    *
288    * <code>map&lt;string, string&gt; script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
289    * </code>
290    */
291   @java.lang.Override
containsScriptVariables(java.lang.String key)292   public boolean containsScriptVariables(java.lang.String key) {
293     if (key == null) {
294       throw new NullPointerException("map key");
295     }
296     return internalGetScriptVariables().getMap().containsKey(key);
297   }
298   /** Use {@link #getScriptVariablesMap()} instead. */
299   @java.lang.Override
300   @java.lang.Deprecated
getScriptVariables()301   public java.util.Map<java.lang.String, java.lang.String> getScriptVariables() {
302     return getScriptVariablesMap();
303   }
304   /**
305    *
306    *
307    * <pre>
308    * Optional. Mapping of query variable names to values (equivalent to the
309    * Spark SQL command: SET `name="value";`).
310    * </pre>
311    *
312    * <code>map&lt;string, string&gt; script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
313    * </code>
314    */
315   @java.lang.Override
getScriptVariablesMap()316   public java.util.Map<java.lang.String, java.lang.String> getScriptVariablesMap() {
317     return internalGetScriptVariables().getMap();
318   }
319   /**
320    *
321    *
322    * <pre>
323    * Optional. Mapping of query variable names to values (equivalent to the
324    * Spark SQL command: SET `name="value";`).
325    * </pre>
326    *
327    * <code>map&lt;string, string&gt; script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
328    * </code>
329    */
330   @java.lang.Override
getScriptVariablesOrDefault( java.lang.String key, java.lang.String defaultValue)331   public /* nullable */ java.lang.String getScriptVariablesOrDefault(
332       java.lang.String key,
333       /* nullable */
334       java.lang.String defaultValue) {
335     if (key == null) {
336       throw new NullPointerException("map key");
337     }
338     java.util.Map<java.lang.String, java.lang.String> map = internalGetScriptVariables().getMap();
339     return map.containsKey(key) ? map.get(key) : defaultValue;
340   }
341   /**
342    *
343    *
344    * <pre>
345    * Optional. Mapping of query variable names to values (equivalent to the
346    * Spark SQL command: SET `name="value";`).
347    * </pre>
348    *
349    * <code>map&lt;string, string&gt; script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
350    * </code>
351    */
352   @java.lang.Override
getScriptVariablesOrThrow(java.lang.String key)353   public java.lang.String getScriptVariablesOrThrow(java.lang.String key) {
354     if (key == null) {
355       throw new NullPointerException("map key");
356     }
357     java.util.Map<java.lang.String, java.lang.String> map = internalGetScriptVariables().getMap();
358     if (!map.containsKey(key)) {
359       throw new java.lang.IllegalArgumentException();
360     }
361     return map.get(key);
362   }
363 
364   public static final int PROPERTIES_FIELD_NUMBER = 4;
365 
366   private static final class PropertiesDefaultEntryHolder {
367     static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
368         com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
369             com.google.cloud.dataproc.v1.JobsProto
370                 .internal_static_google_cloud_dataproc_v1_SparkSqlJob_PropertiesEntry_descriptor,
371             com.google.protobuf.WireFormat.FieldType.STRING,
372             "",
373             com.google.protobuf.WireFormat.FieldType.STRING,
374             "");
375   }
376 
377   @SuppressWarnings("serial")
378   private com.google.protobuf.MapField<java.lang.String, java.lang.String> properties_;
379 
internalGetProperties()380   private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetProperties() {
381     if (properties_ == null) {
382       return com.google.protobuf.MapField.emptyMapField(PropertiesDefaultEntryHolder.defaultEntry);
383     }
384     return properties_;
385   }
386 
getPropertiesCount()387   public int getPropertiesCount() {
388     return internalGetProperties().getMap().size();
389   }
390   /**
391    *
392    *
393    * <pre>
394    * Optional. A mapping of property names to values, used to configure
395    * Spark SQL's SparkConf. Properties that conflict with values set by the
396    * Dataproc API may be overwritten.
397    * </pre>
398    *
399    * <code>map&lt;string, string&gt; properties = 4 [(.google.api.field_behavior) = OPTIONAL];
400    * </code>
401    */
402   @java.lang.Override
containsProperties(java.lang.String key)403   public boolean containsProperties(java.lang.String key) {
404     if (key == null) {
405       throw new NullPointerException("map key");
406     }
407     return internalGetProperties().getMap().containsKey(key);
408   }
409   /** Use {@link #getPropertiesMap()} instead. */
410   @java.lang.Override
411   @java.lang.Deprecated
getProperties()412   public java.util.Map<java.lang.String, java.lang.String> getProperties() {
413     return getPropertiesMap();
414   }
415   /**
416    *
417    *
418    * <pre>
419    * Optional. A mapping of property names to values, used to configure
420    * Spark SQL's SparkConf. Properties that conflict with values set by the
421    * Dataproc API may be overwritten.
422    * </pre>
423    *
424    * <code>map&lt;string, string&gt; properties = 4 [(.google.api.field_behavior) = OPTIONAL];
425    * </code>
426    */
427   @java.lang.Override
getPropertiesMap()428   public java.util.Map<java.lang.String, java.lang.String> getPropertiesMap() {
429     return internalGetProperties().getMap();
430   }
431   /**
432    *
433    *
434    * <pre>
435    * Optional. A mapping of property names to values, used to configure
436    * Spark SQL's SparkConf. Properties that conflict with values set by the
437    * Dataproc API may be overwritten.
438    * </pre>
439    *
440    * <code>map&lt;string, string&gt; properties = 4 [(.google.api.field_behavior) = OPTIONAL];
441    * </code>
442    */
443   @java.lang.Override
getPropertiesOrDefault( java.lang.String key, java.lang.String defaultValue)444   public /* nullable */ java.lang.String getPropertiesOrDefault(
445       java.lang.String key,
446       /* nullable */
447       java.lang.String defaultValue) {
448     if (key == null) {
449       throw new NullPointerException("map key");
450     }
451     java.util.Map<java.lang.String, java.lang.String> map = internalGetProperties().getMap();
452     return map.containsKey(key) ? map.get(key) : defaultValue;
453   }
454   /**
455    *
456    *
457    * <pre>
458    * Optional. A mapping of property names to values, used to configure
459    * Spark SQL's SparkConf. Properties that conflict with values set by the
460    * Dataproc API may be overwritten.
461    * </pre>
462    *
463    * <code>map&lt;string, string&gt; properties = 4 [(.google.api.field_behavior) = OPTIONAL];
464    * </code>
465    */
466   @java.lang.Override
getPropertiesOrThrow(java.lang.String key)467   public java.lang.String getPropertiesOrThrow(java.lang.String key) {
468     if (key == null) {
469       throw new NullPointerException("map key");
470     }
471     java.util.Map<java.lang.String, java.lang.String> map = internalGetProperties().getMap();
472     if (!map.containsKey(key)) {
473       throw new java.lang.IllegalArgumentException();
474     }
475     return map.get(key);
476   }
477 
478   public static final int JAR_FILE_URIS_FIELD_NUMBER = 56;
479 
480   @SuppressWarnings("serial")
481   private com.google.protobuf.LazyStringList jarFileUris_;
482   /**
483    *
484    *
485    * <pre>
486    * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
487    * </pre>
488    *
489    * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
490    *
491    * @return A list containing the jarFileUris.
492    */
getJarFileUrisList()493   public com.google.protobuf.ProtocolStringList getJarFileUrisList() {
494     return jarFileUris_;
495   }
496   /**
497    *
498    *
499    * <pre>
500    * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
501    * </pre>
502    *
503    * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
504    *
505    * @return The count of jarFileUris.
506    */
getJarFileUrisCount()507   public int getJarFileUrisCount() {
508     return jarFileUris_.size();
509   }
510   /**
511    *
512    *
513    * <pre>
514    * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
515    * </pre>
516    *
517    * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
518    *
519    * @param index The index of the element to return.
520    * @return The jarFileUris at the given index.
521    */
getJarFileUris(int index)522   public java.lang.String getJarFileUris(int index) {
523     return jarFileUris_.get(index);
524   }
525   /**
526    *
527    *
528    * <pre>
529    * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
530    * </pre>
531    *
532    * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
533    *
534    * @param index The index of the value to return.
535    * @return The bytes of the jarFileUris at the given index.
536    */
getJarFileUrisBytes(int index)537   public com.google.protobuf.ByteString getJarFileUrisBytes(int index) {
538     return jarFileUris_.getByteString(index);
539   }
540 
541   public static final int LOGGING_CONFIG_FIELD_NUMBER = 6;
542   private com.google.cloud.dataproc.v1.LoggingConfig loggingConfig_;
543   /**
544    *
545    *
546    * <pre>
547    * Optional. The runtime log config for job execution.
548    * </pre>
549    *
550    * <code>
551    * .google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
552    * </code>
553    *
554    * @return Whether the loggingConfig field is set.
555    */
556   @java.lang.Override
hasLoggingConfig()557   public boolean hasLoggingConfig() {
558     return loggingConfig_ != null;
559   }
560   /**
561    *
562    *
563    * <pre>
564    * Optional. The runtime log config for job execution.
565    * </pre>
566    *
567    * <code>
568    * .google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
569    * </code>
570    *
571    * @return The loggingConfig.
572    */
573   @java.lang.Override
getLoggingConfig()574   public com.google.cloud.dataproc.v1.LoggingConfig getLoggingConfig() {
575     return loggingConfig_ == null
576         ? com.google.cloud.dataproc.v1.LoggingConfig.getDefaultInstance()
577         : loggingConfig_;
578   }
579   /**
580    *
581    *
582    * <pre>
583    * Optional. The runtime log config for job execution.
584    * </pre>
585    *
586    * <code>
587    * .google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
588    * </code>
589    */
590   @java.lang.Override
getLoggingConfigOrBuilder()591   public com.google.cloud.dataproc.v1.LoggingConfigOrBuilder getLoggingConfigOrBuilder() {
592     return loggingConfig_ == null
593         ? com.google.cloud.dataproc.v1.LoggingConfig.getDefaultInstance()
594         : loggingConfig_;
595   }
596 
597   private byte memoizedIsInitialized = -1;
598 
599   @java.lang.Override
isInitialized()600   public final boolean isInitialized() {
601     byte isInitialized = memoizedIsInitialized;
602     if (isInitialized == 1) return true;
603     if (isInitialized == 0) return false;
604 
605     memoizedIsInitialized = 1;
606     return true;
607   }
608 
609   @java.lang.Override
writeTo(com.google.protobuf.CodedOutputStream output)610   public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
611     if (queriesCase_ == 1) {
612       com.google.protobuf.GeneratedMessageV3.writeString(output, 1, queries_);
613     }
614     if (queriesCase_ == 2) {
615       output.writeMessage(2, (com.google.cloud.dataproc.v1.QueryList) queries_);
616     }
617     com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
618         output, internalGetScriptVariables(), ScriptVariablesDefaultEntryHolder.defaultEntry, 3);
619     com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
620         output, internalGetProperties(), PropertiesDefaultEntryHolder.defaultEntry, 4);
621     if (loggingConfig_ != null) {
622       output.writeMessage(6, getLoggingConfig());
623     }
624     for (int i = 0; i < jarFileUris_.size(); i++) {
625       com.google.protobuf.GeneratedMessageV3.writeString(output, 56, jarFileUris_.getRaw(i));
626     }
627     getUnknownFields().writeTo(output);
628   }
629 
630   @java.lang.Override
getSerializedSize()631   public int getSerializedSize() {
632     int size = memoizedSize;
633     if (size != -1) return size;
634 
635     size = 0;
636     if (queriesCase_ == 1) {
637       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, queries_);
638     }
639     if (queriesCase_ == 2) {
640       size +=
641           com.google.protobuf.CodedOutputStream.computeMessageSize(
642               2, (com.google.cloud.dataproc.v1.QueryList) queries_);
643     }
644     for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
645         internalGetScriptVariables().getMap().entrySet()) {
646       com.google.protobuf.MapEntry<java.lang.String, java.lang.String> scriptVariables__ =
647           ScriptVariablesDefaultEntryHolder.defaultEntry
648               .newBuilderForType()
649               .setKey(entry.getKey())
650               .setValue(entry.getValue())
651               .build();
652       size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, scriptVariables__);
653     }
654     for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
655         internalGetProperties().getMap().entrySet()) {
656       com.google.protobuf.MapEntry<java.lang.String, java.lang.String> properties__ =
657           PropertiesDefaultEntryHolder.defaultEntry
658               .newBuilderForType()
659               .setKey(entry.getKey())
660               .setValue(entry.getValue())
661               .build();
662       size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, properties__);
663     }
664     if (loggingConfig_ != null) {
665       size += com.google.protobuf.CodedOutputStream.computeMessageSize(6, getLoggingConfig());
666     }
667     {
668       int dataSize = 0;
669       for (int i = 0; i < jarFileUris_.size(); i++) {
670         dataSize += computeStringSizeNoTag(jarFileUris_.getRaw(i));
671       }
672       size += dataSize;
673       size += 2 * getJarFileUrisList().size();
674     }
675     size += getUnknownFields().getSerializedSize();
676     memoizedSize = size;
677     return size;
678   }
679 
680   @java.lang.Override
equals(final java.lang.Object obj)681   public boolean equals(final java.lang.Object obj) {
682     if (obj == this) {
683       return true;
684     }
685     if (!(obj instanceof com.google.cloud.dataproc.v1.SparkSqlJob)) {
686       return super.equals(obj);
687     }
688     com.google.cloud.dataproc.v1.SparkSqlJob other = (com.google.cloud.dataproc.v1.SparkSqlJob) obj;
689 
690     if (!internalGetScriptVariables().equals(other.internalGetScriptVariables())) return false;
691     if (!internalGetProperties().equals(other.internalGetProperties())) return false;
692     if (!getJarFileUrisList().equals(other.getJarFileUrisList())) return false;
693     if (hasLoggingConfig() != other.hasLoggingConfig()) return false;
694     if (hasLoggingConfig()) {
695       if (!getLoggingConfig().equals(other.getLoggingConfig())) return false;
696     }
697     if (!getQueriesCase().equals(other.getQueriesCase())) return false;
698     switch (queriesCase_) {
699       case 1:
700         if (!getQueryFileUri().equals(other.getQueryFileUri())) return false;
701         break;
702       case 2:
703         if (!getQueryList().equals(other.getQueryList())) return false;
704         break;
705       case 0:
706       default:
707     }
708     if (!getUnknownFields().equals(other.getUnknownFields())) return false;
709     return true;
710   }
711 
712   @java.lang.Override
hashCode()713   public int hashCode() {
714     if (memoizedHashCode != 0) {
715       return memoizedHashCode;
716     }
717     int hash = 41;
718     hash = (19 * hash) + getDescriptor().hashCode();
719     if (!internalGetScriptVariables().getMap().isEmpty()) {
720       hash = (37 * hash) + SCRIPT_VARIABLES_FIELD_NUMBER;
721       hash = (53 * hash) + internalGetScriptVariables().hashCode();
722     }
723     if (!internalGetProperties().getMap().isEmpty()) {
724       hash = (37 * hash) + PROPERTIES_FIELD_NUMBER;
725       hash = (53 * hash) + internalGetProperties().hashCode();
726     }
727     if (getJarFileUrisCount() > 0) {
728       hash = (37 * hash) + JAR_FILE_URIS_FIELD_NUMBER;
729       hash = (53 * hash) + getJarFileUrisList().hashCode();
730     }
731     if (hasLoggingConfig()) {
732       hash = (37 * hash) + LOGGING_CONFIG_FIELD_NUMBER;
733       hash = (53 * hash) + getLoggingConfig().hashCode();
734     }
735     switch (queriesCase_) {
736       case 1:
737         hash = (37 * hash) + QUERY_FILE_URI_FIELD_NUMBER;
738         hash = (53 * hash) + getQueryFileUri().hashCode();
739         break;
740       case 2:
741         hash = (37 * hash) + QUERY_LIST_FIELD_NUMBER;
742         hash = (53 * hash) + getQueryList().hashCode();
743         break;
744       case 0:
745       default:
746     }
747     hash = (29 * hash) + getUnknownFields().hashCode();
748     memoizedHashCode = hash;
749     return hash;
750   }
751 
parseFrom(java.nio.ByteBuffer data)752   public static com.google.cloud.dataproc.v1.SparkSqlJob parseFrom(java.nio.ByteBuffer data)
753       throws com.google.protobuf.InvalidProtocolBufferException {
754     return PARSER.parseFrom(data);
755   }
756 
parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)757   public static com.google.cloud.dataproc.v1.SparkSqlJob parseFrom(
758       java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
759       throws com.google.protobuf.InvalidProtocolBufferException {
760     return PARSER.parseFrom(data, extensionRegistry);
761   }
762 
parseFrom( com.google.protobuf.ByteString data)763   public static com.google.cloud.dataproc.v1.SparkSqlJob parseFrom(
764       com.google.protobuf.ByteString data)
765       throws com.google.protobuf.InvalidProtocolBufferException {
766     return PARSER.parseFrom(data);
767   }
768 
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)769   public static com.google.cloud.dataproc.v1.SparkSqlJob parseFrom(
770       com.google.protobuf.ByteString data,
771       com.google.protobuf.ExtensionRegistryLite extensionRegistry)
772       throws com.google.protobuf.InvalidProtocolBufferException {
773     return PARSER.parseFrom(data, extensionRegistry);
774   }
775 
parseFrom(byte[] data)776   public static com.google.cloud.dataproc.v1.SparkSqlJob parseFrom(byte[] data)
777       throws com.google.protobuf.InvalidProtocolBufferException {
778     return PARSER.parseFrom(data);
779   }
780 
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)781   public static com.google.cloud.dataproc.v1.SparkSqlJob parseFrom(
782       byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
783       throws com.google.protobuf.InvalidProtocolBufferException {
784     return PARSER.parseFrom(data, extensionRegistry);
785   }
786 
parseFrom(java.io.InputStream input)787   public static com.google.cloud.dataproc.v1.SparkSqlJob parseFrom(java.io.InputStream input)
788       throws java.io.IOException {
789     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
790   }
791 
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)792   public static com.google.cloud.dataproc.v1.SparkSqlJob parseFrom(
793       java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
794       throws java.io.IOException {
795     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
796         PARSER, input, extensionRegistry);
797   }
798 
parseDelimitedFrom( java.io.InputStream input)799   public static com.google.cloud.dataproc.v1.SparkSqlJob parseDelimitedFrom(
800       java.io.InputStream input) throws java.io.IOException {
801     return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
802   }
803 
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)804   public static com.google.cloud.dataproc.v1.SparkSqlJob parseDelimitedFrom(
805       java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
806       throws java.io.IOException {
807     return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
808         PARSER, input, extensionRegistry);
809   }
810 
parseFrom( com.google.protobuf.CodedInputStream input)811   public static com.google.cloud.dataproc.v1.SparkSqlJob parseFrom(
812       com.google.protobuf.CodedInputStream input) throws java.io.IOException {
813     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
814   }
815 
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)816   public static com.google.cloud.dataproc.v1.SparkSqlJob parseFrom(
817       com.google.protobuf.CodedInputStream input,
818       com.google.protobuf.ExtensionRegistryLite extensionRegistry)
819       throws java.io.IOException {
820     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
821         PARSER, input, extensionRegistry);
822   }
823 
824   @java.lang.Override
newBuilderForType()825   public Builder newBuilderForType() {
826     return newBuilder();
827   }
828 
newBuilder()829   public static Builder newBuilder() {
830     return DEFAULT_INSTANCE.toBuilder();
831   }
832 
newBuilder(com.google.cloud.dataproc.v1.SparkSqlJob prototype)833   public static Builder newBuilder(com.google.cloud.dataproc.v1.SparkSqlJob prototype) {
834     return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
835   }
836 
837   @java.lang.Override
toBuilder()838   public Builder toBuilder() {
839     return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
840   }
841 
842   @java.lang.Override
newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)843   protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
844     Builder builder = new Builder(parent);
845     return builder;
846   }
847   /**
848    *
849    *
850    * <pre>
851    * A Dataproc job for running [Apache Spark
852    * SQL](https://spark.apache.org/sql/) queries.
853    * </pre>
854    *
855    * Protobuf type {@code google.cloud.dataproc.v1.SparkSqlJob}
856    */
857   public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
858       implements
859       // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.SparkSqlJob)
860       com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder {
getDescriptor()861     public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
862       return com.google.cloud.dataproc.v1.JobsProto
863           .internal_static_google_cloud_dataproc_v1_SparkSqlJob_descriptor;
864     }
865 
866     @SuppressWarnings({"rawtypes"})
internalGetMapField(int number)867     protected com.google.protobuf.MapField internalGetMapField(int number) {
868       switch (number) {
869         case 3:
870           return internalGetScriptVariables();
871         case 4:
872           return internalGetProperties();
873         default:
874           throw new RuntimeException("Invalid map field number: " + number);
875       }
876     }
877 
878     @SuppressWarnings({"rawtypes"})
internalGetMutableMapField(int number)879     protected com.google.protobuf.MapField internalGetMutableMapField(int number) {
880       switch (number) {
881         case 3:
882           return internalGetMutableScriptVariables();
883         case 4:
884           return internalGetMutableProperties();
885         default:
886           throw new RuntimeException("Invalid map field number: " + number);
887       }
888     }
889 
890     @java.lang.Override
891     protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()892         internalGetFieldAccessorTable() {
893       return com.google.cloud.dataproc.v1.JobsProto
894           .internal_static_google_cloud_dataproc_v1_SparkSqlJob_fieldAccessorTable
895           .ensureFieldAccessorsInitialized(
896               com.google.cloud.dataproc.v1.SparkSqlJob.class,
897               com.google.cloud.dataproc.v1.SparkSqlJob.Builder.class);
898     }
899 
900     // Construct using com.google.cloud.dataproc.v1.SparkSqlJob.newBuilder()
Builder()901     private Builder() {}
902 
Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)903     private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
904       super(parent);
905     }
906 
907     @java.lang.Override
clear()908     public Builder clear() {
909       super.clear();
910       bitField0_ = 0;
911       if (queryListBuilder_ != null) {
912         queryListBuilder_.clear();
913       }
914       internalGetMutableScriptVariables().clear();
915       internalGetMutableProperties().clear();
916       jarFileUris_ = com.google.protobuf.LazyStringArrayList.EMPTY;
917       bitField0_ = (bitField0_ & ~0x00000010);
918       loggingConfig_ = null;
919       if (loggingConfigBuilder_ != null) {
920         loggingConfigBuilder_.dispose();
921         loggingConfigBuilder_ = null;
922       }
923       queriesCase_ = 0;
924       queries_ = null;
925       return this;
926     }
927 
928     @java.lang.Override
getDescriptorForType()929     public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
930       return com.google.cloud.dataproc.v1.JobsProto
931           .internal_static_google_cloud_dataproc_v1_SparkSqlJob_descriptor;
932     }
933 
934     @java.lang.Override
getDefaultInstanceForType()935     public com.google.cloud.dataproc.v1.SparkSqlJob getDefaultInstanceForType() {
936       return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
937     }
938 
939     @java.lang.Override
build()940     public com.google.cloud.dataproc.v1.SparkSqlJob build() {
941       com.google.cloud.dataproc.v1.SparkSqlJob result = buildPartial();
942       if (!result.isInitialized()) {
943         throw newUninitializedMessageException(result);
944       }
945       return result;
946     }
947 
948     @java.lang.Override
buildPartial()949     public com.google.cloud.dataproc.v1.SparkSqlJob buildPartial() {
950       com.google.cloud.dataproc.v1.SparkSqlJob result =
951           new com.google.cloud.dataproc.v1.SparkSqlJob(this);
952       buildPartialRepeatedFields(result);
953       if (bitField0_ != 0) {
954         buildPartial0(result);
955       }
956       buildPartialOneofs(result);
957       onBuilt();
958       return result;
959     }
960 
buildPartialRepeatedFields(com.google.cloud.dataproc.v1.SparkSqlJob result)961     private void buildPartialRepeatedFields(com.google.cloud.dataproc.v1.SparkSqlJob result) {
962       if (((bitField0_ & 0x00000010) != 0)) {
963         jarFileUris_ = jarFileUris_.getUnmodifiableView();
964         bitField0_ = (bitField0_ & ~0x00000010);
965       }
966       result.jarFileUris_ = jarFileUris_;
967     }
968 
buildPartial0(com.google.cloud.dataproc.v1.SparkSqlJob result)969     private void buildPartial0(com.google.cloud.dataproc.v1.SparkSqlJob result) {
970       int from_bitField0_ = bitField0_;
971       if (((from_bitField0_ & 0x00000004) != 0)) {
972         result.scriptVariables_ = internalGetScriptVariables();
973         result.scriptVariables_.makeImmutable();
974       }
975       if (((from_bitField0_ & 0x00000008) != 0)) {
976         result.properties_ = internalGetProperties();
977         result.properties_.makeImmutable();
978       }
979       if (((from_bitField0_ & 0x00000020) != 0)) {
980         result.loggingConfig_ =
981             loggingConfigBuilder_ == null ? loggingConfig_ : loggingConfigBuilder_.build();
982       }
983     }
984 
buildPartialOneofs(com.google.cloud.dataproc.v1.SparkSqlJob result)985     private void buildPartialOneofs(com.google.cloud.dataproc.v1.SparkSqlJob result) {
986       result.queriesCase_ = queriesCase_;
987       result.queries_ = this.queries_;
988       if (queriesCase_ == 2 && queryListBuilder_ != null) {
989         result.queries_ = queryListBuilder_.build();
990       }
991     }
992 
993     @java.lang.Override
clone()994     public Builder clone() {
995       return super.clone();
996     }
997 
998     @java.lang.Override
setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)999     public Builder setField(
1000         com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
1001       return super.setField(field, value);
1002     }
1003 
1004     @java.lang.Override
clearField(com.google.protobuf.Descriptors.FieldDescriptor field)1005     public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
1006       return super.clearField(field);
1007     }
1008 
1009     @java.lang.Override
clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)1010     public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
1011       return super.clearOneof(oneof);
1012     }
1013 
1014     @java.lang.Override
setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)1015     public Builder setRepeatedField(
1016         com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
1017       return super.setRepeatedField(field, index, value);
1018     }
1019 
1020     @java.lang.Override
addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)1021     public Builder addRepeatedField(
1022         com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
1023       return super.addRepeatedField(field, value);
1024     }
1025 
1026     @java.lang.Override
mergeFrom(com.google.protobuf.Message other)1027     public Builder mergeFrom(com.google.protobuf.Message other) {
1028       if (other instanceof com.google.cloud.dataproc.v1.SparkSqlJob) {
1029         return mergeFrom((com.google.cloud.dataproc.v1.SparkSqlJob) other);
1030       } else {
1031         super.mergeFrom(other);
1032         return this;
1033       }
1034     }
1035 
mergeFrom(com.google.cloud.dataproc.v1.SparkSqlJob other)1036     public Builder mergeFrom(com.google.cloud.dataproc.v1.SparkSqlJob other) {
1037       if (other == com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance()) return this;
1038       internalGetMutableScriptVariables().mergeFrom(other.internalGetScriptVariables());
1039       bitField0_ |= 0x00000004;
1040       internalGetMutableProperties().mergeFrom(other.internalGetProperties());
1041       bitField0_ |= 0x00000008;
1042       if (!other.jarFileUris_.isEmpty()) {
1043         if (jarFileUris_.isEmpty()) {
1044           jarFileUris_ = other.jarFileUris_;
1045           bitField0_ = (bitField0_ & ~0x00000010);
1046         } else {
1047           ensureJarFileUrisIsMutable();
1048           jarFileUris_.addAll(other.jarFileUris_);
1049         }
1050         onChanged();
1051       }
1052       if (other.hasLoggingConfig()) {
1053         mergeLoggingConfig(other.getLoggingConfig());
1054       }
1055       switch (other.getQueriesCase()) {
1056         case QUERY_FILE_URI:
1057           {
1058             queriesCase_ = 1;
1059             queries_ = other.queries_;
1060             onChanged();
1061             break;
1062           }
1063         case QUERY_LIST:
1064           {
1065             mergeQueryList(other.getQueryList());
1066             break;
1067           }
1068         case QUERIES_NOT_SET:
1069           {
1070             break;
1071           }
1072       }
1073       this.mergeUnknownFields(other.getUnknownFields());
1074       onChanged();
1075       return this;
1076     }
1077 
1078     @java.lang.Override
isInitialized()1079     public final boolean isInitialized() {
1080       return true;
1081     }
1082 
1083     @java.lang.Override
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1084     public Builder mergeFrom(
1085         com.google.protobuf.CodedInputStream input,
1086         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1087         throws java.io.IOException {
1088       if (extensionRegistry == null) {
1089         throw new java.lang.NullPointerException();
1090       }
1091       try {
1092         boolean done = false;
1093         while (!done) {
1094           int tag = input.readTag();
1095           switch (tag) {
1096             case 0:
1097               done = true;
1098               break;
1099             case 10:
1100               {
1101                 java.lang.String s = input.readStringRequireUtf8();
1102                 queriesCase_ = 1;
1103                 queries_ = s;
1104                 break;
1105               } // case 10
1106             case 18:
1107               {
1108                 input.readMessage(getQueryListFieldBuilder().getBuilder(), extensionRegistry);
1109                 queriesCase_ = 2;
1110                 break;
1111               } // case 18
1112             case 26:
1113               {
1114                 com.google.protobuf.MapEntry<java.lang.String, java.lang.String> scriptVariables__ =
1115                     input.readMessage(
1116                         ScriptVariablesDefaultEntryHolder.defaultEntry.getParserForType(),
1117                         extensionRegistry);
1118                 internalGetMutableScriptVariables()
1119                     .getMutableMap()
1120                     .put(scriptVariables__.getKey(), scriptVariables__.getValue());
1121                 bitField0_ |= 0x00000004;
1122                 break;
1123               } // case 26
1124             case 34:
1125               {
1126                 com.google.protobuf.MapEntry<java.lang.String, java.lang.String> properties__ =
1127                     input.readMessage(
1128                         PropertiesDefaultEntryHolder.defaultEntry.getParserForType(),
1129                         extensionRegistry);
1130                 internalGetMutableProperties()
1131                     .getMutableMap()
1132                     .put(properties__.getKey(), properties__.getValue());
1133                 bitField0_ |= 0x00000008;
1134                 break;
1135               } // case 34
1136             case 50:
1137               {
1138                 input.readMessage(getLoggingConfigFieldBuilder().getBuilder(), extensionRegistry);
1139                 bitField0_ |= 0x00000020;
1140                 break;
1141               } // case 50
1142             case 450:
1143               {
1144                 java.lang.String s = input.readStringRequireUtf8();
1145                 ensureJarFileUrisIsMutable();
1146                 jarFileUris_.add(s);
1147                 break;
1148               } // case 450
1149             default:
1150               {
1151                 if (!super.parseUnknownField(input, extensionRegistry, tag)) {
1152                   done = true; // was an endgroup tag
1153                 }
1154                 break;
1155               } // default:
1156           } // switch (tag)
1157         } // while (!done)
1158       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1159         throw e.unwrapIOException();
1160       } finally {
1161         onChanged();
1162       } // finally
1163       return this;
1164     }
1165 
1166     private int queriesCase_ = 0;
1167     private java.lang.Object queries_;
1168 
getQueriesCase()1169     public QueriesCase getQueriesCase() {
1170       return QueriesCase.forNumber(queriesCase_);
1171     }
1172 
clearQueries()1173     public Builder clearQueries() {
1174       queriesCase_ = 0;
1175       queries_ = null;
1176       onChanged();
1177       return this;
1178     }
1179 
1180     private int bitField0_;
1181 
1182     /**
1183      *
1184      *
1185      * <pre>
1186      * The HCFS URI of the script that contains SQL queries.
1187      * </pre>
1188      *
1189      * <code>string query_file_uri = 1;</code>
1190      *
1191      * @return Whether the queryFileUri field is set.
1192      */
1193     @java.lang.Override
hasQueryFileUri()1194     public boolean hasQueryFileUri() {
1195       return queriesCase_ == 1;
1196     }
1197     /**
1198      *
1199      *
1200      * <pre>
1201      * The HCFS URI of the script that contains SQL queries.
1202      * </pre>
1203      *
1204      * <code>string query_file_uri = 1;</code>
1205      *
1206      * @return The queryFileUri.
1207      */
1208     @java.lang.Override
getQueryFileUri()1209     public java.lang.String getQueryFileUri() {
1210       java.lang.Object ref = "";
1211       if (queriesCase_ == 1) {
1212         ref = queries_;
1213       }
1214       if (!(ref instanceof java.lang.String)) {
1215         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
1216         java.lang.String s = bs.toStringUtf8();
1217         if (queriesCase_ == 1) {
1218           queries_ = s;
1219         }
1220         return s;
1221       } else {
1222         return (java.lang.String) ref;
1223       }
1224     }
1225     /**
1226      *
1227      *
1228      * <pre>
1229      * The HCFS URI of the script that contains SQL queries.
1230      * </pre>
1231      *
1232      * <code>string query_file_uri = 1;</code>
1233      *
1234      * @return The bytes for queryFileUri.
1235      */
1236     @java.lang.Override
getQueryFileUriBytes()1237     public com.google.protobuf.ByteString getQueryFileUriBytes() {
1238       java.lang.Object ref = "";
1239       if (queriesCase_ == 1) {
1240         ref = queries_;
1241       }
1242       if (ref instanceof String) {
1243         com.google.protobuf.ByteString b =
1244             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
1245         if (queriesCase_ == 1) {
1246           queries_ = b;
1247         }
1248         return b;
1249       } else {
1250         return (com.google.protobuf.ByteString) ref;
1251       }
1252     }
1253     /**
1254      *
1255      *
1256      * <pre>
1257      * The HCFS URI of the script that contains SQL queries.
1258      * </pre>
1259      *
1260      * <code>string query_file_uri = 1;</code>
1261      *
1262      * @param value The queryFileUri to set.
1263      * @return This builder for chaining.
1264      */
setQueryFileUri(java.lang.String value)1265     public Builder setQueryFileUri(java.lang.String value) {
1266       if (value == null) {
1267         throw new NullPointerException();
1268       }
1269       queriesCase_ = 1;
1270       queries_ = value;
1271       onChanged();
1272       return this;
1273     }
1274     /**
1275      *
1276      *
1277      * <pre>
1278      * The HCFS URI of the script that contains SQL queries.
1279      * </pre>
1280      *
1281      * <code>string query_file_uri = 1;</code>
1282      *
1283      * @return This builder for chaining.
1284      */
clearQueryFileUri()1285     public Builder clearQueryFileUri() {
1286       if (queriesCase_ == 1) {
1287         queriesCase_ = 0;
1288         queries_ = null;
1289         onChanged();
1290       }
1291       return this;
1292     }
1293     /**
1294      *
1295      *
1296      * <pre>
1297      * The HCFS URI of the script that contains SQL queries.
1298      * </pre>
1299      *
1300      * <code>string query_file_uri = 1;</code>
1301      *
1302      * @param value The bytes for queryFileUri to set.
1303      * @return This builder for chaining.
1304      */
setQueryFileUriBytes(com.google.protobuf.ByteString value)1305     public Builder setQueryFileUriBytes(com.google.protobuf.ByteString value) {
1306       if (value == null) {
1307         throw new NullPointerException();
1308       }
1309       checkByteStringIsUtf8(value);
1310       queriesCase_ = 1;
1311       queries_ = value;
1312       onChanged();
1313       return this;
1314     }
1315 
1316     private com.google.protobuf.SingleFieldBuilderV3<
1317             com.google.cloud.dataproc.v1.QueryList,
1318             com.google.cloud.dataproc.v1.QueryList.Builder,
1319             com.google.cloud.dataproc.v1.QueryListOrBuilder>
1320         queryListBuilder_;
1321     /**
1322      *
1323      *
1324      * <pre>
1325      * A list of queries.
1326      * </pre>
1327      *
1328      * <code>.google.cloud.dataproc.v1.QueryList query_list = 2;</code>
1329      *
1330      * @return Whether the queryList field is set.
1331      */
1332     @java.lang.Override
hasQueryList()1333     public boolean hasQueryList() {
1334       return queriesCase_ == 2;
1335     }
1336     /**
1337      *
1338      *
1339      * <pre>
1340      * A list of queries.
1341      * </pre>
1342      *
1343      * <code>.google.cloud.dataproc.v1.QueryList query_list = 2;</code>
1344      *
1345      * @return The queryList.
1346      */
1347     @java.lang.Override
getQueryList()1348     public com.google.cloud.dataproc.v1.QueryList getQueryList() {
1349       if (queryListBuilder_ == null) {
1350         if (queriesCase_ == 2) {
1351           return (com.google.cloud.dataproc.v1.QueryList) queries_;
1352         }
1353         return com.google.cloud.dataproc.v1.QueryList.getDefaultInstance();
1354       } else {
1355         if (queriesCase_ == 2) {
1356           return queryListBuilder_.getMessage();
1357         }
1358         return com.google.cloud.dataproc.v1.QueryList.getDefaultInstance();
1359       }
1360     }
1361     /**
1362      *
1363      *
1364      * <pre>
1365      * A list of queries.
1366      * </pre>
1367      *
1368      * <code>.google.cloud.dataproc.v1.QueryList query_list = 2;</code>
1369      */
setQueryList(com.google.cloud.dataproc.v1.QueryList value)1370     public Builder setQueryList(com.google.cloud.dataproc.v1.QueryList value) {
1371       if (queryListBuilder_ == null) {
1372         if (value == null) {
1373           throw new NullPointerException();
1374         }
1375         queries_ = value;
1376         onChanged();
1377       } else {
1378         queryListBuilder_.setMessage(value);
1379       }
1380       queriesCase_ = 2;
1381       return this;
1382     }
1383     /**
1384      *
1385      *
1386      * <pre>
1387      * A list of queries.
1388      * </pre>
1389      *
1390      * <code>.google.cloud.dataproc.v1.QueryList query_list = 2;</code>
1391      */
setQueryList(com.google.cloud.dataproc.v1.QueryList.Builder builderForValue)1392     public Builder setQueryList(com.google.cloud.dataproc.v1.QueryList.Builder builderForValue) {
1393       if (queryListBuilder_ == null) {
1394         queries_ = builderForValue.build();
1395         onChanged();
1396       } else {
1397         queryListBuilder_.setMessage(builderForValue.build());
1398       }
1399       queriesCase_ = 2;
1400       return this;
1401     }
1402     /**
1403      *
1404      *
1405      * <pre>
1406      * A list of queries.
1407      * </pre>
1408      *
1409      * <code>.google.cloud.dataproc.v1.QueryList query_list = 2;</code>
1410      */
mergeQueryList(com.google.cloud.dataproc.v1.QueryList value)1411     public Builder mergeQueryList(com.google.cloud.dataproc.v1.QueryList value) {
1412       if (queryListBuilder_ == null) {
1413         if (queriesCase_ == 2
1414             && queries_ != com.google.cloud.dataproc.v1.QueryList.getDefaultInstance()) {
1415           queries_ =
1416               com.google.cloud.dataproc.v1.QueryList.newBuilder(
1417                       (com.google.cloud.dataproc.v1.QueryList) queries_)
1418                   .mergeFrom(value)
1419                   .buildPartial();
1420         } else {
1421           queries_ = value;
1422         }
1423         onChanged();
1424       } else {
1425         if (queriesCase_ == 2) {
1426           queryListBuilder_.mergeFrom(value);
1427         } else {
1428           queryListBuilder_.setMessage(value);
1429         }
1430       }
1431       queriesCase_ = 2;
1432       return this;
1433     }
1434     /**
1435      *
1436      *
1437      * <pre>
1438      * A list of queries.
1439      * </pre>
1440      *
1441      * <code>.google.cloud.dataproc.v1.QueryList query_list = 2;</code>
1442      */
clearQueryList()1443     public Builder clearQueryList() {
1444       if (queryListBuilder_ == null) {
1445         if (queriesCase_ == 2) {
1446           queriesCase_ = 0;
1447           queries_ = null;
1448           onChanged();
1449         }
1450       } else {
1451         if (queriesCase_ == 2) {
1452           queriesCase_ = 0;
1453           queries_ = null;
1454         }
1455         queryListBuilder_.clear();
1456       }
1457       return this;
1458     }
1459     /**
1460      *
1461      *
1462      * <pre>
1463      * A list of queries.
1464      * </pre>
1465      *
1466      * <code>.google.cloud.dataproc.v1.QueryList query_list = 2;</code>
1467      */
getQueryListBuilder()1468     public com.google.cloud.dataproc.v1.QueryList.Builder getQueryListBuilder() {
1469       return getQueryListFieldBuilder().getBuilder();
1470     }
1471     /**
1472      *
1473      *
1474      * <pre>
1475      * A list of queries.
1476      * </pre>
1477      *
1478      * <code>.google.cloud.dataproc.v1.QueryList query_list = 2;</code>
1479      */
1480     @java.lang.Override
getQueryListOrBuilder()1481     public com.google.cloud.dataproc.v1.QueryListOrBuilder getQueryListOrBuilder() {
1482       if ((queriesCase_ == 2) && (queryListBuilder_ != null)) {
1483         return queryListBuilder_.getMessageOrBuilder();
1484       } else {
1485         if (queriesCase_ == 2) {
1486           return (com.google.cloud.dataproc.v1.QueryList) queries_;
1487         }
1488         return com.google.cloud.dataproc.v1.QueryList.getDefaultInstance();
1489       }
1490     }
1491     /**
1492      *
1493      *
1494      * <pre>
1495      * A list of queries.
1496      * </pre>
1497      *
1498      * <code>.google.cloud.dataproc.v1.QueryList query_list = 2;</code>
1499      */
1500     private com.google.protobuf.SingleFieldBuilderV3<
1501             com.google.cloud.dataproc.v1.QueryList,
1502             com.google.cloud.dataproc.v1.QueryList.Builder,
1503             com.google.cloud.dataproc.v1.QueryListOrBuilder>
getQueryListFieldBuilder()1504         getQueryListFieldBuilder() {
1505       if (queryListBuilder_ == null) {
1506         if (!(queriesCase_ == 2)) {
1507           queries_ = com.google.cloud.dataproc.v1.QueryList.getDefaultInstance();
1508         }
1509         queryListBuilder_ =
1510             new com.google.protobuf.SingleFieldBuilderV3<
1511                 com.google.cloud.dataproc.v1.QueryList,
1512                 com.google.cloud.dataproc.v1.QueryList.Builder,
1513                 com.google.cloud.dataproc.v1.QueryListOrBuilder>(
1514                 (com.google.cloud.dataproc.v1.QueryList) queries_,
1515                 getParentForChildren(),
1516                 isClean());
1517         queries_ = null;
1518       }
1519       queriesCase_ = 2;
1520       onChanged();
1521       return queryListBuilder_;
1522     }
1523 
1524     private com.google.protobuf.MapField<java.lang.String, java.lang.String> scriptVariables_;
1525 
1526     private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetScriptVariables()1527         internalGetScriptVariables() {
1528       if (scriptVariables_ == null) {
1529         return com.google.protobuf.MapField.emptyMapField(
1530             ScriptVariablesDefaultEntryHolder.defaultEntry);
1531       }
1532       return scriptVariables_;
1533     }
1534 
1535     private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableScriptVariables()1536         internalGetMutableScriptVariables() {
1537       if (scriptVariables_ == null) {
1538         scriptVariables_ =
1539             com.google.protobuf.MapField.newMapField(
1540                 ScriptVariablesDefaultEntryHolder.defaultEntry);
1541       }
1542       if (!scriptVariables_.isMutable()) {
1543         scriptVariables_ = scriptVariables_.copy();
1544       }
1545       bitField0_ |= 0x00000004;
1546       onChanged();
1547       return scriptVariables_;
1548     }
1549 
getScriptVariablesCount()1550     public int getScriptVariablesCount() {
1551       return internalGetScriptVariables().getMap().size();
1552     }
1553     /**
1554      *
1555      *
1556      * <pre>
1557      * Optional. Mapping of query variable names to values (equivalent to the
1558      * Spark SQL command: SET `name="value";`).
1559      * </pre>
1560      *
1561      * <code>
1562      * map&lt;string, string&gt; script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
1563      * </code>
1564      */
1565     @java.lang.Override
containsScriptVariables(java.lang.String key)1566     public boolean containsScriptVariables(java.lang.String key) {
1567       if (key == null) {
1568         throw new NullPointerException("map key");
1569       }
1570       return internalGetScriptVariables().getMap().containsKey(key);
1571     }
1572     /** Use {@link #getScriptVariablesMap()} instead. */
1573     @java.lang.Override
1574     @java.lang.Deprecated
getScriptVariables()1575     public java.util.Map<java.lang.String, java.lang.String> getScriptVariables() {
1576       return getScriptVariablesMap();
1577     }
1578     /**
1579      *
1580      *
1581      * <pre>
1582      * Optional. Mapping of query variable names to values (equivalent to the
1583      * Spark SQL command: SET `name="value";`).
1584      * </pre>
1585      *
1586      * <code>
1587      * map&lt;string, string&gt; script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
1588      * </code>
1589      */
1590     @java.lang.Override
getScriptVariablesMap()1591     public java.util.Map<java.lang.String, java.lang.String> getScriptVariablesMap() {
1592       return internalGetScriptVariables().getMap();
1593     }
1594     /**
1595      *
1596      *
1597      * <pre>
1598      * Optional. Mapping of query variable names to values (equivalent to the
1599      * Spark SQL command: SET `name="value";`).
1600      * </pre>
1601      *
1602      * <code>
1603      * map&lt;string, string&gt; script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
1604      * </code>
1605      */
1606     @java.lang.Override
getScriptVariablesOrDefault( java.lang.String key, java.lang.String defaultValue)1607     public /* nullable */ java.lang.String getScriptVariablesOrDefault(
1608         java.lang.String key,
1609         /* nullable */
1610         java.lang.String defaultValue) {
1611       if (key == null) {
1612         throw new NullPointerException("map key");
1613       }
1614       java.util.Map<java.lang.String, java.lang.String> map = internalGetScriptVariables().getMap();
1615       return map.containsKey(key) ? map.get(key) : defaultValue;
1616     }
1617     /**
1618      *
1619      *
1620      * <pre>
1621      * Optional. Mapping of query variable names to values (equivalent to the
1622      * Spark SQL command: SET `name="value";`).
1623      * </pre>
1624      *
1625      * <code>
1626      * map&lt;string, string&gt; script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
1627      * </code>
1628      */
1629     @java.lang.Override
getScriptVariablesOrThrow(java.lang.String key)1630     public java.lang.String getScriptVariablesOrThrow(java.lang.String key) {
1631       if (key == null) {
1632         throw new NullPointerException("map key");
1633       }
1634       java.util.Map<java.lang.String, java.lang.String> map = internalGetScriptVariables().getMap();
1635       if (!map.containsKey(key)) {
1636         throw new java.lang.IllegalArgumentException();
1637       }
1638       return map.get(key);
1639     }
1640 
clearScriptVariables()1641     public Builder clearScriptVariables() {
1642       bitField0_ = (bitField0_ & ~0x00000004);
1643       internalGetMutableScriptVariables().getMutableMap().clear();
1644       return this;
1645     }
1646     /**
1647      *
1648      *
1649      * <pre>
1650      * Optional. Mapping of query variable names to values (equivalent to the
1651      * Spark SQL command: SET `name="value";`).
1652      * </pre>
1653      *
1654      * <code>
1655      * map&lt;string, string&gt; script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
1656      * </code>
1657      */
removeScriptVariables(java.lang.String key)1658     public Builder removeScriptVariables(java.lang.String key) {
1659       if (key == null) {
1660         throw new NullPointerException("map key");
1661       }
1662       internalGetMutableScriptVariables().getMutableMap().remove(key);
1663       return this;
1664     }
1665     /** Use alternate mutation accessors instead. */
1666     @java.lang.Deprecated
getMutableScriptVariables()1667     public java.util.Map<java.lang.String, java.lang.String> getMutableScriptVariables() {
1668       bitField0_ |= 0x00000004;
1669       return internalGetMutableScriptVariables().getMutableMap();
1670     }
1671     /**
1672      *
1673      *
1674      * <pre>
1675      * Optional. Mapping of query variable names to values (equivalent to the
1676      * Spark SQL command: SET `name="value";`).
1677      * </pre>
1678      *
1679      * <code>
1680      * map&lt;string, string&gt; script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
1681      * </code>
1682      */
putScriptVariables(java.lang.String key, java.lang.String value)1683     public Builder putScriptVariables(java.lang.String key, java.lang.String value) {
1684       if (key == null) {
1685         throw new NullPointerException("map key");
1686       }
1687       if (value == null) {
1688         throw new NullPointerException("map value");
1689       }
1690       internalGetMutableScriptVariables().getMutableMap().put(key, value);
1691       bitField0_ |= 0x00000004;
1692       return this;
1693     }
1694     /**
1695      *
1696      *
1697      * <pre>
1698      * Optional. Mapping of query variable names to values (equivalent to the
1699      * Spark SQL command: SET `name="value";`).
1700      * </pre>
1701      *
1702      * <code>
1703      * map&lt;string, string&gt; script_variables = 3 [(.google.api.field_behavior) = OPTIONAL];
1704      * </code>
1705      */
putAllScriptVariables(java.util.Map<java.lang.String, java.lang.String> values)1706     public Builder putAllScriptVariables(java.util.Map<java.lang.String, java.lang.String> values) {
1707       internalGetMutableScriptVariables().getMutableMap().putAll(values);
1708       bitField0_ |= 0x00000004;
1709       return this;
1710     }
1711 
1712     private com.google.protobuf.MapField<java.lang.String, java.lang.String> properties_;
1713 
1714     private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetProperties()1715         internalGetProperties() {
1716       if (properties_ == null) {
1717         return com.google.protobuf.MapField.emptyMapField(
1718             PropertiesDefaultEntryHolder.defaultEntry);
1719       }
1720       return properties_;
1721     }
1722 
1723     private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableProperties()1724         internalGetMutableProperties() {
1725       if (properties_ == null) {
1726         properties_ =
1727             com.google.protobuf.MapField.newMapField(PropertiesDefaultEntryHolder.defaultEntry);
1728       }
1729       if (!properties_.isMutable()) {
1730         properties_ = properties_.copy();
1731       }
1732       bitField0_ |= 0x00000008;
1733       onChanged();
1734       return properties_;
1735     }
1736 
getPropertiesCount()1737     public int getPropertiesCount() {
1738       return internalGetProperties().getMap().size();
1739     }
1740     /**
1741      *
1742      *
1743      * <pre>
1744      * Optional. A mapping of property names to values, used to configure
1745      * Spark SQL's SparkConf. Properties that conflict with values set by the
1746      * Dataproc API may be overwritten.
1747      * </pre>
1748      *
1749      * <code>map&lt;string, string&gt; properties = 4 [(.google.api.field_behavior) = OPTIONAL];
1750      * </code>
1751      */
1752     @java.lang.Override
containsProperties(java.lang.String key)1753     public boolean containsProperties(java.lang.String key) {
1754       if (key == null) {
1755         throw new NullPointerException("map key");
1756       }
1757       return internalGetProperties().getMap().containsKey(key);
1758     }
1759     /** Use {@link #getPropertiesMap()} instead. */
1760     @java.lang.Override
1761     @java.lang.Deprecated
getProperties()1762     public java.util.Map<java.lang.String, java.lang.String> getProperties() {
1763       return getPropertiesMap();
1764     }
1765     /**
1766      *
1767      *
1768      * <pre>
1769      * Optional. A mapping of property names to values, used to configure
1770      * Spark SQL's SparkConf. Properties that conflict with values set by the
1771      * Dataproc API may be overwritten.
1772      * </pre>
1773      *
1774      * <code>map&lt;string, string&gt; properties = 4 [(.google.api.field_behavior) = OPTIONAL];
1775      * </code>
1776      */
1777     @java.lang.Override
getPropertiesMap()1778     public java.util.Map<java.lang.String, java.lang.String> getPropertiesMap() {
1779       return internalGetProperties().getMap();
1780     }
1781     /**
1782      *
1783      *
1784      * <pre>
1785      * Optional. A mapping of property names to values, used to configure
1786      * Spark SQL's SparkConf. Properties that conflict with values set by the
1787      * Dataproc API may be overwritten.
1788      * </pre>
1789      *
1790      * <code>map&lt;string, string&gt; properties = 4 [(.google.api.field_behavior) = OPTIONAL];
1791      * </code>
1792      */
1793     @java.lang.Override
getPropertiesOrDefault( java.lang.String key, java.lang.String defaultValue)1794     public /* nullable */ java.lang.String getPropertiesOrDefault(
1795         java.lang.String key,
1796         /* nullable */
1797         java.lang.String defaultValue) {
1798       if (key == null) {
1799         throw new NullPointerException("map key");
1800       }
1801       java.util.Map<java.lang.String, java.lang.String> map = internalGetProperties().getMap();
1802       return map.containsKey(key) ? map.get(key) : defaultValue;
1803     }
1804     /**
1805      *
1806      *
1807      * <pre>
1808      * Optional. A mapping of property names to values, used to configure
1809      * Spark SQL's SparkConf. Properties that conflict with values set by the
1810      * Dataproc API may be overwritten.
1811      * </pre>
1812      *
1813      * <code>map&lt;string, string&gt; properties = 4 [(.google.api.field_behavior) = OPTIONAL];
1814      * </code>
1815      */
1816     @java.lang.Override
getPropertiesOrThrow(java.lang.String key)1817     public java.lang.String getPropertiesOrThrow(java.lang.String key) {
1818       if (key == null) {
1819         throw new NullPointerException("map key");
1820       }
1821       java.util.Map<java.lang.String, java.lang.String> map = internalGetProperties().getMap();
1822       if (!map.containsKey(key)) {
1823         throw new java.lang.IllegalArgumentException();
1824       }
1825       return map.get(key);
1826     }
1827 
clearProperties()1828     public Builder clearProperties() {
1829       bitField0_ = (bitField0_ & ~0x00000008);
1830       internalGetMutableProperties().getMutableMap().clear();
1831       return this;
1832     }
1833     /**
1834      *
1835      *
1836      * <pre>
1837      * Optional. A mapping of property names to values, used to configure
1838      * Spark SQL's SparkConf. Properties that conflict with values set by the
1839      * Dataproc API may be overwritten.
1840      * </pre>
1841      *
1842      * <code>map&lt;string, string&gt; properties = 4 [(.google.api.field_behavior) = OPTIONAL];
1843      * </code>
1844      */
removeProperties(java.lang.String key)1845     public Builder removeProperties(java.lang.String key) {
1846       if (key == null) {
1847         throw new NullPointerException("map key");
1848       }
1849       internalGetMutableProperties().getMutableMap().remove(key);
1850       return this;
1851     }
1852     /** Use alternate mutation accessors instead. */
1853     @java.lang.Deprecated
getMutableProperties()1854     public java.util.Map<java.lang.String, java.lang.String> getMutableProperties() {
1855       bitField0_ |= 0x00000008;
1856       return internalGetMutableProperties().getMutableMap();
1857     }
1858     /**
1859      *
1860      *
1861      * <pre>
1862      * Optional. A mapping of property names to values, used to configure
1863      * Spark SQL's SparkConf. Properties that conflict with values set by the
1864      * Dataproc API may be overwritten.
1865      * </pre>
1866      *
1867      * <code>map&lt;string, string&gt; properties = 4 [(.google.api.field_behavior) = OPTIONAL];
1868      * </code>
1869      */
putProperties(java.lang.String key, java.lang.String value)1870     public Builder putProperties(java.lang.String key, java.lang.String value) {
1871       if (key == null) {
1872         throw new NullPointerException("map key");
1873       }
1874       if (value == null) {
1875         throw new NullPointerException("map value");
1876       }
1877       internalGetMutableProperties().getMutableMap().put(key, value);
1878       bitField0_ |= 0x00000008;
1879       return this;
1880     }
1881     /**
1882      *
1883      *
1884      * <pre>
1885      * Optional. A mapping of property names to values, used to configure
1886      * Spark SQL's SparkConf. Properties that conflict with values set by the
1887      * Dataproc API may be overwritten.
1888      * </pre>
1889      *
1890      * <code>map&lt;string, string&gt; properties = 4 [(.google.api.field_behavior) = OPTIONAL];
1891      * </code>
1892      */
putAllProperties(java.util.Map<java.lang.String, java.lang.String> values)1893     public Builder putAllProperties(java.util.Map<java.lang.String, java.lang.String> values) {
1894       internalGetMutableProperties().getMutableMap().putAll(values);
1895       bitField0_ |= 0x00000008;
1896       return this;
1897     }
1898 
1899     private com.google.protobuf.LazyStringList jarFileUris_ =
1900         com.google.protobuf.LazyStringArrayList.EMPTY;
1901 
ensureJarFileUrisIsMutable()1902     private void ensureJarFileUrisIsMutable() {
1903       if (!((bitField0_ & 0x00000010) != 0)) {
1904         jarFileUris_ = new com.google.protobuf.LazyStringArrayList(jarFileUris_);
1905         bitField0_ |= 0x00000010;
1906       }
1907     }
1908     /**
1909      *
1910      *
1911      * <pre>
1912      * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
1913      * </pre>
1914      *
1915      * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
1916      *
1917      * @return A list containing the jarFileUris.
1918      */
getJarFileUrisList()1919     public com.google.protobuf.ProtocolStringList getJarFileUrisList() {
1920       return jarFileUris_.getUnmodifiableView();
1921     }
1922     /**
1923      *
1924      *
1925      * <pre>
1926      * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
1927      * </pre>
1928      *
1929      * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
1930      *
1931      * @return The count of jarFileUris.
1932      */
getJarFileUrisCount()1933     public int getJarFileUrisCount() {
1934       return jarFileUris_.size();
1935     }
1936     /**
1937      *
1938      *
1939      * <pre>
1940      * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
1941      * </pre>
1942      *
1943      * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
1944      *
1945      * @param index The index of the element to return.
1946      * @return The jarFileUris at the given index.
1947      */
getJarFileUris(int index)1948     public java.lang.String getJarFileUris(int index) {
1949       return jarFileUris_.get(index);
1950     }
1951     /**
1952      *
1953      *
1954      * <pre>
1955      * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
1956      * </pre>
1957      *
1958      * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
1959      *
1960      * @param index The index of the value to return.
1961      * @return The bytes of the jarFileUris at the given index.
1962      */
getJarFileUrisBytes(int index)1963     public com.google.protobuf.ByteString getJarFileUrisBytes(int index) {
1964       return jarFileUris_.getByteString(index);
1965     }
1966     /**
1967      *
1968      *
1969      * <pre>
1970      * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
1971      * </pre>
1972      *
1973      * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
1974      *
1975      * @param index The index to set the value at.
1976      * @param value The jarFileUris to set.
1977      * @return This builder for chaining.
1978      */
setJarFileUris(int index, java.lang.String value)1979     public Builder setJarFileUris(int index, java.lang.String value) {
1980       if (value == null) {
1981         throw new NullPointerException();
1982       }
1983       ensureJarFileUrisIsMutable();
1984       jarFileUris_.set(index, value);
1985       onChanged();
1986       return this;
1987     }
1988     /**
1989      *
1990      *
1991      * <pre>
1992      * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
1993      * </pre>
1994      *
1995      * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
1996      *
1997      * @param value The jarFileUris to add.
1998      * @return This builder for chaining.
1999      */
addJarFileUris(java.lang.String value)2000     public Builder addJarFileUris(java.lang.String value) {
2001       if (value == null) {
2002         throw new NullPointerException();
2003       }
2004       ensureJarFileUrisIsMutable();
2005       jarFileUris_.add(value);
2006       onChanged();
2007       return this;
2008     }
2009     /**
2010      *
2011      *
2012      * <pre>
2013      * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
2014      * </pre>
2015      *
2016      * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
2017      *
2018      * @param values The jarFileUris to add.
2019      * @return This builder for chaining.
2020      */
addAllJarFileUris(java.lang.Iterable<java.lang.String> values)2021     public Builder addAllJarFileUris(java.lang.Iterable<java.lang.String> values) {
2022       ensureJarFileUrisIsMutable();
2023       com.google.protobuf.AbstractMessageLite.Builder.addAll(values, jarFileUris_);
2024       onChanged();
2025       return this;
2026     }
2027     /**
2028      *
2029      *
2030      * <pre>
2031      * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
2032      * </pre>
2033      *
2034      * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
2035      *
2036      * @return This builder for chaining.
2037      */
clearJarFileUris()2038     public Builder clearJarFileUris() {
2039       jarFileUris_ = com.google.protobuf.LazyStringArrayList.EMPTY;
2040       bitField0_ = (bitField0_ & ~0x00000010);
2041       onChanged();
2042       return this;
2043     }
2044     /**
2045      *
2046      *
2047      * <pre>
2048      * Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
2049      * </pre>
2050      *
2051      * <code>repeated string jar_file_uris = 56 [(.google.api.field_behavior) = OPTIONAL];</code>
2052      *
2053      * @param value The bytes of the jarFileUris to add.
2054      * @return This builder for chaining.
2055      */
addJarFileUrisBytes(com.google.protobuf.ByteString value)2056     public Builder addJarFileUrisBytes(com.google.protobuf.ByteString value) {
2057       if (value == null) {
2058         throw new NullPointerException();
2059       }
2060       checkByteStringIsUtf8(value);
2061       ensureJarFileUrisIsMutable();
2062       jarFileUris_.add(value);
2063       onChanged();
2064       return this;
2065     }
2066 
2067     private com.google.cloud.dataproc.v1.LoggingConfig loggingConfig_;
2068     private com.google.protobuf.SingleFieldBuilderV3<
2069             com.google.cloud.dataproc.v1.LoggingConfig,
2070             com.google.cloud.dataproc.v1.LoggingConfig.Builder,
2071             com.google.cloud.dataproc.v1.LoggingConfigOrBuilder>
2072         loggingConfigBuilder_;
2073     /**
2074      *
2075      *
2076      * <pre>
2077      * Optional. The runtime log config for job execution.
2078      * </pre>
2079      *
2080      * <code>
2081      * .google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
2082      * </code>
2083      *
2084      * @return Whether the loggingConfig field is set.
2085      */
hasLoggingConfig()2086     public boolean hasLoggingConfig() {
2087       return ((bitField0_ & 0x00000020) != 0);
2088     }
2089     /**
2090      *
2091      *
2092      * <pre>
2093      * Optional. The runtime log config for job execution.
2094      * </pre>
2095      *
2096      * <code>
2097      * .google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
2098      * </code>
2099      *
2100      * @return The loggingConfig.
2101      */
getLoggingConfig()2102     public com.google.cloud.dataproc.v1.LoggingConfig getLoggingConfig() {
2103       if (loggingConfigBuilder_ == null) {
2104         return loggingConfig_ == null
2105             ? com.google.cloud.dataproc.v1.LoggingConfig.getDefaultInstance()
2106             : loggingConfig_;
2107       } else {
2108         return loggingConfigBuilder_.getMessage();
2109       }
2110     }
2111     /**
2112      *
2113      *
2114      * <pre>
2115      * Optional. The runtime log config for job execution.
2116      * </pre>
2117      *
2118      * <code>
2119      * .google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
2120      * </code>
2121      */
setLoggingConfig(com.google.cloud.dataproc.v1.LoggingConfig value)2122     public Builder setLoggingConfig(com.google.cloud.dataproc.v1.LoggingConfig value) {
2123       if (loggingConfigBuilder_ == null) {
2124         if (value == null) {
2125           throw new NullPointerException();
2126         }
2127         loggingConfig_ = value;
2128       } else {
2129         loggingConfigBuilder_.setMessage(value);
2130       }
2131       bitField0_ |= 0x00000020;
2132       onChanged();
2133       return this;
2134     }
2135     /**
2136      *
2137      *
2138      * <pre>
2139      * Optional. The runtime log config for job execution.
2140      * </pre>
2141      *
2142      * <code>
2143      * .google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
2144      * </code>
2145      */
setLoggingConfig( com.google.cloud.dataproc.v1.LoggingConfig.Builder builderForValue)2146     public Builder setLoggingConfig(
2147         com.google.cloud.dataproc.v1.LoggingConfig.Builder builderForValue) {
2148       if (loggingConfigBuilder_ == null) {
2149         loggingConfig_ = builderForValue.build();
2150       } else {
2151         loggingConfigBuilder_.setMessage(builderForValue.build());
2152       }
2153       bitField0_ |= 0x00000020;
2154       onChanged();
2155       return this;
2156     }
2157     /**
2158      *
2159      *
2160      * <pre>
2161      * Optional. The runtime log config for job execution.
2162      * </pre>
2163      *
2164      * <code>
2165      * .google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
2166      * </code>
2167      */
mergeLoggingConfig(com.google.cloud.dataproc.v1.LoggingConfig value)2168     public Builder mergeLoggingConfig(com.google.cloud.dataproc.v1.LoggingConfig value) {
2169       if (loggingConfigBuilder_ == null) {
2170         if (((bitField0_ & 0x00000020) != 0)
2171             && loggingConfig_ != null
2172             && loggingConfig_ != com.google.cloud.dataproc.v1.LoggingConfig.getDefaultInstance()) {
2173           getLoggingConfigBuilder().mergeFrom(value);
2174         } else {
2175           loggingConfig_ = value;
2176         }
2177       } else {
2178         loggingConfigBuilder_.mergeFrom(value);
2179       }
2180       bitField0_ |= 0x00000020;
2181       onChanged();
2182       return this;
2183     }
2184     /**
2185      *
2186      *
2187      * <pre>
2188      * Optional. The runtime log config for job execution.
2189      * </pre>
2190      *
2191      * <code>
2192      * .google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
2193      * </code>
2194      */
clearLoggingConfig()2195     public Builder clearLoggingConfig() {
2196       bitField0_ = (bitField0_ & ~0x00000020);
2197       loggingConfig_ = null;
2198       if (loggingConfigBuilder_ != null) {
2199         loggingConfigBuilder_.dispose();
2200         loggingConfigBuilder_ = null;
2201       }
2202       onChanged();
2203       return this;
2204     }
2205     /**
2206      *
2207      *
2208      * <pre>
2209      * Optional. The runtime log config for job execution.
2210      * </pre>
2211      *
2212      * <code>
2213      * .google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
2214      * </code>
2215      */
getLoggingConfigBuilder()2216     public com.google.cloud.dataproc.v1.LoggingConfig.Builder getLoggingConfigBuilder() {
2217       bitField0_ |= 0x00000020;
2218       onChanged();
2219       return getLoggingConfigFieldBuilder().getBuilder();
2220     }
2221     /**
2222      *
2223      *
2224      * <pre>
2225      * Optional. The runtime log config for job execution.
2226      * </pre>
2227      *
2228      * <code>
2229      * .google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
2230      * </code>
2231      */
getLoggingConfigOrBuilder()2232     public com.google.cloud.dataproc.v1.LoggingConfigOrBuilder getLoggingConfigOrBuilder() {
2233       if (loggingConfigBuilder_ != null) {
2234         return loggingConfigBuilder_.getMessageOrBuilder();
2235       } else {
2236         return loggingConfig_ == null
2237             ? com.google.cloud.dataproc.v1.LoggingConfig.getDefaultInstance()
2238             : loggingConfig_;
2239       }
2240     }
2241     /**
2242      *
2243      *
2244      * <pre>
2245      * Optional. The runtime log config for job execution.
2246      * </pre>
2247      *
2248      * <code>
2249      * .google.cloud.dataproc.v1.LoggingConfig logging_config = 6 [(.google.api.field_behavior) = OPTIONAL];
2250      * </code>
2251      */
2252     private com.google.protobuf.SingleFieldBuilderV3<
2253             com.google.cloud.dataproc.v1.LoggingConfig,
2254             com.google.cloud.dataproc.v1.LoggingConfig.Builder,
2255             com.google.cloud.dataproc.v1.LoggingConfigOrBuilder>
getLoggingConfigFieldBuilder()2256         getLoggingConfigFieldBuilder() {
2257       if (loggingConfigBuilder_ == null) {
2258         loggingConfigBuilder_ =
2259             new com.google.protobuf.SingleFieldBuilderV3<
2260                 com.google.cloud.dataproc.v1.LoggingConfig,
2261                 com.google.cloud.dataproc.v1.LoggingConfig.Builder,
2262                 com.google.cloud.dataproc.v1.LoggingConfigOrBuilder>(
2263                 getLoggingConfig(), getParentForChildren(), isClean());
2264         loggingConfig_ = null;
2265       }
2266       return loggingConfigBuilder_;
2267     }
2268 
2269     @java.lang.Override
setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields)2270     public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
2271       return super.setUnknownFields(unknownFields);
2272     }
2273 
2274     @java.lang.Override
mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)2275     public final Builder mergeUnknownFields(
2276         final com.google.protobuf.UnknownFieldSet unknownFields) {
2277       return super.mergeUnknownFields(unknownFields);
2278     }
2279 
2280     // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.SparkSqlJob)
2281   }
2282 
2283   // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SparkSqlJob)
2284   private static final com.google.cloud.dataproc.v1.SparkSqlJob DEFAULT_INSTANCE;
2285 
2286   static {
2287     DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.SparkSqlJob();
2288   }
2289 
getDefaultInstance()2290   public static com.google.cloud.dataproc.v1.SparkSqlJob getDefaultInstance() {
2291     return DEFAULT_INSTANCE;
2292   }
2293 
2294   private static final com.google.protobuf.Parser<SparkSqlJob> PARSER =
2295       new com.google.protobuf.AbstractParser<SparkSqlJob>() {
2296         @java.lang.Override
2297         public SparkSqlJob parsePartialFrom(
2298             com.google.protobuf.CodedInputStream input,
2299             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2300             throws com.google.protobuf.InvalidProtocolBufferException {
2301           Builder builder = newBuilder();
2302           try {
2303             builder.mergeFrom(input, extensionRegistry);
2304           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2305             throw e.setUnfinishedMessage(builder.buildPartial());
2306           } catch (com.google.protobuf.UninitializedMessageException e) {
2307             throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
2308           } catch (java.io.IOException e) {
2309             throw new com.google.protobuf.InvalidProtocolBufferException(e)
2310                 .setUnfinishedMessage(builder.buildPartial());
2311           }
2312           return builder.buildPartial();
2313         }
2314       };
2315 
parser()2316   public static com.google.protobuf.Parser<SparkSqlJob> parser() {
2317     return PARSER;
2318   }
2319 
2320   @java.lang.Override
getParserForType()2321   public com.google.protobuf.Parser<SparkSqlJob> getParserForType() {
2322     return PARSER;
2323   }
2324 
2325   @java.lang.Override
getDefaultInstanceForType()2326   public com.google.cloud.dataproc.v1.SparkSqlJob getDefaultInstanceForType() {
2327     return DEFAULT_INSTANCE;
2328   }
2329 }
2330