• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2020 Google LLC
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     https://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 // Generated by the protocol buffer compiler.  DO NOT EDIT!
17 // source: google/cloud/retail/v2/import_config.proto
18 
19 package com.google.cloud.retail.v2;
20 
21 /**
22  *
23  *
24  * <pre>
25  * BigQuery source import data from.
26  * </pre>
27  *
28  * Protobuf type {@code google.cloud.retail.v2.BigQuerySource}
29  */
30 public final class BigQuerySource extends com.google.protobuf.GeneratedMessageV3
31     implements
32     // @@protoc_insertion_point(message_implements:google.cloud.retail.v2.BigQuerySource)
33     BigQuerySourceOrBuilder {
34   private static final long serialVersionUID = 0L;
35   // Use BigQuerySource.newBuilder() to construct.
BigQuerySource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)36   private BigQuerySource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
37     super(builder);
38   }
39 
BigQuerySource()40   private BigQuerySource() {
41     projectId_ = "";
42     datasetId_ = "";
43     tableId_ = "";
44     gcsStagingDir_ = "";
45     dataSchema_ = "";
46   }
47 
48   @java.lang.Override
49   @SuppressWarnings({"unused"})
newInstance(UnusedPrivateParameter unused)50   protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
51     return new BigQuerySource();
52   }
53 
54   @java.lang.Override
getUnknownFields()55   public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
56     return this.unknownFields;
57   }
58 
getDescriptor()59   public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
60     return com.google.cloud.retail.v2.ImportConfigProto
61         .internal_static_google_cloud_retail_v2_BigQuerySource_descriptor;
62   }
63 
64   @java.lang.Override
65   protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()66       internalGetFieldAccessorTable() {
67     return com.google.cloud.retail.v2.ImportConfigProto
68         .internal_static_google_cloud_retail_v2_BigQuerySource_fieldAccessorTable
69         .ensureFieldAccessorsInitialized(
70             com.google.cloud.retail.v2.BigQuerySource.class,
71             com.google.cloud.retail.v2.BigQuerySource.Builder.class);
72   }
73 
74   private int partitionCase_ = 0;
75   private java.lang.Object partition_;
76 
77   public enum PartitionCase
78       implements
79           com.google.protobuf.Internal.EnumLite,
80           com.google.protobuf.AbstractMessage.InternalOneOfEnum {
81     PARTITION_DATE(6),
82     PARTITION_NOT_SET(0);
83     private final int value;
84 
PartitionCase(int value)85     private PartitionCase(int value) {
86       this.value = value;
87     }
88     /**
89      * @param value The number of the enum to look for.
90      * @return The enum associated with the given number.
91      * @deprecated Use {@link #forNumber(int)} instead.
92      */
93     @java.lang.Deprecated
valueOf(int value)94     public static PartitionCase valueOf(int value) {
95       return forNumber(value);
96     }
97 
forNumber(int value)98     public static PartitionCase forNumber(int value) {
99       switch (value) {
100         case 6:
101           return PARTITION_DATE;
102         case 0:
103           return PARTITION_NOT_SET;
104         default:
105           return null;
106       }
107     }
108 
getNumber()109     public int getNumber() {
110       return this.value;
111     }
112   };
113 
getPartitionCase()114   public PartitionCase getPartitionCase() {
115     return PartitionCase.forNumber(partitionCase_);
116   }
117 
118   public static final int PARTITION_DATE_FIELD_NUMBER = 6;
119   /**
120    *
121    *
122    * <pre>
123    * BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
124    * Only supported in
125    * [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
126    * </pre>
127    *
128    * <code>.google.type.Date partition_date = 6;</code>
129    *
130    * @return Whether the partitionDate field is set.
131    */
132   @java.lang.Override
hasPartitionDate()133   public boolean hasPartitionDate() {
134     return partitionCase_ == 6;
135   }
136   /**
137    *
138    *
139    * <pre>
140    * BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
141    * Only supported in
142    * [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
143    * </pre>
144    *
145    * <code>.google.type.Date partition_date = 6;</code>
146    *
147    * @return The partitionDate.
148    */
149   @java.lang.Override
getPartitionDate()150   public com.google.type.Date getPartitionDate() {
151     if (partitionCase_ == 6) {
152       return (com.google.type.Date) partition_;
153     }
154     return com.google.type.Date.getDefaultInstance();
155   }
156   /**
157    *
158    *
159    * <pre>
160    * BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
161    * Only supported in
162    * [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
163    * </pre>
164    *
165    * <code>.google.type.Date partition_date = 6;</code>
166    */
167   @java.lang.Override
getPartitionDateOrBuilder()168   public com.google.type.DateOrBuilder getPartitionDateOrBuilder() {
169     if (partitionCase_ == 6) {
170       return (com.google.type.Date) partition_;
171     }
172     return com.google.type.Date.getDefaultInstance();
173   }
174 
175   public static final int PROJECT_ID_FIELD_NUMBER = 5;
176 
177   @SuppressWarnings("serial")
178   private volatile java.lang.Object projectId_ = "";
179   /**
180    *
181    *
182    * <pre>
183    * The project ID (can be project # or ID) that the BigQuery source is in with
184    * a length limit of 128 characters. If not specified, inherits the project
185    * ID from the parent request.
186    * </pre>
187    *
188    * <code>string project_id = 5;</code>
189    *
190    * @return The projectId.
191    */
192   @java.lang.Override
getProjectId()193   public java.lang.String getProjectId() {
194     java.lang.Object ref = projectId_;
195     if (ref instanceof java.lang.String) {
196       return (java.lang.String) ref;
197     } else {
198       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
199       java.lang.String s = bs.toStringUtf8();
200       projectId_ = s;
201       return s;
202     }
203   }
204   /**
205    *
206    *
207    * <pre>
208    * The project ID (can be project # or ID) that the BigQuery source is in with
209    * a length limit of 128 characters. If not specified, inherits the project
210    * ID from the parent request.
211    * </pre>
212    *
213    * <code>string project_id = 5;</code>
214    *
215    * @return The bytes for projectId.
216    */
217   @java.lang.Override
getProjectIdBytes()218   public com.google.protobuf.ByteString getProjectIdBytes() {
219     java.lang.Object ref = projectId_;
220     if (ref instanceof java.lang.String) {
221       com.google.protobuf.ByteString b =
222           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
223       projectId_ = b;
224       return b;
225     } else {
226       return (com.google.protobuf.ByteString) ref;
227     }
228   }
229 
230   public static final int DATASET_ID_FIELD_NUMBER = 1;
231 
232   @SuppressWarnings("serial")
233   private volatile java.lang.Object datasetId_ = "";
234   /**
235    *
236    *
237    * <pre>
238    * Required. The BigQuery data set to copy the data from with a length limit
239    * of 1,024 characters.
240    * </pre>
241    *
242    * <code>string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
243    *
244    * @return The datasetId.
245    */
246   @java.lang.Override
getDatasetId()247   public java.lang.String getDatasetId() {
248     java.lang.Object ref = datasetId_;
249     if (ref instanceof java.lang.String) {
250       return (java.lang.String) ref;
251     } else {
252       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
253       java.lang.String s = bs.toStringUtf8();
254       datasetId_ = s;
255       return s;
256     }
257   }
258   /**
259    *
260    *
261    * <pre>
262    * Required. The BigQuery data set to copy the data from with a length limit
263    * of 1,024 characters.
264    * </pre>
265    *
266    * <code>string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
267    *
268    * @return The bytes for datasetId.
269    */
270   @java.lang.Override
getDatasetIdBytes()271   public com.google.protobuf.ByteString getDatasetIdBytes() {
272     java.lang.Object ref = datasetId_;
273     if (ref instanceof java.lang.String) {
274       com.google.protobuf.ByteString b =
275           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
276       datasetId_ = b;
277       return b;
278     } else {
279       return (com.google.protobuf.ByteString) ref;
280     }
281   }
282 
283   public static final int TABLE_ID_FIELD_NUMBER = 2;
284 
285   @SuppressWarnings("serial")
286   private volatile java.lang.Object tableId_ = "";
287   /**
288    *
289    *
290    * <pre>
291    * Required. The BigQuery table to copy the data from with a length limit of
292    * 1,024 characters.
293    * </pre>
294    *
295    * <code>string table_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
296    *
297    * @return The tableId.
298    */
299   @java.lang.Override
getTableId()300   public java.lang.String getTableId() {
301     java.lang.Object ref = tableId_;
302     if (ref instanceof java.lang.String) {
303       return (java.lang.String) ref;
304     } else {
305       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
306       java.lang.String s = bs.toStringUtf8();
307       tableId_ = s;
308       return s;
309     }
310   }
311   /**
312    *
313    *
314    * <pre>
315    * Required. The BigQuery table to copy the data from with a length limit of
316    * 1,024 characters.
317    * </pre>
318    *
319    * <code>string table_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
320    *
321    * @return The bytes for tableId.
322    */
323   @java.lang.Override
getTableIdBytes()324   public com.google.protobuf.ByteString getTableIdBytes() {
325     java.lang.Object ref = tableId_;
326     if (ref instanceof java.lang.String) {
327       com.google.protobuf.ByteString b =
328           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
329       tableId_ = b;
330       return b;
331     } else {
332       return (com.google.protobuf.ByteString) ref;
333     }
334   }
335 
336   public static final int GCS_STAGING_DIR_FIELD_NUMBER = 3;
337 
338   @SuppressWarnings("serial")
339   private volatile java.lang.Object gcsStagingDir_ = "";
340   /**
341    *
342    *
343    * <pre>
344    * Intermediate Cloud Storage directory used for the import with a length
345    * limit of 2,000 characters. Can be specified if one wants to have the
346    * BigQuery export to a specific Cloud Storage directory.
347    * </pre>
348    *
349    * <code>string gcs_staging_dir = 3;</code>
350    *
351    * @return The gcsStagingDir.
352    */
353   @java.lang.Override
getGcsStagingDir()354   public java.lang.String getGcsStagingDir() {
355     java.lang.Object ref = gcsStagingDir_;
356     if (ref instanceof java.lang.String) {
357       return (java.lang.String) ref;
358     } else {
359       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
360       java.lang.String s = bs.toStringUtf8();
361       gcsStagingDir_ = s;
362       return s;
363     }
364   }
365   /**
366    *
367    *
368    * <pre>
369    * Intermediate Cloud Storage directory used for the import with a length
370    * limit of 2,000 characters. Can be specified if one wants to have the
371    * BigQuery export to a specific Cloud Storage directory.
372    * </pre>
373    *
374    * <code>string gcs_staging_dir = 3;</code>
375    *
376    * @return The bytes for gcsStagingDir.
377    */
378   @java.lang.Override
getGcsStagingDirBytes()379   public com.google.protobuf.ByteString getGcsStagingDirBytes() {
380     java.lang.Object ref = gcsStagingDir_;
381     if (ref instanceof java.lang.String) {
382       com.google.protobuf.ByteString b =
383           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
384       gcsStagingDir_ = b;
385       return b;
386     } else {
387       return (com.google.protobuf.ByteString) ref;
388     }
389   }
390 
391   public static final int DATA_SCHEMA_FIELD_NUMBER = 4;
392 
393   @SuppressWarnings("serial")
394   private volatile java.lang.Object dataSchema_ = "";
395   /**
396    *
397    *
398    * <pre>
399    * The schema to use when parsing the data from the source.
400    * Supported values for product imports:
401    * * `product` (default): One JSON [Product][google.cloud.retail.v2.Product]
402    * per line. Each product must
403    *   have a valid [Product.id][google.cloud.retail.v2.Product.id].
404    * * `product_merchant_center`: See [Importing catalog data from Merchant
405    *   Center](https://cloud.google.com/retail/recommendations-ai/docs/upload-catalog#mc).
406    * Supported values for user events imports:
407    * * `user_event` (default): One JSON
408    * [UserEvent][google.cloud.retail.v2.UserEvent] per line.
409    * * `user_event_ga360`:
410    *   The schema is available here:
411    *   https://support.google.com/analytics/answer/3437719.
412    * * `user_event_ga4`:
413    *   The schema is available here:
414    *   https://support.google.com/analytics/answer/7029846.
415    * Supported values for autocomplete imports:
416    * * `suggestions` (default): One JSON completion suggestion per line.
417    * * `denylist`:  One JSON deny suggestion per line.
418    * * `allowlist`:  One JSON allow suggestion per line.
419    * </pre>
420    *
421    * <code>string data_schema = 4;</code>
422    *
423    * @return The dataSchema.
424    */
425   @java.lang.Override
getDataSchema()426   public java.lang.String getDataSchema() {
427     java.lang.Object ref = dataSchema_;
428     if (ref instanceof java.lang.String) {
429       return (java.lang.String) ref;
430     } else {
431       com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
432       java.lang.String s = bs.toStringUtf8();
433       dataSchema_ = s;
434       return s;
435     }
436   }
437   /**
438    *
439    *
440    * <pre>
441    * The schema to use when parsing the data from the source.
442    * Supported values for product imports:
443    * * `product` (default): One JSON [Product][google.cloud.retail.v2.Product]
444    * per line. Each product must
445    *   have a valid [Product.id][google.cloud.retail.v2.Product.id].
446    * * `product_merchant_center`: See [Importing catalog data from Merchant
447    *   Center](https://cloud.google.com/retail/recommendations-ai/docs/upload-catalog#mc).
448    * Supported values for user events imports:
449    * * `user_event` (default): One JSON
450    * [UserEvent][google.cloud.retail.v2.UserEvent] per line.
451    * * `user_event_ga360`:
452    *   The schema is available here:
453    *   https://support.google.com/analytics/answer/3437719.
454    * * `user_event_ga4`:
455    *   The schema is available here:
456    *   https://support.google.com/analytics/answer/7029846.
457    * Supported values for autocomplete imports:
458    * * `suggestions` (default): One JSON completion suggestion per line.
459    * * `denylist`:  One JSON deny suggestion per line.
460    * * `allowlist`:  One JSON allow suggestion per line.
461    * </pre>
462    *
463    * <code>string data_schema = 4;</code>
464    *
465    * @return The bytes for dataSchema.
466    */
467   @java.lang.Override
getDataSchemaBytes()468   public com.google.protobuf.ByteString getDataSchemaBytes() {
469     java.lang.Object ref = dataSchema_;
470     if (ref instanceof java.lang.String) {
471       com.google.protobuf.ByteString b =
472           com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
473       dataSchema_ = b;
474       return b;
475     } else {
476       return (com.google.protobuf.ByteString) ref;
477     }
478   }
479 
480   private byte memoizedIsInitialized = -1;
481 
482   @java.lang.Override
isInitialized()483   public final boolean isInitialized() {
484     byte isInitialized = memoizedIsInitialized;
485     if (isInitialized == 1) return true;
486     if (isInitialized == 0) return false;
487 
488     memoizedIsInitialized = 1;
489     return true;
490   }
491 
492   @java.lang.Override
writeTo(com.google.protobuf.CodedOutputStream output)493   public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
494     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) {
495       com.google.protobuf.GeneratedMessageV3.writeString(output, 1, datasetId_);
496     }
497     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tableId_)) {
498       com.google.protobuf.GeneratedMessageV3.writeString(output, 2, tableId_);
499     }
500     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gcsStagingDir_)) {
501       com.google.protobuf.GeneratedMessageV3.writeString(output, 3, gcsStagingDir_);
502     }
503     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataSchema_)) {
504       com.google.protobuf.GeneratedMessageV3.writeString(output, 4, dataSchema_);
505     }
506     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
507       com.google.protobuf.GeneratedMessageV3.writeString(output, 5, projectId_);
508     }
509     if (partitionCase_ == 6) {
510       output.writeMessage(6, (com.google.type.Date) partition_);
511     }
512     getUnknownFields().writeTo(output);
513   }
514 
515   @java.lang.Override
getSerializedSize()516   public int getSerializedSize() {
517     int size = memoizedSize;
518     if (size != -1) return size;
519 
520     size = 0;
521     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) {
522       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, datasetId_);
523     }
524     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tableId_)) {
525       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, tableId_);
526     }
527     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gcsStagingDir_)) {
528       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, gcsStagingDir_);
529     }
530     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataSchema_)) {
531       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, dataSchema_);
532     }
533     if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
534       size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, projectId_);
535     }
536     if (partitionCase_ == 6) {
537       size +=
538           com.google.protobuf.CodedOutputStream.computeMessageSize(
539               6, (com.google.type.Date) partition_);
540     }
541     size += getUnknownFields().getSerializedSize();
542     memoizedSize = size;
543     return size;
544   }
545 
546   @java.lang.Override
equals(final java.lang.Object obj)547   public boolean equals(final java.lang.Object obj) {
548     if (obj == this) {
549       return true;
550     }
551     if (!(obj instanceof com.google.cloud.retail.v2.BigQuerySource)) {
552       return super.equals(obj);
553     }
554     com.google.cloud.retail.v2.BigQuerySource other =
555         (com.google.cloud.retail.v2.BigQuerySource) obj;
556 
557     if (!getProjectId().equals(other.getProjectId())) return false;
558     if (!getDatasetId().equals(other.getDatasetId())) return false;
559     if (!getTableId().equals(other.getTableId())) return false;
560     if (!getGcsStagingDir().equals(other.getGcsStagingDir())) return false;
561     if (!getDataSchema().equals(other.getDataSchema())) return false;
562     if (!getPartitionCase().equals(other.getPartitionCase())) return false;
563     switch (partitionCase_) {
564       case 6:
565         if (!getPartitionDate().equals(other.getPartitionDate())) return false;
566         break;
567       case 0:
568       default:
569     }
570     if (!getUnknownFields().equals(other.getUnknownFields())) return false;
571     return true;
572   }
573 
574   @java.lang.Override
hashCode()575   public int hashCode() {
576     if (memoizedHashCode != 0) {
577       return memoizedHashCode;
578     }
579     int hash = 41;
580     hash = (19 * hash) + getDescriptor().hashCode();
581     hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER;
582     hash = (53 * hash) + getProjectId().hashCode();
583     hash = (37 * hash) + DATASET_ID_FIELD_NUMBER;
584     hash = (53 * hash) + getDatasetId().hashCode();
585     hash = (37 * hash) + TABLE_ID_FIELD_NUMBER;
586     hash = (53 * hash) + getTableId().hashCode();
587     hash = (37 * hash) + GCS_STAGING_DIR_FIELD_NUMBER;
588     hash = (53 * hash) + getGcsStagingDir().hashCode();
589     hash = (37 * hash) + DATA_SCHEMA_FIELD_NUMBER;
590     hash = (53 * hash) + getDataSchema().hashCode();
591     switch (partitionCase_) {
592       case 6:
593         hash = (37 * hash) + PARTITION_DATE_FIELD_NUMBER;
594         hash = (53 * hash) + getPartitionDate().hashCode();
595         break;
596       case 0:
597       default:
598     }
599     hash = (29 * hash) + getUnknownFields().hashCode();
600     memoizedHashCode = hash;
601     return hash;
602   }
603 
parseFrom(java.nio.ByteBuffer data)604   public static com.google.cloud.retail.v2.BigQuerySource parseFrom(java.nio.ByteBuffer data)
605       throws com.google.protobuf.InvalidProtocolBufferException {
606     return PARSER.parseFrom(data);
607   }
608 
parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)609   public static com.google.cloud.retail.v2.BigQuerySource parseFrom(
610       java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
611       throws com.google.protobuf.InvalidProtocolBufferException {
612     return PARSER.parseFrom(data, extensionRegistry);
613   }
614 
parseFrom( com.google.protobuf.ByteString data)615   public static com.google.cloud.retail.v2.BigQuerySource parseFrom(
616       com.google.protobuf.ByteString data)
617       throws com.google.protobuf.InvalidProtocolBufferException {
618     return PARSER.parseFrom(data);
619   }
620 
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)621   public static com.google.cloud.retail.v2.BigQuerySource parseFrom(
622       com.google.protobuf.ByteString data,
623       com.google.protobuf.ExtensionRegistryLite extensionRegistry)
624       throws com.google.protobuf.InvalidProtocolBufferException {
625     return PARSER.parseFrom(data, extensionRegistry);
626   }
627 
parseFrom(byte[] data)628   public static com.google.cloud.retail.v2.BigQuerySource parseFrom(byte[] data)
629       throws com.google.protobuf.InvalidProtocolBufferException {
630     return PARSER.parseFrom(data);
631   }
632 
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)633   public static com.google.cloud.retail.v2.BigQuerySource parseFrom(
634       byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
635       throws com.google.protobuf.InvalidProtocolBufferException {
636     return PARSER.parseFrom(data, extensionRegistry);
637   }
638 
parseFrom(java.io.InputStream input)639   public static com.google.cloud.retail.v2.BigQuerySource parseFrom(java.io.InputStream input)
640       throws java.io.IOException {
641     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
642   }
643 
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)644   public static com.google.cloud.retail.v2.BigQuerySource parseFrom(
645       java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
646       throws java.io.IOException {
647     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
648         PARSER, input, extensionRegistry);
649   }
650 
parseDelimitedFrom( java.io.InputStream input)651   public static com.google.cloud.retail.v2.BigQuerySource parseDelimitedFrom(
652       java.io.InputStream input) throws java.io.IOException {
653     return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
654   }
655 
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)656   public static com.google.cloud.retail.v2.BigQuerySource parseDelimitedFrom(
657       java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
658       throws java.io.IOException {
659     return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
660         PARSER, input, extensionRegistry);
661   }
662 
parseFrom( com.google.protobuf.CodedInputStream input)663   public static com.google.cloud.retail.v2.BigQuerySource parseFrom(
664       com.google.protobuf.CodedInputStream input) throws java.io.IOException {
665     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
666   }
667 
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)668   public static com.google.cloud.retail.v2.BigQuerySource parseFrom(
669       com.google.protobuf.CodedInputStream input,
670       com.google.protobuf.ExtensionRegistryLite extensionRegistry)
671       throws java.io.IOException {
672     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
673         PARSER, input, extensionRegistry);
674   }
675 
676   @java.lang.Override
newBuilderForType()677   public Builder newBuilderForType() {
678     return newBuilder();
679   }
680 
newBuilder()681   public static Builder newBuilder() {
682     return DEFAULT_INSTANCE.toBuilder();
683   }
684 
newBuilder(com.google.cloud.retail.v2.BigQuerySource prototype)685   public static Builder newBuilder(com.google.cloud.retail.v2.BigQuerySource prototype) {
686     return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
687   }
688 
689   @java.lang.Override
toBuilder()690   public Builder toBuilder() {
691     return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
692   }
693 
694   @java.lang.Override
newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)695   protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
696     Builder builder = new Builder(parent);
697     return builder;
698   }
699   /**
700    *
701    *
702    * <pre>
703    * BigQuery source import data from.
704    * </pre>
705    *
706    * Protobuf type {@code google.cloud.retail.v2.BigQuerySource}
707    */
708   public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
709       implements
710       // @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.BigQuerySource)
711       com.google.cloud.retail.v2.BigQuerySourceOrBuilder {
getDescriptor()712     public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
713       return com.google.cloud.retail.v2.ImportConfigProto
714           .internal_static_google_cloud_retail_v2_BigQuerySource_descriptor;
715     }
716 
717     @java.lang.Override
718     protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()719         internalGetFieldAccessorTable() {
720       return com.google.cloud.retail.v2.ImportConfigProto
721           .internal_static_google_cloud_retail_v2_BigQuerySource_fieldAccessorTable
722           .ensureFieldAccessorsInitialized(
723               com.google.cloud.retail.v2.BigQuerySource.class,
724               com.google.cloud.retail.v2.BigQuerySource.Builder.class);
725     }
726 
727     // Construct using com.google.cloud.retail.v2.BigQuerySource.newBuilder()
Builder()728     private Builder() {}
729 
Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)730     private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
731       super(parent);
732     }
733 
734     @java.lang.Override
clear()735     public Builder clear() {
736       super.clear();
737       bitField0_ = 0;
738       if (partitionDateBuilder_ != null) {
739         partitionDateBuilder_.clear();
740       }
741       projectId_ = "";
742       datasetId_ = "";
743       tableId_ = "";
744       gcsStagingDir_ = "";
745       dataSchema_ = "";
746       partitionCase_ = 0;
747       partition_ = null;
748       return this;
749     }
750 
751     @java.lang.Override
getDescriptorForType()752     public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
753       return com.google.cloud.retail.v2.ImportConfigProto
754           .internal_static_google_cloud_retail_v2_BigQuerySource_descriptor;
755     }
756 
757     @java.lang.Override
getDefaultInstanceForType()758     public com.google.cloud.retail.v2.BigQuerySource getDefaultInstanceForType() {
759       return com.google.cloud.retail.v2.BigQuerySource.getDefaultInstance();
760     }
761 
762     @java.lang.Override
build()763     public com.google.cloud.retail.v2.BigQuerySource build() {
764       com.google.cloud.retail.v2.BigQuerySource result = buildPartial();
765       if (!result.isInitialized()) {
766         throw newUninitializedMessageException(result);
767       }
768       return result;
769     }
770 
771     @java.lang.Override
buildPartial()772     public com.google.cloud.retail.v2.BigQuerySource buildPartial() {
773       com.google.cloud.retail.v2.BigQuerySource result =
774           new com.google.cloud.retail.v2.BigQuerySource(this);
775       if (bitField0_ != 0) {
776         buildPartial0(result);
777       }
778       buildPartialOneofs(result);
779       onBuilt();
780       return result;
781     }
782 
buildPartial0(com.google.cloud.retail.v2.BigQuerySource result)783     private void buildPartial0(com.google.cloud.retail.v2.BigQuerySource result) {
784       int from_bitField0_ = bitField0_;
785       if (((from_bitField0_ & 0x00000002) != 0)) {
786         result.projectId_ = projectId_;
787       }
788       if (((from_bitField0_ & 0x00000004) != 0)) {
789         result.datasetId_ = datasetId_;
790       }
791       if (((from_bitField0_ & 0x00000008) != 0)) {
792         result.tableId_ = tableId_;
793       }
794       if (((from_bitField0_ & 0x00000010) != 0)) {
795         result.gcsStagingDir_ = gcsStagingDir_;
796       }
797       if (((from_bitField0_ & 0x00000020) != 0)) {
798         result.dataSchema_ = dataSchema_;
799       }
800     }
801 
buildPartialOneofs(com.google.cloud.retail.v2.BigQuerySource result)802     private void buildPartialOneofs(com.google.cloud.retail.v2.BigQuerySource result) {
803       result.partitionCase_ = partitionCase_;
804       result.partition_ = this.partition_;
805       if (partitionCase_ == 6 && partitionDateBuilder_ != null) {
806         result.partition_ = partitionDateBuilder_.build();
807       }
808     }
809 
810     @java.lang.Override
clone()811     public Builder clone() {
812       return super.clone();
813     }
814 
815     @java.lang.Override
setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)816     public Builder setField(
817         com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
818       return super.setField(field, value);
819     }
820 
821     @java.lang.Override
clearField(com.google.protobuf.Descriptors.FieldDescriptor field)822     public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
823       return super.clearField(field);
824     }
825 
826     @java.lang.Override
clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)827     public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
828       return super.clearOneof(oneof);
829     }
830 
831     @java.lang.Override
setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)832     public Builder setRepeatedField(
833         com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
834       return super.setRepeatedField(field, index, value);
835     }
836 
837     @java.lang.Override
addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)838     public Builder addRepeatedField(
839         com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
840       return super.addRepeatedField(field, value);
841     }
842 
843     @java.lang.Override
mergeFrom(com.google.protobuf.Message other)844     public Builder mergeFrom(com.google.protobuf.Message other) {
845       if (other instanceof com.google.cloud.retail.v2.BigQuerySource) {
846         return mergeFrom((com.google.cloud.retail.v2.BigQuerySource) other);
847       } else {
848         super.mergeFrom(other);
849         return this;
850       }
851     }
852 
mergeFrom(com.google.cloud.retail.v2.BigQuerySource other)853     public Builder mergeFrom(com.google.cloud.retail.v2.BigQuerySource other) {
854       if (other == com.google.cloud.retail.v2.BigQuerySource.getDefaultInstance()) return this;
855       if (!other.getProjectId().isEmpty()) {
856         projectId_ = other.projectId_;
857         bitField0_ |= 0x00000002;
858         onChanged();
859       }
860       if (!other.getDatasetId().isEmpty()) {
861         datasetId_ = other.datasetId_;
862         bitField0_ |= 0x00000004;
863         onChanged();
864       }
865       if (!other.getTableId().isEmpty()) {
866         tableId_ = other.tableId_;
867         bitField0_ |= 0x00000008;
868         onChanged();
869       }
870       if (!other.getGcsStagingDir().isEmpty()) {
871         gcsStagingDir_ = other.gcsStagingDir_;
872         bitField0_ |= 0x00000010;
873         onChanged();
874       }
875       if (!other.getDataSchema().isEmpty()) {
876         dataSchema_ = other.dataSchema_;
877         bitField0_ |= 0x00000020;
878         onChanged();
879       }
880       switch (other.getPartitionCase()) {
881         case PARTITION_DATE:
882           {
883             mergePartitionDate(other.getPartitionDate());
884             break;
885           }
886         case PARTITION_NOT_SET:
887           {
888             break;
889           }
890       }
891       this.mergeUnknownFields(other.getUnknownFields());
892       onChanged();
893       return this;
894     }
895 
896     @java.lang.Override
isInitialized()897     public final boolean isInitialized() {
898       return true;
899     }
900 
901     @java.lang.Override
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)902     public Builder mergeFrom(
903         com.google.protobuf.CodedInputStream input,
904         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
905         throws java.io.IOException {
906       if (extensionRegistry == null) {
907         throw new java.lang.NullPointerException();
908       }
909       try {
910         boolean done = false;
911         while (!done) {
912           int tag = input.readTag();
913           switch (tag) {
914             case 0:
915               done = true;
916               break;
917             case 10:
918               {
919                 datasetId_ = input.readStringRequireUtf8();
920                 bitField0_ |= 0x00000004;
921                 break;
922               } // case 10
923             case 18:
924               {
925                 tableId_ = input.readStringRequireUtf8();
926                 bitField0_ |= 0x00000008;
927                 break;
928               } // case 18
929             case 26:
930               {
931                 gcsStagingDir_ = input.readStringRequireUtf8();
932                 bitField0_ |= 0x00000010;
933                 break;
934               } // case 26
935             case 34:
936               {
937                 dataSchema_ = input.readStringRequireUtf8();
938                 bitField0_ |= 0x00000020;
939                 break;
940               } // case 34
941             case 42:
942               {
943                 projectId_ = input.readStringRequireUtf8();
944                 bitField0_ |= 0x00000002;
945                 break;
946               } // case 42
947             case 50:
948               {
949                 input.readMessage(getPartitionDateFieldBuilder().getBuilder(), extensionRegistry);
950                 partitionCase_ = 6;
951                 break;
952               } // case 50
953             default:
954               {
955                 if (!super.parseUnknownField(input, extensionRegistry, tag)) {
956                   done = true; // was an endgroup tag
957                 }
958                 break;
959               } // default:
960           } // switch (tag)
961         } // while (!done)
962       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
963         throw e.unwrapIOException();
964       } finally {
965         onChanged();
966       } // finally
967       return this;
968     }
969 
970     private int partitionCase_ = 0;
971     private java.lang.Object partition_;
972 
getPartitionCase()973     public PartitionCase getPartitionCase() {
974       return PartitionCase.forNumber(partitionCase_);
975     }
976 
clearPartition()977     public Builder clearPartition() {
978       partitionCase_ = 0;
979       partition_ = null;
980       onChanged();
981       return this;
982     }
983 
984     private int bitField0_;
985 
986     private com.google.protobuf.SingleFieldBuilderV3<
987             com.google.type.Date, com.google.type.Date.Builder, com.google.type.DateOrBuilder>
988         partitionDateBuilder_;
989     /**
990      *
991      *
992      * <pre>
993      * BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
994      * Only supported in
995      * [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
996      * </pre>
997      *
998      * <code>.google.type.Date partition_date = 6;</code>
999      *
1000      * @return Whether the partitionDate field is set.
1001      */
1002     @java.lang.Override
hasPartitionDate()1003     public boolean hasPartitionDate() {
1004       return partitionCase_ == 6;
1005     }
1006     /**
1007      *
1008      *
1009      * <pre>
1010      * BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
1011      * Only supported in
1012      * [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
1013      * </pre>
1014      *
1015      * <code>.google.type.Date partition_date = 6;</code>
1016      *
1017      * @return The partitionDate.
1018      */
1019     @java.lang.Override
getPartitionDate()1020     public com.google.type.Date getPartitionDate() {
1021       if (partitionDateBuilder_ == null) {
1022         if (partitionCase_ == 6) {
1023           return (com.google.type.Date) partition_;
1024         }
1025         return com.google.type.Date.getDefaultInstance();
1026       } else {
1027         if (partitionCase_ == 6) {
1028           return partitionDateBuilder_.getMessage();
1029         }
1030         return com.google.type.Date.getDefaultInstance();
1031       }
1032     }
1033     /**
1034      *
1035      *
1036      * <pre>
1037      * BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
1038      * Only supported in
1039      * [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
1040      * </pre>
1041      *
1042      * <code>.google.type.Date partition_date = 6;</code>
1043      */
setPartitionDate(com.google.type.Date value)1044     public Builder setPartitionDate(com.google.type.Date value) {
1045       if (partitionDateBuilder_ == null) {
1046         if (value == null) {
1047           throw new NullPointerException();
1048         }
1049         partition_ = value;
1050         onChanged();
1051       } else {
1052         partitionDateBuilder_.setMessage(value);
1053       }
1054       partitionCase_ = 6;
1055       return this;
1056     }
1057     /**
1058      *
1059      *
1060      * <pre>
1061      * BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
1062      * Only supported in
1063      * [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
1064      * </pre>
1065      *
1066      * <code>.google.type.Date partition_date = 6;</code>
1067      */
setPartitionDate(com.google.type.Date.Builder builderForValue)1068     public Builder setPartitionDate(com.google.type.Date.Builder builderForValue) {
1069       if (partitionDateBuilder_ == null) {
1070         partition_ = builderForValue.build();
1071         onChanged();
1072       } else {
1073         partitionDateBuilder_.setMessage(builderForValue.build());
1074       }
1075       partitionCase_ = 6;
1076       return this;
1077     }
1078     /**
1079      *
1080      *
1081      * <pre>
1082      * BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
1083      * Only supported in
1084      * [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
1085      * </pre>
1086      *
1087      * <code>.google.type.Date partition_date = 6;</code>
1088      */
mergePartitionDate(com.google.type.Date value)1089     public Builder mergePartitionDate(com.google.type.Date value) {
1090       if (partitionDateBuilder_ == null) {
1091         if (partitionCase_ == 6 && partition_ != com.google.type.Date.getDefaultInstance()) {
1092           partition_ =
1093               com.google.type.Date.newBuilder((com.google.type.Date) partition_)
1094                   .mergeFrom(value)
1095                   .buildPartial();
1096         } else {
1097           partition_ = value;
1098         }
1099         onChanged();
1100       } else {
1101         if (partitionCase_ == 6) {
1102           partitionDateBuilder_.mergeFrom(value);
1103         } else {
1104           partitionDateBuilder_.setMessage(value);
1105         }
1106       }
1107       partitionCase_ = 6;
1108       return this;
1109     }
1110     /**
1111      *
1112      *
1113      * <pre>
1114      * BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
1115      * Only supported in
1116      * [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
1117      * </pre>
1118      *
1119      * <code>.google.type.Date partition_date = 6;</code>
1120      */
clearPartitionDate()1121     public Builder clearPartitionDate() {
1122       if (partitionDateBuilder_ == null) {
1123         if (partitionCase_ == 6) {
1124           partitionCase_ = 0;
1125           partition_ = null;
1126           onChanged();
1127         }
1128       } else {
1129         if (partitionCase_ == 6) {
1130           partitionCase_ = 0;
1131           partition_ = null;
1132         }
1133         partitionDateBuilder_.clear();
1134       }
1135       return this;
1136     }
1137     /**
1138      *
1139      *
1140      * <pre>
1141      * BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
1142      * Only supported in
1143      * [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
1144      * </pre>
1145      *
1146      * <code>.google.type.Date partition_date = 6;</code>
1147      */
getPartitionDateBuilder()1148     public com.google.type.Date.Builder getPartitionDateBuilder() {
1149       return getPartitionDateFieldBuilder().getBuilder();
1150     }
1151     /**
1152      *
1153      *
1154      * <pre>
1155      * BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
1156      * Only supported in
1157      * [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
1158      * </pre>
1159      *
1160      * <code>.google.type.Date partition_date = 6;</code>
1161      */
1162     @java.lang.Override
getPartitionDateOrBuilder()1163     public com.google.type.DateOrBuilder getPartitionDateOrBuilder() {
1164       if ((partitionCase_ == 6) && (partitionDateBuilder_ != null)) {
1165         return partitionDateBuilder_.getMessageOrBuilder();
1166       } else {
1167         if (partitionCase_ == 6) {
1168           return (com.google.type.Date) partition_;
1169         }
1170         return com.google.type.Date.getDefaultInstance();
1171       }
1172     }
1173     /**
1174      *
1175      *
1176      * <pre>
1177      * BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
1178      * Only supported in
1179      * [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
1180      * </pre>
1181      *
1182      * <code>.google.type.Date partition_date = 6;</code>
1183      */
1184     private com.google.protobuf.SingleFieldBuilderV3<
1185             com.google.type.Date, com.google.type.Date.Builder, com.google.type.DateOrBuilder>
getPartitionDateFieldBuilder()1186         getPartitionDateFieldBuilder() {
1187       if (partitionDateBuilder_ == null) {
1188         if (!(partitionCase_ == 6)) {
1189           partition_ = com.google.type.Date.getDefaultInstance();
1190         }
1191         partitionDateBuilder_ =
1192             new com.google.protobuf.SingleFieldBuilderV3<
1193                 com.google.type.Date, com.google.type.Date.Builder, com.google.type.DateOrBuilder>(
1194                 (com.google.type.Date) partition_, getParentForChildren(), isClean());
1195         partition_ = null;
1196       }
1197       partitionCase_ = 6;
1198       onChanged();
1199       return partitionDateBuilder_;
1200     }
1201 
1202     private java.lang.Object projectId_ = "";
1203     /**
1204      *
1205      *
1206      * <pre>
1207      * The project ID (can be project # or ID) that the BigQuery source is in with
1208      * a length limit of 128 characters. If not specified, inherits the project
1209      * ID from the parent request.
1210      * </pre>
1211      *
1212      * <code>string project_id = 5;</code>
1213      *
1214      * @return The projectId.
1215      */
getProjectId()1216     public java.lang.String getProjectId() {
1217       java.lang.Object ref = projectId_;
1218       if (!(ref instanceof java.lang.String)) {
1219         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
1220         java.lang.String s = bs.toStringUtf8();
1221         projectId_ = s;
1222         return s;
1223       } else {
1224         return (java.lang.String) ref;
1225       }
1226     }
1227     /**
1228      *
1229      *
1230      * <pre>
1231      * The project ID (can be project # or ID) that the BigQuery source is in with
1232      * a length limit of 128 characters. If not specified, inherits the project
1233      * ID from the parent request.
1234      * </pre>
1235      *
1236      * <code>string project_id = 5;</code>
1237      *
1238      * @return The bytes for projectId.
1239      */
getProjectIdBytes()1240     public com.google.protobuf.ByteString getProjectIdBytes() {
1241       java.lang.Object ref = projectId_;
1242       if (ref instanceof String) {
1243         com.google.protobuf.ByteString b =
1244             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
1245         projectId_ = b;
1246         return b;
1247       } else {
1248         return (com.google.protobuf.ByteString) ref;
1249       }
1250     }
1251     /**
1252      *
1253      *
1254      * <pre>
1255      * The project ID (can be project # or ID) that the BigQuery source is in with
1256      * a length limit of 128 characters. If not specified, inherits the project
1257      * ID from the parent request.
1258      * </pre>
1259      *
1260      * <code>string project_id = 5;</code>
1261      *
1262      * @param value The projectId to set.
1263      * @return This builder for chaining.
1264      */
setProjectId(java.lang.String value)1265     public Builder setProjectId(java.lang.String value) {
1266       if (value == null) {
1267         throw new NullPointerException();
1268       }
1269       projectId_ = value;
1270       bitField0_ |= 0x00000002;
1271       onChanged();
1272       return this;
1273     }
1274     /**
1275      *
1276      *
1277      * <pre>
1278      * The project ID (can be project # or ID) that the BigQuery source is in with
1279      * a length limit of 128 characters. If not specified, inherits the project
1280      * ID from the parent request.
1281      * </pre>
1282      *
1283      * <code>string project_id = 5;</code>
1284      *
1285      * @return This builder for chaining.
1286      */
clearProjectId()1287     public Builder clearProjectId() {
1288       projectId_ = getDefaultInstance().getProjectId();
1289       bitField0_ = (bitField0_ & ~0x00000002);
1290       onChanged();
1291       return this;
1292     }
1293     /**
1294      *
1295      *
1296      * <pre>
1297      * The project ID (can be project # or ID) that the BigQuery source is in with
1298      * a length limit of 128 characters. If not specified, inherits the project
1299      * ID from the parent request.
1300      * </pre>
1301      *
1302      * <code>string project_id = 5;</code>
1303      *
1304      * @param value The bytes for projectId to set.
1305      * @return This builder for chaining.
1306      */
setProjectIdBytes(com.google.protobuf.ByteString value)1307     public Builder setProjectIdBytes(com.google.protobuf.ByteString value) {
1308       if (value == null) {
1309         throw new NullPointerException();
1310       }
1311       checkByteStringIsUtf8(value);
1312       projectId_ = value;
1313       bitField0_ |= 0x00000002;
1314       onChanged();
1315       return this;
1316     }
1317 
1318     private java.lang.Object datasetId_ = "";
1319     /**
1320      *
1321      *
1322      * <pre>
1323      * Required. The BigQuery data set to copy the data from with a length limit
1324      * of 1,024 characters.
1325      * </pre>
1326      *
1327      * <code>string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
1328      *
1329      * @return The datasetId.
1330      */
getDatasetId()1331     public java.lang.String getDatasetId() {
1332       java.lang.Object ref = datasetId_;
1333       if (!(ref instanceof java.lang.String)) {
1334         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
1335         java.lang.String s = bs.toStringUtf8();
1336         datasetId_ = s;
1337         return s;
1338       } else {
1339         return (java.lang.String) ref;
1340       }
1341     }
1342     /**
1343      *
1344      *
1345      * <pre>
1346      * Required. The BigQuery data set to copy the data from with a length limit
1347      * of 1,024 characters.
1348      * </pre>
1349      *
1350      * <code>string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
1351      *
1352      * @return The bytes for datasetId.
1353      */
getDatasetIdBytes()1354     public com.google.protobuf.ByteString getDatasetIdBytes() {
1355       java.lang.Object ref = datasetId_;
1356       if (ref instanceof String) {
1357         com.google.protobuf.ByteString b =
1358             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
1359         datasetId_ = b;
1360         return b;
1361       } else {
1362         return (com.google.protobuf.ByteString) ref;
1363       }
1364     }
1365     /**
1366      *
1367      *
1368      * <pre>
1369      * Required. The BigQuery data set to copy the data from with a length limit
1370      * of 1,024 characters.
1371      * </pre>
1372      *
1373      * <code>string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
1374      *
1375      * @param value The datasetId to set.
1376      * @return This builder for chaining.
1377      */
setDatasetId(java.lang.String value)1378     public Builder setDatasetId(java.lang.String value) {
1379       if (value == null) {
1380         throw new NullPointerException();
1381       }
1382       datasetId_ = value;
1383       bitField0_ |= 0x00000004;
1384       onChanged();
1385       return this;
1386     }
1387     /**
1388      *
1389      *
1390      * <pre>
1391      * Required. The BigQuery data set to copy the data from with a length limit
1392      * of 1,024 characters.
1393      * </pre>
1394      *
1395      * <code>string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
1396      *
1397      * @return This builder for chaining.
1398      */
clearDatasetId()1399     public Builder clearDatasetId() {
1400       datasetId_ = getDefaultInstance().getDatasetId();
1401       bitField0_ = (bitField0_ & ~0x00000004);
1402       onChanged();
1403       return this;
1404     }
1405     /**
1406      *
1407      *
1408      * <pre>
1409      * Required. The BigQuery data set to copy the data from with a length limit
1410      * of 1,024 characters.
1411      * </pre>
1412      *
1413      * <code>string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
1414      *
1415      * @param value The bytes for datasetId to set.
1416      * @return This builder for chaining.
1417      */
setDatasetIdBytes(com.google.protobuf.ByteString value)1418     public Builder setDatasetIdBytes(com.google.protobuf.ByteString value) {
1419       if (value == null) {
1420         throw new NullPointerException();
1421       }
1422       checkByteStringIsUtf8(value);
1423       datasetId_ = value;
1424       bitField0_ |= 0x00000004;
1425       onChanged();
1426       return this;
1427     }
1428 
1429     private java.lang.Object tableId_ = "";
1430     /**
1431      *
1432      *
1433      * <pre>
1434      * Required. The BigQuery table to copy the data from with a length limit of
1435      * 1,024 characters.
1436      * </pre>
1437      *
1438      * <code>string table_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
1439      *
1440      * @return The tableId.
1441      */
getTableId()1442     public java.lang.String getTableId() {
1443       java.lang.Object ref = tableId_;
1444       if (!(ref instanceof java.lang.String)) {
1445         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
1446         java.lang.String s = bs.toStringUtf8();
1447         tableId_ = s;
1448         return s;
1449       } else {
1450         return (java.lang.String) ref;
1451       }
1452     }
1453     /**
1454      *
1455      *
1456      * <pre>
1457      * Required. The BigQuery table to copy the data from with a length limit of
1458      * 1,024 characters.
1459      * </pre>
1460      *
1461      * <code>string table_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
1462      *
1463      * @return The bytes for tableId.
1464      */
getTableIdBytes()1465     public com.google.protobuf.ByteString getTableIdBytes() {
1466       java.lang.Object ref = tableId_;
1467       if (ref instanceof String) {
1468         com.google.protobuf.ByteString b =
1469             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
1470         tableId_ = b;
1471         return b;
1472       } else {
1473         return (com.google.protobuf.ByteString) ref;
1474       }
1475     }
1476     /**
1477      *
1478      *
1479      * <pre>
1480      * Required. The BigQuery table to copy the data from with a length limit of
1481      * 1,024 characters.
1482      * </pre>
1483      *
1484      * <code>string table_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
1485      *
1486      * @param value The tableId to set.
1487      * @return This builder for chaining.
1488      */
setTableId(java.lang.String value)1489     public Builder setTableId(java.lang.String value) {
1490       if (value == null) {
1491         throw new NullPointerException();
1492       }
1493       tableId_ = value;
1494       bitField0_ |= 0x00000008;
1495       onChanged();
1496       return this;
1497     }
1498     /**
1499      *
1500      *
1501      * <pre>
1502      * Required. The BigQuery table to copy the data from with a length limit of
1503      * 1,024 characters.
1504      * </pre>
1505      *
1506      * <code>string table_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
1507      *
1508      * @return This builder for chaining.
1509      */
clearTableId()1510     public Builder clearTableId() {
1511       tableId_ = getDefaultInstance().getTableId();
1512       bitField0_ = (bitField0_ & ~0x00000008);
1513       onChanged();
1514       return this;
1515     }
1516     /**
1517      *
1518      *
1519      * <pre>
1520      * Required. The BigQuery table to copy the data from with a length limit of
1521      * 1,024 characters.
1522      * </pre>
1523      *
1524      * <code>string table_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
1525      *
1526      * @param value The bytes for tableId to set.
1527      * @return This builder for chaining.
1528      */
setTableIdBytes(com.google.protobuf.ByteString value)1529     public Builder setTableIdBytes(com.google.protobuf.ByteString value) {
1530       if (value == null) {
1531         throw new NullPointerException();
1532       }
1533       checkByteStringIsUtf8(value);
1534       tableId_ = value;
1535       bitField0_ |= 0x00000008;
1536       onChanged();
1537       return this;
1538     }
1539 
1540     private java.lang.Object gcsStagingDir_ = "";
1541     /**
1542      *
1543      *
1544      * <pre>
1545      * Intermediate Cloud Storage directory used for the import with a length
1546      * limit of 2,000 characters. Can be specified if one wants to have the
1547      * BigQuery export to a specific Cloud Storage directory.
1548      * </pre>
1549      *
1550      * <code>string gcs_staging_dir = 3;</code>
1551      *
1552      * @return The gcsStagingDir.
1553      */
getGcsStagingDir()1554     public java.lang.String getGcsStagingDir() {
1555       java.lang.Object ref = gcsStagingDir_;
1556       if (!(ref instanceof java.lang.String)) {
1557         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
1558         java.lang.String s = bs.toStringUtf8();
1559         gcsStagingDir_ = s;
1560         return s;
1561       } else {
1562         return (java.lang.String) ref;
1563       }
1564     }
1565     /**
1566      *
1567      *
1568      * <pre>
1569      * Intermediate Cloud Storage directory used for the import with a length
1570      * limit of 2,000 characters. Can be specified if one wants to have the
1571      * BigQuery export to a specific Cloud Storage directory.
1572      * </pre>
1573      *
1574      * <code>string gcs_staging_dir = 3;</code>
1575      *
1576      * @return The bytes for gcsStagingDir.
1577      */
getGcsStagingDirBytes()1578     public com.google.protobuf.ByteString getGcsStagingDirBytes() {
1579       java.lang.Object ref = gcsStagingDir_;
1580       if (ref instanceof String) {
1581         com.google.protobuf.ByteString b =
1582             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
1583         gcsStagingDir_ = b;
1584         return b;
1585       } else {
1586         return (com.google.protobuf.ByteString) ref;
1587       }
1588     }
1589     /**
1590      *
1591      *
1592      * <pre>
1593      * Intermediate Cloud Storage directory used for the import with a length
1594      * limit of 2,000 characters. Can be specified if one wants to have the
1595      * BigQuery export to a specific Cloud Storage directory.
1596      * </pre>
1597      *
1598      * <code>string gcs_staging_dir = 3;</code>
1599      *
1600      * @param value The gcsStagingDir to set.
1601      * @return This builder for chaining.
1602      */
setGcsStagingDir(java.lang.String value)1603     public Builder setGcsStagingDir(java.lang.String value) {
1604       if (value == null) {
1605         throw new NullPointerException();
1606       }
1607       gcsStagingDir_ = value;
1608       bitField0_ |= 0x00000010;
1609       onChanged();
1610       return this;
1611     }
1612     /**
1613      *
1614      *
1615      * <pre>
1616      * Intermediate Cloud Storage directory used for the import with a length
1617      * limit of 2,000 characters. Can be specified if one wants to have the
1618      * BigQuery export to a specific Cloud Storage directory.
1619      * </pre>
1620      *
1621      * <code>string gcs_staging_dir = 3;</code>
1622      *
1623      * @return This builder for chaining.
1624      */
clearGcsStagingDir()1625     public Builder clearGcsStagingDir() {
1626       gcsStagingDir_ = getDefaultInstance().getGcsStagingDir();
1627       bitField0_ = (bitField0_ & ~0x00000010);
1628       onChanged();
1629       return this;
1630     }
1631     /**
1632      *
1633      *
1634      * <pre>
1635      * Intermediate Cloud Storage directory used for the import with a length
1636      * limit of 2,000 characters. Can be specified if one wants to have the
1637      * BigQuery export to a specific Cloud Storage directory.
1638      * </pre>
1639      *
1640      * <code>string gcs_staging_dir = 3;</code>
1641      *
1642      * @param value The bytes for gcsStagingDir to set.
1643      * @return This builder for chaining.
1644      */
setGcsStagingDirBytes(com.google.protobuf.ByteString value)1645     public Builder setGcsStagingDirBytes(com.google.protobuf.ByteString value) {
1646       if (value == null) {
1647         throw new NullPointerException();
1648       }
1649       checkByteStringIsUtf8(value);
1650       gcsStagingDir_ = value;
1651       bitField0_ |= 0x00000010;
1652       onChanged();
1653       return this;
1654     }
1655 
1656     private java.lang.Object dataSchema_ = "";
1657     /**
1658      *
1659      *
1660      * <pre>
1661      * The schema to use when parsing the data from the source.
1662      * Supported values for product imports:
1663      * * `product` (default): One JSON [Product][google.cloud.retail.v2.Product]
1664      * per line. Each product must
1665      *   have a valid [Product.id][google.cloud.retail.v2.Product.id].
1666      * * `product_merchant_center`: See [Importing catalog data from Merchant
1667      *   Center](https://cloud.google.com/retail/recommendations-ai/docs/upload-catalog#mc).
1668      * Supported values for user events imports:
1669      * * `user_event` (default): One JSON
1670      * [UserEvent][google.cloud.retail.v2.UserEvent] per line.
1671      * * `user_event_ga360`:
1672      *   The schema is available here:
1673      *   https://support.google.com/analytics/answer/3437719.
1674      * * `user_event_ga4`:
1675      *   The schema is available here:
1676      *   https://support.google.com/analytics/answer/7029846.
1677      * Supported values for autocomplete imports:
1678      * * `suggestions` (default): One JSON completion suggestion per line.
1679      * * `denylist`:  One JSON deny suggestion per line.
1680      * * `allowlist`:  One JSON allow suggestion per line.
1681      * </pre>
1682      *
1683      * <code>string data_schema = 4;</code>
1684      *
1685      * @return The dataSchema.
1686      */
getDataSchema()1687     public java.lang.String getDataSchema() {
1688       java.lang.Object ref = dataSchema_;
1689       if (!(ref instanceof java.lang.String)) {
1690         com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
1691         java.lang.String s = bs.toStringUtf8();
1692         dataSchema_ = s;
1693         return s;
1694       } else {
1695         return (java.lang.String) ref;
1696       }
1697     }
1698     /**
1699      *
1700      *
1701      * <pre>
1702      * The schema to use when parsing the data from the source.
1703      * Supported values for product imports:
1704      * * `product` (default): One JSON [Product][google.cloud.retail.v2.Product]
1705      * per line. Each product must
1706      *   have a valid [Product.id][google.cloud.retail.v2.Product.id].
1707      * * `product_merchant_center`: See [Importing catalog data from Merchant
1708      *   Center](https://cloud.google.com/retail/recommendations-ai/docs/upload-catalog#mc).
1709      * Supported values for user events imports:
1710      * * `user_event` (default): One JSON
1711      * [UserEvent][google.cloud.retail.v2.UserEvent] per line.
1712      * * `user_event_ga360`:
1713      *   The schema is available here:
1714      *   https://support.google.com/analytics/answer/3437719.
1715      * * `user_event_ga4`:
1716      *   The schema is available here:
1717      *   https://support.google.com/analytics/answer/7029846.
1718      * Supported values for autocomplete imports:
1719      * * `suggestions` (default): One JSON completion suggestion per line.
1720      * * `denylist`:  One JSON deny suggestion per line.
1721      * * `allowlist`:  One JSON allow suggestion per line.
1722      * </pre>
1723      *
1724      * <code>string data_schema = 4;</code>
1725      *
1726      * @return The bytes for dataSchema.
1727      */
getDataSchemaBytes()1728     public com.google.protobuf.ByteString getDataSchemaBytes() {
1729       java.lang.Object ref = dataSchema_;
1730       if (ref instanceof String) {
1731         com.google.protobuf.ByteString b =
1732             com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
1733         dataSchema_ = b;
1734         return b;
1735       } else {
1736         return (com.google.protobuf.ByteString) ref;
1737       }
1738     }
1739     /**
1740      *
1741      *
1742      * <pre>
1743      * The schema to use when parsing the data from the source.
1744      * Supported values for product imports:
1745      * * `product` (default): One JSON [Product][google.cloud.retail.v2.Product]
1746      * per line. Each product must
1747      *   have a valid [Product.id][google.cloud.retail.v2.Product.id].
1748      * * `product_merchant_center`: See [Importing catalog data from Merchant
1749      *   Center](https://cloud.google.com/retail/recommendations-ai/docs/upload-catalog#mc).
1750      * Supported values for user events imports:
1751      * * `user_event` (default): One JSON
1752      * [UserEvent][google.cloud.retail.v2.UserEvent] per line.
1753      * * `user_event_ga360`:
1754      *   The schema is available here:
1755      *   https://support.google.com/analytics/answer/3437719.
1756      * * `user_event_ga4`:
1757      *   The schema is available here:
1758      *   https://support.google.com/analytics/answer/7029846.
1759      * Supported values for autocomplete imports:
1760      * * `suggestions` (default): One JSON completion suggestion per line.
1761      * * `denylist`:  One JSON deny suggestion per line.
1762      * * `allowlist`:  One JSON allow suggestion per line.
1763      * </pre>
1764      *
1765      * <code>string data_schema = 4;</code>
1766      *
1767      * @param value The dataSchema to set.
1768      * @return This builder for chaining.
1769      */
setDataSchema(java.lang.String value)1770     public Builder setDataSchema(java.lang.String value) {
1771       if (value == null) {
1772         throw new NullPointerException();
1773       }
1774       dataSchema_ = value;
1775       bitField0_ |= 0x00000020;
1776       onChanged();
1777       return this;
1778     }
1779     /**
1780      *
1781      *
1782      * <pre>
1783      * The schema to use when parsing the data from the source.
1784      * Supported values for product imports:
1785      * * `product` (default): One JSON [Product][google.cloud.retail.v2.Product]
1786      * per line. Each product must
1787      *   have a valid [Product.id][google.cloud.retail.v2.Product.id].
1788      * * `product_merchant_center`: See [Importing catalog data from Merchant
1789      *   Center](https://cloud.google.com/retail/recommendations-ai/docs/upload-catalog#mc).
1790      * Supported values for user events imports:
1791      * * `user_event` (default): One JSON
1792      * [UserEvent][google.cloud.retail.v2.UserEvent] per line.
1793      * * `user_event_ga360`:
1794      *   The schema is available here:
1795      *   https://support.google.com/analytics/answer/3437719.
1796      * * `user_event_ga4`:
1797      *   The schema is available here:
1798      *   https://support.google.com/analytics/answer/7029846.
1799      * Supported values for autocomplete imports:
1800      * * `suggestions` (default): One JSON completion suggestion per line.
1801      * * `denylist`:  One JSON deny suggestion per line.
1802      * * `allowlist`:  One JSON allow suggestion per line.
1803      * </pre>
1804      *
1805      * <code>string data_schema = 4;</code>
1806      *
1807      * @return This builder for chaining.
1808      */
clearDataSchema()1809     public Builder clearDataSchema() {
1810       dataSchema_ = getDefaultInstance().getDataSchema();
1811       bitField0_ = (bitField0_ & ~0x00000020);
1812       onChanged();
1813       return this;
1814     }
1815     /**
1816      *
1817      *
1818      * <pre>
1819      * The schema to use when parsing the data from the source.
1820      * Supported values for product imports:
1821      * * `product` (default): One JSON [Product][google.cloud.retail.v2.Product]
1822      * per line. Each product must
1823      *   have a valid [Product.id][google.cloud.retail.v2.Product.id].
1824      * * `product_merchant_center`: See [Importing catalog data from Merchant
1825      *   Center](https://cloud.google.com/retail/recommendations-ai/docs/upload-catalog#mc).
1826      * Supported values for user events imports:
1827      * * `user_event` (default): One JSON
1828      * [UserEvent][google.cloud.retail.v2.UserEvent] per line.
1829      * * `user_event_ga360`:
1830      *   The schema is available here:
1831      *   https://support.google.com/analytics/answer/3437719.
1832      * * `user_event_ga4`:
1833      *   The schema is available here:
1834      *   https://support.google.com/analytics/answer/7029846.
1835      * Supported values for autocomplete imports:
1836      * * `suggestions` (default): One JSON completion suggestion per line.
1837      * * `denylist`:  One JSON deny suggestion per line.
1838      * * `allowlist`:  One JSON allow suggestion per line.
1839      * </pre>
1840      *
1841      * <code>string data_schema = 4;</code>
1842      *
1843      * @param value The bytes for dataSchema to set.
1844      * @return This builder for chaining.
1845      */
setDataSchemaBytes(com.google.protobuf.ByteString value)1846     public Builder setDataSchemaBytes(com.google.protobuf.ByteString value) {
1847       if (value == null) {
1848         throw new NullPointerException();
1849       }
1850       checkByteStringIsUtf8(value);
1851       dataSchema_ = value;
1852       bitField0_ |= 0x00000020;
1853       onChanged();
1854       return this;
1855     }
1856 
1857     @java.lang.Override
setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields)1858     public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
1859       return super.setUnknownFields(unknownFields);
1860     }
1861 
1862     @java.lang.Override
mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)1863     public final Builder mergeUnknownFields(
1864         final com.google.protobuf.UnknownFieldSet unknownFields) {
1865       return super.mergeUnknownFields(unknownFields);
1866     }
1867 
1868     // @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.BigQuerySource)
1869   }
1870 
1871   // @@protoc_insertion_point(class_scope:google.cloud.retail.v2.BigQuerySource)
1872   private static final com.google.cloud.retail.v2.BigQuerySource DEFAULT_INSTANCE;
1873 
1874   static {
1875     DEFAULT_INSTANCE = new com.google.cloud.retail.v2.BigQuerySource();
1876   }
1877 
getDefaultInstance()1878   public static com.google.cloud.retail.v2.BigQuerySource getDefaultInstance() {
1879     return DEFAULT_INSTANCE;
1880   }
1881 
1882   private static final com.google.protobuf.Parser<BigQuerySource> PARSER =
1883       new com.google.protobuf.AbstractParser<BigQuerySource>() {
1884         @java.lang.Override
1885         public BigQuerySource parsePartialFrom(
1886             com.google.protobuf.CodedInputStream input,
1887             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1888             throws com.google.protobuf.InvalidProtocolBufferException {
1889           Builder builder = newBuilder();
1890           try {
1891             builder.mergeFrom(input, extensionRegistry);
1892           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1893             throw e.setUnfinishedMessage(builder.buildPartial());
1894           } catch (com.google.protobuf.UninitializedMessageException e) {
1895             throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
1896           } catch (java.io.IOException e) {
1897             throw new com.google.protobuf.InvalidProtocolBufferException(e)
1898                 .setUnfinishedMessage(builder.buildPartial());
1899           }
1900           return builder.buildPartial();
1901         }
1902       };
1903 
parser()1904   public static com.google.protobuf.Parser<BigQuerySource> parser() {
1905     return PARSER;
1906   }
1907 
1908   @java.lang.Override
getParserForType()1909   public com.google.protobuf.Parser<BigQuerySource> getParserForType() {
1910     return PARSER;
1911   }
1912 
1913   @java.lang.Override
getDefaultInstanceForType()1914   public com.google.cloud.retail.v2.BigQuerySource getDefaultInstanceForType() {
1915     return DEFAULT_INSTANCE;
1916   }
1917 }
1918