• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2020 Google LLC
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     https://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 // Generated by the protocol buffer compiler.  DO NOT EDIT!
17 // source: google/cloud/automl/v1/io.proto
18 
19 package com.google.cloud.automl.v1;
20 
21 /**
22  *
23  *
24  * <pre>
25  * Input configuration for [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData] action.
26  * The format of input depends on dataset_metadata the Dataset into which
27  * the import is happening has. As input source the
28  * [gcs_source][google.cloud.automl.v1.InputConfig.gcs_source]
29  * is expected, unless specified otherwise. Additionally any input .CSV file
30  * by itself must be 100MB or smaller, unless specified otherwise.
31  * If an "example" file (that is, image, video etc.) with identical content
32  * (even if it had different `GCS_FILE_PATH`) is mentioned multiple times, then
33  * its label, bounding boxes etc. are appended. The same file should be always
34  * provided with the same `ML_USE` and `GCS_FILE_PATH`, if it is not, then
35  * these values are nondeterministically selected from the given ones.
36  * The formats are represented in EBNF with commas being literal and with
37  * non-terminal symbols defined near the end of this comment. The formats are:
38  * &lt;h4&gt;AutoML Vision&lt;/h4&gt;
39  * &lt;div class="ds-selector-tabs"&gt;&lt;section&gt;&lt;h5&gt;Classification&lt;/h5&gt;
40  * See [Preparing your training
41  * data](https://cloud.google.com/vision/automl/docs/prepare) for more
42  * information.
43  * CSV file(s) with each line in format:
44  *     ML_USE,GCS_FILE_PATH,LABEL,LABEL,...
45  * *   `ML_USE` - Identifies the data set that the current row (file) applies
46  * to.
47  *     This value can be one of the following:
48  *     * `TRAIN` - Rows in this file are used to train the model.
49  *     * `TEST` - Rows in this file are used to test the model during training.
50  *     * `UNASSIGNED` - Rows in this file are not categorized. They are
51  *        Automatically divided into train and test data. 80% for training and
52  *        20% for testing.
53  * *   `GCS_FILE_PATH` - The Google Cloud Storage location of an image of up to
54  *      30MB in size. Supported extensions: .JPEG, .GIF, .PNG, .WEBP, .BMP,
55  *      .TIFF, .ICO.
56  * *   `LABEL` - A label that identifies the object in the image.
57  * For the `MULTICLASS` classification type, at most one `LABEL` is allowed
58  * per image. If an image has not yet been labeled, then it should be
59  * mentioned just once with no `LABEL`.
60  * Some sample rows:
61  *     TRAIN,gs://folder/image1.jpg,daisy
62  *     TEST,gs://folder/image2.jpg,dandelion,tulip,rose
63  *     UNASSIGNED,gs://folder/image3.jpg,daisy
64  *     UNASSIGNED,gs://folder/image4.jpg
65  * &lt;/section&gt;&lt;section&gt;&lt;h5&gt;Object Detection&lt;/h5&gt;
66  * See [Preparing your training
67  * data](https://cloud.google.com/vision/automl/object-detection/docs/prepare)
68  * for more information.
69  * A CSV file(s) with each line in format:
70  *     ML_USE,GCS_FILE_PATH,[LABEL],(BOUNDING_BOX | ,,,,,,,)
71  * *   `ML_USE` - Identifies the data set that the current row (file) applies
72  * to.
73  *     This value can be one of the following:
74  *     * `TRAIN` - Rows in this file are used to train the model.
75  *     * `TEST` - Rows in this file are used to test the model during training.
76  *     * `UNASSIGNED` - Rows in this file are not categorized. They are
77  *        Automatically divided into train and test data. 80% for training and
78  *        20% for testing.
79  * *  `GCS_FILE_PATH` - The Google Cloud Storage location of an image of up to
80  *     30MB in size. Supported extensions: .JPEG, .GIF, .PNG. Each image
81  *     is assumed to be exhaustively labeled.
82  * *  `LABEL` - A label that identifies the object in the image specified by the
83  *    `BOUNDING_BOX`.
84  * *  `BOUNDING BOX` - The vertices of an object in the example image.
85  *    The minimum allowed `BOUNDING_BOX` edge length is 0.01, and no more than
86  *    500 `BOUNDING_BOX` instances per image are allowed (one `BOUNDING_BOX`
87  *    per line). If an image has no looked for objects then it should be
88  *    mentioned just once with no LABEL and the ",,,,,,," in place of the
89  *   `BOUNDING_BOX`.
90  * **Four sample rows:**
91  *     TRAIN,gs://folder/image1.png,car,0.1,0.1,,,0.3,0.3,,
92  *     TRAIN,gs://folder/image1.png,bike,.7,.6,,,.8,.9,,
93  *     UNASSIGNED,gs://folder/im2.png,car,0.1,0.1,0.2,0.1,0.2,0.3,0.1,0.3
94  *     TEST,gs://folder/im3.png,,,,,,,,,
95  *   &lt;/section&gt;
96  * &lt;/div&gt;
97  * &lt;h4&gt;AutoML Video Intelligence&lt;/h4&gt;
98  * &lt;div class="ds-selector-tabs"&gt;&lt;section&gt;&lt;h5&gt;Classification&lt;/h5&gt;
99  * See [Preparing your training
100  * data](https://cloud.google.com/video-intelligence/automl/docs/prepare) for
101  * more information.
102  * CSV file(s) with each line in format:
103  *     ML_USE,GCS_FILE_PATH
104  * For `ML_USE`, do not use `VALIDATE`.
105  * `GCS_FILE_PATH` is the path to another .csv file that describes training
106  * example for a given `ML_USE`, using the following row format:
107  *     GCS_FILE_PATH,(LABEL,TIME_SEGMENT_START,TIME_SEGMENT_END | ,,)
108  * Here `GCS_FILE_PATH` leads to a video of up to 50GB in size and up
109  * to 3h duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI.
110  * `TIME_SEGMENT_START` and `TIME_SEGMENT_END` must be within the
111  * length of the video, and the end time must be after the start time. Any
112  * segment of a video which has one or more labels on it, is considered a
113  * hard negative for all other labels. Any segment with no labels on
114  * it is considered to be unknown. If a whole video is unknown, then
115  * it should be mentioned just once with ",," in place of `LABEL,
116  * TIME_SEGMENT_START,TIME_SEGMENT_END`.
117  * Sample top level CSV file:
118  *     TRAIN,gs://folder/train_videos.csv
119  *     TEST,gs://folder/test_videos.csv
120  *     UNASSIGNED,gs://folder/other_videos.csv
121  * Sample rows of a CSV file for a particular ML_USE:
122  *     gs://folder/video1.avi,car,120,180.000021
123  *     gs://folder/video1.avi,bike,150,180.000021
124  *     gs://folder/vid2.avi,car,0,60.5
125  *     gs://folder/vid3.avi,,,
126  * &lt;/section&gt;&lt;section&gt;&lt;h5&gt;Object Tracking&lt;/h5&gt;
127  * See [Preparing your training
128  * data](/video-intelligence/automl/object-tracking/docs/prepare) for more
129  * information.
130  * CSV file(s) with each line in format:
131  *     ML_USE,GCS_FILE_PATH
132  * For `ML_USE`, do not use `VALIDATE`.
133  * `GCS_FILE_PATH` is the path to another .csv file that describes training
134  * example for a given `ML_USE`, using the following row format:
135  *     GCS_FILE_PATH,LABEL,[INSTANCE_ID],TIMESTAMP,BOUNDING_BOX
136  * or
137  *     GCS_FILE_PATH,,,,,,,,,,
138  * Here `GCS_FILE_PATH` leads to a video of up to 50GB in size and up
139  * to 3h duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI.
140  * Providing `INSTANCE_ID`s can help to obtain a better model. When
141  * a specific labeled entity leaves the video frame, and shows up
142  * afterwards it is not required, albeit preferable, that the same
143  * `INSTANCE_ID` is given to it.
144  * `TIMESTAMP` must be within the length of the video, the
145  * `BOUNDING_BOX` is assumed to be drawn on the closest video's frame
146  * to the `TIMESTAMP`. Any mentioned by the `TIMESTAMP` frame is expected
147  * to be exhaustively labeled and no more than 500 `BOUNDING_BOX`-es per
148  * frame are allowed. If a whole video is unknown, then it should be
149  * mentioned just once with ",,,,,,,,,," in place of `LABEL,
150  * [INSTANCE_ID],TIMESTAMP,BOUNDING_BOX`.
151  * Sample top level CSV file:
152  *      TRAIN,gs://folder/train_videos.csv
153  *      TEST,gs://folder/test_videos.csv
154  *      UNASSIGNED,gs://folder/other_videos.csv
155  * Seven sample rows of a CSV file for a particular ML_USE:
156  *      gs://folder/video1.avi,car,1,12.10,0.8,0.8,0.9,0.8,0.9,0.9,0.8,0.9
157  *      gs://folder/video1.avi,car,1,12.90,0.4,0.8,0.5,0.8,0.5,0.9,0.4,0.9
158  *      gs://folder/video1.avi,car,2,12.10,.4,.2,.5,.2,.5,.3,.4,.3
159  *      gs://folder/video1.avi,car,2,12.90,.8,.2,,,.9,.3,,
160  *      gs://folder/video1.avi,bike,,12.50,.45,.45,,,.55,.55,,
161  *      gs://folder/video2.avi,car,1,0,.1,.9,,,.9,.1,,
162  *      gs://folder/video2.avi,,,,,,,,,,,
163  *   &lt;/section&gt;
164  * &lt;/div&gt;
165  * &lt;h4&gt;AutoML Natural Language&lt;/h4&gt;
166  * &lt;div class="ds-selector-tabs"&gt;&lt;section&gt;&lt;h5&gt;Entity Extraction&lt;/h5&gt;
167  * See [Preparing your training
168  * data](/natural-language/automl/entity-analysis/docs/prepare) for more
169  * information.
170  * One or more CSV file(s) with each line in the following format:
171  *     ML_USE,GCS_FILE_PATH
172  * *   `ML_USE` - Identifies the data set that the current row (file) applies
173  * to.
174  *     This value can be one of the following:
175  *     * `TRAIN` - Rows in this file are used to train the model.
176  *     * `TEST` - Rows in this file are used to test the model during training.
177  *     * `UNASSIGNED` - Rows in this file are not categorized. They are
178  *        Automatically divided into train and test data. 80% for training and
179  *        20% for testing..
180  * *   `GCS_FILE_PATH` - a Identifies JSON Lines (.JSONL) file stored in
181  *      Google Cloud Storage that contains in-line text in-line as documents
182  *      for model training.
183  * After the training data set has been determined from the `TRAIN` and
184  * `UNASSIGNED` CSV files, the training data is divided into train and
185  * validation data sets. 70% for training and 30% for validation.
186  * For example:
187  *     TRAIN,gs://folder/file1.jsonl
188  *     VALIDATE,gs://folder/file2.jsonl
189  *     TEST,gs://folder/file3.jsonl
190  * **In-line JSONL files**
191  * In-line .JSONL files contain, per line, a JSON document that wraps a
192  * [`text_snippet`][google.cloud.automl.v1.TextSnippet] field followed by
193  * one or more [`annotations`][google.cloud.automl.v1.AnnotationPayload]
194  * fields, which have `display_name` and `text_extraction` fields to describe
195  * the entity from the text snippet. Multiple JSON documents can be separated
196  * using line breaks (&#92;n).
197  * The supplied text must be annotated exhaustively. For example, if you
198  * include the text "horse", but do not label it as "animal",
199  * then "horse" is assumed to not be an "animal".
200  * Any given text snippet content must have 30,000 characters or
201  * less, and also be UTF-8 NFC encoded. ASCII is accepted as it is
202  * UTF-8 NFC encoded.
203  * For example:
204  *     {
205  *       "text_snippet": {
206  *         "content": "dog car cat"
207  *       },
208  *       "annotations": [
209  *          {
210  *            "display_name": "animal",
211  *            "text_extraction": {
212  *              "text_segment": {"start_offset": 0, "end_offset": 2}
213  *           }
214  *          },
215  *          {
216  *           "display_name": "vehicle",
217  *            "text_extraction": {
218  *              "text_segment": {"start_offset": 4, "end_offset": 6}
219  *            }
220  *          },
221  *          {
222  *            "display_name": "animal",
223  *            "text_extraction": {
224  *              "text_segment": {"start_offset": 8, "end_offset": 10}
225  *            }
226  *          }
227  *      ]
228  *     }&#92;n
229  *     {
230  *        "text_snippet": {
231  *          "content": "This dog is good."
232  *        },
233  *        "annotations": [
234  *           {
235  *             "display_name": "animal",
236  *             "text_extraction": {
237  *               "text_segment": {"start_offset": 5, "end_offset": 7}
238  *             }
239  *           }
240  *        ]
241  *     }
242  * **JSONL files that reference documents**
243  * .JSONL files contain, per line, a JSON document that wraps a
244  * `input_config` that contains the path to a source document.
245  * Multiple JSON documents can be separated using line breaks (&#92;n).
246  * Supported document extensions: .PDF, .TIF, .TIFF
247  * For example:
248  *     {
249  *       "document": {
250  *         "input_config": {
251  *           "gcs_source": { "input_uris": [ "gs://folder/document1.pdf" ]
252  *           }
253  *         }
254  *       }
255  *     }&#92;n
256  *     {
257  *       "document": {
258  *         "input_config": {
259  *           "gcs_source": { "input_uris": [ "gs://folder/document2.tif" ]
260  *           }
261  *         }
262  *       }
263  *     }
264  * **In-line JSONL files with document layout information**
265  * **Note:** You can only annotate documents using the UI. The format described
266  * below applies to annotated documents exported using the UI or `exportData`.
267  * In-line .JSONL files for documents contain, per line, a JSON document
268  * that wraps a `document` field that provides the textual content of the
269  * document and the layout information.
270  * For example:
271  *     {
272  *       "document": {
273  *               "document_text": {
274  *                 "content": "dog car cat"
275  *               }
276  *               "layout": [
277  *                 {
278  *                   "text_segment": {
279  *                     "start_offset": 0,
280  *                     "end_offset": 11,
281  *                    },
282  *                    "page_number": 1,
283  *                    "bounding_poly": {
284  *                       "normalized_vertices": [
285  *                         {"x": 0.1, "y": 0.1},
286  *                         {"x": 0.1, "y": 0.3},
287  *                         {"x": 0.3, "y": 0.3},
288  *                         {"x": 0.3, "y": 0.1},
289  *                       ],
290  *                     },
291  *                     "text_segment_type": TOKEN,
292  *                 }
293  *               ],
294  *               "document_dimensions": {
295  *                 "width": 8.27,
296  *                 "height": 11.69,
297  *                 "unit": INCH,
298  *               }
299  *               "page_count": 3,
300  *             },
301  *             "annotations": [
302  *               {
303  *                 "display_name": "animal",
304  *                 "text_extraction": {
305  *                   "text_segment": {"start_offset": 0, "end_offset": 3}
306  *                 }
307  *               },
308  *               {
309  *                 "display_name": "vehicle",
310  *                 "text_extraction": {
311  *                   "text_segment": {"start_offset": 4, "end_offset": 7}
312  *                 }
313  *               },
314  *               {
315  *                 "display_name": "animal",
316  *                 "text_extraction": {
317  *                   "text_segment": {"start_offset": 8, "end_offset": 11}
318  *                 }
319  *               },
320  *             ],
321  * &lt;/section&gt;&lt;section&gt;&lt;h5&gt;Classification&lt;/h5&gt;
322  * See [Preparing your training
323  * data](https://cloud.google.com/natural-language/automl/docs/prepare) for more
324  * information.
325  * One or more CSV file(s) with each line in the following format:
326  *     ML_USE,(TEXT_SNIPPET | GCS_FILE_PATH),LABEL,LABEL,...
327  * *   `ML_USE` - Identifies the data set that the current row (file) applies
328  * to.
329  *     This value can be one of the following:
330  *     * `TRAIN` - Rows in this file are used to train the model.
331  *     * `TEST` - Rows in this file are used to test the model during training.
332  *     * `UNASSIGNED` - Rows in this file are not categorized. They are
333  *        Automatically divided into train and test data. 80% for training and
334  *        20% for testing.
335  * *   `TEXT_SNIPPET` and `GCS_FILE_PATH` are distinguished by a pattern. If
336  *     the column content is a valid Google Cloud Storage file path, that is,
337  *     prefixed by "gs://", it is treated as a `GCS_FILE_PATH`. Otherwise, if
338  *     the content is enclosed in double quotes (""), it is treated as a
339  *     `TEXT_SNIPPET`. For `GCS_FILE_PATH`, the path must lead to a
340  *     file with supported extension and UTF-8 encoding, for example,
341  *     "gs://folder/content.txt" AutoML imports the file content
342  *     as a text snippet. For `TEXT_SNIPPET`, AutoML imports the column content
343  *     excluding quotes. In both cases, size of the content must be 10MB or
344  *     less in size. For zip files, the size of each file inside the zip must be
345  *     10MB or less in size.
346  *     For the `MULTICLASS` classification type, at most one `LABEL` is allowed.
347  *     The `ML_USE` and `LABEL` columns are optional.
348  *     Supported file extensions: .TXT, .PDF, .TIF, .TIFF, .ZIP
349  * A maximum of 100 unique labels are allowed per CSV row.
350  * Sample rows:
351  *     TRAIN,"They have bad food and very rude",RudeService,BadFood
352  *     gs://folder/content.txt,SlowService
353  *     TEST,gs://folder/document.pdf
354  *     VALIDATE,gs://folder/text_files.zip,BadFood
355  * &lt;/section&gt;&lt;section&gt;&lt;h5&gt;Sentiment Analysis&lt;/h5&gt;
356  * See [Preparing your training
357  * data](https://cloud.google.com/natural-language/automl/docs/prepare) for more
358  * information.
359  * CSV file(s) with each line in format:
360  *     ML_USE,(TEXT_SNIPPET | GCS_FILE_PATH),SENTIMENT
361  * *   `ML_USE` - Identifies the data set that the current row (file) applies
362  * to.
363  *     This value can be one of the following:
364  *     * `TRAIN` - Rows in this file are used to train the model.
365  *     * `TEST` - Rows in this file are used to test the model during training.
366  *     * `UNASSIGNED` - Rows in this file are not categorized. They are
367  *        Automatically divided into train and test data. 80% for training and
368  *        20% for testing.
369  * *   `TEXT_SNIPPET` and `GCS_FILE_PATH` are distinguished by a pattern. If
370  *     the column content is a valid  Google Cloud Storage file path, that is,
371  *     prefixed by "gs://", it is treated as a `GCS_FILE_PATH`. Otherwise, if
372  *     the content is enclosed in double quotes (""), it is treated as a
373  *     `TEXT_SNIPPET`. For `GCS_FILE_PATH`, the path must lead to a
374  *     file with supported extension and UTF-8 encoding, for example,
375  *     "gs://folder/content.txt" AutoML imports the file content
376  *     as a text snippet. For `TEXT_SNIPPET`, AutoML imports the column content
377  *     excluding quotes. In both cases, size of the content must be 128kB or
378  *     less in size. For zip files, the size of each file inside the zip must be
379  *     128kB or less in size.
380  *     The `ML_USE` and `SENTIMENT` columns are optional.
381  *     Supported file extensions: .TXT, .PDF, .TIF, .TIFF, .ZIP
382  * *  `SENTIMENT` - An integer between 0 and
383  *     Dataset.text_sentiment_dataset_metadata.sentiment_max
384  *     (inclusive). Describes the ordinal of the sentiment - higher
385  *     value means a more positive sentiment. All the values are
386  *     completely relative, i.e. neither 0 needs to mean a negative or
387  *     neutral sentiment nor sentiment_max needs to mean a positive one -
388  *     it is just required that 0 is the least positive sentiment
389  *     in the data, and sentiment_max is the  most positive one.
390  *     The SENTIMENT shouldn't be confused with "score" or "magnitude"
391  *     from the previous Natural Language Sentiment Analysis API.
392  *     All SENTIMENT values between 0 and sentiment_max must be
393  *     represented in the imported data. On prediction the same 0 to
394  *     sentiment_max range will be used. The difference between
395  *     neighboring sentiment values needs not to be uniform, e.g. 1 and
396  *     2 may be similar whereas the difference between 2 and 3 may be
397  *     large.
398  * Sample rows:
399  *     TRAIN,"&#64;freewrytin this is way too good for your product",2
400  *     gs://folder/content.txt,3
401  *     TEST,gs://folder/document.pdf
402  *     VALIDATE,gs://folder/text_files.zip,2
403  *   &lt;/section&gt;
404  * &lt;/div&gt;
405  * &lt;h4&gt;AutoML Tables&lt;/h4&gt;&lt;div class="ui-datasection-main"&gt;&lt;section
406  * class="selected"&gt;
407  * See [Preparing your training
408  * data](https://cloud.google.com/automl-tables/docs/prepare) for more
409  * information.
410  * You can use either
411  * [gcs_source][google.cloud.automl.v1.InputConfig.gcs_source] or
412  * [bigquery_source][google.cloud.automl.v1.InputConfig.bigquery_source].
413  * All input is concatenated into a
414  * single
415  * [primary_table_spec_id][google.cloud.automl.v1.TablesDatasetMetadata.primary_table_spec_id]
416  * **For gcs_source:**
417  * CSV file(s), where the first row of the first file is the header,
418  * containing unique column names. If the first row of a subsequent
419  * file is the same as the header, then it is also treated as a
420  * header. All other rows contain values for the corresponding
421  * columns.
422  * Each .CSV file by itself must be 10GB or smaller, and their total
423  * size must be 100GB or smaller.
424  * First three sample rows of a CSV file:
425  * &lt;pre&gt;
426  * "Id","First Name","Last Name","Dob","Addresses"
427  * "1","John","Doe","1968-01-22","[{"status":"current","address":"123_First_Avenue","city":"Seattle","state":"WA","zip":"11111","numberOfYears":"1"},{"status":"previous","address":"456_Main_Street","city":"Portland","state":"OR","zip":"22222","numberOfYears":"5"}]"
428  * "2","Jane","Doe","1980-10-16","[{"status":"current","address":"789_Any_Avenue","city":"Albany","state":"NY","zip":"33333","numberOfYears":"2"},{"status":"previous","address":"321_Main_Street","city":"Hoboken","state":"NJ","zip":"44444","numberOfYears":"3"}]}
429  * &lt;/pre&gt;
430  * **For bigquery_source:**
431  * An URI of a BigQuery table. The user data size of the BigQuery
432  * table must be 100GB or smaller.
433  * An imported table must have between 2 and 1,000 columns, inclusive,
434  * and between 1000 and 100,000,000 rows, inclusive. There are at most 5
435  * import data running in parallel.
436  *   &lt;/section&gt;
437  * &lt;/div&gt;
438  * **Input field definitions:**
439  * `ML_USE`
440  * : ("TRAIN" | "VALIDATE" | "TEST" | "UNASSIGNED")
441  *   Describes how the given example (file) should be used for model
442  *   training. "UNASSIGNED" can be used when user has no preference.
443  * `GCS_FILE_PATH`
444  * : The path to a file on Google Cloud Storage. For example,
445  *   "gs://folder/image1.png".
446  * `LABEL`
447  * : A display name of an object on an image, video etc., e.g. "dog".
448  *   Must be up to 32 characters long and can consist only of ASCII
449  *   Latin letters A-Z and a-z, underscores(_), and ASCII digits 0-9.
450  *   For each label an AnnotationSpec is created which display_name
451  *   becomes the label; AnnotationSpecs are given back in predictions.
452  * `INSTANCE_ID`
453  * : A positive integer that identifies a specific instance of a
454  *   labeled entity on an example. Used e.g. to track two cars on
455  *   a video while being able to tell apart which one is which.
456  * `BOUNDING_BOX`
457  * : (`VERTEX,VERTEX,VERTEX,VERTEX` | `VERTEX,,,VERTEX,,`)
458  *   A rectangle parallel to the frame of the example (image,
459  *   video). If 4 vertices are given they are connected by edges
460  *   in the order provided, if 2 are given they are recognized
461  *   as diagonally opposite vertices of the rectangle.
462  * `VERTEX`
463  * : (`COORDINATE,COORDINATE`)
464  *   First coordinate is horizontal (x), the second is vertical (y).
465  * `COORDINATE`
466  * : A float in 0 to 1 range, relative to total length of
467  *   image or video in given dimension. For fractions the
468  *   leading non-decimal 0 can be omitted (i.e. 0.3 = .3).
469  *   Point 0,0 is in top left.
470  * `TIME_SEGMENT_START`
471  * : (`TIME_OFFSET`)
472  *   Expresses a beginning, inclusive, of a time segment
473  *   within an example that has a time dimension
474  *   (e.g. video).
475  * `TIME_SEGMENT_END`
476  * : (`TIME_OFFSET`)
477  *   Expresses an end, exclusive, of a time segment within
478  *   n example that has a time dimension (e.g. video).
479  * `TIME_OFFSET`
480  * : A number of seconds as measured from the start of an
481  *   example (e.g. video). Fractions are allowed, up to a
482  *   microsecond precision. "inf" is allowed, and it means the end
483  *   of the example.
484  * `TEXT_SNIPPET`
485  * : The content of a text snippet, UTF-8 encoded, enclosed within
486  *   double quotes ("").
487  * `DOCUMENT`
488  * : A field that provides the textual content with document and the layout
489  *   information.
490  *  **Errors:**
491  *  If any of the provided CSV files can't be parsed or if more than certain
492  *  percent of CSV rows cannot be processed then the operation fails and
493  *  nothing is imported. Regardless of overall success or failure the per-row
494  *  failures, up to a certain count cap, is listed in
495  *  Operation.metadata.partial_failures.
496  * </pre>
497  *
498  * Protobuf type {@code google.cloud.automl.v1.InputConfig}
499  */
500 public final class InputConfig extends com.google.protobuf.GeneratedMessageV3
501     implements
502     // @@protoc_insertion_point(message_implements:google.cloud.automl.v1.InputConfig)
503     InputConfigOrBuilder {
504   private static final long serialVersionUID = 0L;
505   // Use InputConfig.newBuilder() to construct.
InputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder)506   private InputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
507     super(builder);
508   }
509 
InputConfig()510   private InputConfig() {}
511 
512   @java.lang.Override
513   @SuppressWarnings({"unused"})
newInstance(UnusedPrivateParameter unused)514   protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
515     return new InputConfig();
516   }
517 
518   @java.lang.Override
getUnknownFields()519   public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
520     return this.unknownFields;
521   }
522 
getDescriptor()523   public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
524     return com.google.cloud.automl.v1.Io
525         .internal_static_google_cloud_automl_v1_InputConfig_descriptor;
526   }
527 
528   @SuppressWarnings({"rawtypes"})
529   @java.lang.Override
internalGetMapField(int number)530   protected com.google.protobuf.MapField internalGetMapField(int number) {
531     switch (number) {
532       case 2:
533         return internalGetParams();
534       default:
535         throw new RuntimeException("Invalid map field number: " + number);
536     }
537   }
538 
539   @java.lang.Override
540   protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()541       internalGetFieldAccessorTable() {
542     return com.google.cloud.automl.v1.Io
543         .internal_static_google_cloud_automl_v1_InputConfig_fieldAccessorTable
544         .ensureFieldAccessorsInitialized(
545             com.google.cloud.automl.v1.InputConfig.class,
546             com.google.cloud.automl.v1.InputConfig.Builder.class);
547   }
548 
549   private int sourceCase_ = 0;
550   private java.lang.Object source_;
551 
552   public enum SourceCase
553       implements
554           com.google.protobuf.Internal.EnumLite,
555           com.google.protobuf.AbstractMessage.InternalOneOfEnum {
556     GCS_SOURCE(1),
557     SOURCE_NOT_SET(0);
558     private final int value;
559 
SourceCase(int value)560     private SourceCase(int value) {
561       this.value = value;
562     }
563     /**
564      * @param value The number of the enum to look for.
565      * @return The enum associated with the given number.
566      * @deprecated Use {@link #forNumber(int)} instead.
567      */
568     @java.lang.Deprecated
valueOf(int value)569     public static SourceCase valueOf(int value) {
570       return forNumber(value);
571     }
572 
forNumber(int value)573     public static SourceCase forNumber(int value) {
574       switch (value) {
575         case 1:
576           return GCS_SOURCE;
577         case 0:
578           return SOURCE_NOT_SET;
579         default:
580           return null;
581       }
582     }
583 
getNumber()584     public int getNumber() {
585       return this.value;
586     }
587   };
588 
getSourceCase()589   public SourceCase getSourceCase() {
590     return SourceCase.forNumber(sourceCase_);
591   }
592 
593   public static final int GCS_SOURCE_FIELD_NUMBER = 1;
594   /**
595    *
596    *
597    * <pre>
598    * The Google Cloud Storage location for the input content.
599    * For [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], `gcs_source` points to a CSV file with
600    * a structure described in [InputConfig][google.cloud.automl.v1.InputConfig].
601    * </pre>
602    *
603    * <code>.google.cloud.automl.v1.GcsSource gcs_source = 1;</code>
604    *
605    * @return Whether the gcsSource field is set.
606    */
607   @java.lang.Override
hasGcsSource()608   public boolean hasGcsSource() {
609     return sourceCase_ == 1;
610   }
611   /**
612    *
613    *
614    * <pre>
615    * The Google Cloud Storage location for the input content.
616    * For [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], `gcs_source` points to a CSV file with
617    * a structure described in [InputConfig][google.cloud.automl.v1.InputConfig].
618    * </pre>
619    *
620    * <code>.google.cloud.automl.v1.GcsSource gcs_source = 1;</code>
621    *
622    * @return The gcsSource.
623    */
624   @java.lang.Override
getGcsSource()625   public com.google.cloud.automl.v1.GcsSource getGcsSource() {
626     if (sourceCase_ == 1) {
627       return (com.google.cloud.automl.v1.GcsSource) source_;
628     }
629     return com.google.cloud.automl.v1.GcsSource.getDefaultInstance();
630   }
631   /**
632    *
633    *
634    * <pre>
635    * The Google Cloud Storage location for the input content.
636    * For [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], `gcs_source` points to a CSV file with
637    * a structure described in [InputConfig][google.cloud.automl.v1.InputConfig].
638    * </pre>
639    *
640    * <code>.google.cloud.automl.v1.GcsSource gcs_source = 1;</code>
641    */
642   @java.lang.Override
getGcsSourceOrBuilder()643   public com.google.cloud.automl.v1.GcsSourceOrBuilder getGcsSourceOrBuilder() {
644     if (sourceCase_ == 1) {
645       return (com.google.cloud.automl.v1.GcsSource) source_;
646     }
647     return com.google.cloud.automl.v1.GcsSource.getDefaultInstance();
648   }
649 
650   public static final int PARAMS_FIELD_NUMBER = 2;
651 
652   private static final class ParamsDefaultEntryHolder {
653     static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
654         com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
655             com.google.cloud.automl.v1.Io
656                 .internal_static_google_cloud_automl_v1_InputConfig_ParamsEntry_descriptor,
657             com.google.protobuf.WireFormat.FieldType.STRING,
658             "",
659             com.google.protobuf.WireFormat.FieldType.STRING,
660             "");
661   }
662 
663   @SuppressWarnings("serial")
664   private com.google.protobuf.MapField<java.lang.String, java.lang.String> params_;
665 
internalGetParams()666   private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetParams() {
667     if (params_ == null) {
668       return com.google.protobuf.MapField.emptyMapField(ParamsDefaultEntryHolder.defaultEntry);
669     }
670     return params_;
671   }
672 
getParamsCount()673   public int getParamsCount() {
674     return internalGetParams().getMap().size();
675   }
676   /**
677    *
678    *
679    * <pre>
680    * Additional domain-specific parameters describing the semantic of the
681    * imported data, any string must be up to 25000
682    * characters long.
683    * &lt;h4&gt;AutoML Tables&lt;/h4&gt;
684    * `schema_inference_version`
685    * : (integer) This value must be supplied.
686    *   The version of the
687    *   algorithm to use for the initial inference of the
688    *   column data types of the imported table. Allowed values: "1".
689    * </pre>
690    *
691    * <code>map&lt;string, string&gt; params = 2;</code>
692    */
693   @java.lang.Override
containsParams(java.lang.String key)694   public boolean containsParams(java.lang.String key) {
695     if (key == null) {
696       throw new NullPointerException("map key");
697     }
698     return internalGetParams().getMap().containsKey(key);
699   }
700   /** Use {@link #getParamsMap()} instead. */
701   @java.lang.Override
702   @java.lang.Deprecated
getParams()703   public java.util.Map<java.lang.String, java.lang.String> getParams() {
704     return getParamsMap();
705   }
706   /**
707    *
708    *
709    * <pre>
710    * Additional domain-specific parameters describing the semantic of the
711    * imported data, any string must be up to 25000
712    * characters long.
713    * &lt;h4&gt;AutoML Tables&lt;/h4&gt;
714    * `schema_inference_version`
715    * : (integer) This value must be supplied.
716    *   The version of the
717    *   algorithm to use for the initial inference of the
718    *   column data types of the imported table. Allowed values: "1".
719    * </pre>
720    *
721    * <code>map&lt;string, string&gt; params = 2;</code>
722    */
723   @java.lang.Override
getParamsMap()724   public java.util.Map<java.lang.String, java.lang.String> getParamsMap() {
725     return internalGetParams().getMap();
726   }
727   /**
728    *
729    *
730    * <pre>
731    * Additional domain-specific parameters describing the semantic of the
732    * imported data, any string must be up to 25000
733    * characters long.
734    * &lt;h4&gt;AutoML Tables&lt;/h4&gt;
735    * `schema_inference_version`
736    * : (integer) This value must be supplied.
737    *   The version of the
738    *   algorithm to use for the initial inference of the
739    *   column data types of the imported table. Allowed values: "1".
740    * </pre>
741    *
742    * <code>map&lt;string, string&gt; params = 2;</code>
743    */
744   @java.lang.Override
getParamsOrDefault( java.lang.String key, java.lang.String defaultValue)745   public /* nullable */ java.lang.String getParamsOrDefault(
746       java.lang.String key,
747       /* nullable */
748       java.lang.String defaultValue) {
749     if (key == null) {
750       throw new NullPointerException("map key");
751     }
752     java.util.Map<java.lang.String, java.lang.String> map = internalGetParams().getMap();
753     return map.containsKey(key) ? map.get(key) : defaultValue;
754   }
755   /**
756    *
757    *
758    * <pre>
759    * Additional domain-specific parameters describing the semantic of the
760    * imported data, any string must be up to 25000
761    * characters long.
762    * &lt;h4&gt;AutoML Tables&lt;/h4&gt;
763    * `schema_inference_version`
764    * : (integer) This value must be supplied.
765    *   The version of the
766    *   algorithm to use for the initial inference of the
767    *   column data types of the imported table. Allowed values: "1".
768    * </pre>
769    *
770    * <code>map&lt;string, string&gt; params = 2;</code>
771    */
772   @java.lang.Override
getParamsOrThrow(java.lang.String key)773   public java.lang.String getParamsOrThrow(java.lang.String key) {
774     if (key == null) {
775       throw new NullPointerException("map key");
776     }
777     java.util.Map<java.lang.String, java.lang.String> map = internalGetParams().getMap();
778     if (!map.containsKey(key)) {
779       throw new java.lang.IllegalArgumentException();
780     }
781     return map.get(key);
782   }
783 
784   private byte memoizedIsInitialized = -1;
785 
786   @java.lang.Override
isInitialized()787   public final boolean isInitialized() {
788     byte isInitialized = memoizedIsInitialized;
789     if (isInitialized == 1) return true;
790     if (isInitialized == 0) return false;
791 
792     memoizedIsInitialized = 1;
793     return true;
794   }
795 
796   @java.lang.Override
writeTo(com.google.protobuf.CodedOutputStream output)797   public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
798     if (sourceCase_ == 1) {
799       output.writeMessage(1, (com.google.cloud.automl.v1.GcsSource) source_);
800     }
801     com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
802         output, internalGetParams(), ParamsDefaultEntryHolder.defaultEntry, 2);
803     getUnknownFields().writeTo(output);
804   }
805 
806   @java.lang.Override
getSerializedSize()807   public int getSerializedSize() {
808     int size = memoizedSize;
809     if (size != -1) return size;
810 
811     size = 0;
812     if (sourceCase_ == 1) {
813       size +=
814           com.google.protobuf.CodedOutputStream.computeMessageSize(
815               1, (com.google.cloud.automl.v1.GcsSource) source_);
816     }
817     for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
818         internalGetParams().getMap().entrySet()) {
819       com.google.protobuf.MapEntry<java.lang.String, java.lang.String> params__ =
820           ParamsDefaultEntryHolder.defaultEntry
821               .newBuilderForType()
822               .setKey(entry.getKey())
823               .setValue(entry.getValue())
824               .build();
825       size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, params__);
826     }
827     size += getUnknownFields().getSerializedSize();
828     memoizedSize = size;
829     return size;
830   }
831 
832   @java.lang.Override
equals(final java.lang.Object obj)833   public boolean equals(final java.lang.Object obj) {
834     if (obj == this) {
835       return true;
836     }
837     if (!(obj instanceof com.google.cloud.automl.v1.InputConfig)) {
838       return super.equals(obj);
839     }
840     com.google.cloud.automl.v1.InputConfig other = (com.google.cloud.automl.v1.InputConfig) obj;
841 
842     if (!internalGetParams().equals(other.internalGetParams())) return false;
843     if (!getSourceCase().equals(other.getSourceCase())) return false;
844     switch (sourceCase_) {
845       case 1:
846         if (!getGcsSource().equals(other.getGcsSource())) return false;
847         break;
848       case 0:
849       default:
850     }
851     if (!getUnknownFields().equals(other.getUnknownFields())) return false;
852     return true;
853   }
854 
855   @java.lang.Override
hashCode()856   public int hashCode() {
857     if (memoizedHashCode != 0) {
858       return memoizedHashCode;
859     }
860     int hash = 41;
861     hash = (19 * hash) + getDescriptor().hashCode();
862     if (!internalGetParams().getMap().isEmpty()) {
863       hash = (37 * hash) + PARAMS_FIELD_NUMBER;
864       hash = (53 * hash) + internalGetParams().hashCode();
865     }
866     switch (sourceCase_) {
867       case 1:
868         hash = (37 * hash) + GCS_SOURCE_FIELD_NUMBER;
869         hash = (53 * hash) + getGcsSource().hashCode();
870         break;
871       case 0:
872       default:
873     }
874     hash = (29 * hash) + getUnknownFields().hashCode();
875     memoizedHashCode = hash;
876     return hash;
877   }
878 
parseFrom(java.nio.ByteBuffer data)879   public static com.google.cloud.automl.v1.InputConfig parseFrom(java.nio.ByteBuffer data)
880       throws com.google.protobuf.InvalidProtocolBufferException {
881     return PARSER.parseFrom(data);
882   }
883 
parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)884   public static com.google.cloud.automl.v1.InputConfig parseFrom(
885       java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
886       throws com.google.protobuf.InvalidProtocolBufferException {
887     return PARSER.parseFrom(data, extensionRegistry);
888   }
889 
parseFrom( com.google.protobuf.ByteString data)890   public static com.google.cloud.automl.v1.InputConfig parseFrom(
891       com.google.protobuf.ByteString data)
892       throws com.google.protobuf.InvalidProtocolBufferException {
893     return PARSER.parseFrom(data);
894   }
895 
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)896   public static com.google.cloud.automl.v1.InputConfig parseFrom(
897       com.google.protobuf.ByteString data,
898       com.google.protobuf.ExtensionRegistryLite extensionRegistry)
899       throws com.google.protobuf.InvalidProtocolBufferException {
900     return PARSER.parseFrom(data, extensionRegistry);
901   }
902 
parseFrom(byte[] data)903   public static com.google.cloud.automl.v1.InputConfig parseFrom(byte[] data)
904       throws com.google.protobuf.InvalidProtocolBufferException {
905     return PARSER.parseFrom(data);
906   }
907 
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)908   public static com.google.cloud.automl.v1.InputConfig parseFrom(
909       byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
910       throws com.google.protobuf.InvalidProtocolBufferException {
911     return PARSER.parseFrom(data, extensionRegistry);
912   }
913 
parseFrom(java.io.InputStream input)914   public static com.google.cloud.automl.v1.InputConfig parseFrom(java.io.InputStream input)
915       throws java.io.IOException {
916     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
917   }
918 
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)919   public static com.google.cloud.automl.v1.InputConfig parseFrom(
920       java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
921       throws java.io.IOException {
922     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
923         PARSER, input, extensionRegistry);
924   }
925 
parseDelimitedFrom(java.io.InputStream input)926   public static com.google.cloud.automl.v1.InputConfig parseDelimitedFrom(java.io.InputStream input)
927       throws java.io.IOException {
928     return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
929   }
930 
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)931   public static com.google.cloud.automl.v1.InputConfig parseDelimitedFrom(
932       java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
933       throws java.io.IOException {
934     return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
935         PARSER, input, extensionRegistry);
936   }
937 
parseFrom( com.google.protobuf.CodedInputStream input)938   public static com.google.cloud.automl.v1.InputConfig parseFrom(
939       com.google.protobuf.CodedInputStream input) throws java.io.IOException {
940     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
941   }
942 
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)943   public static com.google.cloud.automl.v1.InputConfig parseFrom(
944       com.google.protobuf.CodedInputStream input,
945       com.google.protobuf.ExtensionRegistryLite extensionRegistry)
946       throws java.io.IOException {
947     return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
948         PARSER, input, extensionRegistry);
949   }
950 
951   @java.lang.Override
newBuilderForType()952   public Builder newBuilderForType() {
953     return newBuilder();
954   }
955 
newBuilder()956   public static Builder newBuilder() {
957     return DEFAULT_INSTANCE.toBuilder();
958   }
959 
newBuilder(com.google.cloud.automl.v1.InputConfig prototype)960   public static Builder newBuilder(com.google.cloud.automl.v1.InputConfig prototype) {
961     return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
962   }
963 
964   @java.lang.Override
toBuilder()965   public Builder toBuilder() {
966     return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
967   }
968 
969   @java.lang.Override
newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)970   protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
971     Builder builder = new Builder(parent);
972     return builder;
973   }
974   /**
975    *
976    *
977    * <pre>
978    * Input configuration for [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData] action.
979    * The format of input depends on dataset_metadata the Dataset into which
980    * the import is happening has. As input source the
981    * [gcs_source][google.cloud.automl.v1.InputConfig.gcs_source]
982    * is expected, unless specified otherwise. Additionally any input .CSV file
983    * by itself must be 100MB or smaller, unless specified otherwise.
984    * If an "example" file (that is, image, video etc.) with identical content
985    * (even if it had different `GCS_FILE_PATH`) is mentioned multiple times, then
986    * its label, bounding boxes etc. are appended. The same file should be always
987    * provided with the same `ML_USE` and `GCS_FILE_PATH`, if it is not, then
988    * these values are nondeterministically selected from the given ones.
989    * The formats are represented in EBNF with commas being literal and with
990    * non-terminal symbols defined near the end of this comment. The formats are:
991    * &lt;h4&gt;AutoML Vision&lt;/h4&gt;
992    * &lt;div class="ds-selector-tabs"&gt;&lt;section&gt;&lt;h5&gt;Classification&lt;/h5&gt;
993    * See [Preparing your training
994    * data](https://cloud.google.com/vision/automl/docs/prepare) for more
995    * information.
996    * CSV file(s) with each line in format:
997    *     ML_USE,GCS_FILE_PATH,LABEL,LABEL,...
998    * *   `ML_USE` - Identifies the data set that the current row (file) applies
999    * to.
1000    *     This value can be one of the following:
1001    *     * `TRAIN` - Rows in this file are used to train the model.
1002    *     * `TEST` - Rows in this file are used to test the model during training.
1003    *     * `UNASSIGNED` - Rows in this file are not categorized. They are
1004    *        Automatically divided into train and test data. 80% for training and
1005    *        20% for testing.
1006    * *   `GCS_FILE_PATH` - The Google Cloud Storage location of an image of up to
1007    *      30MB in size. Supported extensions: .JPEG, .GIF, .PNG, .WEBP, .BMP,
1008    *      .TIFF, .ICO.
1009    * *   `LABEL` - A label that identifies the object in the image.
1010    * For the `MULTICLASS` classification type, at most one `LABEL` is allowed
1011    * per image. If an image has not yet been labeled, then it should be
1012    * mentioned just once with no `LABEL`.
1013    * Some sample rows:
1014    *     TRAIN,gs://folder/image1.jpg,daisy
1015    *     TEST,gs://folder/image2.jpg,dandelion,tulip,rose
1016    *     UNASSIGNED,gs://folder/image3.jpg,daisy
1017    *     UNASSIGNED,gs://folder/image4.jpg
1018    * &lt;/section&gt;&lt;section&gt;&lt;h5&gt;Object Detection&lt;/h5&gt;
1019    * See [Preparing your training
1020    * data](https://cloud.google.com/vision/automl/object-detection/docs/prepare)
1021    * for more information.
1022    * A CSV file(s) with each line in format:
1023    *     ML_USE,GCS_FILE_PATH,[LABEL],(BOUNDING_BOX | ,,,,,,,)
1024    * *   `ML_USE` - Identifies the data set that the current row (file) applies
1025    * to.
1026    *     This value can be one of the following:
1027    *     * `TRAIN` - Rows in this file are used to train the model.
1028    *     * `TEST` - Rows in this file are used to test the model during training.
1029    *     * `UNASSIGNED` - Rows in this file are not categorized. They are
1030    *        Automatically divided into train and test data. 80% for training and
1031    *        20% for testing.
1032    * *  `GCS_FILE_PATH` - The Google Cloud Storage location of an image of up to
1033    *     30MB in size. Supported extensions: .JPEG, .GIF, .PNG. Each image
1034    *     is assumed to be exhaustively labeled.
1035    * *  `LABEL` - A label that identifies the object in the image specified by the
1036    *    `BOUNDING_BOX`.
1037    * *  `BOUNDING BOX` - The vertices of an object in the example image.
1038    *    The minimum allowed `BOUNDING_BOX` edge length is 0.01, and no more than
1039    *    500 `BOUNDING_BOX` instances per image are allowed (one `BOUNDING_BOX`
1040    *    per line). If an image has no looked for objects then it should be
1041    *    mentioned just once with no LABEL and the ",,,,,,," in place of the
1042    *   `BOUNDING_BOX`.
1043    * **Four sample rows:**
1044    *     TRAIN,gs://folder/image1.png,car,0.1,0.1,,,0.3,0.3,,
1045    *     TRAIN,gs://folder/image1.png,bike,.7,.6,,,.8,.9,,
1046    *     UNASSIGNED,gs://folder/im2.png,car,0.1,0.1,0.2,0.1,0.2,0.3,0.1,0.3
1047    *     TEST,gs://folder/im3.png,,,,,,,,,
1048    *   &lt;/section&gt;
1049    * &lt;/div&gt;
1050    * &lt;h4&gt;AutoML Video Intelligence&lt;/h4&gt;
1051    * &lt;div class="ds-selector-tabs"&gt;&lt;section&gt;&lt;h5&gt;Classification&lt;/h5&gt;
1052    * See [Preparing your training
1053    * data](https://cloud.google.com/video-intelligence/automl/docs/prepare) for
1054    * more information.
1055    * CSV file(s) with each line in format:
1056    *     ML_USE,GCS_FILE_PATH
1057    * For `ML_USE`, do not use `VALIDATE`.
1058    * `GCS_FILE_PATH` is the path to another .csv file that describes training
1059    * example for a given `ML_USE`, using the following row format:
1060    *     GCS_FILE_PATH,(LABEL,TIME_SEGMENT_START,TIME_SEGMENT_END | ,,)
1061    * Here `GCS_FILE_PATH` leads to a video of up to 50GB in size and up
1062    * to 3h duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI.
1063    * `TIME_SEGMENT_START` and `TIME_SEGMENT_END` must be within the
1064    * length of the video, and the end time must be after the start time. Any
1065    * segment of a video which has one or more labels on it, is considered a
1066    * hard negative for all other labels. Any segment with no labels on
1067    * it is considered to be unknown. If a whole video is unknown, then
1068    * it should be mentioned just once with ",," in place of `LABEL,
1069    * TIME_SEGMENT_START,TIME_SEGMENT_END`.
1070    * Sample top level CSV file:
1071    *     TRAIN,gs://folder/train_videos.csv
1072    *     TEST,gs://folder/test_videos.csv
1073    *     UNASSIGNED,gs://folder/other_videos.csv
1074    * Sample rows of a CSV file for a particular ML_USE:
1075    *     gs://folder/video1.avi,car,120,180.000021
1076    *     gs://folder/video1.avi,bike,150,180.000021
1077    *     gs://folder/vid2.avi,car,0,60.5
1078    *     gs://folder/vid3.avi,,,
1079    * &lt;/section&gt;&lt;section&gt;&lt;h5&gt;Object Tracking&lt;/h5&gt;
1080    * See [Preparing your training
1081    * data](/video-intelligence/automl/object-tracking/docs/prepare) for more
1082    * information.
1083    * CSV file(s) with each line in format:
1084    *     ML_USE,GCS_FILE_PATH
1085    * For `ML_USE`, do not use `VALIDATE`.
1086    * `GCS_FILE_PATH` is the path to another .csv file that describes training
1087    * example for a given `ML_USE`, using the following row format:
1088    *     GCS_FILE_PATH,LABEL,[INSTANCE_ID],TIMESTAMP,BOUNDING_BOX
1089    * or
1090    *     GCS_FILE_PATH,,,,,,,,,,
1091    * Here `GCS_FILE_PATH` leads to a video of up to 50GB in size and up
1092    * to 3h duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI.
1093    * Providing `INSTANCE_ID`s can help to obtain a better model. When
1094    * a specific labeled entity leaves the video frame, and shows up
1095    * afterwards it is not required, albeit preferable, that the same
1096    * `INSTANCE_ID` is given to it.
1097    * `TIMESTAMP` must be within the length of the video, the
1098    * `BOUNDING_BOX` is assumed to be drawn on the closest video's frame
1099    * to the `TIMESTAMP`. Any mentioned by the `TIMESTAMP` frame is expected
1100    * to be exhaustively labeled and no more than 500 `BOUNDING_BOX`-es per
1101    * frame are allowed. If a whole video is unknown, then it should be
1102    * mentioned just once with ",,,,,,,,,," in place of `LABEL,
1103    * [INSTANCE_ID],TIMESTAMP,BOUNDING_BOX`.
1104    * Sample top level CSV file:
1105    *      TRAIN,gs://folder/train_videos.csv
1106    *      TEST,gs://folder/test_videos.csv
1107    *      UNASSIGNED,gs://folder/other_videos.csv
1108    * Seven sample rows of a CSV file for a particular ML_USE:
1109    *      gs://folder/video1.avi,car,1,12.10,0.8,0.8,0.9,0.8,0.9,0.9,0.8,0.9
1110    *      gs://folder/video1.avi,car,1,12.90,0.4,0.8,0.5,0.8,0.5,0.9,0.4,0.9
1111    *      gs://folder/video1.avi,car,2,12.10,.4,.2,.5,.2,.5,.3,.4,.3
1112    *      gs://folder/video1.avi,car,2,12.90,.8,.2,,,.9,.3,,
1113    *      gs://folder/video1.avi,bike,,12.50,.45,.45,,,.55,.55,,
1114    *      gs://folder/video2.avi,car,1,0,.1,.9,,,.9,.1,,
1115    *      gs://folder/video2.avi,,,,,,,,,,,
1116    *   &lt;/section&gt;
1117    * &lt;/div&gt;
1118    * &lt;h4&gt;AutoML Natural Language&lt;/h4&gt;
1119    * &lt;div class="ds-selector-tabs"&gt;&lt;section&gt;&lt;h5&gt;Entity Extraction&lt;/h5&gt;
1120    * See [Preparing your training
1121    * data](/natural-language/automl/entity-analysis/docs/prepare) for more
1122    * information.
1123    * One or more CSV file(s) with each line in the following format:
1124    *     ML_USE,GCS_FILE_PATH
1125    * *   `ML_USE` - Identifies the data set that the current row (file) applies
1126    * to.
1127    *     This value can be one of the following:
1128    *     * `TRAIN` - Rows in this file are used to train the model.
1129    *     * `TEST` - Rows in this file are used to test the model during training.
1130    *     * `UNASSIGNED` - Rows in this file are not categorized. They are
1131    *        Automatically divided into train and test data. 80% for training and
1132    *        20% for testing..
1133    * *   `GCS_FILE_PATH` - a Identifies JSON Lines (.JSONL) file stored in
1134    *      Google Cloud Storage that contains in-line text in-line as documents
1135    *      for model training.
1136    * After the training data set has been determined from the `TRAIN` and
1137    * `UNASSIGNED` CSV files, the training data is divided into train and
1138    * validation data sets. 70% for training and 30% for validation.
1139    * For example:
1140    *     TRAIN,gs://folder/file1.jsonl
1141    *     VALIDATE,gs://folder/file2.jsonl
1142    *     TEST,gs://folder/file3.jsonl
1143    * **In-line JSONL files**
1144    * In-line .JSONL files contain, per line, a JSON document that wraps a
1145    * [`text_snippet`][google.cloud.automl.v1.TextSnippet] field followed by
1146    * one or more [`annotations`][google.cloud.automl.v1.AnnotationPayload]
1147    * fields, which have `display_name` and `text_extraction` fields to describe
1148    * the entity from the text snippet. Multiple JSON documents can be separated
1149    * using line breaks (&#92;n).
1150    * The supplied text must be annotated exhaustively. For example, if you
1151    * include the text "horse", but do not label it as "animal",
1152    * then "horse" is assumed to not be an "animal".
1153    * Any given text snippet content must have 30,000 characters or
1154    * less, and also be UTF-8 NFC encoded. ASCII is accepted as it is
1155    * UTF-8 NFC encoded.
1156    * For example:
1157    *     {
1158    *       "text_snippet": {
1159    *         "content": "dog car cat"
1160    *       },
1161    *       "annotations": [
1162    *          {
1163    *            "display_name": "animal",
1164    *            "text_extraction": {
1165    *              "text_segment": {"start_offset": 0, "end_offset": 2}
1166    *           }
1167    *          },
1168    *          {
1169    *           "display_name": "vehicle",
1170    *            "text_extraction": {
1171    *              "text_segment": {"start_offset": 4, "end_offset": 6}
1172    *            }
1173    *          },
1174    *          {
1175    *            "display_name": "animal",
1176    *            "text_extraction": {
1177    *              "text_segment": {"start_offset": 8, "end_offset": 10}
1178    *            }
1179    *          }
1180    *      ]
1181    *     }&#92;n
1182    *     {
1183    *        "text_snippet": {
1184    *          "content": "This dog is good."
1185    *        },
1186    *        "annotations": [
1187    *           {
1188    *             "display_name": "animal",
1189    *             "text_extraction": {
1190    *               "text_segment": {"start_offset": 5, "end_offset": 7}
1191    *             }
1192    *           }
1193    *        ]
1194    *     }
1195    * **JSONL files that reference documents**
1196    * .JSONL files contain, per line, a JSON document that wraps a
1197    * `input_config` that contains the path to a source document.
1198    * Multiple JSON documents can be separated using line breaks (&#92;n).
1199    * Supported document extensions: .PDF, .TIF, .TIFF
1200    * For example:
1201    *     {
1202    *       "document": {
1203    *         "input_config": {
1204    *           "gcs_source": { "input_uris": [ "gs://folder/document1.pdf" ]
1205    *           }
1206    *         }
1207    *       }
1208    *     }&#92;n
1209    *     {
1210    *       "document": {
1211    *         "input_config": {
1212    *           "gcs_source": { "input_uris": [ "gs://folder/document2.tif" ]
1213    *           }
1214    *         }
1215    *       }
1216    *     }
1217    * **In-line JSONL files with document layout information**
1218    * **Note:** You can only annotate documents using the UI. The format described
1219    * below applies to annotated documents exported using the UI or `exportData`.
1220    * In-line .JSONL files for documents contain, per line, a JSON document
1221    * that wraps a `document` field that provides the textual content of the
1222    * document and the layout information.
1223    * For example:
1224    *     {
1225    *       "document": {
1226    *               "document_text": {
1227    *                 "content": "dog car cat"
1228    *               }
1229    *               "layout": [
1230    *                 {
1231    *                   "text_segment": {
1232    *                     "start_offset": 0,
1233    *                     "end_offset": 11,
1234    *                    },
1235    *                    "page_number": 1,
1236    *                    "bounding_poly": {
1237    *                       "normalized_vertices": [
1238    *                         {"x": 0.1, "y": 0.1},
1239    *                         {"x": 0.1, "y": 0.3},
1240    *                         {"x": 0.3, "y": 0.3},
1241    *                         {"x": 0.3, "y": 0.1},
1242    *                       ],
1243    *                     },
1244    *                     "text_segment_type": TOKEN,
1245    *                 }
1246    *               ],
1247    *               "document_dimensions": {
1248    *                 "width": 8.27,
1249    *                 "height": 11.69,
1250    *                 "unit": INCH,
1251    *               }
1252    *               "page_count": 3,
1253    *             },
1254    *             "annotations": [
1255    *               {
1256    *                 "display_name": "animal",
1257    *                 "text_extraction": {
1258    *                   "text_segment": {"start_offset": 0, "end_offset": 3}
1259    *                 }
1260    *               },
1261    *               {
1262    *                 "display_name": "vehicle",
1263    *                 "text_extraction": {
1264    *                   "text_segment": {"start_offset": 4, "end_offset": 7}
1265    *                 }
1266    *               },
1267    *               {
1268    *                 "display_name": "animal",
1269    *                 "text_extraction": {
1270    *                   "text_segment": {"start_offset": 8, "end_offset": 11}
1271    *                 }
1272    *               },
1273    *             ],
1274    * &lt;/section&gt;&lt;section&gt;&lt;h5&gt;Classification&lt;/h5&gt;
1275    * See [Preparing your training
1276    * data](https://cloud.google.com/natural-language/automl/docs/prepare) for more
1277    * information.
1278    * One or more CSV file(s) with each line in the following format:
1279    *     ML_USE,(TEXT_SNIPPET | GCS_FILE_PATH),LABEL,LABEL,...
1280    * *   `ML_USE` - Identifies the data set that the current row (file) applies
1281    * to.
1282    *     This value can be one of the following:
1283    *     * `TRAIN` - Rows in this file are used to train the model.
1284    *     * `TEST` - Rows in this file are used to test the model during training.
1285    *     * `UNASSIGNED` - Rows in this file are not categorized. They are
1286    *        Automatically divided into train and test data. 80% for training and
1287    *        20% for testing.
1288    * *   `TEXT_SNIPPET` and `GCS_FILE_PATH` are distinguished by a pattern. If
1289    *     the column content is a valid Google Cloud Storage file path, that is,
1290    *     prefixed by "gs://", it is treated as a `GCS_FILE_PATH`. Otherwise, if
1291    *     the content is enclosed in double quotes (""), it is treated as a
1292    *     `TEXT_SNIPPET`. For `GCS_FILE_PATH`, the path must lead to a
1293    *     file with supported extension and UTF-8 encoding, for example,
1294    *     "gs://folder/content.txt" AutoML imports the file content
1295    *     as a text snippet. For `TEXT_SNIPPET`, AutoML imports the column content
1296    *     excluding quotes. In both cases, size of the content must be 10MB or
1297    *     less in size. For zip files, the size of each file inside the zip must be
1298    *     10MB or less in size.
1299    *     For the `MULTICLASS` classification type, at most one `LABEL` is allowed.
1300    *     The `ML_USE` and `LABEL` columns are optional.
1301    *     Supported file extensions: .TXT, .PDF, .TIF, .TIFF, .ZIP
1302    * A maximum of 100 unique labels are allowed per CSV row.
1303    * Sample rows:
1304    *     TRAIN,"They have bad food and very rude",RudeService,BadFood
1305    *     gs://folder/content.txt,SlowService
1306    *     TEST,gs://folder/document.pdf
1307    *     VALIDATE,gs://folder/text_files.zip,BadFood
1308    * &lt;/section&gt;&lt;section&gt;&lt;h5&gt;Sentiment Analysis&lt;/h5&gt;
1309    * See [Preparing your training
1310    * data](https://cloud.google.com/natural-language/automl/docs/prepare) for more
1311    * information.
1312    * CSV file(s) with each line in format:
1313    *     ML_USE,(TEXT_SNIPPET | GCS_FILE_PATH),SENTIMENT
1314    * *   `ML_USE` - Identifies the data set that the current row (file) applies
1315    * to.
1316    *     This value can be one of the following:
1317    *     * `TRAIN` - Rows in this file are used to train the model.
1318    *     * `TEST` - Rows in this file are used to test the model during training.
1319    *     * `UNASSIGNED` - Rows in this file are not categorized. They are
1320    *        Automatically divided into train and test data. 80% for training and
1321    *        20% for testing.
1322    * *   `TEXT_SNIPPET` and `GCS_FILE_PATH` are distinguished by a pattern. If
1323    *     the column content is a valid  Google Cloud Storage file path, that is,
1324    *     prefixed by "gs://", it is treated as a `GCS_FILE_PATH`. Otherwise, if
1325    *     the content is enclosed in double quotes (""), it is treated as a
1326    *     `TEXT_SNIPPET`. For `GCS_FILE_PATH`, the path must lead to a
1327    *     file with supported extension and UTF-8 encoding, for example,
1328    *     "gs://folder/content.txt" AutoML imports the file content
1329    *     as a text snippet. For `TEXT_SNIPPET`, AutoML imports the column content
1330    *     excluding quotes. In both cases, size of the content must be 128kB or
1331    *     less in size. For zip files, the size of each file inside the zip must be
1332    *     128kB or less in size.
1333    *     The `ML_USE` and `SENTIMENT` columns are optional.
1334    *     Supported file extensions: .TXT, .PDF, .TIF, .TIFF, .ZIP
1335    * *  `SENTIMENT` - An integer between 0 and
1336    *     Dataset.text_sentiment_dataset_metadata.sentiment_max
1337    *     (inclusive). Describes the ordinal of the sentiment - higher
1338    *     value means a more positive sentiment. All the values are
1339    *     completely relative, i.e. neither 0 needs to mean a negative or
1340    *     neutral sentiment nor sentiment_max needs to mean a positive one -
1341    *     it is just required that 0 is the least positive sentiment
1342    *     in the data, and sentiment_max is the  most positive one.
1343    *     The SENTIMENT shouldn't be confused with "score" or "magnitude"
1344    *     from the previous Natural Language Sentiment Analysis API.
1345    *     All SENTIMENT values between 0 and sentiment_max must be
1346    *     represented in the imported data. On prediction the same 0 to
1347    *     sentiment_max range will be used. The difference between
1348    *     neighboring sentiment values needs not to be uniform, e.g. 1 and
1349    *     2 may be similar whereas the difference between 2 and 3 may be
1350    *     large.
1351    * Sample rows:
1352    *     TRAIN,"&#64;freewrytin this is way too good for your product",2
1353    *     gs://folder/content.txt,3
1354    *     TEST,gs://folder/document.pdf
1355    *     VALIDATE,gs://folder/text_files.zip,2
1356    *   &lt;/section&gt;
1357    * &lt;/div&gt;
1358    * &lt;h4&gt;AutoML Tables&lt;/h4&gt;&lt;div class="ui-datasection-main"&gt;&lt;section
1359    * class="selected"&gt;
1360    * See [Preparing your training
1361    * data](https://cloud.google.com/automl-tables/docs/prepare) for more
1362    * information.
1363    * You can use either
1364    * [gcs_source][google.cloud.automl.v1.InputConfig.gcs_source] or
1365    * [bigquery_source][google.cloud.automl.v1.InputConfig.bigquery_source].
1366    * All input is concatenated into a
1367    * single
1368    * [primary_table_spec_id][google.cloud.automl.v1.TablesDatasetMetadata.primary_table_spec_id]
1369    * **For gcs_source:**
1370    * CSV file(s), where the first row of the first file is the header,
1371    * containing unique column names. If the first row of a subsequent
1372    * file is the same as the header, then it is also treated as a
1373    * header. All other rows contain values for the corresponding
1374    * columns.
1375    * Each .CSV file by itself must be 10GB or smaller, and their total
1376    * size must be 100GB or smaller.
1377    * First three sample rows of a CSV file:
1378    * &lt;pre&gt;
1379    * "Id","First Name","Last Name","Dob","Addresses"
1380    * "1","John","Doe","1968-01-22","[{"status":"current","address":"123_First_Avenue","city":"Seattle","state":"WA","zip":"11111","numberOfYears":"1"},{"status":"previous","address":"456_Main_Street","city":"Portland","state":"OR","zip":"22222","numberOfYears":"5"}]"
1381    * "2","Jane","Doe","1980-10-16","[{"status":"current","address":"789_Any_Avenue","city":"Albany","state":"NY","zip":"33333","numberOfYears":"2"},{"status":"previous","address":"321_Main_Street","city":"Hoboken","state":"NJ","zip":"44444","numberOfYears":"3"}]}
1382    * &lt;/pre&gt;
1383    * **For bigquery_source:**
1384    * An URI of a BigQuery table. The user data size of the BigQuery
1385    * table must be 100GB or smaller.
1386    * An imported table must have between 2 and 1,000 columns, inclusive,
1387    * and between 1000 and 100,000,000 rows, inclusive. There are at most 5
1388    * import data running in parallel.
1389    *   &lt;/section&gt;
1390    * &lt;/div&gt;
1391    * **Input field definitions:**
1392    * `ML_USE`
1393    * : ("TRAIN" | "VALIDATE" | "TEST" | "UNASSIGNED")
1394    *   Describes how the given example (file) should be used for model
1395    *   training. "UNASSIGNED" can be used when user has no preference.
1396    * `GCS_FILE_PATH`
1397    * : The path to a file on Google Cloud Storage. For example,
1398    *   "gs://folder/image1.png".
1399    * `LABEL`
1400    * : A display name of an object on an image, video etc., e.g. "dog".
1401    *   Must be up to 32 characters long and can consist only of ASCII
1402    *   Latin letters A-Z and a-z, underscores(_), and ASCII digits 0-9.
1403    *   For each label an AnnotationSpec is created which display_name
1404    *   becomes the label; AnnotationSpecs are given back in predictions.
1405    * `INSTANCE_ID`
1406    * : A positive integer that identifies a specific instance of a
1407    *   labeled entity on an example. Used e.g. to track two cars on
1408    *   a video while being able to tell apart which one is which.
1409    * `BOUNDING_BOX`
1410    * : (`VERTEX,VERTEX,VERTEX,VERTEX` | `VERTEX,,,VERTEX,,`)
1411    *   A rectangle parallel to the frame of the example (image,
1412    *   video). If 4 vertices are given they are connected by edges
1413    *   in the order provided, if 2 are given they are recognized
1414    *   as diagonally opposite vertices of the rectangle.
1415    * `VERTEX`
1416    * : (`COORDINATE,COORDINATE`)
1417    *   First coordinate is horizontal (x), the second is vertical (y).
1418    * `COORDINATE`
1419    * : A float in 0 to 1 range, relative to total length of
1420    *   image or video in given dimension. For fractions the
1421    *   leading non-decimal 0 can be omitted (i.e. 0.3 = .3).
1422    *   Point 0,0 is in top left.
1423    * `TIME_SEGMENT_START`
1424    * : (`TIME_OFFSET`)
1425    *   Expresses a beginning, inclusive, of a time segment
1426    *   within an example that has a time dimension
1427    *   (e.g. video).
1428    * `TIME_SEGMENT_END`
1429    * : (`TIME_OFFSET`)
1430    *   Expresses an end, exclusive, of a time segment within
1431    *   n example that has a time dimension (e.g. video).
1432    * `TIME_OFFSET`
1433    * : A number of seconds as measured from the start of an
1434    *   example (e.g. video). Fractions are allowed, up to a
1435    *   microsecond precision. "inf" is allowed, and it means the end
1436    *   of the example.
1437    * `TEXT_SNIPPET`
1438    * : The content of a text snippet, UTF-8 encoded, enclosed within
1439    *   double quotes ("").
1440    * `DOCUMENT`
1441    * : A field that provides the textual content with document and the layout
1442    *   information.
1443    *  **Errors:**
1444    *  If any of the provided CSV files can't be parsed or if more than certain
1445    *  percent of CSV rows cannot be processed then the operation fails and
1446    *  nothing is imported. Regardless of overall success or failure the per-row
1447    *  failures, up to a certain count cap, is listed in
1448    *  Operation.metadata.partial_failures.
1449    * </pre>
1450    *
1451    * Protobuf type {@code google.cloud.automl.v1.InputConfig}
1452    */
1453   public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
1454       implements
1455       // @@protoc_insertion_point(builder_implements:google.cloud.automl.v1.InputConfig)
1456       com.google.cloud.automl.v1.InputConfigOrBuilder {
getDescriptor()1457     public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
1458       return com.google.cloud.automl.v1.Io
1459           .internal_static_google_cloud_automl_v1_InputConfig_descriptor;
1460     }
1461 
1462     @SuppressWarnings({"rawtypes"})
internalGetMapField(int number)1463     protected com.google.protobuf.MapField internalGetMapField(int number) {
1464       switch (number) {
1465         case 2:
1466           return internalGetParams();
1467         default:
1468           throw new RuntimeException("Invalid map field number: " + number);
1469       }
1470     }
1471 
1472     @SuppressWarnings({"rawtypes"})
internalGetMutableMapField(int number)1473     protected com.google.protobuf.MapField internalGetMutableMapField(int number) {
1474       switch (number) {
1475         case 2:
1476           return internalGetMutableParams();
1477         default:
1478           throw new RuntimeException("Invalid map field number: " + number);
1479       }
1480     }
1481 
1482     @java.lang.Override
1483     protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()1484         internalGetFieldAccessorTable() {
1485       return com.google.cloud.automl.v1.Io
1486           .internal_static_google_cloud_automl_v1_InputConfig_fieldAccessorTable
1487           .ensureFieldAccessorsInitialized(
1488               com.google.cloud.automl.v1.InputConfig.class,
1489               com.google.cloud.automl.v1.InputConfig.Builder.class);
1490     }
1491 
1492     // Construct using com.google.cloud.automl.v1.InputConfig.newBuilder()
Builder()1493     private Builder() {}
1494 
Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)1495     private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
1496       super(parent);
1497     }
1498 
1499     @java.lang.Override
clear()1500     public Builder clear() {
1501       super.clear();
1502       bitField0_ = 0;
1503       if (gcsSourceBuilder_ != null) {
1504         gcsSourceBuilder_.clear();
1505       }
1506       internalGetMutableParams().clear();
1507       sourceCase_ = 0;
1508       source_ = null;
1509       return this;
1510     }
1511 
1512     @java.lang.Override
getDescriptorForType()1513     public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
1514       return com.google.cloud.automl.v1.Io
1515           .internal_static_google_cloud_automl_v1_InputConfig_descriptor;
1516     }
1517 
1518     @java.lang.Override
getDefaultInstanceForType()1519     public com.google.cloud.automl.v1.InputConfig getDefaultInstanceForType() {
1520       return com.google.cloud.automl.v1.InputConfig.getDefaultInstance();
1521     }
1522 
1523     @java.lang.Override
build()1524     public com.google.cloud.automl.v1.InputConfig build() {
1525       com.google.cloud.automl.v1.InputConfig result = buildPartial();
1526       if (!result.isInitialized()) {
1527         throw newUninitializedMessageException(result);
1528       }
1529       return result;
1530     }
1531 
1532     @java.lang.Override
buildPartial()1533     public com.google.cloud.automl.v1.InputConfig buildPartial() {
1534       com.google.cloud.automl.v1.InputConfig result =
1535           new com.google.cloud.automl.v1.InputConfig(this);
1536       if (bitField0_ != 0) {
1537         buildPartial0(result);
1538       }
1539       buildPartialOneofs(result);
1540       onBuilt();
1541       return result;
1542     }
1543 
buildPartial0(com.google.cloud.automl.v1.InputConfig result)1544     private void buildPartial0(com.google.cloud.automl.v1.InputConfig result) {
1545       int from_bitField0_ = bitField0_;
1546       if (((from_bitField0_ & 0x00000002) != 0)) {
1547         result.params_ = internalGetParams();
1548         result.params_.makeImmutable();
1549       }
1550     }
1551 
buildPartialOneofs(com.google.cloud.automl.v1.InputConfig result)1552     private void buildPartialOneofs(com.google.cloud.automl.v1.InputConfig result) {
1553       result.sourceCase_ = sourceCase_;
1554       result.source_ = this.source_;
1555       if (sourceCase_ == 1 && gcsSourceBuilder_ != null) {
1556         result.source_ = gcsSourceBuilder_.build();
1557       }
1558     }
1559 
1560     @java.lang.Override
clone()1561     public Builder clone() {
1562       return super.clone();
1563     }
1564 
1565     @java.lang.Override
setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)1566     public Builder setField(
1567         com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
1568       return super.setField(field, value);
1569     }
1570 
1571     @java.lang.Override
clearField(com.google.protobuf.Descriptors.FieldDescriptor field)1572     public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
1573       return super.clearField(field);
1574     }
1575 
1576     @java.lang.Override
clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)1577     public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
1578       return super.clearOneof(oneof);
1579     }
1580 
1581     @java.lang.Override
setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value)1582     public Builder setRepeatedField(
1583         com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
1584       return super.setRepeatedField(field, index, value);
1585     }
1586 
1587     @java.lang.Override
addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value)1588     public Builder addRepeatedField(
1589         com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
1590       return super.addRepeatedField(field, value);
1591     }
1592 
1593     @java.lang.Override
mergeFrom(com.google.protobuf.Message other)1594     public Builder mergeFrom(com.google.protobuf.Message other) {
1595       if (other instanceof com.google.cloud.automl.v1.InputConfig) {
1596         return mergeFrom((com.google.cloud.automl.v1.InputConfig) other);
1597       } else {
1598         super.mergeFrom(other);
1599         return this;
1600       }
1601     }
1602 
mergeFrom(com.google.cloud.automl.v1.InputConfig other)1603     public Builder mergeFrom(com.google.cloud.automl.v1.InputConfig other) {
1604       if (other == com.google.cloud.automl.v1.InputConfig.getDefaultInstance()) return this;
1605       internalGetMutableParams().mergeFrom(other.internalGetParams());
1606       bitField0_ |= 0x00000002;
1607       switch (other.getSourceCase()) {
1608         case GCS_SOURCE:
1609           {
1610             mergeGcsSource(other.getGcsSource());
1611             break;
1612           }
1613         case SOURCE_NOT_SET:
1614           {
1615             break;
1616           }
1617       }
1618       this.mergeUnknownFields(other.getUnknownFields());
1619       onChanged();
1620       return this;
1621     }
1622 
1623     @java.lang.Override
isInitialized()1624     public final boolean isInitialized() {
1625       return true;
1626     }
1627 
1628     @java.lang.Override
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1629     public Builder mergeFrom(
1630         com.google.protobuf.CodedInputStream input,
1631         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1632         throws java.io.IOException {
1633       if (extensionRegistry == null) {
1634         throw new java.lang.NullPointerException();
1635       }
1636       try {
1637         boolean done = false;
1638         while (!done) {
1639           int tag = input.readTag();
1640           switch (tag) {
1641             case 0:
1642               done = true;
1643               break;
1644             case 10:
1645               {
1646                 input.readMessage(getGcsSourceFieldBuilder().getBuilder(), extensionRegistry);
1647                 sourceCase_ = 1;
1648                 break;
1649               } // case 10
1650             case 18:
1651               {
1652                 com.google.protobuf.MapEntry<java.lang.String, java.lang.String> params__ =
1653                     input.readMessage(
1654                         ParamsDefaultEntryHolder.defaultEntry.getParserForType(),
1655                         extensionRegistry);
1656                 internalGetMutableParams()
1657                     .getMutableMap()
1658                     .put(params__.getKey(), params__.getValue());
1659                 bitField0_ |= 0x00000002;
1660                 break;
1661               } // case 18
1662             default:
1663               {
1664                 if (!super.parseUnknownField(input, extensionRegistry, tag)) {
1665                   done = true; // was an endgroup tag
1666                 }
1667                 break;
1668               } // default:
1669           } // switch (tag)
1670         } // while (!done)
1671       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1672         throw e.unwrapIOException();
1673       } finally {
1674         onChanged();
1675       } // finally
1676       return this;
1677     }
1678 
1679     private int sourceCase_ = 0;
1680     private java.lang.Object source_;
1681 
getSourceCase()1682     public SourceCase getSourceCase() {
1683       return SourceCase.forNumber(sourceCase_);
1684     }
1685 
clearSource()1686     public Builder clearSource() {
1687       sourceCase_ = 0;
1688       source_ = null;
1689       onChanged();
1690       return this;
1691     }
1692 
1693     private int bitField0_;
1694 
1695     private com.google.protobuf.SingleFieldBuilderV3<
1696             com.google.cloud.automl.v1.GcsSource,
1697             com.google.cloud.automl.v1.GcsSource.Builder,
1698             com.google.cloud.automl.v1.GcsSourceOrBuilder>
1699         gcsSourceBuilder_;
1700     /**
1701      *
1702      *
1703      * <pre>
1704      * The Google Cloud Storage location for the input content.
1705      * For [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], `gcs_source` points to a CSV file with
1706      * a structure described in [InputConfig][google.cloud.automl.v1.InputConfig].
1707      * </pre>
1708      *
1709      * <code>.google.cloud.automl.v1.GcsSource gcs_source = 1;</code>
1710      *
1711      * @return Whether the gcsSource field is set.
1712      */
1713     @java.lang.Override
hasGcsSource()1714     public boolean hasGcsSource() {
1715       return sourceCase_ == 1;
1716     }
1717     /**
1718      *
1719      *
1720      * <pre>
1721      * The Google Cloud Storage location for the input content.
1722      * For [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], `gcs_source` points to a CSV file with
1723      * a structure described in [InputConfig][google.cloud.automl.v1.InputConfig].
1724      * </pre>
1725      *
1726      * <code>.google.cloud.automl.v1.GcsSource gcs_source = 1;</code>
1727      *
1728      * @return The gcsSource.
1729      */
1730     @java.lang.Override
getGcsSource()1731     public com.google.cloud.automl.v1.GcsSource getGcsSource() {
1732       if (gcsSourceBuilder_ == null) {
1733         if (sourceCase_ == 1) {
1734           return (com.google.cloud.automl.v1.GcsSource) source_;
1735         }
1736         return com.google.cloud.automl.v1.GcsSource.getDefaultInstance();
1737       } else {
1738         if (sourceCase_ == 1) {
1739           return gcsSourceBuilder_.getMessage();
1740         }
1741         return com.google.cloud.automl.v1.GcsSource.getDefaultInstance();
1742       }
1743     }
1744     /**
1745      *
1746      *
1747      * <pre>
1748      * The Google Cloud Storage location for the input content.
1749      * For [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], `gcs_source` points to a CSV file with
1750      * a structure described in [InputConfig][google.cloud.automl.v1.InputConfig].
1751      * </pre>
1752      *
1753      * <code>.google.cloud.automl.v1.GcsSource gcs_source = 1;</code>
1754      */
setGcsSource(com.google.cloud.automl.v1.GcsSource value)1755     public Builder setGcsSource(com.google.cloud.automl.v1.GcsSource value) {
1756       if (gcsSourceBuilder_ == null) {
1757         if (value == null) {
1758           throw new NullPointerException();
1759         }
1760         source_ = value;
1761         onChanged();
1762       } else {
1763         gcsSourceBuilder_.setMessage(value);
1764       }
1765       sourceCase_ = 1;
1766       return this;
1767     }
1768     /**
1769      *
1770      *
1771      * <pre>
1772      * The Google Cloud Storage location for the input content.
1773      * For [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], `gcs_source` points to a CSV file with
1774      * a structure described in [InputConfig][google.cloud.automl.v1.InputConfig].
1775      * </pre>
1776      *
1777      * <code>.google.cloud.automl.v1.GcsSource gcs_source = 1;</code>
1778      */
setGcsSource(com.google.cloud.automl.v1.GcsSource.Builder builderForValue)1779     public Builder setGcsSource(com.google.cloud.automl.v1.GcsSource.Builder builderForValue) {
1780       if (gcsSourceBuilder_ == null) {
1781         source_ = builderForValue.build();
1782         onChanged();
1783       } else {
1784         gcsSourceBuilder_.setMessage(builderForValue.build());
1785       }
1786       sourceCase_ = 1;
1787       return this;
1788     }
1789     /**
1790      *
1791      *
1792      * <pre>
1793      * The Google Cloud Storage location for the input content.
1794      * For [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], `gcs_source` points to a CSV file with
1795      * a structure described in [InputConfig][google.cloud.automl.v1.InputConfig].
1796      * </pre>
1797      *
1798      * <code>.google.cloud.automl.v1.GcsSource gcs_source = 1;</code>
1799      */
mergeGcsSource(com.google.cloud.automl.v1.GcsSource value)1800     public Builder mergeGcsSource(com.google.cloud.automl.v1.GcsSource value) {
1801       if (gcsSourceBuilder_ == null) {
1802         if (sourceCase_ == 1
1803             && source_ != com.google.cloud.automl.v1.GcsSource.getDefaultInstance()) {
1804           source_ =
1805               com.google.cloud.automl.v1.GcsSource.newBuilder(
1806                       (com.google.cloud.automl.v1.GcsSource) source_)
1807                   .mergeFrom(value)
1808                   .buildPartial();
1809         } else {
1810           source_ = value;
1811         }
1812         onChanged();
1813       } else {
1814         if (sourceCase_ == 1) {
1815           gcsSourceBuilder_.mergeFrom(value);
1816         } else {
1817           gcsSourceBuilder_.setMessage(value);
1818         }
1819       }
1820       sourceCase_ = 1;
1821       return this;
1822     }
1823     /**
1824      *
1825      *
1826      * <pre>
1827      * The Google Cloud Storage location for the input content.
1828      * For [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], `gcs_source` points to a CSV file with
1829      * a structure described in [InputConfig][google.cloud.automl.v1.InputConfig].
1830      * </pre>
1831      *
1832      * <code>.google.cloud.automl.v1.GcsSource gcs_source = 1;</code>
1833      */
clearGcsSource()1834     public Builder clearGcsSource() {
1835       if (gcsSourceBuilder_ == null) {
1836         if (sourceCase_ == 1) {
1837           sourceCase_ = 0;
1838           source_ = null;
1839           onChanged();
1840         }
1841       } else {
1842         if (sourceCase_ == 1) {
1843           sourceCase_ = 0;
1844           source_ = null;
1845         }
1846         gcsSourceBuilder_.clear();
1847       }
1848       return this;
1849     }
1850     /**
1851      *
1852      *
1853      * <pre>
1854      * The Google Cloud Storage location for the input content.
1855      * For [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], `gcs_source` points to a CSV file with
1856      * a structure described in [InputConfig][google.cloud.automl.v1.InputConfig].
1857      * </pre>
1858      *
1859      * <code>.google.cloud.automl.v1.GcsSource gcs_source = 1;</code>
1860      */
getGcsSourceBuilder()1861     public com.google.cloud.automl.v1.GcsSource.Builder getGcsSourceBuilder() {
1862       return getGcsSourceFieldBuilder().getBuilder();
1863     }
1864     /**
1865      *
1866      *
1867      * <pre>
1868      * The Google Cloud Storage location for the input content.
1869      * For [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], `gcs_source` points to a CSV file with
1870      * a structure described in [InputConfig][google.cloud.automl.v1.InputConfig].
1871      * </pre>
1872      *
1873      * <code>.google.cloud.automl.v1.GcsSource gcs_source = 1;</code>
1874      */
1875     @java.lang.Override
getGcsSourceOrBuilder()1876     public com.google.cloud.automl.v1.GcsSourceOrBuilder getGcsSourceOrBuilder() {
1877       if ((sourceCase_ == 1) && (gcsSourceBuilder_ != null)) {
1878         return gcsSourceBuilder_.getMessageOrBuilder();
1879       } else {
1880         if (sourceCase_ == 1) {
1881           return (com.google.cloud.automl.v1.GcsSource) source_;
1882         }
1883         return com.google.cloud.automl.v1.GcsSource.getDefaultInstance();
1884       }
1885     }
1886     /**
1887      *
1888      *
1889      * <pre>
1890      * The Google Cloud Storage location for the input content.
1891      * For [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], `gcs_source` points to a CSV file with
1892      * a structure described in [InputConfig][google.cloud.automl.v1.InputConfig].
1893      * </pre>
1894      *
1895      * <code>.google.cloud.automl.v1.GcsSource gcs_source = 1;</code>
1896      */
1897     private com.google.protobuf.SingleFieldBuilderV3<
1898             com.google.cloud.automl.v1.GcsSource,
1899             com.google.cloud.automl.v1.GcsSource.Builder,
1900             com.google.cloud.automl.v1.GcsSourceOrBuilder>
getGcsSourceFieldBuilder()1901         getGcsSourceFieldBuilder() {
1902       if (gcsSourceBuilder_ == null) {
1903         if (!(sourceCase_ == 1)) {
1904           source_ = com.google.cloud.automl.v1.GcsSource.getDefaultInstance();
1905         }
1906         gcsSourceBuilder_ =
1907             new com.google.protobuf.SingleFieldBuilderV3<
1908                 com.google.cloud.automl.v1.GcsSource,
1909                 com.google.cloud.automl.v1.GcsSource.Builder,
1910                 com.google.cloud.automl.v1.GcsSourceOrBuilder>(
1911                 (com.google.cloud.automl.v1.GcsSource) source_, getParentForChildren(), isClean());
1912         source_ = null;
1913       }
1914       sourceCase_ = 1;
1915       onChanged();
1916       return gcsSourceBuilder_;
1917     }
1918 
1919     private com.google.protobuf.MapField<java.lang.String, java.lang.String> params_;
1920 
internalGetParams()1921     private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetParams() {
1922       if (params_ == null) {
1923         return com.google.protobuf.MapField.emptyMapField(ParamsDefaultEntryHolder.defaultEntry);
1924       }
1925       return params_;
1926     }
1927 
1928     private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableParams()1929         internalGetMutableParams() {
1930       if (params_ == null) {
1931         params_ = com.google.protobuf.MapField.newMapField(ParamsDefaultEntryHolder.defaultEntry);
1932       }
1933       if (!params_.isMutable()) {
1934         params_ = params_.copy();
1935       }
1936       bitField0_ |= 0x00000002;
1937       onChanged();
1938       return params_;
1939     }
1940 
getParamsCount()1941     public int getParamsCount() {
1942       return internalGetParams().getMap().size();
1943     }
1944     /**
1945      *
1946      *
1947      * <pre>
1948      * Additional domain-specific parameters describing the semantic of the
1949      * imported data, any string must be up to 25000
1950      * characters long.
1951      * &lt;h4&gt;AutoML Tables&lt;/h4&gt;
1952      * `schema_inference_version`
1953      * : (integer) This value must be supplied.
1954      *   The version of the
1955      *   algorithm to use for the initial inference of the
1956      *   column data types of the imported table. Allowed values: "1".
1957      * </pre>
1958      *
1959      * <code>map&lt;string, string&gt; params = 2;</code>
1960      */
1961     @java.lang.Override
containsParams(java.lang.String key)1962     public boolean containsParams(java.lang.String key) {
1963       if (key == null) {
1964         throw new NullPointerException("map key");
1965       }
1966       return internalGetParams().getMap().containsKey(key);
1967     }
1968     /** Use {@link #getParamsMap()} instead. */
1969     @java.lang.Override
1970     @java.lang.Deprecated
getParams()1971     public java.util.Map<java.lang.String, java.lang.String> getParams() {
1972       return getParamsMap();
1973     }
1974     /**
1975      *
1976      *
1977      * <pre>
1978      * Additional domain-specific parameters describing the semantic of the
1979      * imported data, any string must be up to 25000
1980      * characters long.
1981      * &lt;h4&gt;AutoML Tables&lt;/h4&gt;
1982      * `schema_inference_version`
1983      * : (integer) This value must be supplied.
1984      *   The version of the
1985      *   algorithm to use for the initial inference of the
1986      *   column data types of the imported table. Allowed values: "1".
1987      * </pre>
1988      *
1989      * <code>map&lt;string, string&gt; params = 2;</code>
1990      */
1991     @java.lang.Override
getParamsMap()1992     public java.util.Map<java.lang.String, java.lang.String> getParamsMap() {
1993       return internalGetParams().getMap();
1994     }
1995     /**
1996      *
1997      *
1998      * <pre>
1999      * Additional domain-specific parameters describing the semantic of the
2000      * imported data, any string must be up to 25000
2001      * characters long.
2002      * &lt;h4&gt;AutoML Tables&lt;/h4&gt;
2003      * `schema_inference_version`
2004      * : (integer) This value must be supplied.
2005      *   The version of the
2006      *   algorithm to use for the initial inference of the
2007      *   column data types of the imported table. Allowed values: "1".
2008      * </pre>
2009      *
2010      * <code>map&lt;string, string&gt; params = 2;</code>
2011      */
2012     @java.lang.Override
getParamsOrDefault( java.lang.String key, java.lang.String defaultValue)2013     public /* nullable */ java.lang.String getParamsOrDefault(
2014         java.lang.String key,
2015         /* nullable */
2016         java.lang.String defaultValue) {
2017       if (key == null) {
2018         throw new NullPointerException("map key");
2019       }
2020       java.util.Map<java.lang.String, java.lang.String> map = internalGetParams().getMap();
2021       return map.containsKey(key) ? map.get(key) : defaultValue;
2022     }
2023     /**
2024      *
2025      *
2026      * <pre>
2027      * Additional domain-specific parameters describing the semantic of the
2028      * imported data, any string must be up to 25000
2029      * characters long.
2030      * &lt;h4&gt;AutoML Tables&lt;/h4&gt;
2031      * `schema_inference_version`
2032      * : (integer) This value must be supplied.
2033      *   The version of the
2034      *   algorithm to use for the initial inference of the
2035      *   column data types of the imported table. Allowed values: "1".
2036      * </pre>
2037      *
2038      * <code>map&lt;string, string&gt; params = 2;</code>
2039      */
2040     @java.lang.Override
getParamsOrThrow(java.lang.String key)2041     public java.lang.String getParamsOrThrow(java.lang.String key) {
2042       if (key == null) {
2043         throw new NullPointerException("map key");
2044       }
2045       java.util.Map<java.lang.String, java.lang.String> map = internalGetParams().getMap();
2046       if (!map.containsKey(key)) {
2047         throw new java.lang.IllegalArgumentException();
2048       }
2049       return map.get(key);
2050     }
2051 
clearParams()2052     public Builder clearParams() {
2053       bitField0_ = (bitField0_ & ~0x00000002);
2054       internalGetMutableParams().getMutableMap().clear();
2055       return this;
2056     }
2057     /**
2058      *
2059      *
2060      * <pre>
2061      * Additional domain-specific parameters describing the semantic of the
2062      * imported data, any string must be up to 25000
2063      * characters long.
2064      * &lt;h4&gt;AutoML Tables&lt;/h4&gt;
2065      * `schema_inference_version`
2066      * : (integer) This value must be supplied.
2067      *   The version of the
2068      *   algorithm to use for the initial inference of the
2069      *   column data types of the imported table. Allowed values: "1".
2070      * </pre>
2071      *
2072      * <code>map&lt;string, string&gt; params = 2;</code>
2073      */
removeParams(java.lang.String key)2074     public Builder removeParams(java.lang.String key) {
2075       if (key == null) {
2076         throw new NullPointerException("map key");
2077       }
2078       internalGetMutableParams().getMutableMap().remove(key);
2079       return this;
2080     }
2081     /** Use alternate mutation accessors instead. */
2082     @java.lang.Deprecated
getMutableParams()2083     public java.util.Map<java.lang.String, java.lang.String> getMutableParams() {
2084       bitField0_ |= 0x00000002;
2085       return internalGetMutableParams().getMutableMap();
2086     }
2087     /**
2088      *
2089      *
2090      * <pre>
2091      * Additional domain-specific parameters describing the semantic of the
2092      * imported data, any string must be up to 25000
2093      * characters long.
2094      * &lt;h4&gt;AutoML Tables&lt;/h4&gt;
2095      * `schema_inference_version`
2096      * : (integer) This value must be supplied.
2097      *   The version of the
2098      *   algorithm to use for the initial inference of the
2099      *   column data types of the imported table. Allowed values: "1".
2100      * </pre>
2101      *
2102      * <code>map&lt;string, string&gt; params = 2;</code>
2103      */
putParams(java.lang.String key, java.lang.String value)2104     public Builder putParams(java.lang.String key, java.lang.String value) {
2105       if (key == null) {
2106         throw new NullPointerException("map key");
2107       }
2108       if (value == null) {
2109         throw new NullPointerException("map value");
2110       }
2111       internalGetMutableParams().getMutableMap().put(key, value);
2112       bitField0_ |= 0x00000002;
2113       return this;
2114     }
2115     /**
2116      *
2117      *
2118      * <pre>
2119      * Additional domain-specific parameters describing the semantic of the
2120      * imported data, any string must be up to 25000
2121      * characters long.
2122      * &lt;h4&gt;AutoML Tables&lt;/h4&gt;
2123      * `schema_inference_version`
2124      * : (integer) This value must be supplied.
2125      *   The version of the
2126      *   algorithm to use for the initial inference of the
2127      *   column data types of the imported table. Allowed values: "1".
2128      * </pre>
2129      *
2130      * <code>map&lt;string, string&gt; params = 2;</code>
2131      */
putAllParams(java.util.Map<java.lang.String, java.lang.String> values)2132     public Builder putAllParams(java.util.Map<java.lang.String, java.lang.String> values) {
2133       internalGetMutableParams().getMutableMap().putAll(values);
2134       bitField0_ |= 0x00000002;
2135       return this;
2136     }
2137 
2138     @java.lang.Override
setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields)2139     public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
2140       return super.setUnknownFields(unknownFields);
2141     }
2142 
2143     @java.lang.Override
mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields)2144     public final Builder mergeUnknownFields(
2145         final com.google.protobuf.UnknownFieldSet unknownFields) {
2146       return super.mergeUnknownFields(unknownFields);
2147     }
2148 
2149     // @@protoc_insertion_point(builder_scope:google.cloud.automl.v1.InputConfig)
2150   }
2151 
2152   // @@protoc_insertion_point(class_scope:google.cloud.automl.v1.InputConfig)
2153   private static final com.google.cloud.automl.v1.InputConfig DEFAULT_INSTANCE;
2154 
2155   static {
2156     DEFAULT_INSTANCE = new com.google.cloud.automl.v1.InputConfig();
2157   }
2158 
getDefaultInstance()2159   public static com.google.cloud.automl.v1.InputConfig getDefaultInstance() {
2160     return DEFAULT_INSTANCE;
2161   }
2162 
2163   private static final com.google.protobuf.Parser<InputConfig> PARSER =
2164       new com.google.protobuf.AbstractParser<InputConfig>() {
2165         @java.lang.Override
2166         public InputConfig parsePartialFrom(
2167             com.google.protobuf.CodedInputStream input,
2168             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2169             throws com.google.protobuf.InvalidProtocolBufferException {
2170           Builder builder = newBuilder();
2171           try {
2172             builder.mergeFrom(input, extensionRegistry);
2173           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2174             throw e.setUnfinishedMessage(builder.buildPartial());
2175           } catch (com.google.protobuf.UninitializedMessageException e) {
2176             throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
2177           } catch (java.io.IOException e) {
2178             throw new com.google.protobuf.InvalidProtocolBufferException(e)
2179                 .setUnfinishedMessage(builder.buildPartial());
2180           }
2181           return builder.buildPartial();
2182         }
2183       };
2184 
parser()2185   public static com.google.protobuf.Parser<InputConfig> parser() {
2186     return PARSER;
2187   }
2188 
2189   @java.lang.Override
getParserForType()2190   public com.google.protobuf.Parser<InputConfig> getParserForType() {
2191     return PARSER;
2192   }
2193 
2194   @java.lang.Override
getDefaultInstanceForType()2195   public com.google.cloud.automl.v1.InputConfig getDefaultInstanceForType() {
2196     return DEFAULT_INSTANCE;
2197   }
2198 }
2199