• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// Copyright 2022 Google LLC
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//     http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15syntax = "proto3";
16
17package google.cloud.discoveryengine.v1alpha;
18
19import "google/api/field_behavior.proto";
20import "google/api/resource.proto";
21import "google/cloud/discoveryengine/v1alpha/completion.proto";
22import "google/cloud/discoveryengine/v1alpha/document.proto";
23import "google/cloud/discoveryengine/v1alpha/user_event.proto";
24import "google/protobuf/field_mask.proto";
25import "google/protobuf/timestamp.proto";
26import "google/rpc/status.proto";
27import "google/type/date.proto";
28
29option csharp_namespace = "Google.Cloud.DiscoveryEngine.V1Alpha";
30option go_package = "cloud.google.com/go/discoveryengine/apiv1alpha/discoveryenginepb;discoveryenginepb";
31option java_multiple_files = true;
32option java_outer_classname = "ImportConfigProto";
33option java_package = "com.google.cloud.discoveryengine.v1alpha";
34option objc_class_prefix = "DISCOVERYENGINE";
35option php_namespace = "Google\\Cloud\\DiscoveryEngine\\V1alpha";
36option ruby_package = "Google::Cloud::DiscoveryEngine::V1alpha";
37
38// Cloud Storage location for input content.
39message GcsSource {
40  // Required. Cloud Storage URIs to input files. URI can be up to
41  // 2000 characters long. URIs can match the full object path (for example,
42  // `gs://bucket/directory/object.json`) or a pattern matching one or more
43  // files, such as `gs://bucket/directory/*.json`.
44  //
45  // A request can contain at most 100 files (or 100,000 files if `data_schema`
46  // is `content`). Each file can be up to 2 GB (or 100 MB if `data_schema` is
47  // `content`).
48  repeated string input_uris = 1 [(google.api.field_behavior) = REQUIRED];
49
50  // The schema to use when parsing the data from the source.
51  //
52  // Supported values for document imports:
53  //
54  // * `document` (default): One JSON
55  // [Document][google.cloud.discoveryengine.v1alpha.Document] per line. Each
56  // document must
57  //   have a valid
58  //   [Document.id][google.cloud.discoveryengine.v1alpha.Document.id].
59  // * `content`: Unstructured data (e.g. PDF, HTML). Each file matched by
60  //   `input_uris` becomes a document, with the ID set to the first 128
61  //   bits of SHA256(URI) encoded as a hex string.
62  // * `custom`: One custom data JSON per row in arbitrary format that conforms
63  //   to the defined [Schema][google.cloud.discoveryengine.v1alpha.Schema] of
64  //   the data store. This can only be used by the GENERIC Data Store vertical.
65  // * `csv`: A CSV file with header conforming to the defined
66  // [Schema][google.cloud.discoveryengine.v1alpha.Schema] of the
67  //   data store. Each entry after the header is imported as a Document.
68  //   This can only be used by the GENERIC Data Store vertical.
69  //
70  // Supported values for user even imports:
71  //
72  // * `user_event` (default): One JSON
73  // [UserEvent][google.cloud.discoveryengine.v1alpha.UserEvent] per line.
74  string data_schema = 2;
75}
76
77// BigQuery source import data from.
78message BigQuerySource {
79  // BigQuery table partition info. Leave this empty if the BigQuery table
80  // is not partitioned.
81  oneof partition {
82    // BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
83    google.type.Date partition_date = 5;
84  }
85
86  // The project ID (can be project # or ID) that the BigQuery source is in with
87  // a length limit of 128 characters. If not specified, inherits the project
88  // ID from the parent request.
89  string project_id = 1;
90
91  // Required. The BigQuery data set to copy the data from with a length limit
92  // of 1,024 characters.
93  string dataset_id = 2 [(google.api.field_behavior) = REQUIRED];
94
95  // Required. The BigQuery table to copy the data from with a length limit of
96  // 1,024 characters.
97  string table_id = 3 [(google.api.field_behavior) = REQUIRED];
98
99  // Intermediate Cloud Storage directory used for the import with a length
100  // limit of 2,000 characters. Can be specified if one wants to have the
101  // BigQuery export to a specific Cloud Storage directory.
102  string gcs_staging_dir = 4;
103
104  // The schema to use when parsing the data from the source.
105  //
106  // Supported values for user event imports:
107  //
108  // * `user_event` (default): One
109  // [UserEvent][google.cloud.discoveryengine.v1alpha.UserEvent] per row.
110  //
111  // Supported values for document imports:
112  //
113  // * `document` (default): One
114  // [Document][google.cloud.discoveryengine.v1alpha.Document] format per
115  //   row. Each document must have a valid
116  //   [Document.id][google.cloud.discoveryengine.v1alpha.Document.id] and one
117  //   of
118  //   [Document.json_data][google.cloud.discoveryengine.v1alpha.Document.json_data]
119  //   or
120  //   [Document.struct_data][google.cloud.discoveryengine.v1alpha.Document.struct_data].
121  // * `custom`: One custom data per row in arbitrary format that conforms to
122  //   the defined [Schema][google.cloud.discoveryengine.v1alpha.Schema] of the
123  //   data store. This can only be used by the GENERIC Data Store vertical.
124  string data_schema = 6;
125}
126
127// The Spanner source for importing data
128message SpannerSource {
129  // The project ID that the Spanner source is in with a length limit of 128
130  // characters. If not specified, inherits the project ID from the parent
131  // request.
132  string project_id = 1;
133
134  // Required. The instance ID of the source Spanner table.
135  string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
136
137  // Required. The database ID of the source Spanner table.
138  string database_id = 3 [(google.api.field_behavior) = REQUIRED];
139
140  // Required. The table name of the Spanner database that needs to be imported.
141  string table_id = 4 [(google.api.field_behavior) = REQUIRED];
142
143  // Whether to apply data boost on Spanner export. Enabling this option will
144  // incur additional cost. More info can be found
145  // [here](https://cloud.google.com/spanner/docs/databoost/databoost-overview#billing_and_quotas).
146  bool enable_data_boost = 5;
147}
148
149// The Bigtable Options object that contains information to support
150// the import.
151message BigtableOptions {
152  // The column family of the Bigtable.
153  message BigtableColumnFamily {
154    // The field name to use for this column family in the document. The
155    // name has to match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`. If not set,
156    // it is parsed from the family name with best effort. However, due to
157    // different naming patterns, field name collisions could happen, where
158    // parsing behavior is undefined.
159    string field_name = 1;
160
161    // The encoding mode of the values when the type is not STRING.
162    // Acceptable encoding values are:
163    //
164    // * `TEXT`: indicates values are alphanumeric text strings.
165    // * `BINARY`: indicates values are encoded using `HBase Bytes.toBytes`
166    // family of functions. This can be overridden for a specific column
167    // by listing that column in `columns` and specifying an encoding for it.
168    Encoding encoding = 2;
169
170    // The type of values in this column family.
171    // The values are expected to be encoded using `HBase Bytes.toBytes`
172    // function when the encoding value is set to `BINARY`.
173    Type type = 3;
174
175    // The list of objects that contains column level information for each
176    // column. If a column is not present in this list it will be ignored.
177    repeated BigtableColumn columns = 4;
178  }
179
180  // The column of the Bigtable.
181  message BigtableColumn {
182    // Required. Qualifier of the column. If it cannot be decoded with utf-8,
183    // use a base-64 encoded string instead.
184    bytes qualifier = 1 [(google.api.field_behavior) = REQUIRED];
185
186    // The field name to use for this column in the document. The name has to
187    // match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`.
188    // If not set, it is parsed from the qualifier bytes with best effort.
189    // However, due to different naming patterns, field name collisions could
190    // happen, where parsing behavior is undefined.
191    string field_name = 2;
192
193    // The encoding mode of the values when the type is not `STRING`.
194    // Acceptable encoding values are:
195    //
196    // * `TEXT`: indicates values are alphanumeric text strings.
197    // * `BINARY`: indicates values are encoded using `HBase Bytes.toBytes`
198    // family of functions. This can be overridden for a specific column
199    // by listing that column in `columns` and specifying an encoding for it.
200    Encoding encoding = 3;
201
202    // The type of values in this column family.
203    // The values are expected to be encoded using `HBase Bytes.toBytes`
204    // function when the encoding value is set to `BINARY`.
205    Type type = 4;
206  }
207
208  // The type of values in a Bigtable column or column family.
209  // The values are expected to be encoded using
210  // [HBase
211  // Bytes.toBytes](https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/util/Bytes.html)
212  // function when the encoding value is set to `BINARY`.
213  enum Type {
214    // The type is unspecified.
215    TYPE_UNSPECIFIED = 0;
216
217    // String type.
218    STRING = 1;
219
220    // Numerical type.
221    NUMBER = 2;
222
223    // Integer type.
224    INTEGER = 3;
225
226    // Variable length integer type.
227    VAR_INTEGER = 4;
228
229    // BigDecimal type.
230    BIG_NUMERIC = 5;
231
232    // Boolean type.
233    BOOLEAN = 6;
234
235    // JSON type.
236    JSON = 7;
237  }
238
239  // The encoding mode of a Bigtable column or column family.
240  enum Encoding {
241    // The encoding is unspecified.
242    ENCODING_UNSPECIFIED = 0;
243
244    // Text encoding.
245    TEXT = 1;
246
247    // Binary encoding.
248    BINARY = 2;
249  }
250
251  // The field name used for saving row key value in the document. The name has
252  // to match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`.
253  string key_field_name = 1;
254
255  // The mapping from family names to an object that contains column families
256  // level information for the given column family. If a family is not present
257  // in this map it will be ignored.
258  map<string, BigtableColumnFamily> families = 2;
259}
260
261// The Cloud Bigtable source for importing data.
262message BigtableSource {
263  // The project ID that the Bigtable source is in with a length limit of 128
264  // characters. If not specified, inherits the project ID from the parent
265  // request.
266  string project_id = 1;
267
268  // Required. The instance ID of the Cloud Bigtable that needs to be imported.
269  string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
270
271  // Required. The table ID of the Cloud Bigtable that needs to be imported.
272  string table_id = 3 [(google.api.field_behavior) = REQUIRED];
273
274  // Required. Bigtable options that contains information needed when parsing
275  // data into typed structures. For example, column type annotations.
276  BigtableOptions bigtable_options = 4 [(google.api.field_behavior) = REQUIRED];
277}
278
279// Cloud FhirStore source import data from.
280message FhirStoreSource {
281  // Required. The full resource name of the FHIR store to import data from, in
282  // the format of
283  // `projects/{project}/locations/{location}/datasets/{dataset}/fhirStores/{fhir_store}`.
284  string fhir_store = 1 [
285    (google.api.field_behavior) = REQUIRED,
286    (google.api.resource_reference) = {
287      type: "healthcare.googleapis.com/FhirStore"
288    }
289  ];
290
291  // Intermediate Cloud Storage directory used for the import with a length
292  // limit of 2,000 characters. Can be specified if one wants to have the
293  // FhirStore export to a specific Cloud Storage directory.
294  string gcs_staging_dir = 2;
295}
296
297// Cloud SQL source import data from.
298message CloudSqlSource {
299  // The project ID that the Cloud SQL source is in with a length limit of 128
300  // characters. If not specified, inherits the project ID from the parent
301  // request.
302  string project_id = 1;
303
304  // Required. The Cloud SQL instance to copy the data from with a length limit
305  // of 256 characters.
306  string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
307
308  // Required. The Cloud SQL database to copy the data from with a length limit
309  // of 256 characters.
310  string database_id = 3 [(google.api.field_behavior) = REQUIRED];
311
312  // Required. The Cloud SQL table to copy the data from with a length limit of
313  // 256 characters.
314  string table_id = 4 [(google.api.field_behavior) = REQUIRED];
315
316  // Intermediate Cloud Storage directory used for the import with a length
317  // limit of 2,000 characters. Can be specified if one wants to have the
318  // Cloud SQL export to a specific Cloud Storage directory.
319  //
320  // Ensure that the Cloud SQL service account has the necessary Cloud
321  // Storage Admin permissions to access the specified Cloud Storage directory.
322  string gcs_staging_dir = 5;
323
324  // Option for serverless export. Enabling this option will incur additional
325  // cost. More info can be found
326  // [here](https://cloud.google.com/sql/pricing#serverless).
327  bool offload = 6;
328}
329
330// Firestore source import data from.
331message FirestoreSource {
332  // The project ID that the Cloud SQL source is in with a length limit of 128
333  // characters. If not specified, inherits the project ID from the parent
334  // request.
335  string project_id = 1;
336
337  // Required. The Firestore database to copy the data from with a length limit
338  // of 256 characters.
339  string database_id = 2 [(google.api.field_behavior) = REQUIRED];
340
341  // Required. The Firestore collection to copy the data from with a length
342  // limit of 1,500 characters.
343  string collection_id = 3 [(google.api.field_behavior) = REQUIRED];
344
345  // Intermediate Cloud Storage directory used for the import with a length
346  // limit of 2,000 characters. Can be specified if one wants to have the
347  // Firestore export to a specific Cloud Storage directory.
348  //
349  // Ensure that the Firestore service account has the necessary Cloud
350  // Storage Admin permissions to access the specified Cloud Storage directory.
351  string gcs_staging_dir = 4;
352}
353
354// Configuration of destination for Import related errors.
355message ImportErrorConfig {
356  // Required. Errors destination.
357  oneof destination {
358    // Cloud Storage prefix for import errors. This must be an empty,
359    // existing Cloud Storage directory. Import errors are written to
360    // sharded files in this directory, one per line, as a JSON-encoded
361    // `google.rpc.Status` message.
362    string gcs_prefix = 1;
363  }
364}
365
366// Request message for the ImportUserEvents request.
367message ImportUserEventsRequest {
368  // The inline source for the input config for ImportUserEvents method.
369  message InlineSource {
370    // Required. A list of user events to import. Recommended max of 10k items.
371    repeated UserEvent user_events = 1 [(google.api.field_behavior) = REQUIRED];
372  }
373
374  // Required - The desired input source of the user event data.
375  oneof source {
376    // The Inline source for the input content for UserEvents.
377    InlineSource inline_source = 2;
378
379    // Cloud Storage location for the input content.
380    GcsSource gcs_source = 3;
381
382    // BigQuery input source.
383    BigQuerySource bigquery_source = 4;
384  }
385
386  // Required. Parent DataStore resource name, of the form
387  // `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`
388  string parent = 1 [
389    (google.api.field_behavior) = REQUIRED,
390    (google.api.resource_reference) = {
391      type: "discoveryengine.googleapis.com/DataStore"
392    }
393  ];
394
395  // The desired location of errors incurred during the Import. Cannot be set
396  // for inline user event imports.
397  ImportErrorConfig error_config = 5;
398}
399
400// Response of the ImportUserEventsRequest. If the long running
401// operation was successful, then this message is returned by the
402// google.longrunning.Operations.response field if the operation was successful.
403message ImportUserEventsResponse {
404  // A sample of errors encountered while processing the request.
405  repeated google.rpc.Status error_samples = 1;
406
407  // Echoes the destination for the complete errors if this field was set in
408  // the request.
409  ImportErrorConfig error_config = 2;
410
411  // Count of user events imported with complete existing Documents.
412  int64 joined_events_count = 3;
413
414  // Count of user events imported, but with Document information not found
415  // in the existing Branch.
416  int64 unjoined_events_count = 4;
417}
418
419// Metadata related to the progress of the Import operation. This is
420// returned by the google.longrunning.Operation.metadata field.
421message ImportUserEventsMetadata {
422  // Operation create time.
423  google.protobuf.Timestamp create_time = 1;
424
425  // Operation last update time. If the operation is done, this is also the
426  // finish time.
427  google.protobuf.Timestamp update_time = 2;
428
429  // Count of entries that were processed successfully.
430  int64 success_count = 3;
431
432  // Count of entries that encountered errors while processing.
433  int64 failure_count = 4;
434}
435
436// Metadata related to the progress of the ImportDocuments operation. This is
437// returned by the google.longrunning.Operation.metadata field.
438message ImportDocumentsMetadata {
439  // Operation create time.
440  google.protobuf.Timestamp create_time = 1;
441
442  // Operation last update time. If the operation is done, this is also the
443  // finish time.
444  google.protobuf.Timestamp update_time = 2;
445
446  // Count of entries that were processed successfully.
447  int64 success_count = 3;
448
449  // Count of entries that encountered errors while processing.
450  int64 failure_count = 4;
451
452  // Total count of entries that were processed.
453  int64 total_count = 5;
454}
455
456// Request message for Import methods.
457message ImportDocumentsRequest {
458  // The inline source for the input config for ImportDocuments method.
459  message InlineSource {
460    // Required. A list of documents to update/create. Each document must have a
461    // valid [Document.id][google.cloud.discoveryengine.v1alpha.Document.id].
462    // Recommended max of 100 items.
463    repeated Document documents = 1 [(google.api.field_behavior) = REQUIRED];
464  }
465
466  // Indicates how imported documents are reconciled with the existing documents
467  // created or imported before.
468  enum ReconciliationMode {
469    // Defaults to `INCREMENTAL`.
470    RECONCILIATION_MODE_UNSPECIFIED = 0;
471
472    // Inserts new documents or updates existing documents.
473    INCREMENTAL = 1;
474
475    // Calculates diff and replaces the entire document dataset. Existing
476    // documents may be deleted if they are not present in the source location.
477    FULL = 2;
478  }
479
480  // Required. The source of the input.
481  oneof source {
482    // The Inline source for the input content for documents.
483    InlineSource inline_source = 2;
484
485    // Cloud Storage location for the input content.
486    GcsSource gcs_source = 3;
487
488    // BigQuery input source.
489    BigQuerySource bigquery_source = 4;
490
491    // FhirStore input source.
492    FhirStoreSource fhir_store_source = 10;
493
494    // Spanner input source.
495    SpannerSource spanner_source = 11;
496
497    // Cloud SQL input source.
498    CloudSqlSource cloud_sql_source = 12;
499
500    // Firestore input source.
501    FirestoreSource firestore_source = 13;
502
503    // Cloud Bigtable input source.
504    BigtableSource bigtable_source = 15;
505  }
506
507  // Required. The parent branch resource name, such as
508  // `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}`.
509  // Requires create/update permission.
510  string parent = 1 [
511    (google.api.field_behavior) = REQUIRED,
512    (google.api.resource_reference) = {
513      type: "discoveryengine.googleapis.com/Branch"
514    }
515  ];
516
517  // The desired location of errors incurred during the Import.
518  ImportErrorConfig error_config = 5;
519
520  // The mode of reconciliation between existing documents and the documents to
521  // be imported. Defaults to
522  // [ReconciliationMode.INCREMENTAL][google.cloud.discoveryengine.v1alpha.ImportDocumentsRequest.ReconciliationMode.INCREMENTAL].
523  ReconciliationMode reconciliation_mode = 6;
524
525  // Indicates which fields in the provided imported documents to update. If
526  // not set, the default is to update all fields.
527  google.protobuf.FieldMask update_mask = 7;
528
529  // Whether to automatically generate IDs for the documents if absent.
530  //
531  // If set to `true`,
532  // [Document.id][google.cloud.discoveryengine.v1alpha.Document.id]s are
533  // automatically generated based on the hash of the payload, where IDs may not
534  // be consistent during multiple imports. In which case
535  // [ReconciliationMode.FULL][google.cloud.discoveryengine.v1alpha.ImportDocumentsRequest.ReconciliationMode.FULL]
536  // is highly recommended to avoid duplicate contents. If unset or set to
537  // `false`, [Document.id][google.cloud.discoveryengine.v1alpha.Document.id]s
538  // have to be specified using
539  // [id_field][google.cloud.discoveryengine.v1alpha.ImportDocumentsRequest.id_field],
540  // otherwise, documents without IDs fail to be imported.
541  //
542  // Supported data sources:
543  //
544  // * [GcsSource][google.cloud.discoveryengine.v1alpha.GcsSource].
545  // [GcsSource.data_schema][google.cloud.discoveryengine.v1alpha.GcsSource.data_schema]
546  // must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
547  // * [BigQuerySource][google.cloud.discoveryengine.v1alpha.BigQuerySource].
548  // [BigQuerySource.data_schema][google.cloud.discoveryengine.v1alpha.BigQuerySource.data_schema]
549  // must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
550  // * [SpannerSource][google.cloud.discoveryengine.v1alpha.SpannerSource].
551  // * [CloudSqlSource][google.cloud.discoveryengine.v1alpha.CloudSqlSource].
552  // * [FirestoreSource][google.cloud.discoveryengine.v1alpha.FirestoreSource].
553  // * [BigtableSource][google.cloud.discoveryengine.v1alpha.BigtableSource].
554  bool auto_generate_ids = 8;
555
556  // The field indicates the ID field or column to be used as unique IDs of
557  // the documents.
558  //
559  // For [GcsSource][google.cloud.discoveryengine.v1alpha.GcsSource] it is the
560  // key of the JSON field. For instance, `my_id` for JSON `{"my_id":
561  // "some_uuid"}`. For others, it may be the column name of the table where the
562  // unique ids are stored.
563  //
564  // The values of the JSON field or the table column are used as the
565  // [Document.id][google.cloud.discoveryengine.v1alpha.Document.id]s. The JSON
566  // field or the table column must be of string type, and the values must be
567  // set as valid strings conform to
568  // [RFC-1034](https://tools.ietf.org/html/rfc1034) with 1-63 characters.
569  // Otherwise, documents without valid IDs fail to be imported.
570  //
571  // Only set this field when
572  // [auto_generate_ids][google.cloud.discoveryengine.v1alpha.ImportDocumentsRequest.auto_generate_ids]
573  // is unset or set as `false`. Otherwise, an INVALID_ARGUMENT error is thrown.
574  //
575  // If it is unset, a default value `_id` is used when importing from the
576  // allowed data sources.
577  //
578  // Supported data sources:
579  //
580  // * [GcsSource][google.cloud.discoveryengine.v1alpha.GcsSource].
581  // [GcsSource.data_schema][google.cloud.discoveryengine.v1alpha.GcsSource.data_schema]
582  // must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
583  // * [BigQuerySource][google.cloud.discoveryengine.v1alpha.BigQuerySource].
584  // [BigQuerySource.data_schema][google.cloud.discoveryengine.v1alpha.BigQuerySource.data_schema]
585  // must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
586  // * [SpannerSource][google.cloud.discoveryengine.v1alpha.SpannerSource].
587  // * [CloudSqlSource][google.cloud.discoveryengine.v1alpha.CloudSqlSource].
588  // * [FirestoreSource][google.cloud.discoveryengine.v1alpha.FirestoreSource].
589  // * [BigtableSource][google.cloud.discoveryengine.v1alpha.BigtableSource].
590  string id_field = 9;
591}
592
593// Response of the
594// [ImportDocumentsRequest][google.cloud.discoveryengine.v1alpha.ImportDocumentsRequest].
595// If the long running operation is done, then this message is returned by the
596// google.longrunning.Operations.response field if the operation was successful.
597message ImportDocumentsResponse {
598  // A sample of errors encountered while processing the request.
599  repeated google.rpc.Status error_samples = 1;
600
601  // Echoes the destination for the complete errors in the request if set.
602  ImportErrorConfig error_config = 2;
603}
604
605// Request message for
606// [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1alpha.CompletionService.ImportSuggestionDenyListEntries]
607// method.
608message ImportSuggestionDenyListEntriesRequest {
609  // The inline source for SuggestionDenyListEntry.
610  message InlineSource {
611    // Required. A list of all denylist entries to import. Max of 1000 items.
612    repeated SuggestionDenyListEntry entries = 1
613        [(google.api.field_behavior) = REQUIRED];
614  }
615
616  // The source of the updated SuggestionDenyList.
617  oneof source {
618    // The Inline source for the input content for suggestion deny list entries.
619    InlineSource inline_source = 2;
620
621    // Cloud Storage location for the input content.
622    //
623    // Only 1 file can be specified that contains all entries to import.
624    // Supported values `gcs_source.schema` for autocomplete suggestion deny
625    // list entry imports:
626    //
627    // * `suggestion_deny_list` (default): One JSON [SuggestionDenyListEntry]
628    // per line.
629    GcsSource gcs_source = 3;
630  }
631
632  // Required. The parent data store resource name for which to import denylist
633  // entries. Follows pattern projects/*/locations/*/collections/*/dataStores/*.
634  string parent = 1 [
635    (google.api.field_behavior) = REQUIRED,
636    (google.api.resource_reference) = {
637      type: "discoveryengine.googleapis.com/DataStore"
638    }
639  ];
640}
641
642// Response message for
643// [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1alpha.CompletionService.ImportSuggestionDenyListEntries]
644// method.
645message ImportSuggestionDenyListEntriesResponse {
646  // A sample of errors encountered while processing the request.
647  repeated google.rpc.Status error_samples = 1;
648
649  // Count of deny list entries successfully imported.
650  int64 imported_entries_count = 2;
651
652  // Count of deny list entries that failed to be imported.
653  int64 failed_entries_count = 3;
654}
655
656// Metadata related to the progress of the ImportSuggestionDenyListEntries
657// operation. This is returned by the google.longrunning.Operation.metadata
658// field.
659message ImportSuggestionDenyListEntriesMetadata {
660  // Operation create time.
661  google.protobuf.Timestamp create_time = 1;
662
663  // Operation last update time. If the operation is done, this is also the
664  // finish time.
665  google.protobuf.Timestamp update_time = 2;
666}
667