• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 /* This file registers Bigquery reader ops. */
17 
18 #include "tensorflow/core/framework/op.h"
19 #include "tensorflow/core/framework/shape_inference.h"
20 namespace tensorflow {
21 
22 using shape_inference::InferenceContext;
23 
24 REGISTER_OP("BigQueryReader")
25     .Attr("container: string = ''")
26     .Attr("shared_name: string = ''")
27     .Attr("project_id: string")
28     .Attr("dataset_id: string")
29     .Attr("table_id: string")
30     .Attr("columns: list(string)")
31     .Attr("timestamp_millis: int")
32     .Attr("test_end_point: string = ''")
33     .Output("reader_handle: Ref(string)")
34     .SetIsStateful()
__anon97f137840102(InferenceContext* c) 35     .SetShapeFn([](InferenceContext* c) {
36       c->set_output(0, c->Vector(2));
37       return Status::OK();
38     })
39     .Doc(R"doc(
40 A Reader that outputs rows from a BigQuery table as tensorflow Examples.
41 
42 container: If non-empty, this reader is placed in the given container.
43            Otherwise, a default container is used.
44 shared_name: If non-empty, this reader is named in the given bucket
45              with this shared_name. Otherwise, the node name is used instead.
46 project_id: GCP project ID.
47 dataset_id: BigQuery Dataset ID.
48 table_id: Table to read.
49 columns: List of columns to read. Leave empty to read all columns.
50 timestamp_millis: Table snapshot timestamp in millis since epoch. Relative
51 (negative or zero) snapshot times are not allowed. For more details, see
52 'Table Decorators' in BigQuery docs.
53 test_end_point: Do not use. For testing purposes only.
54 reader_handle: The handle to reference the Reader.
55 )doc");
56 
57 REGISTER_OP("GenerateBigQueryReaderPartitions")
58     .Attr("project_id: string")
59     .Attr("dataset_id: string")
60     .Attr("table_id: string")
61     .Attr("columns: list(string)")
62     .Attr("timestamp_millis: int")
63     .Attr("num_partitions: int")
64     .Attr("test_end_point: string = ''")
65     .Output("partitions: string")
__anon97f137840202(InferenceContext* c) 66     .SetShapeFn([](InferenceContext* c) {
67       c->set_output(0, c->Vector(InferenceContext::kUnknownDim));
68       return Status::OK();
69     })
70     .Doc(R"doc(
71 Generates serialized partition messages suitable for batch reads.
72 
73 This op should not be used directly by clients. Instead, the
74 bigquery_reader_ops.py file defines a clean interface to the reader.
75 
76 project_id: GCP project ID.
77 dataset_id: BigQuery Dataset ID.
78 table_id: Table to read.
79 columns: List of columns to read. Leave empty to read all columns.
80 timestamp_millis: Table snapshot timestamp in millis since epoch. Relative
81 (negative or zero) snapshot times are not allowed. For more details, see
82 'Table Decorators' in BigQuery docs.
83 num_partitions: Number of partitions to split the table into.
84 test_end_point: Do not use. For testing purposes only.
85 partitions: Serialized table partitions.
86 )doc");
87 
88 }  // namespace tensorflow
89