Home
last modified time | relevance | path

Searched refs:dataset_id (Results 1 – 9 of 9) sorted by relevance

/external/grpc-grpc/tools/gcp/utils/
Dbig_query_utils.py38 def create_dataset(biq_query, project_id, dataset_id): argument
43 'datasetId': dataset_id
53 print 'Warning: The dataset %s already exists' % dataset_id
56 print 'Error in creating dataset: %s. Err: %s' % (dataset_id,
62 def create_table(big_query, project_id, dataset_id, table_id, table_schema, argument
69 return create_table2(big_query, project_id, dataset_id, table_id, fields,
75 dataset_id, argument
89 return create_table2(big_query, project_id, dataset_id, table_id, fields,
95 dataset_id, argument
109 'datasetId': dataset_id,
[all …]
/external/grpc-grpc/tools/run_tests/performance/
Dbq_upload_result.py37 def _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, result_file): argument
56 _create_results_table(bq, dataset_id, table_id)
59 bq, dataset_id, table_id, scenario_result, flatten=False):
64 def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file): argument
69 _create_results_table(bq, dataset_id, table_id)
71 if not _insert_result(bq, dataset_id, table_id, scenario_result):
76 def _insert_result(bq, dataset_id, table_id, scenario_result, flatten=True): argument
81 return big_query_utils.insert_rows(bq, _PROJECT_ID, dataset_id, table_id,
85 def _create_results_table(bq, dataset_id, table_id): argument
90 return big_query_utils.create_table2(bq, _PROJECT_ID, dataset_id, table_id,
[all …]
Dpatch_scenario_results_schema.py36 def _patch_results_table(dataset_id, table_id): argument
42 return big_query_utils.patch_table(bq, _PROJECT_ID, dataset_id, table_id,
57 dataset_id, table_id = args.bq_result_table.split('.', 2) variable
59 _patch_results_table(dataset_id, table_id)
/external/tensorflow/tensorflow/contrib/cloud/kernels/
Dbigquery_reader_ops.cc35 string* dataset_id, string* table_id, in GetTableAttrs() argument
39 TF_RETURN_IF_ERROR(context->GetAttr("dataset_id", dataset_id)); in GetTableAttrs()
99 string dataset_id; in BigQueryReaderOp() local
105 GetTableAttrs(context, &project_id, &dataset_id, &table_id, in BigQueryReaderOp()
109 project_id, dataset_id, table_id, timestamp_millis, in BigQueryReaderOp()
130 string dataset_id; in GenerateBigQueryReaderPartitionsOp() local
137 GetTableAttrs(context, &project_id, &dataset_id, &table_id, in GenerateBigQueryReaderPartitionsOp()
141 project_id, dataset_id, table_id, timestamp_millis, in GenerateBigQueryReaderPartitionsOp()
Dbigquery_table_accessor.h65 static Status New(const string& project_id, const string& dataset_id,
106 static Status New(const string& project_id, const string& dataset_id,
117 const string& project_id, const string& dataset_id,
Dbigquery_table_accessor.cc74 const string& project_id, const string& dataset_id, const string& table_id, in New() argument
78 return New(project_id, dataset_id, table_id, timestamp_millis, in New()
84 const string& project_id, const string& dataset_id, const string& table_id, in New() argument
105 project_id, dataset_id, table_id, timestamp_millis, row_buffer_size, in New()
113 const string& project_id, const string& dataset_id, const string& table_id, in BigQueryTableAccessor() argument
119 dataset_id_(dataset_id), in BigQueryTableAccessor()
Dbigquery_table_accessor_test.cc68 Status CreateTableAccessor(const string& project_id, const string& dataset_id, in CreateTableAccessor() argument
74 project_id, dataset_id, table_id, timestamp_millis, row_buffer_size, "", in CreateTableAccessor()
/external/tensorflow/tensorflow/contrib/cloud/python/ops/
Dbigquery_reader_ops.py69 dataset_id, argument
111 self._dataset_id = dataset_id
120 dataset_id=self._dataset_id,
142 dataset_id=self._dataset_id,
Dbigquery_reader_ops_test.py211 dataset_id=_DATASET,
260 dataset_id=_DATASET,