/external/grpc-grpc/tools/gcp/utils/ |
D | big_query_utils.py | 38 def create_dataset(biq_query, project_id, dataset_id): argument 43 'datasetId': dataset_id 53 print 'Warning: The dataset %s already exists' % dataset_id 56 print 'Error in creating dataset: %s. Err: %s' % (dataset_id, 62 def create_table(big_query, project_id, dataset_id, table_id, table_schema, argument 69 return create_table2(big_query, project_id, dataset_id, table_id, fields, 75 dataset_id, argument 89 return create_table2(big_query, project_id, dataset_id, table_id, fields, 95 dataset_id, argument 109 'datasetId': dataset_id, [all …]
|
/external/grpc-grpc/tools/run_tests/performance/ |
D | bq_upload_result.py | 37 def _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, result_file): argument 56 _create_results_table(bq, dataset_id, table_id) 59 bq, dataset_id, table_id, scenario_result, flatten=False): 64 def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file): argument 69 _create_results_table(bq, dataset_id, table_id) 71 if not _insert_result(bq, dataset_id, table_id, scenario_result): 76 def _insert_result(bq, dataset_id, table_id, scenario_result, flatten=True): argument 81 return big_query_utils.insert_rows(bq, _PROJECT_ID, dataset_id, table_id, 85 def _create_results_table(bq, dataset_id, table_id): argument 90 return big_query_utils.create_table2(bq, _PROJECT_ID, dataset_id, table_id, [all …]
|
D | patch_scenario_results_schema.py | 36 def _patch_results_table(dataset_id, table_id): argument 42 return big_query_utils.patch_table(bq, _PROJECT_ID, dataset_id, table_id, 57 dataset_id, table_id = args.bq_result_table.split('.', 2) variable 59 _patch_results_table(dataset_id, table_id)
|
/external/tensorflow/tensorflow/contrib/cloud/kernels/ |
D | bigquery_reader_ops.cc | 35 string* dataset_id, string* table_id, in GetTableAttrs() argument 39 TF_RETURN_IF_ERROR(context->GetAttr("dataset_id", dataset_id)); in GetTableAttrs() 99 string dataset_id; in BigQueryReaderOp() local 105 GetTableAttrs(context, &project_id, &dataset_id, &table_id, in BigQueryReaderOp() 109 project_id, dataset_id, table_id, timestamp_millis, in BigQueryReaderOp() 130 string dataset_id; in GenerateBigQueryReaderPartitionsOp() local 137 GetTableAttrs(context, &project_id, &dataset_id, &table_id, in GenerateBigQueryReaderPartitionsOp() 141 project_id, dataset_id, table_id, timestamp_millis, in GenerateBigQueryReaderPartitionsOp()
|
D | bigquery_table_accessor.h | 65 static Status New(const string& project_id, const string& dataset_id, 106 static Status New(const string& project_id, const string& dataset_id, 117 const string& project_id, const string& dataset_id,
|
D | bigquery_table_accessor.cc | 74 const string& project_id, const string& dataset_id, const string& table_id, in New() argument 78 return New(project_id, dataset_id, table_id, timestamp_millis, in New() 84 const string& project_id, const string& dataset_id, const string& table_id, in New() argument 105 project_id, dataset_id, table_id, timestamp_millis, row_buffer_size, in New() 113 const string& project_id, const string& dataset_id, const string& table_id, in BigQueryTableAccessor() argument 119 dataset_id_(dataset_id), in BigQueryTableAccessor()
|
D | bigquery_table_accessor_test.cc | 68 Status CreateTableAccessor(const string& project_id, const string& dataset_id, in CreateTableAccessor() argument 74 project_id, dataset_id, table_id, timestamp_millis, row_buffer_size, "", in CreateTableAccessor()
|
/external/tensorflow/tensorflow/contrib/cloud/python/ops/ |
D | bigquery_reader_ops.py | 69 dataset_id, argument 111 self._dataset_id = dataset_id 120 dataset_id=self._dataset_id, 142 dataset_id=self._dataset_id,
|
D | bigquery_reader_ops_test.py | 211 dataset_id=_DATASET, 260 dataset_id=_DATASET,
|