Home
last modified time | relevance | path

Searched refs:sharding_type (Results 1 – 4 of 4) sorted by relevance

/external/tensorflow/tensorflow/compiler/mlir/tensorflow/utils/
Dxla_sharding_util.cc660 const auto sharding_type = sharding.type(); in GetMetadataArgumentMapping() local
661 if (sharding_type == xla::OpSharding::OTHER) { in GetMetadataArgumentMapping()
664 } else if (sharding_type == xla::OpSharding::REPLICATED) { in GetMetadataArgumentMapping()
667 assert(sharding_type == xla::OpSharding::MAXIMAL); in GetMetadataArgumentMapping()
/external/tensorflow/tensorflow/core/kernels/
Deigen_contraction_kernel.h273 template <typename StorageIndex, int sharding_type>
275 sharding_type> {
298 if (sharding_type == ShardByCol) {
341 template <typename StorageIndex, int sharding_type>
343 StorageIndex, sharding_type> {
/external/tensorflow/tensorflow/compiler/xla/experimental/xla_sharding/
Dxla_sharding.py126 for sharding_type in subgroup_modes:
127 if sharding_type not in [
/external/tensorflow/tensorflow/core/tpu/kernels/
Dtpu_program_group.cc330 TpuProgramShardingType sharding_type) const { in tpu_programs()
336 tpu_programs_[i], sharding_type)); in tpu_programs()