Searched refs:input_dim_size (Results 1 – 8 of 8) sorted by relevance
100 int input_dim_size, int offset) { in GetInputDimension() argument106 if (padded_dimension >= input_dim_size) { in GetInputDimension()107 padded_dimension -= input_dim_size; in GetInputDimension()108 const int original_ind = input_dim_size - (1 + offset); in GetInputDimension()
1159 const int64_t input_dim_size = in HandleReshape() local1163 VLOG(2) << "input_dim_size: " << input_dim_size in HandleReshape()1166 if (input_dim_size == output_dim_size) { in HandleReshape()1172 if (input_dim_size > output_dim_size) { in HandleReshape()1173 TF_RET_CHECK(input_dim_size % output_dim_size == 0) in HandleReshape()1175 const int64_t divisor = input_dim_size / output_dim_size; in HandleReshape()1189 if (input_dim_size < output_dim_size) { in HandleReshape()
2675 const int64_t input_dim_size = operand_shape.dimensions(dim); in InferDynamicSliceShape() local2681 if (slice_dim_size > input_dim_size) { in InferDynamicSliceShape()2684 slice_dim_size, input_dim_size); in InferDynamicSliceShape()2792 const int64_t input_dim_size = operand_shape.dimensions(dim); in InferDynamicUpdateSliceShape() local2799 if (update_dim_size > input_dim_size) { in InferDynamicUpdateSliceShape()2802 update_dim_size, input_dim_size); in InferDynamicUpdateSliceShape()
75 low_padding_for_conv, kernel_spatial_dim_size, input_dim_size; member3566 const int64_t input_dim_size = in GetConvolutionDetails() local3589 const int64_t spatial_size = input_dim_size + in GetConvolutionDetails()3622 input_dim_size}; in GetConvolutionDetails()
1551 int64_t input_dim_size = in verify() local1553 if (ShapedType::isDynamic(input_dim_size)) return success(); in verify()1557 if (input_dim_size % op.getNumResults() != 0) in verify()1584 int64_t input_dim_size = in verify() local1586 if (ShapedType::isDynamic(input_dim_size)) return success(); in verify()1615 if (!dynamic_dim_index && total_dim_size != input_dim_size) in verify()1619 << total_dim_size << " vs " << input_dim_size; in verify()1621 if (dynamic_dim_index && total_dim_size > input_dim_size) in verify()1625 << total_dim_size << " vs " << input_dim_size; in verify()
3331 int64_t input_dim_size = input_type.getDimSize(dim_index); in matchAndRewrite() local3334 if (ShapedType::isDynamic(input_dim_size)) return failure(); in matchAndRewrite()3337 int64_t slice_size = input_dim_size / num_splits; in matchAndRewrite()3400 Value input_dim_size = in matchAndRewrite() local3407 rewriter.create<arith::DivSIOp>(loc, input_dim_size, num_splits_value); in matchAndRewrite()3533 int64_t input_dim_size = input_type.getDimSize(dim_index); in matchAndRewrite() local3534 if (ShapedType::isDynamic(input_dim_size)) return failure(); in matchAndRewrite()3536 assert(((dynamic_dim_index && total_dim_size <= input_dim_size) || in matchAndRewrite()3537 (!dynamic_dim_index && total_dim_size == input_dim_size)) && in matchAndRewrite()3542 split_sizes[*dynamic_dim_index] = input_dim_size - total_dim_size; in matchAndRewrite()
2700 int64_t input_dim_size = operand.base_shape().dimensions(input_sharded_dim); in HandleReshape() local2705 if (input_dim_size % output_dim_size == 0) { in HandleReshape()2707 int64_t split_factor = input_dim_size / output_dim_size; in HandleReshape()2723 input_dim_size); in HandleReshape()2746 } else if (output_dim_size % input_dim_size == 0) { in HandleReshape()2748 int64_t merge_factor = output_dim_size / input_dim_size; in HandleReshape()
1276 output_dim_size = input_dim_size - kernel_dim_size + 1 (requires: input_dim_size >= kernel_dim_size…