/external/mesa3d/src/gallium/frontends/lavapipe/ |
D | lvp_descriptor_set.c | 35 struct lvp_descriptor_set_layout *set_layout; in lvp_CreateDescriptorSetLayout() local 47 (max_binding + 1) * sizeof(set_layout->binding[0]) + in lvp_CreateDescriptorSetLayout() 50 set_layout = vk_zalloc2(&device->vk.alloc, pAllocator, size, 8, in lvp_CreateDescriptorSetLayout() 52 if (!set_layout) in lvp_CreateDescriptorSetLayout() 55 vk_object_base_init(&device->vk, &set_layout->base, in lvp_CreateDescriptorSetLayout() 59 (struct lvp_sampler **)&set_layout->binding[max_binding + 1]; in lvp_CreateDescriptorSetLayout() 61 set_layout->binding_count = max_binding + 1; in lvp_CreateDescriptorSetLayout() 62 set_layout->shader_stages = 0; in lvp_CreateDescriptorSetLayout() 63 set_layout->size = 0; in lvp_CreateDescriptorSetLayout() 71 set_layout->binding[b].array_size = binding->descriptorCount; in lvp_CreateDescriptorSetLayout() [all …]
|
/external/mesa3d/src/intel/vulkan/ |
D | anv_descriptor_set.c | 375 struct anv_descriptor_set_layout *set_layout; in anv_CreateDescriptorSetLayout() local 384 anv_multialloc_add(&ma, &set_layout, 1); in anv_CreateDescriptorSetLayout() 392 memset(set_layout, 0, sizeof(*set_layout)); in anv_CreateDescriptorSetLayout() 393 vk_object_base_init(&device->vk, &set_layout->base, in anv_CreateDescriptorSetLayout() 395 set_layout->ref_cnt = 1; in anv_CreateDescriptorSetLayout() 396 set_layout->binding_count = max_binding + 1; in anv_CreateDescriptorSetLayout() 400 memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b])); in anv_CreateDescriptorSetLayout() 402 set_layout->binding[b].flags = 0; in anv_CreateDescriptorSetLayout() 403 set_layout->binding[b].data = 0; in anv_CreateDescriptorSetLayout() 404 set_layout->binding[b].max_plane_count = 0; in anv_CreateDescriptorSetLayout() [all …]
|
D | anv_cmd_buffer.c | 805 struct anv_descriptor_set_layout *set_layout = in anv_cmd_buffer_bind_descriptor_set() local 808 VkShaderStageFlags stages = set_layout->shader_stages; in anv_cmd_buffer_bind_descriptor_set() 840 if (set_layout->dynamic_offset_count > 0) { in anv_cmd_buffer_bind_descriptor_set() 848 assert(set_layout->dynamic_offset_count <= *dynamic_offset_count); in anv_cmd_buffer_bind_descriptor_set() 849 assert(dynamic_offset_start + set_layout->dynamic_offset_count <= in anv_cmd_buffer_bind_descriptor_set() 852 for (uint32_t i = 0; i < set_layout->dynamic_offset_count; i++) { in anv_cmd_buffer_bind_descriptor_set() 859 dirty_stages |= set_layout->dynamic_offset_stages[i] & stages; in anv_cmd_buffer_bind_descriptor_set() 863 *dynamic_offsets += set_layout->dynamic_offset_count; in anv_cmd_buffer_bind_descriptor_set() 864 *dynamic_offset_count -= set_layout->dynamic_offset_count; in anv_cmd_buffer_bind_descriptor_set() 1291 struct anv_descriptor_set_layout *set_layout = layout->set[_set].layout; in anv_CmdPushDescriptorSetKHR() local [all …]
|
D | anv_nir_lower_ycbcr_textures.c | 211 const struct anv_descriptor_set_layout *set_layout = in try_lower_tex_ycbcr() local 214 &set_layout->binding[var->data.binding]; in try_lower_tex_ycbcr()
|
D | anv_nir_apply_pipeline_layout.c | 1210 struct anv_descriptor_set_layout *set_layout = layout->set[set].layout; in anv_nir_apply_pipeline_layout() local 1211 for (unsigned b = 0; b < set_layout->binding_count; b++) { in anv_nir_apply_pipeline_layout() 1223 const struct anv_descriptor_set_layout *set_layout = layout->set[set].layout; in anv_nir_apply_pipeline_layout() local 1224 for (unsigned b = 0; b < set_layout->binding_count; b++) { in anv_nir_apply_pipeline_layout()
|
/external/mesa3d/src/amd/vulkan/ |
D | radv_descriptor_set.c | 79 struct radv_descriptor_set_layout *set_layout; in radv_CreateDescriptorSetLayout() local 116 set_layout = vk_zalloc2(&device->vk.alloc, pAllocator, size, 8, in radv_CreateDescriptorSetLayout() 118 if (!set_layout) in radv_CreateDescriptorSetLayout() 121 vk_object_base_init(&device->vk, &set_layout->base, in radv_CreateDescriptorSetLayout() 124 set_layout->flags = pCreateInfo->flags; in radv_CreateDescriptorSetLayout() 125 set_layout->layout_size = size; in radv_CreateDescriptorSetLayout() 128 uint32_t *samplers = (uint32_t*)&set_layout->binding[num_bindings]; in radv_CreateDescriptorSetLayout() 134 set_layout->ycbcr_sampler_offsets_offset = (char*)ycbcr_sampler_offsets - (char*)set_layout; in radv_CreateDescriptorSetLayout() 140 set_layout->ycbcr_sampler_offsets_offset = 0; in radv_CreateDescriptorSetLayout() 145 vk_object_base_finish(&set_layout->base); in radv_CreateDescriptorSetLayout() [all …]
|
D | radv_nir_lower_ycbcr_textures.c | 236 const struct radv_descriptor_set_layout *set_layout = in try_lower_tex_ycbcr() local 239 &set_layout->binding[var->data.binding]; in try_lower_tex_ycbcr() 241 radv_immutable_ycbcr_samplers(set_layout, var->data.binding); in try_lower_tex_ycbcr()
|
/external/mesa3d/src/freedreno/vulkan/ |
D | tu_descriptor_set.c | 112 struct tu_descriptor_set_layout *set_layout; in tu_CreateDescriptorSetLayout() local 151 set_layout = vk_object_zalloc(&device->vk, pAllocator, size, in tu_CreateDescriptorSetLayout() 153 if (!set_layout) in tu_CreateDescriptorSetLayout() 156 set_layout->flags = pCreateInfo->flags; in tu_CreateDescriptorSetLayout() 159 struct tu_sampler *samplers = (void*) &set_layout->binding[max_binding + 1]; in tu_CreateDescriptorSetLayout() 166 vk_object_free(&device->vk, pAllocator, set_layout); in tu_CreateDescriptorSetLayout() 170 set_layout->binding_count = max_binding + 1; in tu_CreateDescriptorSetLayout() 171 set_layout->shader_stages = 0; in tu_CreateDescriptorSetLayout() 172 set_layout->has_immutable_samplers = false; in tu_CreateDescriptorSetLayout() 173 set_layout->size = 0; in tu_CreateDescriptorSetLayout() [all …]
|
D | tu_shader.c | 219 struct tu_descriptor_set_layout *set_layout = layout->set[set].layout; in lower_vulkan_resource_index() local 221 &set_layout->binding[binding]; in lower_vulkan_resource_index() 488 const struct tu_descriptor_set_layout *set_layout = in lower_tex_ycbcr() local 491 &set_layout->binding[var->data.binding]; in lower_tex_ycbcr() 493 tu_immutable_ycbcr_samplers(set_layout, binding); in lower_tex_ycbcr()
|
D | tu_pipeline.c | 74 struct tu_descriptor_set_layout *set_layout = pipeline->layout->set[i].layout; in tu6_load_state_size() local 75 for (unsigned j = 0; j < set_layout->binding_count; j++) { in tu6_load_state_size() 76 struct tu_descriptor_set_binding_layout *binding = &set_layout->binding[j]; in tu6_load_state_size() 157 struct tu_descriptor_set_layout *set_layout = layout->set[i].layout; in tu6_emit_load_state() local 158 for (unsigned j = 0; j < set_layout->binding_count; j++) { in tu6_emit_load_state() 159 struct tu_descriptor_set_binding_layout *binding = &set_layout->binding[j]; in tu6_emit_load_state()
|
/external/mesa3d/src/broadcom/vulkan/ |
D | v3dv_descriptor_set.c | 353 V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, set_layout, in v3dv_CreatePipelineLayout() 355 layout->set[set].layout = set_layout; in v3dv_CreatePipelineLayout() 358 for (uint32_t b = 0; b < set_layout->binding_count; b++) { in v3dv_CreatePipelineLayout() 359 dynamic_offset_count += set_layout->binding[b].array_size * in v3dv_CreatePipelineLayout() 360 set_layout->binding[b].dynamic_offset_count; in v3dv_CreatePipelineLayout() 593 struct v3dv_descriptor_set_layout *set_layout; in v3dv_CreateDescriptorSetLayout() local 622 (max_binding + 1) * sizeof(set_layout->binding[0]); in v3dv_CreateDescriptorSetLayout() 626 set_layout = vk_alloc2(&device->alloc, pAllocator, size, 8, in v3dv_CreateDescriptorSetLayout() 629 if (!set_layout) in v3dv_CreateDescriptorSetLayout() 633 struct v3dv_sampler *samplers = (void*) &set_layout->binding[max_binding + 1]; in v3dv_CreateDescriptorSetLayout() [all …]
|
D | v3dv_pipeline.c | 594 struct v3dv_descriptor_set_layout *set_layout = layout->set[set].layout; in lower_vulkan_resource_index() local 596 &set_layout->binding[binding]; in lower_vulkan_resource_index() 732 struct v3dv_descriptor_set_layout *set_layout = layout->set[set].layout; in lower_tex_src_to_offset() local 734 &set_layout->binding[binding]; in lower_tex_src_to_offset() 833 struct v3dv_descriptor_set_layout *set_layout = layout->set[set].layout; in lower_image_deref() local 835 &set_layout->binding[binding]; in lower_image_deref()
|
/external/tensorflow/tensorflow/core/kernels/ |
D | cudnn_pooling_gpu.cc | 76 .set_layout(se::dnn::DataLayout::kBatchDepthYX); in Compute() 80 .set_layout(se::dnn::DataLayout::kBatchDepthYX); in Compute() 210 .set_layout(se::dnn::DataLayout::kBatchDepthYX); in Compute() 215 .set_layout(se::dnn::DataLayout::kBatchDepthYX); in Compute()
|
D | pooling_ops_common.cc | 367 .set_layout(data_layout); in Compute() 374 .set_layout(data_layout); in Compute() 682 .set_layout(data_layout); in Compute() 689 .set_layout(data_layout); in Compute()
|
D | lrn_op.cc | 208 .set_layout(se::dnn::DataLayout::kBatchYXDepth); in launch() 261 .set_layout(perftools::gputools::dnn::DataLayout::kBatchDepthYX); in launch() 500 .set_layout(se::dnn::DataLayout::kBatchYXDepth); in launch() 575 .set_layout(perftools::gputools::dnn::DataLayout::kBatchDepthYX); in launch()
|
D | conv_grad_input_ops.cc | 254 .set_layout(compute_data_layout); in operator ()() 260 .set_layout(compute_data_layout); in operator ()() 266 .set_layout(filter_layout); in operator ()()
|
D | conv_ops_3d.cc | 403 .set_layout(compute_data_layout); in launch() 410 .set_layout(compute_data_layout); in launch() 417 .set_layout(filter_layout); in launch()
|
D | conv_ops_fused_impl.h | 577 .set_layout(se::dnn::DataLayout::kBatchDepthYX); 588 .set_layout(se::dnn::DataLayout::kBatchDepthYX); 602 .set_layout(se::dnn::DataLayout::kBatchDepthYX);
|
D | conv_grad_ops_3d.cc | 1304 .set_layout(compute_data_layout); in Compute() 1311 .set_layout(compute_data_layout); in Compute() 1318 .set_layout(filter_layout); in Compute() 1801 .set_layout(compute_data_layout); in Compute() 1808 .set_layout(compute_data_layout); in Compute() 1815 .set_layout(filter_layout); in Compute()
|
D | conv_grad_filter_ops.cc | 859 .set_layout(compute_data_layout); in operator ()() 865 .set_layout(compute_data_layout); in operator ()() 871 .set_layout(filter_layout); in operator ()()
|
D | conv_ops.cc | 884 .set_layout(compute_data_layout); in operator ()() 890 .set_layout(compute_data_layout); in operator ()() 896 .set_layout(filter_layout); in operator ()()
|
D | fused_batch_norm_op.cc | 895 .set_layout(data_layout); in operator ()() 902 .set_layout(se::dnn::DataLayout::kBatchDepthYX); in operator ()() 1086 .set_layout(data_layout); in operator ()() 1093 .set_layout(se::dnn::DataLayout::kBatchDepthYX); in operator ()()
|
/external/tensorflow/tensorflow/compiler/xla/service/gpu/ |
D | gpu_conv_runner.cc | 109 .set_layout(params.config.output_descriptor.layout()); in RunGpuConvForwardActivation() 376 input_descriptor.set_layout(input_dl) in GetGpuConvConfig() 389 filter_descriptor.set_layout(filter_dl) in GetGpuConvConfig() 418 output_descriptor.set_layout(output_dl) in GetGpuConvConfig()
|
D | cudnn_batchnorm_runner.cc | 98 batch_descs.input_desc.set_layout(se::dnn::DataLayout::kBatchDepthYX) in MakeBatchNormDescriptors() 104 batch_descs.scale_offset_desc.set_layout(se::dnn::DataLayout::kBatchDepthYX) in MakeBatchNormDescriptors()
|
/external/tensorflow/tensorflow/stream_executor/ |
D | dnn.cc | 274 set_layout(DataLayout::kYXDepthBatch); in BatchDescriptor() 416 set_layout(FilterLayout::kOutputInputYX); in FilterDescriptor()
|