/drivers/memory/tegra/ |
D | mc.c | 68 static int tegra_mc_setup_latency_allowance(struct tegra_mc *mc) in tegra_mc_setup_latency_allowance() argument 75 tick = (unsigned long long)mc->tick * clk_get_rate(mc->clk); in tegra_mc_setup_latency_allowance() 78 value = readl(mc->regs + MC_EMEM_ARB_CFG); in tegra_mc_setup_latency_allowance() 81 writel(value, mc->regs + MC_EMEM_ARB_CFG); in tegra_mc_setup_latency_allowance() 84 for (i = 0; i < mc->soc->num_clients; i++) { in tegra_mc_setup_latency_allowance() 85 const struct tegra_mc_la *la = &mc->soc->clients[i].la; in tegra_mc_setup_latency_allowance() 88 value = readl(mc->regs + la->reg); in tegra_mc_setup_latency_allowance() 91 writel(value, mc->regs + la->reg); in tegra_mc_setup_latency_allowance() 97 void tegra_mc_write_emem_configuration(struct tegra_mc *mc, unsigned long rate) in tegra_mc_write_emem_configuration() argument 102 for (i = 0; i < mc->num_timings; i++) { in tegra_mc_write_emem_configuration() [all …]
|
D | Makefile | 1 tegra-mc-y := mc.o 3 tegra-mc-$(CONFIG_ARCH_TEGRA_3x_SOC) += tegra30.o 4 tegra-mc-$(CONFIG_ARCH_TEGRA_114_SOC) += tegra114.o 5 tegra-mc-$(CONFIG_ARCH_TEGRA_124_SOC) += tegra124.o 6 tegra-mc-$(CONFIG_ARCH_TEGRA_132_SOC) += tegra124.o 7 tegra-mc-$(CONFIG_ARCH_TEGRA_210_SOC) += tegra210.o 9 obj-$(CONFIG_TEGRA_MC) += tegra-mc.o
|
/drivers/gpio/ |
D | gpio-mc33880.c | 55 static int mc33880_write_config(struct mc33880 *mc) in mc33880_write_config() argument 57 return spi_write(mc->spi, &mc->port_config, sizeof(mc->port_config)); in mc33880_write_config() 61 static int __mc33880_set(struct mc33880 *mc, unsigned offset, int value) in __mc33880_set() argument 64 mc->port_config |= 1 << offset; in __mc33880_set() 66 mc->port_config &= ~(1 << offset); in __mc33880_set() 68 return mc33880_write_config(mc); in __mc33880_set() 74 struct mc33880 *mc = container_of(chip, struct mc33880, chip); in mc33880_set() local 76 mutex_lock(&mc->lock); in mc33880_set() 78 __mc33880_set(mc, offset, value); in mc33880_set() 80 mutex_unlock(&mc->lock); in mc33880_set() [all …]
|
/drivers/gpu/drm/nouveau/nvkm/subdev/mc/ |
D | base.c | 29 nvkm_mc_unk260(struct nvkm_mc *mc, u32 data) in nvkm_mc_unk260() argument 31 if (mc->func->unk260) in nvkm_mc_unk260() 32 mc->func->unk260(mc, data); in nvkm_mc_unk260() 36 nvkm_mc_intr_unarm(struct nvkm_mc *mc) in nvkm_mc_intr_unarm() argument 38 return mc->func->intr_unarm(mc); in nvkm_mc_intr_unarm() 42 nvkm_mc_intr_rearm(struct nvkm_mc *mc) in nvkm_mc_intr_rearm() argument 44 return mc->func->intr_rearm(mc); in nvkm_mc_intr_rearm() 48 nvkm_mc_intr_mask(struct nvkm_mc *mc) in nvkm_mc_intr_mask() argument 50 u32 intr = mc->func->intr_mask(mc); in nvkm_mc_intr_mask() 57 nvkm_mc_intr(struct nvkm_mc *mc, bool *handled) in nvkm_mc_intr() argument [all …]
|
D | nv04.c | 42 nv04_mc_intr_unarm(struct nvkm_mc *mc) in nv04_mc_intr_unarm() argument 44 struct nvkm_device *device = mc->subdev.device; in nv04_mc_intr_unarm() 50 nv04_mc_intr_rearm(struct nvkm_mc *mc) in nv04_mc_intr_rearm() argument 52 struct nvkm_device *device = mc->subdev.device; in nv04_mc_intr_rearm() 57 nv04_mc_intr_mask(struct nvkm_mc *mc) in nv04_mc_intr_mask() argument 59 return nvkm_rd32(mc->subdev.device, 0x000100); in nv04_mc_intr_mask() 63 nv04_mc_init(struct nvkm_mc *mc) in nv04_mc_init() argument 65 struct nvkm_device *device = mc->subdev.device; in nv04_mc_init()
|
D | gf100.c | 52 gf100_mc_intr_unarm(struct nvkm_mc *mc) in gf100_mc_intr_unarm() argument 54 struct nvkm_device *device = mc->subdev.device; in gf100_mc_intr_unarm() 61 gf100_mc_intr_rearm(struct nvkm_mc *mc) in gf100_mc_intr_rearm() argument 63 struct nvkm_device *device = mc->subdev.device; in gf100_mc_intr_rearm() 69 gf100_mc_intr_mask(struct nvkm_mc *mc) in gf100_mc_intr_mask() argument 71 struct nvkm_device *device = mc->subdev.device; in gf100_mc_intr_mask() 78 gf100_mc_unk260(struct nvkm_mc *mc, u32 data) in gf100_mc_unk260() argument 80 nvkm_wr32(mc->subdev.device, 0x000260, data); in gf100_mc_unk260()
|
D | Kbuild | 1 nvkm-y += nvkm/subdev/mc/base.o 2 nvkm-y += nvkm/subdev/mc/nv04.o 3 nvkm-y += nvkm/subdev/mc/nv44.o 4 nvkm-y += nvkm/subdev/mc/nv50.o 5 nvkm-y += nvkm/subdev/mc/g98.o 6 nvkm-y += nvkm/subdev/mc/gf100.o 7 nvkm-y += nvkm/subdev/mc/gk20a.o
|
/drivers/net/can/usb/peak_usb/ |
D | pcan_usb.c | 348 static int pcan_usb_update_ts(struct pcan_usb_msg_context *mc) in pcan_usb_update_ts() argument 352 if ((mc->ptr+2) > mc->end) in pcan_usb_update_ts() 355 memcpy(&tmp16, mc->ptr, 2); in pcan_usb_update_ts() 357 mc->ts16 = le16_to_cpu(tmp16); in pcan_usb_update_ts() 359 if (mc->rec_idx > 0) in pcan_usb_update_ts() 360 peak_usb_update_ts_now(&mc->pdev->time_ref, mc->ts16); in pcan_usb_update_ts() 362 peak_usb_set_ts_now(&mc->pdev->time_ref, mc->ts16); in pcan_usb_update_ts() 370 static int pcan_usb_decode_ts(struct pcan_usb_msg_context *mc, u8 first_packet) in pcan_usb_decode_ts() argument 376 if ((mc->ptr + 2) > mc->end) in pcan_usb_decode_ts() 379 memcpy(&tmp16, mc->ptr, 2); in pcan_usb_decode_ts() [all …]
|
/drivers/memory/ |
D | tegra20-mc.c | 55 static inline u32 mc_readl(struct tegra20_mc *mc, u32 offs) in mc_readl() argument 60 val = readl(mc->regs[0] + offs); in mc_readl() 62 val = readl(mc->regs[1] + offs - 0x3c); in mc_readl() 67 static inline void mc_writel(struct tegra20_mc *mc, u32 val, u32 offs) in mc_writel() argument 70 writel(val, mc->regs[0] + offs); in mc_writel() 72 writel(val, mc->regs[1] + offs - 0x3c); in mc_writel() 130 static void tegra20_mc_decode(struct tegra20_mc *mc, int n) in tegra20_mc_decode() argument 161 dev_err_ratelimited(mc->dev, "Unknown interrupt status %08lx\n", in tegra20_mc_decode() 166 req = mc_readl(mc, reg[idx].offset); in tegra20_mc_decode() 171 addr = mc_readl(mc, reg[idx].offset + sizeof(u32)); in tegra20_mc_decode() [all …]
|
/drivers/misc/ |
D | pti.c | 59 struct pti_masterchannel *mc; member 107 static void pti_write_to_aperture(struct pti_masterchannel *mc, in pti_write_to_aperture() argument 122 aperture = drv_data->pti_ioaddr + (mc->master << 15) in pti_write_to_aperture() 123 + (mc->channel << 8); in pti_write_to_aperture() 165 static void pti_control_frame_built_and_sent(struct pti_masterchannel *mc, in pti_control_frame_built_and_sent() argument 195 snprintf(control_frame, CONTROL_FRAME_LEN, control_format, mc->master, in pti_control_frame_built_and_sent() 196 mc->channel, thread_name_p); in pti_control_frame_built_and_sent() 214 static void pti_write_full_frame_to_aperture(struct pti_masterchannel *mc, in pti_write_full_frame_to_aperture() argument 218 pti_control_frame_built_and_sent(mc, NULL); in pti_write_full_frame_to_aperture() 219 pti_write_to_aperture(mc, (u8 *)buf, len); in pti_write_full_frame_to_aperture() [all …]
|
/drivers/clk/mediatek/ |
D | clk-mtk.c | 138 struct clk * __init mtk_clk_register_composite(const struct mtk_composite *mc, in mtk_clk_register_composite() argument 152 if (mc->mux_shift >= 0) { in mtk_clk_register_composite() 157 mux->reg = base + mc->mux_reg; in mtk_clk_register_composite() 158 mux->mask = BIT(mc->mux_width) - 1; in mtk_clk_register_composite() 159 mux->shift = mc->mux_shift; in mtk_clk_register_composite() 165 parent_names = mc->parent_names; in mtk_clk_register_composite() 166 num_parents = mc->num_parents; in mtk_clk_register_composite() 168 parent = mc->parent; in mtk_clk_register_composite() 173 if (mc->gate_shift >= 0) { in mtk_clk_register_composite() 180 gate->reg = base + mc->gate_reg; in mtk_clk_register_composite() [all …]
|
/drivers/gpu/drm/radeon/ |
D | r520.c | 98 rdev->mc.vram_width = 128; in r520_vram_get_type() 99 rdev->mc.vram_is_ddr = true; in r520_vram_get_type() 103 rdev->mc.vram_width = 32; in r520_vram_get_type() 106 rdev->mc.vram_width = 64; in r520_vram_get_type() 109 rdev->mc.vram_width = 128; in r520_vram_get_type() 112 rdev->mc.vram_width = 256; in r520_vram_get_type() 115 rdev->mc.vram_width = 128; in r520_vram_get_type() 119 rdev->mc.vram_width *= 2; in r520_vram_get_type() 127 radeon_vram_location(rdev, &rdev->mc, 0); in r520_mc_init() 128 rdev->mc.gtt_base_align = 0; in r520_mc_init() [all …]
|
D | radeon_device.c | 574 void radeon_vram_location(struct radeon_device *rdev, struct radeon_mc *mc, u64 base) in radeon_vram_location() argument 578 mc->vram_start = base; in radeon_vram_location() 579 if (mc->mc_vram_size > (rdev->mc.mc_mask - base + 1)) { in radeon_vram_location() 581 mc->real_vram_size = mc->aper_size; in radeon_vram_location() 582 mc->mc_vram_size = mc->aper_size; in radeon_vram_location() 584 mc->vram_end = mc->vram_start + mc->mc_vram_size - 1; in radeon_vram_location() 585 if (rdev->flags & RADEON_IS_AGP && mc->vram_end > mc->gtt_start && mc->vram_start <= mc->gtt_end) { in radeon_vram_location() 587 mc->real_vram_size = mc->aper_size; in radeon_vram_location() 588 mc->mc_vram_size = mc->aper_size; in radeon_vram_location() 590 mc->vram_end = mc->vram_start + mc->mc_vram_size - 1; in radeon_vram_location() [all …]
|
D | rv770.c | 923 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); in rv770_pcie_gart_enable() 924 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); in rv770_pcie_gart_enable() 935 (unsigned)(rdev->mc.gtt_size >> 20), in rv770_pcie_gart_enable() 1029 if (rdev->mc.vram_start < rdev->mc.gtt_start) { in rv770_mc_program() 1032 rdev->mc.vram_start >> 12); in rv770_mc_program() 1034 rdev->mc.gtt_end >> 12); in rv770_mc_program() 1038 rdev->mc.gtt_start >> 12); in rv770_mc_program() 1040 rdev->mc.vram_end >> 12); in rv770_mc_program() 1044 rdev->mc.vram_start >> 12); in rv770_mc_program() 1046 rdev->mc.vram_end >> 12); in rv770_mc_program() [all …]
|
D | radeon_agp.c | 244 rdev->mc.agp_base = rdev->ddev->agp->agp_info.aper_base; in radeon_agp_init() 245 rdev->mc.gtt_size = rdev->ddev->agp->agp_info.aper_size << 20; in radeon_agp_init() 246 rdev->mc.gtt_start = rdev->mc.agp_base; in radeon_agp_init() 247 rdev->mc.gtt_end = rdev->mc.gtt_start + rdev->mc.gtt_size - 1; in radeon_agp_init() 249 rdev->mc.gtt_size >> 20, rdev->mc.gtt_start, rdev->mc.gtt_end); in radeon_agp_init()
|
D | rs400.c | 41 switch (rdev->mc.gtt_size/(1024*1024)) { in rs400_gart_adjust_size() 52 (unsigned)(rdev->mc.gtt_size >> 20)); in rs400_gart_adjust_size() 55 rdev->mc.gtt_size = 32 * 1024 * 1024; in rs400_gart_adjust_size() 85 switch(rdev->mc.gtt_size / (1024 * 1024)) { in rs400_gart_init() 116 switch(rdev->mc.gtt_size / (1024 * 1024)) { in rs400_gart_enable() 149 tmp = REG_SET(RS690_MC_AGP_TOP, rdev->mc.gtt_end >> 16); in rs400_gart_enable() 150 tmp |= REG_SET(RS690_MC_AGP_START, rdev->mc.gtt_start >> 16); in rs400_gart_enable() 188 (unsigned)(rdev->mc.gtt_size >> 20), in rs400_gart_enable() 268 rdev->mc.igp_sideport_enabled = radeon_combios_sideport_present(rdev); in rs400_mc_init() 270 rdev->mc.vram_is_ddr = true; in rs400_mc_init() [all …]
|
D | radeon_object.c | 107 rbo->rdev->mc.visible_vram_size < rbo->rdev->mc.real_vram_size) { in radeon_ttm_placement_from_domain() 109 rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT; in radeon_ttm_placement_from_domain() 172 rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT; in radeon_ttm_placement_from_domain() 347 domain_start = bo->rdev->mc.vram_start; in radeon_bo_pin_restricted() 349 domain_start = bo->rdev->mc.gtt_start; in radeon_bo_pin_restricted() 361 (!max_offset || max_offset > bo->rdev->mc.visible_vram_size)) in radeon_bo_pin_restricted() 363 bo->rdev->mc.visible_vram_size >> PAGE_SHIFT; in radeon_bo_pin_restricted() 421 if (rdev->mc.igp_sideport_enabled == false) in radeon_bo_evict_vram() 451 arch_io_reserve_memtype_wc(rdev->mc.aper_base, in radeon_bo_init() 452 rdev->mc.aper_size); in radeon_bo_init() [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | gmc_v8_0.c | 220 err = request_firmware(&adev->mc.fw, fw_name, adev->dev); in gmc_v8_0_init_microcode() 223 err = amdgpu_ucode_validate(adev->mc.fw); in gmc_v8_0_init_microcode() 230 release_firmware(adev->mc.fw); in gmc_v8_0_init_microcode() 231 adev->mc.fw = NULL; in gmc_v8_0_init_microcode() 252 if (!adev->mc.fw) in gmc_v8_0_mc_load_microcode() 255 hdr = (const struct mc_firmware_header_v1_0 *)adev->mc.fw->data; in gmc_v8_0_mc_load_microcode() 258 adev->mc.fw_version = le32_to_cpu(hdr->header.ucode_version); in gmc_v8_0_mc_load_microcode() 261 (adev->mc.fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); in gmc_v8_0_mc_load_microcode() 264 (adev->mc.fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in gmc_v8_0_mc_load_microcode() 314 struct amdgpu_mc *mc) in gmc_v8_0_vram_gtt_location() argument [all …]
|
D | gmc_v7_0.c | 180 err = request_firmware(&adev->mc.fw, fw_name, adev->dev); in gmc_v7_0_init_microcode() 183 err = amdgpu_ucode_validate(adev->mc.fw); in gmc_v7_0_init_microcode() 190 release_firmware(adev->mc.fw); in gmc_v7_0_init_microcode() 191 adev->mc.fw = NULL; in gmc_v7_0_init_microcode() 212 if (!adev->mc.fw) in gmc_v7_0_mc_load_microcode() 215 hdr = (const struct mc_firmware_header_v1_0 *)adev->mc.fw->data; in gmc_v7_0_mc_load_microcode() 218 adev->mc.fw_version = le32_to_cpu(hdr->header.ucode_version); in gmc_v7_0_mc_load_microcode() 221 (adev->mc.fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); in gmc_v7_0_mc_load_microcode() 224 (adev->mc.fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in gmc_v7_0_mc_load_microcode() 274 struct amdgpu_mc *mc) in gmc_v7_0_vram_gtt_location() argument [all …]
|
D | amdgpu_object.c | 48 if (mem->start << PAGE_SHIFT < adev->mc.visible_vram_size) { in amdgpu_get_vis_part_size() 50 adev->mc.visible_vram_size ? in amdgpu_get_vis_part_size() 51 adev->mc.visible_vram_size - (mem->start << PAGE_SHIFT) : in amdgpu_get_vis_part_size() 128 adev->mc.visible_vram_size < adev->mc.real_vram_size) { in amdgpu_ttm_placement_init() 130 adev->mc.visible_vram_size >> PAGE_SHIFT; in amdgpu_ttm_placement_init() 192 adev->mc.visible_vram_size >> PAGE_SHIFT; in amdgpu_ttm_placement_init() 384 domain_start = bo->adev->mc.vram_start; in amdgpu_bo_pin_restricted() 386 domain_start = bo->adev->mc.gtt_start; in amdgpu_bo_pin_restricted() 398 (!max_offset || max_offset > bo->adev->mc.visible_vram_size)) { in amdgpu_bo_pin_restricted() 400 bo->adev->mc.visible_vram_size)) in amdgpu_bo_pin_restricted() [all …]
|
D | amdgpu_device.c | 557 void amdgpu_vram_location(struct amdgpu_device *adev, struct amdgpu_mc *mc, u64 base) in amdgpu_vram_location() argument 561 mc->vram_start = base; in amdgpu_vram_location() 562 if (mc->mc_vram_size > (adev->mc.mc_mask - base + 1)) { in amdgpu_vram_location() 564 mc->real_vram_size = mc->aper_size; in amdgpu_vram_location() 565 mc->mc_vram_size = mc->aper_size; in amdgpu_vram_location() 567 mc->vram_end = mc->vram_start + mc->mc_vram_size - 1; in amdgpu_vram_location() 568 if (limit && limit < mc->real_vram_size) in amdgpu_vram_location() 569 mc->real_vram_size = limit; in amdgpu_vram_location() 571 mc->mc_vram_size >> 20, mc->vram_start, in amdgpu_vram_location() 572 mc->vram_end, mc->real_vram_size >> 20); in amdgpu_vram_location() [all …]
|
/drivers/infiniband/core/ |
D | ucma.c | 117 struct ucma_multicast *mc; member 217 struct ucma_multicast *mc; in ucma_alloc_multicast() local 219 mc = kzalloc(sizeof(*mc), GFP_KERNEL); in ucma_alloc_multicast() 220 if (!mc) in ucma_alloc_multicast() 224 mc->id = idr_alloc(&multicast_idr, NULL, 0, 0, GFP_KERNEL); in ucma_alloc_multicast() 226 if (mc->id < 0) in ucma_alloc_multicast() 229 mc->ctx = ctx; in ucma_alloc_multicast() 230 list_add_tail(&mc->list, &ctx->mc_list); in ucma_alloc_multicast() 231 return mc; in ucma_alloc_multicast() 234 kfree(mc); in ucma_alloc_multicast() [all …]
|
/drivers/scsi/qla2xxx/ |
D | qla_mbx.c | 406 mbx_cmd_t mc; in qla2x00_load_ram() local 407 mbx_cmd_t *mcp = &mc; in qla2x00_load_ram() 472 mbx_cmd_t mc; in qla2x00_execute_fw() local 473 mbx_cmd_t *mcp = &mc; in qla2x00_execute_fw() 543 mbx_cmd_t mc; in qla2x00_get_fw_version() local 544 mbx_cmd_t *mcp = &mc; in qla2x00_get_fw_version() 640 mbx_cmd_t mc; in qla2x00_get_fw_options() local 641 mbx_cmd_t *mcp = &mc; in qla2x00_get_fw_options() 688 mbx_cmd_t mc; in qla2x00_set_fw_options() local 689 mbx_cmd_t *mcp = &mc; in qla2x00_set_fw_options() [all …]
|
/drivers/staging/fsl-mc/bus/ |
D | Makefile | 8 obj-$(CONFIG_FSL_MC_BUS) += mc-bus-driver.o 10 mc-bus-driver-objs := mc-bus.o \ 11 mc-sys.o \ 15 mc-allocator.o \
|
/drivers/gpu/drm/nouveau/nvkm/engine/device/ |
D | base.c | 86 .mc = nv04_mc_new, 107 .mc = nv04_mc_new, 129 .mc = nv04_mc_new, 149 .mc = nv04_mc_new, 171 .mc = nv04_mc_new, 193 .mc = nv04_mc_new, 215 .mc = nv04_mc_new, 237 .mc = nv04_mc_new, 259 .mc = nv04_mc_new, 281 .mc = nv04_mc_new, [all …]
|