/drivers/soc/ti/ |
D | knav_qmss_acc.c | 20 #define knav_range_offset_to_inst(kdev, range, q) \ argument 21 (range->queue_base_inst + (q << kdev->inst_shift)) 23 static void __knav_acc_notify(struct knav_range_info *range, in __knav_acc_notify() argument 26 struct knav_device *kdev = range->kdev; in __knav_acc_notify() 30 range_base = kdev->base_id + range->queue_base; in __knav_acc_notify() 32 if (range->flags & RANGE_MULTI_QUEUE) { in __knav_acc_notify() 33 for (queue = 0; queue < range->num_queues; queue++) { in __knav_acc_notify() 34 inst = knav_range_offset_to_inst(kdev, range, in __knav_acc_notify() 44 queue = acc->channel - range->acc_info.start_channel; in __knav_acc_notify() 45 inst = knav_range_offset_to_inst(kdev, range, queue); in __knav_acc_notify() [all …]
|
D | knav_qmss_queue.c | 112 static int knav_queue_setup_irq(struct knav_range_info *range, in knav_queue_setup_irq() argument 115 unsigned queue = inst->id - range->queue_base; in knav_queue_setup_irq() 118 if (range->flags & RANGE_HAS_IRQ) { in knav_queue_setup_irq() 119 irq = range->irqs[queue].irq; in knav_queue_setup_irq() 125 if (range->irqs[queue].cpu_mask) { in knav_queue_setup_irq() 126 ret = irq_set_affinity_hint(irq, range->irqs[queue].cpu_mask); in knav_queue_setup_irq() 128 dev_warn(range->kdev->dev, in knav_queue_setup_irq() 139 struct knav_range_info *range = inst->range; in knav_queue_free_irq() local 140 unsigned queue = inst->id - inst->range->queue_base; in knav_queue_free_irq() 143 if (range->flags & RANGE_HAS_IRQ) { in knav_queue_free_irq() [all …]
|
/drivers/staging/android/ |
D | ashmem.c | 107 static inline unsigned long range_size(struct ashmem_range *range) in range_size() argument 109 return range->pgend - range->pgstart + 1; in range_size() 112 static inline bool range_on_lru(struct ashmem_range *range) in range_on_lru() argument 114 return range->purged == ASHMEM_NOT_PURGED; in range_on_lru() 117 static inline bool page_range_subsumes_range(struct ashmem_range *range, in page_range_subsumes_range() argument 120 return (range->pgstart >= start) && (range->pgend <= end); in page_range_subsumes_range() 123 static inline bool page_range_subsumed_by_range(struct ashmem_range *range, in page_range_subsumed_by_range() argument 126 return (range->pgstart <= start) && (range->pgend >= end); in page_range_subsumed_by_range() 129 static inline bool page_in_range(struct ashmem_range *range, size_t page) in page_in_range() argument 131 return (range->pgstart <= page) && (range->pgend >= page); in page_in_range() [all …]
|
/drivers/dax/ |
D | kmem.c | 30 static int dax_kmem_range(struct dev_dax *dev_dax, int i, struct range *r) in dax_kmem_range() 33 struct range *range = &dax_range->range; in dax_kmem_range() local 36 r->start = ALIGN(range->start, memory_block_size_bytes()); in dax_kmem_range() 37 r->end = ALIGN_DOWN(range->end + 1, memory_block_size_bytes()) - 1; in dax_kmem_range() 39 r->start = range->start; in dax_kmem_range() 40 r->end = range->end; in dax_kmem_range() 75 struct range range; in dev_dax_kmem_probe() local 77 rc = dax_kmem_range(dev_dax, i, &range); in dev_dax_kmem_probe() 80 i, range.start, range.end); in dev_dax_kmem_probe() 83 total_len += range_len(&range); in dev_dax_kmem_probe() [all …]
|
D | bus.c | 167 size += range_len(&dev_dax->ranges[i].range); in dev_dax_size() 402 struct range *range = &dev_dax->ranges[i].range; in trim_dev_dax_range() local 407 (unsigned long long)range->start, in trim_dev_dax_range() 408 (unsigned long long)range->end); in trim_dev_dax_range() 410 __release_region(&dax_region->res, range->start, range_len(range)); in trim_dev_dax_range() 589 struct range *range, int target_node, unsigned int align, in alloc_dax_region() argument 604 if (!IS_ALIGNED(range->start, align) in alloc_dax_region() 605 || !IS_ALIGNED(range_len(range), align)) in alloc_dax_region() 620 .start = range->start, in alloc_dax_region() 621 .end = range->end, in alloc_dax_region() [all …]
|
/drivers/pci/hotplug/ |
D | ibmphp_res.c | 368 static int add_bus_range(int type, struct range_node *range, struct bus_node *bus_cur) in add_bus_range() argument 392 if (range->start < range_cur->start) in add_bus_range() 402 bus_cur->rangeMem = range; in add_bus_range() 405 bus_cur->rangePFMem = range; in add_bus_range() 408 bus_cur->rangeIO = range; in add_bus_range() 411 range->next = range_cur; in add_bus_range() 412 range->rangeno = 1; in add_bus_range() 416 range->next = NULL; in add_bus_range() 417 range_prev->next = range; in add_bus_range() 418 range->rangeno = range_prev->rangeno + 1; in add_bus_range() [all …]
|
/drivers/of/ |
D | address.c | 50 u64 (*map)(__be32 *addr, const __be32 *range, 70 static u64 of_bus_default_map(__be32 *addr, const __be32 *range, in of_bus_default_map() argument 75 cp = of_read_number(range, na); in of_bus_default_map() 76 s = of_read_number(range + na + pna, ns); in of_bus_default_map() 167 static u64 of_bus_pci_map(__be32 *addr, const __be32 *range, int na, int ns, in of_bus_pci_map() argument 174 rf = of_bus_pci_get_flags(range); in of_bus_pci_map() 181 cp = of_read_number(range + 1, na - 1); in of_bus_pci_map() 182 s = of_read_number(range + na + pna, ns); in of_bus_pci_map() 224 int of_pci_range_to_resource(struct of_pci_range *range, in of_pci_range_to_resource() argument 228 res->flags = range->flags; in of_pci_range_to_resource() [all …]
|
/drivers/gpu/drm/sprd/ |
D | megacores_pll.c | 221 u32 range[2], constant; in dphy_timing_config() local 236 range[L] = 50 * scale; in dphy_timing_config() 237 range[H] = INFINITY; in dphy_timing_config() 238 val[CLK] = DIV_ROUND_UP(range[L] * (factor << 1), t_byteck) - 2; in dphy_timing_config() 243 range[L] = 38 * scale; in dphy_timing_config() 244 range[H] = 95 * scale; in dphy_timing_config() 245 tmp = AVERAGE(range[L], range[H]); in dphy_timing_config() 246 val[CLK] = DIV_ROUND_UP(AVERAGE(range[L], range[H]), t_half_byteck) - 1; in dphy_timing_config() 247 range[L] = 40 * scale + 4 * t_ui; in dphy_timing_config() 248 range[H] = 85 * scale + 6 * t_ui; in dphy_timing_config() [all …]
|
/drivers/pinctrl/ |
D | core.c | 272 static inline int gpio_to_pin(struct pinctrl_gpio_range *range, in gpio_to_pin() argument 275 unsigned int offset = gpio - range->base; in gpio_to_pin() 276 if (range->pins) in gpio_to_pin() 277 return range->pins[offset]; in gpio_to_pin() 279 return range->pin_base + offset; in gpio_to_pin() 293 struct pinctrl_gpio_range *range; in pinctrl_match_gpio_range() local 297 list_for_each_entry(range, &pctldev->gpio_ranges, node) { in pinctrl_match_gpio_range() 299 if (gpio >= range->base && in pinctrl_match_gpio_range() 300 gpio < range->base + range->npins) { in pinctrl_match_gpio_range() 302 return range; in pinctrl_match_gpio_range() [all …]
|
/drivers/regulator/ |
D | qcom_spmi-regulator.c | 403 struct spmi_voltage_range *range; member 481 .range = name##_ranges, \ 669 const struct spmi_voltage_range *range; in spmi_regulator_select_voltage() local 675 lim_min_uV = vreg->set_points->range[0].set_point_min_uV; in spmi_regulator_select_voltage() 677 vreg->set_points->range[vreg->set_points->count - 1].set_point_max_uV; in spmi_regulator_select_voltage() 691 range_max_uV = vreg->set_points->range[i - 1].set_point_max_uV; in spmi_regulator_select_voltage() 697 range = &vreg->set_points->range[range_id]; in spmi_regulator_select_voltage() 703 voltage_sel = DIV_ROUND_UP(uV - range->min_uV, range->step_uV); in spmi_regulator_select_voltage() 704 uV = voltage_sel * range->step_uV + range->min_uV; in spmi_regulator_select_voltage() 716 selector += vreg->set_points->range[i].n_voltages; in spmi_regulator_select_voltage() [all …]
|
D | helpers.c | 132 int range; in regulator_get_voltage_sel_pickable_regmap() local 152 range = regulator_range_selector_to_index(rdev, r_val); in regulator_get_voltage_sel_pickable_regmap() 153 if (range < 0) in regulator_get_voltage_sel_pickable_regmap() 156 voltages = linear_range_values_in_range_array(r, range); in regulator_get_voltage_sel_pickable_regmap() 176 unsigned int range; in regulator_set_voltage_sel_pickable_regmap() local 197 range = rdev->desc->linear_range_selectors[i]; in regulator_set_voltage_sel_pickable_regmap() 203 rdev->desc->vsel_mask, sel | range); in regulator_set_voltage_sel_pickable_regmap() 207 rdev->desc->vsel_range_mask, range); in regulator_set_voltage_sel_pickable_regmap() 410 const struct linear_range *range; in regulator_map_voltage_linear_range() local 422 range = &rdev->desc->linear_ranges[i]; in regulator_map_voltage_linear_range() [all …]
|
/drivers/base/ |
D | map.c | 23 unsigned long range; member 32 int kobj_map(struct kobj_map *domain, dev_t dev, unsigned long range, in kobj_map() argument 36 unsigned int n = MAJOR(dev + range - 1) - MAJOR(dev) + 1; in kobj_map() 53 p->range = range; in kobj_map() 59 while (*s && (*s)->range < range) in kobj_map() 68 void kobj_unmap(struct kobj_map *domain, dev_t dev, unsigned long range) in kobj_unmap() argument 70 unsigned int n = MAJOR(dev + range - 1) - MAJOR(dev) + 1; in kobj_unmap() 83 if (p->dev == dev && p->range == range) { in kobj_unmap() 108 if (p->dev > dev || p->dev + p->range - 1 < dev) in kobj_lookup() 110 if (p->range - 1 >= best) in kobj_lookup() [all …]
|
/drivers/virt/acrn/ |
D | ioreq.c | 118 struct acrn_ioreq_range *range; in acrn_ioreq_range_add() local 126 range = kzalloc(sizeof(*range), GFP_KERNEL); in acrn_ioreq_range_add() 127 if (!range) in acrn_ioreq_range_add() 130 range->type = type; in acrn_ioreq_range_add() 131 range->start = start; in acrn_ioreq_range_add() 132 range->end = end; in acrn_ioreq_range_add() 135 list_add(&range->list, &client->range_list); in acrn_ioreq_range_add() 151 struct acrn_ioreq_range *range; in acrn_ioreq_range_del() local 154 list_for_each_entry(range, &client->range_list, list) { in acrn_ioreq_range_del() 155 if (type == range->type && in acrn_ioreq_range_del() [all …]
|
/drivers/nvdimm/ |
D | badrange.c | 214 struct badblocks *bb, const struct range *range) in badblocks_populate() argument 225 if (bre_end < range->start) in badblocks_populate() 227 if (bre->start > range->end) in badblocks_populate() 230 if (bre->start >= range->start) { in badblocks_populate() 234 if (bre_end <= range->end) in badblocks_populate() 237 len = range->start + range_len(range) in badblocks_populate() 239 __add_badblock_range(bb, start - range->start, len); in badblocks_populate() 246 if (bre->start < range->start) { in badblocks_populate() 249 if (bre_end < range->end) in badblocks_populate() 250 len = bre->start + bre->length - range->start; in badblocks_populate() [all …]
|
/drivers/gpu/drm/logicvc/ |
D | logicvc_of.c | 42 .range = { 50 .range = { 57 .range = { 8, 24 }, 79 .range = { 8, 24 }, 84 .range = { 92 .range = { 158 if (property->range[0] || property->range[1]) in logicvc_of_property_parse_u32() 159 if (value < property->range[0] || value > property->range[1]) in logicvc_of_property_parse_u32()
|
/drivers/comedi/drivers/ |
D | jr3_pci.c | 94 char _reserved[offsetof(struct comedi_lrange, range[1])]; 112 union jr3_pci_single_range range[9]; member 508 union jr3_pci_single_range *r = spriv->range; in jr3_pci_poll_subdevice() 511 r[0].l.range[0].min = -get_s16(&fs->fx) * 1000; in jr3_pci_poll_subdevice() 512 r[0].l.range[0].max = get_s16(&fs->fx) * 1000; in jr3_pci_poll_subdevice() 513 r[1].l.range[0].min = -get_s16(&fs->fy) * 1000; in jr3_pci_poll_subdevice() 514 r[1].l.range[0].max = get_s16(&fs->fy) * 1000; in jr3_pci_poll_subdevice() 515 r[2].l.range[0].min = -get_s16(&fs->fz) * 1000; in jr3_pci_poll_subdevice() 516 r[2].l.range[0].max = get_s16(&fs->fz) * 1000; in jr3_pci_poll_subdevice() 517 r[3].l.range[0].min = -get_s16(&fs->mx) * 100; in jr3_pci_poll_subdevice() [all …]
|
/drivers/pci/ |
D | of.c | 283 struct of_pci_range range; in devm_of_pci_get_host_bridge_resources() local 316 for_each_of_pci_range(&parser, &range) { in devm_of_pci_get_host_bridge_resources() 318 if ((range.flags & IORESOURCE_TYPE_BITS) == IORESOURCE_IO) in devm_of_pci_get_host_bridge_resources() 320 else if ((range.flags & IORESOURCE_TYPE_BITS) == IORESOURCE_MEM) in devm_of_pci_get_host_bridge_resources() 325 range_type, range.cpu_addr, in devm_of_pci_get_host_bridge_resources() 326 range.cpu_addr + range.size - 1, range.pci_addr); in devm_of_pci_get_host_bridge_resources() 332 if (range.cpu_addr == OF_BAD_ADDR || range.size == 0) in devm_of_pci_get_host_bridge_resources() 335 err = of_pci_range_to_resource(&range, dev_node, &tmp_res); in devm_of_pci_get_host_bridge_resources() 355 *io_base = range.cpu_addr; in devm_of_pci_get_host_bridge_resources() 360 pci_add_resource_offset(resources, res, res->start - range.pci_addr); in devm_of_pci_get_host_bridge_resources() [all …]
|
/drivers/firmware/efi/ |
D | fake_mem.c | 30 if (m1->range.start < m2->range.start) in cmp_fake_mem() 32 if (m1->range.start > m2->range.start) in cmp_fake_mem() 46 new_nr_map += efi_memmap_split_count(md, &efi_range->range); in efi_fake_range() 104 efi_fake_mems[nr_fake_mem].range.start = start; in setup_fake_mem() 105 efi_fake_mems[nr_fake_mem].range.end = start + mem_size - 1; in setup_fake_mem() 118 efi_fake_mems[i].attribute, efi_fake_mems[i].range.start, in setup_fake_mem() 119 efi_fake_mems[i].range.end); in setup_fake_mem()
|
/drivers/infiniband/core/ |
D | umem_odp.c | 355 struct hmm_range range = {}; in ib_umem_odp_map_dma_and_lock() local 378 range.notifier = &umem_odp->notifier; in ib_umem_odp_map_dma_and_lock() 379 range.start = ALIGN_DOWN(user_virt, 1UL << page_shift); in ib_umem_odp_map_dma_and_lock() 380 range.end = ALIGN(user_virt + bcnt, 1UL << page_shift); in ib_umem_odp_map_dma_and_lock() 381 pfn_start_idx = (range.start - ib_umem_start(umem_odp)) >> PAGE_SHIFT; in ib_umem_odp_map_dma_and_lock() 382 num_pfns = (range.end - range.start) >> PAGE_SHIFT; in ib_umem_odp_map_dma_and_lock() 384 range.default_flags = HMM_PFN_REQ_FAULT; in ib_umem_odp_map_dma_and_lock() 387 range.default_flags |= HMM_PFN_REQ_WRITE; in ib_umem_odp_map_dma_and_lock() 390 range.hmm_pfns = &(umem_odp->pfn_list[pfn_start_idx]); in ib_umem_odp_map_dma_and_lock() 394 current_seq = range.notifier_seq = in ib_umem_odp_map_dma_and_lock() [all …]
|
/drivers/staging/rtl8192u/ |
D | r8192U_wx.c | 224 struct iw_range *range = (struct iw_range *)extra; in rtl8180_wx_get_range() local 225 struct iw_range_with_scan_capa *tmp = (struct iw_range_with_scan_capa *)range; in rtl8180_wx_get_range() 230 wrqu->data.length = sizeof(*range); in rtl8180_wx_get_range() 231 memset(range, 0, sizeof(*range)); in rtl8180_wx_get_range() 242 range->throughput = 5 * 1000 * 1000; in rtl8180_wx_get_range() 254 range->sensitivity = priv->max_sens; /* signal level threshold range */ in rtl8180_wx_get_range() 256 range->max_qual.qual = 100; in rtl8180_wx_get_range() 258 range->max_qual.level = 0; in rtl8180_wx_get_range() 259 range->max_qual.noise = 0x100 - 98; in rtl8180_wx_get_range() 260 range->max_qual.updated = 7; /* Updated all three */ in rtl8180_wx_get_range() [all …]
|
/drivers/net/wireless/intel/iwlwifi/fw/ |
D | dbg.c | 1030 struct iwl_fw_ini_error_dump_range *range = range_ptr; in iwl_dump_ini_prph_mac_iter() local 1031 __le32 *val = range->data; in iwl_dump_ini_prph_mac_iter() 1037 range->internal_base_addr = cpu_to_le32(addr); in iwl_dump_ini_prph_mac_iter() 1038 range->range_data_size = reg->dev_addr.size; in iwl_dump_ini_prph_mac_iter() 1046 return sizeof(*range) + le32_to_cpu(range->range_data_size); in iwl_dump_ini_prph_mac_iter() 1055 struct iwl_fw_ini_error_dump_range *range = range_ptr; in iwl_dump_ini_prph_phy_iter() local 1056 __le32 *val = range->data; in iwl_dump_ini_prph_phy_iter() 1065 range->internal_base_addr = cpu_to_le32(addr); in iwl_dump_ini_prph_phy_iter() 1066 range->range_data_size = reg->dev_addr.size; in iwl_dump_ini_prph_phy_iter() 1097 return sizeof(*range) + le32_to_cpu(range->range_data_size); in iwl_dump_ini_prph_phy_iter() [all …]
|
/drivers/gpu/drm/i915/selftests/ |
D | intel_uncore.c | 74 const struct i915_range *range; in intel_shadow_table_check() local 79 range = range_lists[j].regs; in intel_shadow_table_check() 80 for (i = 0, prev = -1; i < range_lists[j].size; i++, range++) { in intel_shadow_table_check() 81 if (range->end < range->start) { in intel_shadow_table_check() 83 __func__, i, range->start, range->end); in intel_shadow_table_check() 87 if (prev >= (s32)range->start) { in intel_shadow_table_check() 89 __func__, i, range->start, range->end, prev); in intel_shadow_table_check() 93 if (range->start % 4) { in intel_shadow_table_check() 95 __func__, i, range->start, range->end); in intel_shadow_table_check() 99 prev = range->end; in intel_shadow_table_check()
|
/drivers/clk/at91/ |
D | clk-generated.c | 24 struct clk_range range; member 146 if (gck->range.max && req->rate > gck->range.max) in clk_generated_determine_rate() 147 req->rate = gck->range.max; in clk_generated_determine_rate() 148 if (gck->range.min && req->rate < gck->range.min) in clk_generated_determine_rate() 149 req->rate = gck->range.min; in clk_generated_determine_rate() 162 (gck->range.max && min_rate > gck->range.max)) in clk_generated_determine_rate() 212 if (best_rate < 0 || (gck->range.max && best_rate > gck->range.max)) in clk_generated_determine_rate() 253 if (gck->range.max && rate > gck->range.max) in clk_generated_set_rate() 323 const struct clk_range *range, in at91_clk_register_generated() argument 347 gck->range = *range; in at91_clk_register_generated()
|
D | clk-peripheral.c | 35 struct clk_range range; member 143 if (periph->range.max) { in clk_sam9x5_peripheral_autodiv() 150 if (parent_rate >> shift <= periph->range.max) in clk_sam9x5_peripheral_autodiv() 278 if (periph->id < PERIPHERAL_ID_MIN || !periph->range.max) in clk_sam9x5_peripheral_determine_rate() 285 if (periph->range.max && tmp_rate > periph->range.max) in clk_sam9x5_peripheral_determine_rate() 318 (periph->range.max && best_rate > periph->range.max)) in clk_sam9x5_peripheral_determine_rate() 342 if (periph->id < PERIPHERAL_ID_MIN || !periph->range.max) in clk_sam9x5_peripheral_round_rate() 345 if (periph->range.max) { in clk_sam9x5_peripheral_round_rate() 348 if (cur_rate <= periph->range.max) in clk_sam9x5_peripheral_round_rate() 383 if (periph->id < PERIPHERAL_ID_MIN || !periph->range.max) { in clk_sam9x5_peripheral_set_rate() [all …]
|
/drivers/gpu/drm/amd/display/dc/dsc/ |
D | dc_dsc.c | 57 struct dc_dsc_bw_range *range); 349 struct dc_dsc_bw_range *range) in dc_dsc_compute_bandwidth_range() argument 367 config.num_slices_h, &dsc_common_caps, timing, range); in dc_dsc_compute_bandwidth_range() 503 struct dc_dsc_bw_range *range) in decide_dsc_bandwidth_range() argument 507 memset(range, 0, sizeof(*range)); in decide_dsc_bandwidth_range() 513 range->max_target_bpp_x16 = preferred_bpp_x16; in decide_dsc_bandwidth_range() 514 range->min_target_bpp_x16 = preferred_bpp_x16; in decide_dsc_bandwidth_range() 520 range->max_target_bpp_x16 = MIN(dsc_caps->edp_sink_max_bits_per_pixel, in decide_dsc_bandwidth_range() 522 range->min_target_bpp_x16 = min_bpp_x16; in decide_dsc_bandwidth_range() 525 range->max_target_bpp_x16 = max_bpp_x16; in decide_dsc_bandwidth_range() [all …]
|