Home
last modified time | relevance | path

Searched refs:range (Results 1 – 25 of 547) sorted by relevance

12345678910>>...22

/drivers/soc/ti/
Dknav_qmss_acc.c20 #define knav_range_offset_to_inst(kdev, range, q) \ argument
21 (range->queue_base_inst + (q << kdev->inst_shift))
23 static void __knav_acc_notify(struct knav_range_info *range, in __knav_acc_notify() argument
26 struct knav_device *kdev = range->kdev; in __knav_acc_notify()
30 range_base = kdev->base_id + range->queue_base; in __knav_acc_notify()
32 if (range->flags & RANGE_MULTI_QUEUE) { in __knav_acc_notify()
33 for (queue = 0; queue < range->num_queues; queue++) { in __knav_acc_notify()
34 inst = knav_range_offset_to_inst(kdev, range, in __knav_acc_notify()
44 queue = acc->channel - range->acc_info.start_channel; in __knav_acc_notify()
45 inst = knav_range_offset_to_inst(kdev, range, queue); in __knav_acc_notify()
[all …]
Dknav_qmss_queue.c112 static int knav_queue_setup_irq(struct knav_range_info *range, in knav_queue_setup_irq() argument
115 unsigned queue = inst->id - range->queue_base; in knav_queue_setup_irq()
118 if (range->flags & RANGE_HAS_IRQ) { in knav_queue_setup_irq()
119 irq = range->irqs[queue].irq; in knav_queue_setup_irq()
125 if (range->irqs[queue].cpu_mask) { in knav_queue_setup_irq()
126 ret = irq_set_affinity_hint(irq, range->irqs[queue].cpu_mask); in knav_queue_setup_irq()
128 dev_warn(range->kdev->dev, in knav_queue_setup_irq()
139 struct knav_range_info *range = inst->range; in knav_queue_free_irq() local
140 unsigned queue = inst->id - inst->range->queue_base; in knav_queue_free_irq()
143 if (range->flags & RANGE_HAS_IRQ) { in knav_queue_free_irq()
[all …]
/drivers/staging/android/
Dashmem.c108 static inline unsigned long range_size(struct ashmem_range *range) in range_size() argument
110 return range->pgend - range->pgstart + 1; in range_size()
113 static inline bool range_on_lru(struct ashmem_range *range) in range_on_lru() argument
115 return range->purged == ASHMEM_NOT_PURGED; in range_on_lru()
118 static inline bool page_range_subsumes_range(struct ashmem_range *range, in page_range_subsumes_range() argument
121 return (range->pgstart >= start) && (range->pgend <= end); in page_range_subsumes_range()
124 static inline bool page_range_subsumed_by_range(struct ashmem_range *range, in page_range_subsumed_by_range() argument
127 return (range->pgstart <= start) && (range->pgend >= end); in page_range_subsumed_by_range()
130 static inline bool page_in_range(struct ashmem_range *range, size_t page) in page_in_range() argument
132 return (range->pgstart <= page) && (range->pgend >= page); in page_in_range()
[all …]
/drivers/dax/
Dkmem.c30 static int dax_kmem_range(struct dev_dax *dev_dax, int i, struct range *r) in dax_kmem_range()
33 struct range *range = &dax_range->range; in dax_kmem_range() local
36 r->start = ALIGN(range->start, memory_block_size_bytes()); in dax_kmem_range()
37 r->end = ALIGN_DOWN(range->end + 1, memory_block_size_bytes()) - 1; in dax_kmem_range()
39 r->start = range->start; in dax_kmem_range()
40 r->end = range->end; in dax_kmem_range()
75 struct range range; in dev_dax_kmem_probe() local
77 rc = dax_kmem_range(dev_dax, i, &range); in dev_dax_kmem_probe()
80 i, range.start, range.end); in dev_dax_kmem_probe()
83 total_len += range_len(&range); in dev_dax_kmem_probe()
[all …]
Dbus.c186 size += range_len(&dev_dax->ranges[i].range); in dev_dax_size()
416 struct range *range = &dev_dax->ranges[i].range; in trim_dev_dax_range() local
421 (unsigned long long)range->start, in trim_dev_dax_range()
422 (unsigned long long)range->end); in trim_dev_dax_range()
424 __release_region(&dax_region->res, range->start, range_len(range)); in trim_dev_dax_range()
602 struct range *range, int target_node, unsigned int align, in alloc_dax_region() argument
617 if (!IS_ALIGNED(range->start, align) in alloc_dax_region()
618 || !IS_ALIGNED(range_len(range), align)) in alloc_dax_region()
633 .start = range->start, in alloc_dax_region()
634 .end = range->end, in alloc_dax_region()
[all …]
/drivers/pci/hotplug/
Dibmphp_res.c368 static int add_bus_range(int type, struct range_node *range, struct bus_node *bus_cur) in add_bus_range() argument
392 if (range->start < range_cur->start) in add_bus_range()
402 bus_cur->rangeMem = range; in add_bus_range()
405 bus_cur->rangePFMem = range; in add_bus_range()
408 bus_cur->rangeIO = range; in add_bus_range()
411 range->next = range_cur; in add_bus_range()
412 range->rangeno = 1; in add_bus_range()
416 range->next = NULL; in add_bus_range()
417 range_prev->next = range; in add_bus_range()
418 range->rangeno = range_prev->rangeno + 1; in add_bus_range()
[all …]
/drivers/of/
Daddress.c45 u64 (*map)(__be32 *addr, const __be32 *range,
65 static u64 of_bus_default_map(__be32 *addr, const __be32 *range, in of_bus_default_map() argument
70 cp = of_read_number(range, na); in of_bus_default_map()
71 s = of_read_number(range + na + pna, ns); in of_bus_default_map()
103 static u64 of_bus_default_flags_map(__be32 *addr, const __be32 *range, int na, in of_bus_default_flags_map() argument
109 if (*addr != *range) in of_bus_default_flags_map()
113 cp = of_read_number(range + 1, na - 1); in of_bus_default_flags_map()
114 s = of_read_number(range + na + pna, ns); in of_bus_default_flags_map()
194 static u64 of_bus_pci_map(__be32 *addr, const __be32 *range, int na, int ns, in of_bus_pci_map() argument
201 rf = of_bus_pci_get_flags(range); in of_bus_pci_map()
[all …]
/drivers/gpu/drm/sprd/
Dmegacores_pll.c221 u32 range[2], constant; in dphy_timing_config() local
236 range[L] = 50 * scale; in dphy_timing_config()
237 range[H] = INFINITY; in dphy_timing_config()
238 val[CLK] = DIV_ROUND_UP(range[L] * (factor << 1), t_byteck) - 2; in dphy_timing_config()
243 range[L] = 38 * scale; in dphy_timing_config()
244 range[H] = 95 * scale; in dphy_timing_config()
245 tmp = AVERAGE(range[L], range[H]); in dphy_timing_config()
246 val[CLK] = DIV_ROUND_UP(AVERAGE(range[L], range[H]), t_half_byteck) - 1; in dphy_timing_config()
247 range[L] = 40 * scale + 4 * t_ui; in dphy_timing_config()
248 range[H] = 85 * scale + 6 * t_ui; in dphy_timing_config()
[all …]
/drivers/pinctrl/
Dcore.c280 static inline int gpio_to_pin(struct pinctrl_gpio_range *range, in gpio_to_pin() argument
283 unsigned int offset = gpio - range->base; in gpio_to_pin()
284 if (range->pins) in gpio_to_pin()
285 return range->pins[offset]; in gpio_to_pin()
287 return range->pin_base + offset; in gpio_to_pin()
301 struct pinctrl_gpio_range *range; in pinctrl_match_gpio_range() local
305 list_for_each_entry(range, &pctldev->gpio_ranges, node) { in pinctrl_match_gpio_range()
307 if (gpio >= range->base && in pinctrl_match_gpio_range()
308 gpio < range->base + range->npins) { in pinctrl_match_gpio_range()
310 return range; in pinctrl_match_gpio_range()
[all …]
/drivers/regulator/
Dqcom_spmi-regulator.c403 struct spmi_voltage_range *range; member
481 .range = name##_ranges, \
669 const struct spmi_voltage_range *range; in spmi_regulator_select_voltage() local
675 lim_min_uV = vreg->set_points->range[0].set_point_min_uV; in spmi_regulator_select_voltage()
677 vreg->set_points->range[vreg->set_points->count - 1].set_point_max_uV; in spmi_regulator_select_voltage()
691 range_max_uV = vreg->set_points->range[i - 1].set_point_max_uV; in spmi_regulator_select_voltage()
697 range = &vreg->set_points->range[range_id]; in spmi_regulator_select_voltage()
703 voltage_sel = DIV_ROUND_UP(uV - range->min_uV, range->step_uV); in spmi_regulator_select_voltage()
704 uV = voltage_sel * range->step_uV + range->min_uV; in spmi_regulator_select_voltage()
716 selector += vreg->set_points->range[i].n_voltages; in spmi_regulator_select_voltage()
[all …]
Dhelpers.c134 int range; in regulator_get_voltage_sel_pickable_regmap() local
154 range = regulator_range_selector_to_index(rdev, r_val); in regulator_get_voltage_sel_pickable_regmap()
155 if (range < 0) in regulator_get_voltage_sel_pickable_regmap()
158 voltages = linear_range_values_in_range_array(r, range); in regulator_get_voltage_sel_pickable_regmap()
178 unsigned int range; in regulator_set_voltage_sel_pickable_regmap() local
199 range = rdev->desc->linear_range_selectors_bitfield[i]; in regulator_set_voltage_sel_pickable_regmap()
200 range <<= ffs(rdev->desc->vsel_range_mask) - 1; in regulator_set_voltage_sel_pickable_regmap()
206 rdev->desc->vsel_mask, sel | range); in regulator_set_voltage_sel_pickable_regmap()
210 rdev->desc->vsel_range_mask, range); in regulator_set_voltage_sel_pickable_regmap()
413 const struct linear_range *range; in regulator_map_voltage_linear_range() local
[all …]
/drivers/net/ethernet/mellanox/mlxsw/
Dspectrum_port_range.c13 struct mlxsw_sp_port_range range; member
36 mlxsw_reg_pprr_src_set(pprr_pl, prr->range.source); in mlxsw_sp_port_range_reg_configure()
37 mlxsw_reg_pprr_dst_set(pprr_pl, !prr->range.source); in mlxsw_sp_port_range_reg_configure()
40 mlxsw_reg_pprr_port_range_min_set(pprr_pl, prr->range.min); in mlxsw_sp_port_range_reg_configure()
41 mlxsw_reg_pprr_port_range_max_set(pprr_pl, prr->range.max); in mlxsw_sp_port_range_reg_configure()
48 const struct mlxsw_sp_port_range *range, in mlxsw_sp_port_range_reg_create() argument
59 prr->range = *range; in mlxsw_sp_port_range_reg_create()
99 const struct mlxsw_sp_port_range *range) in mlxsw_sp_port_range_reg_find() argument
106 if (prr->range.min == range->min && in mlxsw_sp_port_range_reg_find()
107 prr->range.max == range->max && in mlxsw_sp_port_range_reg_find()
[all …]
/drivers/gpu/drm/nouveau/
Dnouveau_uvmm.c67 u64 range; member
84 u64 range; member
90 u64 addr, u64 range) in nouveau_uvmm_vmm_sparse_ref() argument
94 return nvif_vmm_raw_sparse(vmm, addr, range, true); in nouveau_uvmm_vmm_sparse_ref()
99 u64 addr, u64 range) in nouveau_uvmm_vmm_sparse_unref() argument
103 return nvif_vmm_raw_sparse(vmm, addr, range, false); in nouveau_uvmm_vmm_sparse_unref()
108 u64 addr, u64 range) in nouveau_uvmm_vmm_get() argument
112 return nvif_vmm_raw_get(vmm, addr, range, PAGE_SHIFT); in nouveau_uvmm_vmm_get()
117 u64 addr, u64 range) in nouveau_uvmm_vmm_put() argument
121 return nvif_vmm_raw_put(vmm, addr, range, PAGE_SHIFT); in nouveau_uvmm_vmm_put()
[all …]
/drivers/gpu/drm/
Ddrm_gpuva_mgr.c603 #define GPUVA_LAST(node) ((node)->va.addr + (node)->va.range - 1)
617 drm_gpuva_check_overflow(u64 addr, u64 range) in drm_gpuva_check_overflow() argument
621 return WARN(check_add_overflow(addr, range, &end), in drm_gpuva_check_overflow()
626 drm_gpuva_in_mm_range(struct drm_gpuva_manager *mgr, u64 addr, u64 range) in drm_gpuva_in_mm_range() argument
628 u64 end = addr + range; in drm_gpuva_in_mm_range()
636 drm_gpuva_in_kernel_node(struct drm_gpuva_manager *mgr, u64 addr, u64 range) in drm_gpuva_in_kernel_node() argument
638 u64 end = addr + range; in drm_gpuva_in_kernel_node()
640 u64 krange = mgr->kernel_alloc_node.va.range; in drm_gpuva_in_kernel_node()
648 u64 addr, u64 range) in drm_gpuva_range_valid() argument
650 return !drm_gpuva_check_overflow(addr, range) && in drm_gpuva_range_valid()
[all …]
/drivers/base/
Dmap.c23 unsigned long range; member
32 int kobj_map(struct kobj_map *domain, dev_t dev, unsigned long range, in kobj_map() argument
36 unsigned int n = MAJOR(dev + range - 1) - MAJOR(dev) + 1; in kobj_map()
53 p->range = range; in kobj_map()
59 while (*s && (*s)->range < range) in kobj_map()
68 void kobj_unmap(struct kobj_map *domain, dev_t dev, unsigned long range) in kobj_unmap() argument
70 unsigned int n = MAJOR(dev + range - 1) - MAJOR(dev) + 1; in kobj_unmap()
83 if (p->dev == dev && p->range == range) { in kobj_unmap()
108 if (p->dev > dev || p->dev + p->range - 1 < dev) in kobj_lookup()
110 if (p->range - 1 >= best) in kobj_lookup()
[all …]
/drivers/virt/acrn/
Dioreq.c118 struct acrn_ioreq_range *range; in acrn_ioreq_range_add() local
126 range = kzalloc(sizeof(*range), GFP_KERNEL); in acrn_ioreq_range_add()
127 if (!range) in acrn_ioreq_range_add()
130 range->type = type; in acrn_ioreq_range_add()
131 range->start = start; in acrn_ioreq_range_add()
132 range->end = end; in acrn_ioreq_range_add()
135 list_add(&range->list, &client->range_list); in acrn_ioreq_range_add()
151 struct acrn_ioreq_range *range; in acrn_ioreq_range_del() local
154 list_for_each_entry(range, &client->range_list, list) { in acrn_ioreq_range_del()
155 if (type == range->type && in acrn_ioreq_range_del()
[all …]
/drivers/nvdimm/
Dbadrange.c214 struct badblocks *bb, const struct range *range) in badblocks_populate() argument
225 if (bre_end < range->start) in badblocks_populate()
227 if (bre->start > range->end) in badblocks_populate()
230 if (bre->start >= range->start) { in badblocks_populate()
234 if (bre_end <= range->end) in badblocks_populate()
237 len = range->start + range_len(range) in badblocks_populate()
239 __add_badblock_range(bb, start - range->start, len); in badblocks_populate()
246 if (bre->start < range->start) { in badblocks_populate()
249 if (bre_end < range->end) in badblocks_populate()
250 len = bre->start + bre->length - range->start; in badblocks_populate()
[all …]
/drivers/gpu/drm/logicvc/
Dlogicvc_of.c42 .range = {
50 .range = {
57 .range = { 8, 24 },
79 .range = { 8, 24 },
84 .range = {
92 .range = {
158 if (property->range[0] || property->range[1]) in logicvc_of_property_parse_u32()
159 if (value < property->range[0] || value > property->range[1]) in logicvc_of_property_parse_u32()
/drivers/comedi/drivers/
Djr3_pci.c94 char _reserved[offsetof(struct comedi_lrange, range[1])];
112 union jr3_pci_single_range range[9]; member
508 union jr3_pci_single_range *r = spriv->range; in jr3_pci_poll_subdevice()
511 r[0].l.range[0].min = -get_s16(&fs->fx) * 1000; in jr3_pci_poll_subdevice()
512 r[0].l.range[0].max = get_s16(&fs->fx) * 1000; in jr3_pci_poll_subdevice()
513 r[1].l.range[0].min = -get_s16(&fs->fy) * 1000; in jr3_pci_poll_subdevice()
514 r[1].l.range[0].max = get_s16(&fs->fy) * 1000; in jr3_pci_poll_subdevice()
515 r[2].l.range[0].min = -get_s16(&fs->fz) * 1000; in jr3_pci_poll_subdevice()
516 r[2].l.range[0].max = get_s16(&fs->fz) * 1000; in jr3_pci_poll_subdevice()
517 r[3].l.range[0].min = -get_s16(&fs->mx) * 100; in jr3_pci_poll_subdevice()
[all …]
/drivers/pci/
Dof.c290 struct of_pci_range range; in devm_of_pci_get_host_bridge_resources() local
323 for_each_of_pci_range(&parser, &range) { in devm_of_pci_get_host_bridge_resources()
325 if ((range.flags & IORESOURCE_TYPE_BITS) == IORESOURCE_IO) in devm_of_pci_get_host_bridge_resources()
327 else if ((range.flags & IORESOURCE_TYPE_BITS) == IORESOURCE_MEM) in devm_of_pci_get_host_bridge_resources()
332 range_type, range.cpu_addr, in devm_of_pci_get_host_bridge_resources()
333 range.cpu_addr + range.size - 1, range.pci_addr); in devm_of_pci_get_host_bridge_resources()
339 if (range.cpu_addr == OF_BAD_ADDR || range.size == 0) in devm_of_pci_get_host_bridge_resources()
342 err = of_pci_range_to_resource(&range, dev_node, &tmp_res); in devm_of_pci_get_host_bridge_resources()
362 *io_base = range.cpu_addr; in devm_of_pci_get_host_bridge_resources()
367 pci_add_resource_offset(resources, res, res->start - range.pci_addr); in devm_of_pci_get_host_bridge_resources()
[all …]
/drivers/infiniband/core/
Dumem_odp.c355 struct hmm_range range = {}; in ib_umem_odp_map_dma_and_lock() local
378 range.notifier = &umem_odp->notifier; in ib_umem_odp_map_dma_and_lock()
379 range.start = ALIGN_DOWN(user_virt, 1UL << page_shift); in ib_umem_odp_map_dma_and_lock()
380 range.end = ALIGN(user_virt + bcnt, 1UL << page_shift); in ib_umem_odp_map_dma_and_lock()
381 pfn_start_idx = (range.start - ib_umem_start(umem_odp)) >> PAGE_SHIFT; in ib_umem_odp_map_dma_and_lock()
382 num_pfns = (range.end - range.start) >> PAGE_SHIFT; in ib_umem_odp_map_dma_and_lock()
384 range.default_flags = HMM_PFN_REQ_FAULT; in ib_umem_odp_map_dma_and_lock()
387 range.default_flags |= HMM_PFN_REQ_WRITE; in ib_umem_odp_map_dma_and_lock()
390 range.hmm_pfns = &(umem_odp->pfn_list[pfn_start_idx]); in ib_umem_odp_map_dma_and_lock()
394 current_seq = range.notifier_seq = in ib_umem_odp_map_dma_and_lock()
[all …]
/drivers/staging/rtl8192u/
Dr8192U_wx.c224 struct iw_range *range = (struct iw_range *)extra; in rtl8180_wx_get_range() local
225 struct iw_range_with_scan_capa *tmp = (struct iw_range_with_scan_capa *)range; in rtl8180_wx_get_range()
230 wrqu->data.length = sizeof(*range); in rtl8180_wx_get_range()
231 memset(range, 0, sizeof(*range)); in rtl8180_wx_get_range()
242 range->throughput = 5 * 1000 * 1000; in rtl8180_wx_get_range()
254 range->sensitivity = priv->max_sens; /* signal level threshold range */ in rtl8180_wx_get_range()
256 range->max_qual.qual = 100; in rtl8180_wx_get_range()
258 range->max_qual.level = 0; in rtl8180_wx_get_range()
259 range->max_qual.noise = 0x100 - 98; in rtl8180_wx_get_range()
260 range->max_qual.updated = 7; /* Updated all three */ in rtl8180_wx_get_range()
[all …]
/drivers/firmware/efi/
Dunaccepted_memory.c35 struct accept_range range, *entry; in accept_memory() local
90 range.start = start / unit_size; in accept_memory()
91 range.end = DIV_ROUND_UP(end, unit_size); in accept_memory()
103 if (entry->end <= range.start) in accept_memory()
105 if (entry->start >= range.end) in accept_memory()
121 list_add(&range.list, &accepting_list); in accept_memory()
123 range_start = range.start; in accept_memory()
125 range.end) { in accept_memory()
150 list_del(&range.list); in accept_memory()
/drivers/gpu/drm/i915/selftests/
Dintel_uncore.c76 const struct i915_range *range; in intel_shadow_table_check() local
81 range = range_lists[j].regs; in intel_shadow_table_check()
82 for (i = 0, prev = -1; i < range_lists[j].size; i++, range++) { in intel_shadow_table_check()
83 if (range->end < range->start) { in intel_shadow_table_check()
85 __func__, i, range->start, range->end); in intel_shadow_table_check()
89 if (prev >= (s32)range->start) { in intel_shadow_table_check()
91 __func__, i, range->start, range->end, prev); in intel_shadow_table_check()
95 if (range->start % 4) { in intel_shadow_table_check()
97 __func__, i, range->start, range->end); in intel_shadow_table_check()
101 prev = range->end; in intel_shadow_table_check()
/drivers/clk/at91/
Dclk-generated.c24 struct clk_range range; member
146 if (gck->range.max && req->rate > gck->range.max) in clk_generated_determine_rate()
147 req->rate = gck->range.max; in clk_generated_determine_rate()
148 if (gck->range.min && req->rate < gck->range.min) in clk_generated_determine_rate()
149 req->rate = gck->range.min; in clk_generated_determine_rate()
162 (gck->range.max && min_rate > gck->range.max)) in clk_generated_determine_rate()
212 if (best_rate < 0 || (gck->range.max && best_rate > gck->range.max)) in clk_generated_determine_rate()
253 if (gck->range.max && rate > gck->range.max) in clk_generated_set_rate()
324 const struct clk_range *range, in at91_clk_register_generated() argument
354 gck->range = *range; in at91_clk_register_generated()

12345678910>>...22