/include/trace/events/ |
D | maple_tree.h | 22 __field(unsigned long, max) 31 __entry->max = mas->max; 41 (unsigned long) __entry->max, 55 __field(unsigned long, max) 64 __entry->max = mas->max; 74 (unsigned long) __entry->max, 90 __field(unsigned long, max) 101 __entry->max = mas->max; 113 (unsigned long) __entry->max,
|
D | regulator.h | 111 TP_PROTO(const char *name, int min, int max), 113 TP_ARGS(name, min, max), 118 __field( int, max ) 124 __entry->max = max; 128 (int)__entry->min, (int)__entry->max) 133 TP_PROTO(const char *name, int min, int max), 135 TP_ARGS(name, min, max)
|
D | wbt.h | 40 __entry->rmax = stat[0].max; 44 __entry->wmax = stat[1].max; 93 unsigned int normal, unsigned int max), 95 TP_ARGS(bdi, msg, step, window, bg, normal, max), 104 __field(unsigned int, max) 115 __entry->max = max; 120 __entry->bg, __entry->normal, __entry->max)
|
D | clk.h | 137 TP_PROTO(struct clk_core *core, unsigned long min, unsigned long max), 139 TP_ARGS(core, min, max), 144 __field(unsigned long, max ) 150 __entry->max = max; 155 (unsigned long)__entry->max) 160 TP_PROTO(struct clk_core *core, unsigned long min, unsigned long max), 162 TP_ARGS(core, min, max)
|
/include/linux/ |
D | prime_numbers.h | 21 #define for_each_prime_number(prime, max) \ argument 22 for_each_prime_number_from((prime), 2, (max)) 35 #define for_each_prime_number_from(prime, from, max) \ argument 36 for (prime = (from); prime <= (max); prime = next_prime_number(prime))
|
D | delay.h | 62 void usleep_range_state(unsigned long min, unsigned long max, 65 static inline void usleep_range(unsigned long min, unsigned long max) in usleep_range() argument 67 usleep_range_state(min, max, TASK_UNINTERRUPTIBLE); in usleep_range() 70 static inline void usleep_idle_range(unsigned long min, unsigned long max) in usleep_idle_range() argument 72 usleep_range_state(min, max, TASK_IDLE); in usleep_idle_range()
|
D | maple_tree.h | 314 unsigned long max, gfp_t gfp); 317 unsigned long max, gfp_t gfp); 384 unsigned long max; /* The maximum index of this node - implied pivot max */ member 441 .max = ULONG_MAX, \ 464 void *mas_find(struct ma_state *mas, unsigned long max); 465 void *mas_find_range(struct ma_state *mas, unsigned long max); 467 void *mas_find_range_rev(struct ma_state *mas, unsigned long max); 478 void *mas_prev_range(struct ma_state *mas, unsigned long max); 479 void *mas_next(struct ma_state *mas, unsigned long max); 480 void *mas_next_range(struct ma_state *mas, unsigned long max); [all …]
|
D | page_counter.h | 37 unsigned long max; member 51 counter->max = PAGE_COUNTER_MAX; in page_counter_init() 76 int page_counter_memparse(const char *buf, const char *max,
|
D | ioasid.h | 10 typedef ioasid_t (*ioasid_alloc_fn_t)(ioasid_t min, ioasid_t max, void *data); 35 ioasid_t ioasid_alloc(struct ioasid_set *set, ioasid_t min, ioasid_t max, 50 ioasid_t max, void *private) in ioasid_alloc() argument
|
D | minmax.h | 52 #define max(x, y) __careful_cmp(x, y, >) macro 68 #define max3(x, y, z) max((typeof(x))max(x, y), z) 89 #define clamp(val, lo, hi) min((typeof(val))max(val, lo), hi)
|
D | nfsacl.h | 29 w += max(acl_access ? (int)acl_access->a_count : 3, 4) * 12; in nfsacl_size() 31 w += max((int)acl_default->a_count, 4) * 12; in nfsacl_size()
|
D | xarray.h | 240 u32 max; member 244 #define XA_LIMIT(_min, _max) (struct xa_limit) { .min = _min, .max = _max } 360 unsigned long max, xa_mark_t) __attribute__((nonnull(2))); 362 unsigned long max, xa_mark_t) __attribute__((nonnull(2))); 364 unsigned long max, unsigned int n, xa_mark_t); 1516 void *xas_find(struct xa_state *, unsigned long max); 1522 void *xas_find_marked(struct xa_state *, unsigned long max, xa_mark_t); 1669 static inline void *xas_next_entry(struct xa_state *xas, unsigned long max) in xas_next_entry() argument 1676 return xas_find(xas, max); in xas_next_entry() 1679 if (unlikely(xas->xa_index >= max)) in xas_next_entry() [all …]
|
D | cpufreq.h | 69 unsigned int max; /* in kHz */ member 178 unsigned int max; /* in kHz */ member 465 unsigned int max) in cpufreq_verify_within_limits() argument 469 if (policy->max < min) in cpufreq_verify_within_limits() 470 policy->max = min; in cpufreq_verify_within_limits() 471 if (policy->min > max) in cpufreq_verify_within_limits() 472 policy->min = max; in cpufreq_verify_within_limits() 473 if (policy->max > max) in cpufreq_verify_within_limits() 474 policy->max = max; in cpufreq_verify_within_limits() 475 if (policy->min > policy->max) in cpufreq_verify_within_limits() [all …]
|
D | rbtree_augmented.h | 125 RBTYPE max = RBCOMPUTE(node); \ 128 if (child->RBAUGMENTED > max) \ 129 max = child->RBAUGMENTED; \ 133 if (child->RBAUGMENTED > max) \ 134 max = child->RBAUGMENTED; \ 136 if (exit && node->RBAUGMENTED == max) \ 138 node->RBAUGMENTED = max; \
|
/include/trace/hooks/ |
D | topology.h | 17 TP_PROTO(const struct cpumask *cpus, unsigned long freq, unsigned long max, 19 TP_ARGS(cpus, freq, max, scale)); 23 #define trace_android_vh_arch_set_freq_scale(cpus, freq, max, scale) argument
|
/include/asm-generic/ |
D | pci_iomap.h | 13 extern void __iomem *pci_iomap(struct pci_dev *dev, int bar, unsigned long max); 14 extern void __iomem *pci_iomap_wc(struct pci_dev *dev, int bar, unsigned long max); 35 static inline void __iomem *pci_iomap(struct pci_dev *dev, int bar, unsigned long max) in pci_iomap() argument 40 static inline void __iomem *pci_iomap_wc(struct pci_dev *dev, int bar, unsigned long max) in pci_iomap_wc() argument
|
/include/uapi/linux/netfilter/ |
D | xt_hashlimit.h | 39 __u32 max; /* max number of entries */ member 63 __u32 max; /* max number of entries */ member 77 __u32 max; /* max number of entries */ member 91 __u32 max; /* max number of entries */ member
|
D | xt_length.h | 8 __u16 min, max; member
|
/include/sound/ |
D | pcm_params.h | 219 i->max = UINT_MAX; in snd_interval_any() 232 return (i->min > i->max || in snd_interval_checkempty() 233 (i->min == i->max && (i->openmin || i->openmax))); in snd_interval_checkempty() 243 return (i->min == i->max || in snd_interval_single() 244 (i->min + 1 == i->max && (i->openmin || i->openmax))); in snd_interval_single() 250 return i->max; in snd_interval_value() 262 v = i->max; in snd_interval_max() 271 i->max < val || (i->max == val && i->openmax))); in snd_interval_test() 283 if (i->openmin && i->openmax && i->min == i->max) in snd_interval_setinteger() 296 i1->max == i2->max && i1->openmax == i2->openmax; in snd_interval_eq()
|
D | soc.h | 35 .rshift = shift_right, .max = xmax, \ 40 .rshift = shift_right, .min = xmin, .max = xmax, \ 46 {.reg = xreg, .max = xmax, .invert = xinvert}) 50 .max = xmax, .invert = xinvert}) 54 .max = xmax, .min = xmin, .sign_bit = xsign_bit, \ 59 .min = xmin, .max = xmax, .invert = xinvert}) 60 #define SOC_SINGLE(xname, reg, shift, max, invert) \ argument 64 .private_value = SOC_SINGLE_VALUE(reg, shift, max, invert, 0) } 71 .rshift = xshift, .min = xmin, .max = xmax, \ 73 #define SOC_SINGLE_TLV(xname, reg, shift, max, invert, tlv_array) \ argument [all …]
|
/include/drm/ |
D | drm_of.h | 54 const unsigned int min, const unsigned int max); 58 const unsigned int max); 117 const unsigned int min, const unsigned int max) in drm_of_get_data_lanes_count() argument 126 const unsigned int max) in drm_of_get_data_lanes_count_ep() argument
|
D | drm_color_mgmt.h | 45 u32 max = 0xffff >> (16 - bit_precision); in drm_color_lut_extract() local 53 return clamp_val(val, 0, max); in drm_color_lut_extract()
|
D | drm_buddy.h | 16 #define range_overflows(start, size, max) ({ \ argument 19 typeof(max) max__ = (max); \
|
/include/xen/interface/io/ |
D | sndif.h | 880 uint32_t max; member 884 uint32_t max; member 888 uint32_t max; member 892 uint32_t max; member
|
/include/net/ |
D | xdp_sock_drv.h | 19 u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 max); 93 static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max) in xsk_buff_alloc_batch() argument 95 return xp_alloc_batch(pool, xdp, max); in xsk_buff_alloc_batch() 157 static inline u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 max) in xsk_tx_peek_release_desc_batch() argument 244 static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max) in xsk_buff_alloc_batch() argument
|