/drivers/net/wireless/intel/iwlwifi/ |
D | iwl-debug.h | 38 #define CHECK_FOR_NEWLINE(f) BUILD_BUG_ON(f[sizeof(f) - 2] != '\n') argument 41 #define IWL_ERR_DEV(d, f, a...) \ argument 43 CHECK_FOR_NEWLINE(f); \ 44 __iwl_err((d), false, false, f, ## a); \ 46 #define IWL_ERR(m, f, a...) \ argument 47 IWL_ERR_DEV((m)->dev, f, ## a) 48 #define IWL_WARN(m, f, a...) \ argument 50 CHECK_FOR_NEWLINE(f); \ 51 __iwl_warn((m)->dev, f, ## a); \ 53 #define IWL_INFO(m, f, a...) \ argument [all …]
|
/drivers/spi/ |
D | spi-nxp-fspi.c | 380 static inline int needs_ip_only(struct nxp_fspi *f) in needs_ip_only() argument 382 return f->devtype_data->quirks & FSPI_QUIRK_USE_IP_ONLY; in needs_ip_only() 392 static void fspi_writel(struct nxp_fspi *f, u32 val, void __iomem *addr) in fspi_writel() argument 394 if (f->devtype_data->little_endian) in fspi_writel() 400 static u32 fspi_readl(struct nxp_fspi *f, void __iomem *addr) in fspi_readl() argument 402 if (f->devtype_data->little_endian) in fspi_readl() 410 struct nxp_fspi *f = dev_id; in nxp_fspi_irq_handler() local 414 reg = fspi_readl(f, f->iobase + FSPI_INTR); in nxp_fspi_irq_handler() 415 fspi_writel(f, FSPI_INTR_IPCMDDONE, f->iobase + FSPI_INTR); in nxp_fspi_irq_handler() 418 complete(&f->c); in nxp_fspi_irq_handler() [all …]
|
/drivers/dma-buf/ |
D | st-dma-fence.c | 22 } *to_mock_fence(struct dma_fence *f) { in to_mock_fence() argument 23 return container_of(f, struct mock_fence, base); in to_mock_fence() 26 static const char *mock_name(struct dma_fence *f) in mock_name() argument 31 static void mock_fence_release(struct dma_fence *f) in mock_fence_release() argument 33 kmem_cache_free(slab_fences, to_mock_fence(f)); in mock_fence_release() 41 static void mock_wakeup(struct dma_fence *f, struct dma_fence_cb *cb) in mock_wakeup() argument 46 static long mock_wait(struct dma_fence *f, bool intr, long timeout) in mock_wait() argument 51 if (dma_fence_add_callback(f, &cb.cb, mock_wakeup)) in mock_wait() 57 if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &f->flags)) in mock_wait() 67 if (!dma_fence_remove_callback(f, &cb.cb)) in mock_wait() [all …]
|
/drivers/media/pci/ivtv/ |
D | ivtv-yuv.c | 32 struct yuv_frame_info *f = &yi->new_frame_info[frame]; in ivtv_yuv_prep_user_dma() local 40 y_decode_height = uv_decode_height = f->src_h + f->src_y; in ivtv_yuv_prep_user_dma() 42 if (f->offset_y) in ivtv_yuv_prep_user_dma() 122 if (f->offset_y && yi->blanking_dmaptr) { in ivtv_yuv_prep_user_dma() 211 static void ivtv_yuv_handle_horizontal(struct ivtv *itv, struct yuv_frame_info *f) in ivtv_yuv_handle_horizontal() argument 224 f->tru_w, f->src_w, f->dst_w, f->src_x, f->dst_x); in ivtv_yuv_handle_horizontal() 227 x_cutoff = f->src_w + f->src_x; in ivtv_yuv_handle_horizontal() 230 reg_2834 = f->dst_w; in ivtv_yuv_handle_horizontal() 234 reg_2890 = f->dst_x; in ivtv_yuv_handle_horizontal() 245 if (f->vis_w == 720) { in ivtv_yuv_handle_horizontal() [all …]
|
/drivers/net/ethernet/chelsio/cxgb4/ |
D | cxgb4_filter.c | 56 static int set_tcb_field(struct adapter *adap, struct filter_entry *f, in set_tcb_field() argument 75 set_wr_txq(skb, CPL_PRIORITY_CONTROL, f->fs.val.iport & 0x3); in set_tcb_field() 82 static int set_tcb_tflag(struct adapter *adap, struct filter_entry *f, in set_tcb_tflag() argument 86 return set_tcb_field(adap, f, ftid, TCB_T_FLAGS_W, 1ULL << bit_pos, in set_tcb_tflag() 120 static void mk_set_tcb_ulp(struct filter_entry *f, in mk_set_tcb_ulp() argument 132 OPCODE_TID(req) = htonl(MK_OPCODE_TID(CPL_SET_TCB_FIELD, f->tid)); in mk_set_tcb_ulp() 143 static int configure_filter_smac(struct adapter *adap, struct filter_entry *f) in configure_filter_smac() argument 148 err = set_tcb_field(adap, f, f->tid, TCB_SMAC_SEL_W, in configure_filter_smac() 150 TCB_SMAC_SEL_V(f->smt->idx), 1); in configure_filter_smac() 154 err = set_tcb_tflag(adap, f, f->tid, TF_CCTRL_CWR_S, 1, 1); in configure_filter_smac() [all …]
|
D | cxgb4_tc_u32_parse.h | 41 int (*val)(struct ch_filter_specification *f, __be32 val, __be32 mask); 45 static inline int cxgb4_fill_ipv4_tos(struct ch_filter_specification *f, in cxgb4_fill_ipv4_tos() argument 48 f->val.tos = (ntohl(val) >> 16) & 0x000000FF; in cxgb4_fill_ipv4_tos() 49 f->mask.tos = (ntohl(mask) >> 16) & 0x000000FF; in cxgb4_fill_ipv4_tos() 54 static inline int cxgb4_fill_ipv4_frag(struct ch_filter_specification *f, in cxgb4_fill_ipv4_frag() argument 64 f->val.frag = 1; in cxgb4_fill_ipv4_frag() 65 f->mask.frag = 1; in cxgb4_fill_ipv4_frag() 67 f->val.frag = 0; in cxgb4_fill_ipv4_frag() 68 f->mask.frag = 1; in cxgb4_fill_ipv4_frag() 76 static inline int cxgb4_fill_ipv4_proto(struct ch_filter_specification *f, in cxgb4_fill_ipv4_proto() argument [all …]
|
/drivers/gpu/drm/i915/ |
D | i915_sw_fence_work.c | 9 static void fence_complete(struct dma_fence_work *f) in fence_complete() argument 11 if (f->ops->release) in fence_complete() 12 f->ops->release(f); in fence_complete() 13 dma_fence_signal(&f->dma); in fence_complete() 18 struct dma_fence_work *f = container_of(work, typeof(*f), work); in fence_work() local 20 f->ops->work(f); in fence_work() 22 fence_complete(f); in fence_work() 23 dma_fence_put(&f->dma); in fence_work() 29 struct dma_fence_work *f = container_of(fence, typeof(*f), chain); in fence_notify() local 34 dma_fence_set_error(&f->dma, fence->error); in fence_notify() [all …]
|
/drivers/net/ethernet/pensando/ionic/ |
D | ionic_rx_filter.c | 13 void ionic_rx_filter_free(struct ionic_lif *lif, struct ionic_rx_filter *f) in ionic_rx_filter_free() argument 17 hlist_del(&f->by_id); in ionic_rx_filter_free() 18 hlist_del(&f->by_hash); in ionic_rx_filter_free() 19 devm_kfree(dev, f); in ionic_rx_filter_free() 27 struct ionic_rx_filter *f; in ionic_rx_filter_replay() local 39 hlist_for_each_entry_safe(f, tmp, head, by_id) { in ionic_rx_filter_replay() 41 memcpy(ac, &f->cmd, sizeof(f->cmd)); in ionic_rx_filter_replay() 66 ionic_rx_filter_free(lif, f); in ionic_rx_filter_replay() 74 hlist_del(&f->by_id); in ionic_rx_filter_replay() 76 f->filter_id = le32_to_cpu(ctx.comp.rx_filter_add.filter_id); in ionic_rx_filter_replay() [all …]
|
/drivers/media/test-drivers/vivid/ |
D | vivid-touch-cap.c | 16 struct v4l2_pix_format *f = &dev->tch_format; in touch_cap_queue_setup() local 17 unsigned int size = f->sizeimage; in touch_cap_queue_setup() 36 struct v4l2_pix_format *f = &dev->tch_format; in touch_cap_buf_prepare() local 37 unsigned int size = f->sizeimage; in touch_cap_buf_prepare() 120 int vivid_enum_fmt_tch(struct file *file, void *priv, struct v4l2_fmtdesc *f) in vivid_enum_fmt_tch() argument 122 if (f->index) in vivid_enum_fmt_tch() 125 f->pixelformat = V4L2_TCH_FMT_DELTA_TD16; in vivid_enum_fmt_tch() 129 int vivid_g_fmt_tch(struct file *file, void *priv, struct v4l2_format *f) in vivid_g_fmt_tch() argument 135 f->fmt.pix = dev->tch_format; in vivid_g_fmt_tch() 139 int vivid_g_fmt_tch_mplane(struct file *file, void *priv, struct v4l2_format *f) in vivid_g_fmt_tch_mplane() argument [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_sync.c | 66 struct dma_fence *f) in amdgpu_sync_same_dev() argument 68 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); in amdgpu_sync_same_dev() 87 static void *amdgpu_sync_get_owner(struct dma_fence *f) in amdgpu_sync_get_owner() argument 92 if (!f) in amdgpu_sync_get_owner() 95 s_fence = to_drm_sched_fence(f); in amdgpu_sync_get_owner() 99 kfd_fence = to_amdgpu_amdkfd_fence(f); in amdgpu_sync_get_owner() 133 static bool amdgpu_sync_add_later(struct amdgpu_sync *sync, struct dma_fence *f) in amdgpu_sync_add_later() argument 137 hash_for_each_possible(sync->fences, e, node, f->context) { in amdgpu_sync_add_later() 138 if (unlikely(e->fence->context != f->context)) in amdgpu_sync_add_later() 141 amdgpu_sync_keep_later(&e->fence, f); in amdgpu_sync_add_later() [all …]
|
/drivers/net/ethernet/ibm/emac/ |
D | debug.h | 32 # define DBG(d,f,x...) EMAC_DBG(d, emac, f, ##x) argument 33 # define MAL_DBG(d,f,x...) EMAC_DBG(d, mal, f, ##x) argument 34 # define ZMII_DBG(d,f,x...) EMAC_DBG(d, zmii, f, ##x) argument 35 # define RGMII_DBG(d,f,x...) EMAC_DBG(d, rgmii, f, ##x) argument 38 # define DBG(f,x...) ((void)0) argument 39 # define MAL_DBG(d,f,x...) ((void)0) argument 40 # define ZMII_DBG(d,f,x...) ((void)0) argument 41 # define RGMII_DBG(d,f,x...) ((void)0) argument 44 # define DBG2(d,f,x...) DBG(d,f, ##x) argument 45 # define MAL_DBG2(d,f,x...) MAL_DBG(d,f, ##x) argument [all …]
|
/drivers/video/fbdev/ |
D | w100fb.c | 270 if (status.f.cmdfifo_avail >= entries) in w100_fifo_wait() 285 if (!status.f.gui_active) in w100fb_sync() 314 dp_cntl.f.dst_x_dir = 1; in w100_init_graphic_engine() 315 dp_cntl.f.dst_y_dir = 1; in w100_init_graphic_engine() 316 dp_cntl.f.src_x_dir = 1; in w100_init_graphic_engine() 317 dp_cntl.f.src_y_dir = 1; in w100_init_graphic_engine() 318 dp_cntl.f.dst_major_x = 1; in w100_init_graphic_engine() 319 dp_cntl.f.src_major_x = 1; in w100_init_graphic_engine() 323 gmc.f.gmc_src_pitch_offset_cntl = 1; in w100_init_graphic_engine() 324 gmc.f.gmc_dst_pitch_offset_cntl = 1; in w100_init_graphic_engine() [all …]
|
/drivers/gpu/host1x/ |
D | fence.c | 34 static const char *host1x_syncpt_fence_get_driver_name(struct dma_fence *f) in host1x_syncpt_fence_get_driver_name() argument 39 static const char *host1x_syncpt_fence_get_timeline_name(struct dma_fence *f) in host1x_syncpt_fence_get_timeline_name() argument 44 static struct host1x_syncpt_fence *to_host1x_fence(struct dma_fence *f) in to_host1x_fence() argument 46 return container_of(f, struct host1x_syncpt_fence, base); in to_host1x_fence() 49 static bool host1x_syncpt_fence_enable_signaling(struct dma_fence *f) in host1x_syncpt_fence_enable_signaling() argument 51 struct host1x_syncpt_fence *sf = to_host1x_fence(f); in host1x_syncpt_fence_enable_signaling() 57 dma_fence_get(f); in host1x_syncpt_fence_enable_signaling() 72 HOST1X_INTR_ACTION_SIGNAL_FENCE, f, in host1x_syncpt_fence_enable_signaling() 76 dma_fence_put(f); in host1x_syncpt_fence_enable_signaling() 92 static void host1x_syncpt_fence_release(struct dma_fence *f) in host1x_syncpt_fence_release() argument [all …]
|
/drivers/hid/ |
D | hid-debug.c | 446 static char *resolv_usage_page(unsigned page, struct seq_file *f) { in resolv_usage_page() argument 450 if (!f) { in resolv_usage_page() 458 if (!f) { in resolv_usage_page() 464 seq_printf(f, "%s", p->description); in resolv_usage_page() 468 if (!f) in resolv_usage_page() 471 seq_printf(f, "%04x", page); in resolv_usage_page() 475 char *hid_resolv_usage(unsigned usage, struct seq_file *f) { in hid_resolv_usage() argument 480 buf = resolv_usage_page(usage >> 16, f); in hid_resolv_usage() 487 if (!f) { in hid_resolv_usage() 493 seq_printf(f, "."); in hid_resolv_usage() [all …]
|
D | wacom_wac.h | 161 #define WACOM_BATTERY_USAGE(f) (((f)->hid == HID_DG_BATTERYSTRENGTH) || \ argument 162 ((f)->hid == WACOM_HID_WD_BATTERY_CHARGING) || \ 163 ((f)->hid == WACOM_HID_WD_BATTERY_LEVEL)) 165 #define WACOM_PAD_FIELD(f) (((f)->physical == HID_DG_TABLETFUNCTIONKEY) || \ argument 166 ((f)->physical == WACOM_HID_WD_DIGITIZERFNKEYS) || \ 167 ((f)->physical == WACOM_HID_WD_DIGITIZERINFO)) 169 #define WACOM_PEN_FIELD(f) (((f)->logical == HID_DG_STYLUS) || \ argument 170 ((f)->physical == HID_DG_STYLUS) || \ 171 ((f)->physical == HID_DG_PEN) || \ 172 ((f)->application == HID_DG_PEN) || \ [all …]
|
/drivers/net/wireless/broadcom/brcm80211/brcmsmac/ |
D | debug.h | 54 #define brcms_dbg(core, l, f, a...) __brcms_dbg(&(core)->dev, l, __func__, f, ##a) argument 55 #define brcms_info(core, f, a...) __brcms_info(&(core)->dev, f, ##a) argument 56 #define brcms_warn(core, f, a...) __brcms_warn(&(core)->dev, f, ##a) argument 57 #define brcms_err(core, f, a...) __brcms_err(&(core)->dev, f, ##a) argument 58 #define brcms_crit(core, f, a...) __brcms_crit(&(core)->dev, f, ##a) argument 60 #define brcms_dbg_info(core, f, a...) brcms_dbg(core, BRCM_DL_INFO, f, ##a) argument 61 #define brcms_dbg_mac80211(core, f, a...) brcms_dbg(core, BRCM_DL_MAC80211, f, ##a) argument 62 #define brcms_dbg_rx(core, f, a...) brcms_dbg(core, BRCM_DL_RX, f, ##a) argument 63 #define brcms_dbg_tx(core, f, a...) brcms_dbg(core, BRCM_DL_TX, f, ##a) argument 64 #define brcms_dbg_int(core, f, a...) brcms_dbg(core, BRCM_DL_INT, f, ##a) argument [all …]
|
/drivers/block/aoe/ |
D | aoecmd.c | 82 struct frame *f; in getframe_deferred() local 86 f = list_entry(pos, struct frame, head); in getframe_deferred() 87 if (f->tag == tag) { in getframe_deferred() 89 return f; in getframe_deferred() 98 struct frame *f; in getframe() local 105 f = list_entry(pos, struct frame, head); in getframe() 106 if (f->tag == tag) { in getframe() 108 return f; in getframe() 193 aoe_freetframe(struct frame *f) in aoe_freetframe() argument 197 t = f->t; in aoe_freetframe() [all …]
|
/drivers/gpu/drm/nouveau/include/nvhw/ |
D | drf.h | 63 #define NVVAL_N(X,d,r,f, v) NVVAL_X(d##_##r##_##f, (v)) argument 64 #define NVVAL_I(X,d,r,f,i,v) NVVAL_X(d##_##r##_##f(i), (v)) argument 68 #define NVDEF_N(X,d,r,f, v) NVVAL_X(d##_##r##_##f, d##_##r##_##f##_##v) argument 69 #define NVDEF_I(X,d,r,f,i,v) NVVAL_X(d##_##r##_##f(i), d##_##r##_##f##_##v) argument 74 #define NVVAL_GET_N(X,o,d,r,f ) NVVAL_GET_X(o, d##_##r##_##f) argument 75 #define NVVAL_GET_I(X,o,d,r,f,i) NVVAL_GET_X(o, d##_##r##_##f(i)) argument 80 #define NVVAL_TEST_N(X,o,d,r,f, cmp,v) NVVAL_TEST_X(o, d##_##r##_##f , cmp, (v)) argument 81 #define NVVAL_TEST_I(X,o,d,r,f,i,cmp,v) NVVAL_TEST_X(o, d##_##r##_##f(i), cmp, (v)) argument 85 #define NVDEF_TEST_N(X,o,d,r,f, cmp,v) NVVAL_TEST_X(o, d##_##r##_##f , cmp, d##_##r##_##f##_##v) argument 86 #define NVDEF_TEST_I(X,o,d,r,f,i,cmp,v) NVVAL_TEST_X(o, d##_##r##_##f(i), cmp, d##_##r##_##f##_##v) argument [all …]
|
/drivers/char/mwave/ |
D | mwavedd.h | 74 #define PRINTK_1(f,s) \ argument 75 if (f & (mwave_debug)) { \ 79 #define PRINTK_2(f,s,v1) \ argument 80 if (f & (mwave_debug)) { \ 84 #define PRINTK_3(f,s,v1,v2) \ argument 85 if (f & (mwave_debug)) { \ 89 #define PRINTK_4(f,s,v1,v2,v3) \ argument 90 if (f & (mwave_debug)) { \ 94 #define PRINTK_5(f,s,v1,v2,v3,v4) \ argument 95 if (f & (mwave_debug)) { \ [all …]
|
/drivers/irqchip/ |
D | irq-versatile-fpga.c | 56 struct fpga_irq_data *f = irq_data_get_irq_chip_data(d); in fpga_irq_mask() local 59 writel(mask, f->base + IRQ_ENABLE_CLEAR); in fpga_irq_mask() 64 struct fpga_irq_data *f = irq_data_get_irq_chip_data(d); in fpga_irq_unmask() local 67 writel(mask, f->base + IRQ_ENABLE_SET); in fpga_irq_unmask() 73 struct fpga_irq_data *f = irq_desc_get_handler_data(desc); in fpga_irq_handle() local 78 status = readl(f->base + IRQ_STATUS); in fpga_irq_handle() 88 generic_handle_domain_irq(f->domain, irq); in fpga_irq_handle() 100 static int handle_one_fpga(struct fpga_irq_data *f, struct pt_regs *regs) in handle_one_fpga() argument 106 while ((status = readl(f->base + IRQ_STATUS))) { in handle_one_fpga() 108 handle_domain_irq(f->domain, irq, regs); in handle_one_fpga() [all …]
|
D | irq-ftintc010.c | 56 struct ft010_irq_data *f = irq_data_get_irq_chip_data(d); in ft010_irq_mask() local 59 mask = readl(FT010_IRQ_MASK(f->base)); in ft010_irq_mask() 61 writel(mask, FT010_IRQ_MASK(f->base)); in ft010_irq_mask() 66 struct ft010_irq_data *f = irq_data_get_irq_chip_data(d); in ft010_irq_unmask() local 69 mask = readl(FT010_IRQ_MASK(f->base)); in ft010_irq_unmask() 71 writel(mask, FT010_IRQ_MASK(f->base)); in ft010_irq_unmask() 76 struct ft010_irq_data *f = irq_data_get_irq_chip_data(d); in ft010_irq_ack() local 78 writel(BIT(irqd_to_hwirq(d)), FT010_IRQ_CLEAR(f->base)); in ft010_irq_ack() 83 struct ft010_irq_data *f = irq_data_get_irq_chip_data(d); in ft010_irq_set_type() local 87 mode = readl(FT010_IRQ_MODE(f->base)); in ft010_irq_set_type() [all …]
|
/drivers/net/ethernet/marvell/prestera/ |
D | prestera_flow.c | 16 struct tc_cls_matchall_offload *f) in prestera_flow_block_mall_cb() argument 18 switch (f->command) { in prestera_flow_block_mall_cb() 20 return prestera_span_replace(block, f); in prestera_flow_block_mall_cb() 30 struct flow_cls_offload *f) in prestera_flow_block_flower_cb() argument 32 if (f->common.chain_index != 0) in prestera_flow_block_flower_cb() 35 switch (f->command) { in prestera_flow_block_flower_cb() 37 return prestera_flower_replace(block, f); in prestera_flow_block_flower_cb() 39 prestera_flower_destroy(block, f); in prestera_flow_block_flower_cb() 42 return prestera_flower_stats(block, f); in prestera_flow_block_flower_cb() 72 struct flow_block_offload *f, in prestera_flow_block_get() argument [all …]
|
/drivers/media/platform/s5p-g2d/ |
D | g2d.c | 71 static struct g2d_fmt *find_fmt(struct v4l2_format *f) in find_fmt() argument 75 if (formats[i].fourcc == f->fmt.pix.pixelformat) in find_fmt() 100 struct g2d_frame *f = get_frame(ctx, vq->type); in g2d_queue_setup() local 102 if (IS_ERR(f)) in g2d_queue_setup() 103 return PTR_ERR(f); in g2d_queue_setup() 105 sizes[0] = f->size; in g2d_queue_setup() 117 struct g2d_frame *f = get_frame(ctx, vb->vb2_queue->type); in g2d_buf_prepare() local 119 if (IS_ERR(f)) in g2d_buf_prepare() 120 return PTR_ERR(f); in g2d_buf_prepare() 121 vb2_set_plane_payload(vb, 0, f->size); in g2d_buf_prepare() [all …]
|
/drivers/gpu/drm/virtio/ |
D | virtgpu_fence.c | 31 container_of(x, struct virtio_gpu_fence, f) 33 static const char *virtio_gpu_get_driver_name(struct dma_fence *f) in virtio_gpu_get_driver_name() argument 38 static const char *virtio_gpu_get_timeline_name(struct dma_fence *f) in virtio_gpu_get_timeline_name() argument 43 static bool virtio_gpu_fence_signaled(struct dma_fence *f) in virtio_gpu_fence_signaled() argument 48 WARN_ON_ONCE(f->seqno == 0); in virtio_gpu_fence_signaled() 52 static void virtio_gpu_fence_value_str(struct dma_fence *f, char *str, int size) in virtio_gpu_fence_value_str() argument 54 snprintf(str, size, "[%llu, %llu]", f->context, f->seqno); in virtio_gpu_fence_value_str() 57 static void virtio_gpu_timeline_value_str(struct dma_fence *f, char *str, in virtio_gpu_timeline_value_str() argument 60 struct virtio_gpu_fence *fence = to_virtio_gpu_fence(f); in virtio_gpu_timeline_value_str() 88 dma_fence_init(&fence->f, &virtio_gpu_fence_ops, &drv->lock, drv->context, in virtio_gpu_fence_alloc() [all …]
|
/drivers/staging/media/atomisp/pci/ |
D | css_trace.h | 216 #define FIELD_VALUE_PACK(f) FIELD_PACK(f, FIELD_VALUE_MASK, FIELD_VALUE_OFFSET) argument 217 #define FIELD_VALUE_UNPACK(f) FIELD_UNPACK(f, FIELD_VALUE_MASK, FIELD_VALUE_OFFSET) argument 222 #define FIELD_MINOR_PACK(f) FIELD_PACK(f, FIELD_MINOR_MASK, FIELD_MINOR_OFFSET) argument 223 #define FIELD_MINOR_UNPACK(f) FIELD_UNPACK(f, FIELD_MINOR_MASK, FIELD_MINOR_OFFSET) argument 228 #define FIELD_MAJOR_PACK(f) FIELD_PACK(f, FIELD_MAJOR_MASK, FIELD_MAJOR_OFFSET) argument 229 #define FIELD_MAJOR_UNPACK(f) FIELD_UNPACK(f, FIELD_MAJOR_MASK, FIELD_MAJOR_OFFSET) argument 234 #define FIELD_FULL_MAJOR_PACK(f) FIELD_PACK(f, FIELD_FULL_MAJOR_MASK, FIELD_MAJOR_OFFSET) argument 243 #define FIELD_MAJOR_W_FMT_PACK(f) FIELD_PACK(f, FIELD_MAJOR_W_FMT_MASK, FIELD_MAJOR_W_FMT_OFFSET) argument 244 #define FIELD_MAJOR_W_FMT_UNPACK(f) FIELD_UNPACK(f, FIELD_MAJOR_W_FMT_MASK, FIELD_MAJOR_W_FMT_OFFSE… argument 249 #define FIELD_FORMAT_PACK(f) FIELD_PACK(f, FIELD_FORMAT_MASK, FIELD_FORMAT_OFFSET) argument [all …]
|