/drivers/net/ethernet/amd/xgbe/ |
D | xgbe-ptp.c | 130 u64 nsec; in xgbe_cc_read() local 132 nsec = pdata->hw_if.get_tstamp_time(pdata); in xgbe_cc_read() 134 return nsec; in xgbe_cc_read() 188 u64 nsec; in xgbe_gettime() local 192 nsec = timecounter_read(&pdata->tstamp_tc); in xgbe_gettime() 196 *ts = ns_to_timespec64(nsec); in xgbe_gettime() 208 u64 nsec; in xgbe_settime() local 210 nsec = timespec64_to_ns(ts); in xgbe_settime() 214 timecounter_init(&pdata->tstamp_tc, &pdata->tstamp_cc, nsec); in xgbe_settime()
|
/drivers/net/ethernet/stmicro/stmmac/ |
D | stmmac_hwtstamp.c | 60 static int init_systime(void __iomem *ioaddr, u32 sec, u32 nsec) in init_systime() argument 65 writel(nsec, ioaddr + PTP_STNSUR); in init_systime() 101 static int adjust_systime(void __iomem *ioaddr, u32 sec, u32 nsec, in adjust_systime() argument 117 nsec = (PTP_DIGITAL_ROLLOVER_MODE - nsec); in adjust_systime() 119 nsec = (PTP_BINARY_ROLLOVER_MODE - nsec); in adjust_systime() 123 value = (add_sub << PTP_STNSUR_ADDSUB_SHIFT) | nsec; in adjust_systime()
|
D | stmmac_ptp.c | 61 u32 sec, nsec; in stmmac_adjust_time() local 75 nsec = reminder; in stmmac_adjust_time() 78 stmmac_adjust_systime(priv, priv->ptpaddr, sec, nsec, neg_adj, xmac); in stmmac_adjust_time() 150 cfg->start.tv_nsec = rq->perout.start.nsec; in stmmac_enable() 152 cfg->period.tv_nsec = rq->perout.period.nsec; in stmmac_enable()
|
/drivers/pps/ |
D | kapi.c | 29 ts->nsec += offset->nsec; in pps_add_offset() 30 while (ts->nsec >= NSEC_PER_SEC) { in pps_add_offset() 31 ts->nsec -= NSEC_PER_SEC; in pps_add_offset() 34 while (ts->nsec < 0) { in pps_add_offset() 35 ts->nsec += NSEC_PER_SEC; in pps_add_offset() 164 struct pps_ktime ts_real = { .sec = 0, .nsec = 0, .flags = 0 }; in pps_event()
|
D | sysfs.c | 27 (long long) pps->assert_tu.sec, pps->assert_tu.nsec, in assert_show() 41 (long long) pps->clear_tu.sec, pps->clear_tu.nsec, in clear_show()
|
/drivers/ptp/ |
D | ptp_chardev.c | 216 perout->on.nsec > perout->period.nsec)) { in ptp_ioctl() 229 perout->phase.nsec >= perout->period.nsec)) { in ptp_ioctl() 246 enable = req.perout.period.sec || req.perout.period.nsec; in ptp_ioctl() 280 precise_offset.device.nsec = ts.tv_nsec; in ptp_ioctl() 283 precise_offset.sys_realtime.nsec = ts.tv_nsec; in ptp_ioctl() 286 precise_offset.sys_monoraw.nsec = ts.tv_nsec; in ptp_ioctl() 314 extoff->ts[i][0].nsec = sts.pre_ts.tv_nsec; in ptp_ioctl() 316 extoff->ts[i][1].nsec = ts.tv_nsec; in ptp_ioctl() 318 extoff->ts[i][2].nsec = sts.post_ts.tv_nsec; in ptp_ioctl() 340 pct->nsec = ts.tv_nsec; in ptp_ioctl() [all …]
|
D | ptp_idt82p33.c | 40 s32 nsec; in idt82p33_byte_array_to_timespec() local 43 nsec = buf[3]; in idt82p33_byte_array_to_timespec() 45 nsec <<= 8; in idt82p33_byte_array_to_timespec() 46 nsec |= buf[2 - i]; in idt82p33_byte_array_to_timespec() 56 ts->tv_nsec = nsec; in idt82p33_byte_array_to_timespec() 63 s32 nsec; in idt82p33_timespec_to_byte_array() local 66 nsec = ts->tv_nsec; in idt82p33_timespec_to_byte_array() 70 buf[i] = nsec & 0xff; in idt82p33_timespec_to_byte_array() 71 nsec >>= 8; in idt82p33_timespec_to_byte_array() 665 else if (rq->perout.start.nsec || rq->perout.period.sec != 1 || in idt82p33_enable() [all …]
|
D | ptp_sysfs.c | 90 event.index, event.t.sec, event.t.nsec); in extts_fifo_show() 107 &req.perout.start.sec, &req.perout.start.nsec, in period_store() 108 &req.perout.period.sec, &req.perout.period.nsec); in period_store() 114 enable = req.perout.period.sec || req.perout.period.nsec; in period_store()
|
/drivers/net/ethernet/marvell/octeontx2/nic/ |
D | otx2_ptp.c | 81 u64 nsec; in otx2_ptp_gettime() local 84 nsec = timecounter_read(&ptp->time_counter); in otx2_ptp_gettime() 87 *ts = ns_to_timespec64(nsec); in otx2_ptp_gettime() 98 u64 nsec; in otx2_ptp_settime() local 100 nsec = timespec64_to_ns(ts); in otx2_ptp_settime() 103 timecounter_init(&ptp->time_counter, &ptp->cycle_counter, nsec); in otx2_ptp_settime()
|
/drivers/net/ethernet/mellanox/mlxsw/ |
D | spectrum_ptp.c | 114 static u64 mlxsw_sp1_ptp_ns2cycles(const struct timecounter *tc, u64 nsec) in mlxsw_sp1_ptp_ns2cycles() argument 116 u64 cycles = (u64) nsec; in mlxsw_sp1_ptp_ns2cycles() 125 mlxsw_sp1_ptp_phc_settime(struct mlxsw_sp_ptp_clock *clock, u64 nsec) in mlxsw_sp1_ptp_phc_settime() argument 133 next_sec = div_u64(nsec, NSEC_PER_SEC) + 1; in mlxsw_sp1_ptp_phc_settime() 184 u64 nsec; in mlxsw_sp1_ptp_adjtime() local 188 nsec = timecounter_read(&clock->tc); in mlxsw_sp1_ptp_adjtime() 191 return mlxsw_sp1_ptp_phc_settime(clock, nsec); in mlxsw_sp1_ptp_adjtime() 200 u64 cycles, nsec; in mlxsw_sp1_ptp_gettimex() local 204 nsec = timecounter_cyc2time(&clock->tc, cycles); in mlxsw_sp1_ptp_gettimex() 207 *ts = ns_to_timespec64(nsec); in mlxsw_sp1_ptp_gettimex() [all …]
|
/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/ |
D | memx.c | 108 u32 addr, u32 mask, u32 data, u32 nsec) in nvkm_memx_wait() argument 111 addr, mask, data, nsec); in nvkm_memx_wait() 112 memx_cmd(memx, MEMX_WAIT, 4, (u32[]){ addr, mask, data, nsec }); in nvkm_memx_wait() 117 nvkm_memx_nsec(struct nvkm_memx *memx, u32 nsec) in nvkm_memx_nsec() argument 119 nvkm_debug(&memx->pmu->subdev, " DELAY = %d ns\n", nsec); in nvkm_memx_nsec() 120 memx_cmd(memx, MEMX_DELAY, 1, (u32[]){ nsec }); in nvkm_memx_nsec()
|
/drivers/misc/sgi-gru/ |
D | gruhandles.c | 32 unsigned long nsec; in update_mcs_stats() local 34 nsec = CLKS2NSEC(clks); in update_mcs_stats() 36 atomic_long_add(nsec, &mcs_op_statistics[op].total); in update_mcs_stats() 37 if (mcs_op_statistics[op].max < nsec) in update_mcs_stats() 38 mcs_op_statistics[op].max = nsec; in update_mcs_stats()
|
/drivers/gpu/drm/nouveau/nvkm/subdev/timer/ |
D | base.c | 54 nvkm_timer_wait_init(struct nvkm_device *device, u64 nsec, in nvkm_timer_wait_init() argument 58 wait->limit = nsec; in nvkm_timer_wait_init() 106 nvkm_timer_alarm(struct nvkm_timer *tmr, u32 nsec, struct nvkm_alarm *alarm) in nvkm_timer_alarm() argument 119 if (nsec) { in nvkm_timer_alarm() 121 alarm->timestamp = nvkm_timer_read(tmr) + nsec; in nvkm_timer_alarm()
|
/drivers/net/ethernet/mellanox/mlx5/core/lib/ |
D | clock.h | 50 u64 nsec; in mlx5_timecounter_cyc2time() local 54 nsec = timecounter_cyc2time(&timer->tc, timestamp); in mlx5_timecounter_cyc2time() 57 return ns_to_ktime(nsec); in mlx5_timecounter_cyc2time()
|
/drivers/net/ethernet/cavium/common/ |
D | cavium_ptp.c | 167 u64 nsec; in cavium_ptp_gettime() local 170 nsec = timecounter_read(&clock->time_counter); in cavium_ptp_gettime() 173 *ts = ns_to_timespec64(nsec); in cavium_ptp_gettime() 189 u64 nsec; in cavium_ptp_settime() local 191 nsec = timespec64_to_ns(ts); in cavium_ptp_settime() 194 timecounter_init(&clock->time_counter, &clock->cycle_counter, nsec); in cavium_ptp_settime()
|
/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ |
D | ramfuc.h | 123 ramfuc_wait(struct ramfuc *ram, u32 addr, u32 mask, u32 data, u32 nsec) in ramfuc_wait() argument 125 nvkm_memx_wait(ram->memx, addr, mask, data, nsec); in ramfuc_wait() 129 ramfuc_nsec(struct ramfuc *ram, u32 nsec) in ramfuc_nsec() argument 131 nvkm_memx_nsec(ram->memx, nsec); in ramfuc_nsec()
|
/drivers/net/ethernet/intel/igc/ |
D | igc_tsn.c | 67 u32 sec, nsec, cycle; in igc_tsn_enable_offload() local 104 nsec = rd32(IGC_SYSTIML); in igc_tsn_enable_offload() 107 systim = ktime_set(sec, nsec); in igc_tsn_enable_offload()
|
/drivers/gpu/drm/nouveau/nvif/ |
D | timer.c | 50 nvif_timer_wait_init(struct nvif_device *device, u64 nsec, in nvif_timer_wait_init() argument 54 wait->limit = nsec; in nvif_timer_wait_init()
|
/drivers/gpu/drm/nouveau/nvkm/subdev/bus/ |
D | hwsq.c | 167 nvkm_hwsq_nsec(struct nvkm_hwsq *hwsq, u32 nsec) in nvkm_hwsq_nsec() argument 169 u8 shift = 0, usec = nsec / 1000; in nvkm_hwsq_nsec() 175 nvkm_debug(hwsq->subdev, " DELAY = %d ns\n", nsec); in nvkm_hwsq_nsec()
|
/drivers/clocksource/ |
D | jcore-pit.c | 47 u32 seclo, nsec, seclo0; in jcore_sched_clock_read() local 53 nsec = readl(base + REG_NSEC); in jcore_sched_clock_read() 57 return seclo * NSEC_PER_SEC + nsec; in jcore_sched_clock_read()
|
/drivers/net/ethernet/freescale/dpaa2/ |
D | dpaa2-eth.h | 245 u32 nsec; member 250 u64 sec, nsec; in ns_to_ptp_tstamp() local 253 nsec = do_div(sec, 1000000000); in ns_to_ptp_tstamp() 257 tstamp->nsec = nsec; in ns_to_ptp_tstamp()
|
/drivers/gpu/drm/nouveau/include/nvkm/subdev/ |
D | pmu.h | 60 void nvkm_memx_wait(struct nvkm_memx *, u32 addr, u32 mask, u32 data, u32 nsec); 61 void nvkm_memx_nsec(struct nvkm_memx *, u32 nsec);
|
D | timer.h | 29 void nvkm_timer_alarm(struct nvkm_timer *, u32 nsec, struct nvkm_alarm *); 39 void nvkm_timer_wait_init(struct nvkm_device *, u64 nsec,
|
/drivers/net/ethernet/mellanox/mlx4/ |
D | en_clock.c | 66 u64 nsec; in mlx4_en_fill_hwtstamps() local 70 nsec = timecounter_cyc2time(&mdev->clock, timestamp); in mlx4_en_fill_hwtstamps() 74 hwts->hwtstamp = ns_to_ktime(nsec); in mlx4_en_fill_hwtstamps()
|
/drivers/net/ethernet/mscc/ |
D | ocelot_ptp.c | 217 ts_period.tv_nsec = rq->perout.period.nsec; in ocelot_ptp_enable() 233 ts_phase.tv_nsec = rq->perout.phase.nsec; in ocelot_ptp_enable() 237 ts_phase.tv_nsec = rq->perout.start.nsec; in ocelot_ptp_enable() 252 ts_on.tv_nsec = rq->perout.on.nsec; in ocelot_ptp_enable()
|