Searched refs:rq_depth (Results 1 – 5 of 5) sorted by relevance
238 struct rq_depth *rqd = &rwb->rq_depth; in latency_exceeded()291 struct rq_depth *rqd = &rwb->rq_depth; in rwb_trace_step()301 } else if (rwb->rq_depth.max_depth <= 2) { in calc_wb_limits()302 rwb->wb_normal = rwb->rq_depth.max_depth; in calc_wb_limits()305 rwb->wb_normal = (rwb->rq_depth.max_depth + 1) / 2; in calc_wb_limits()306 rwb->wb_background = (rwb->rq_depth.max_depth + 3) / 4; in calc_wb_limits()312 if (!rq_depth_scale_up(&rwb->rq_depth)) in scale_up()322 if (!rq_depth_scale_down(&rwb->rq_depth, hard_throttle)) in scale_down()331 struct rq_depth *rqd = &rwb->rq_depth; in rwb_arm_timer()356 struct rq_depth *rqd = &rwb->rq_depth; in wb_timer_fn()[all …]
143 struct rq_depth rq_depth; member282 return rq_wait_inc_below(rqw, iolat->rq_depth.max_depth); in iolat_acquire_inflight()374 unsigned long old = iolat->rq_depth.max_depth; in scale_change()386 iolat->rq_depth.max_depth = old; in scale_change()391 iolat->rq_depth.max_depth = max(old, 1UL); in scale_change()449 if (iolat->rq_depth.max_depth == 1 && direction < 0) { in check_scale_change()457 iolat->rq_depth.max_depth = UINT_MAX; in check_scale_change()512 if (unlikely(issue_as_root && iolat->rq_depth.max_depth != UINT_MAX)) { in iolatency_record_time()923 if (iolat->rq_depth.max_depth == UINT_MAX) in iolatency_ssd_stat()931 iolat->rq_depth.max_depth); in iolatency_ssd_stat()[all …]
52 struct rq_depth { struct151 bool rq_depth_scale_up(struct rq_depth *rqd);152 bool rq_depth_scale_down(struct rq_depth *rqd, bool hard_throttle);153 bool rq_depth_calc_max_depth(struct rq_depth *rqd);
70 struct rq_depth rq_depth; member
116 bool rq_depth_calc_max_depth(struct rq_depth *rqd) in rq_depth_calc_max_depth()164 bool rq_depth_scale_up(struct rq_depth *rqd) in rq_depth_scale_up()183 bool rq_depth_scale_down(struct rq_depth *rqd, bool hard_throttle) in rq_depth_scale_down()