Lines Matching refs:cq
718 static inline void write_gts(struct t4_cq *cq, u32 val) in write_gts() argument
720 if (cq->bar2_va) in write_gts()
721 writel(val | INGRESSQID_V(cq->bar2_qid), in write_gts()
722 cq->bar2_va + SGE_UDB_GTS); in write_gts()
724 writel(val | INGRESSQID_V(cq->cqid), cq->gts); in write_gts()
727 static inline int t4_clear_cq_armed(struct t4_cq *cq) in t4_clear_cq_armed() argument
729 return test_and_clear_bit(CQ_ARMED, &cq->flags); in t4_clear_cq_armed()
732 static inline int t4_arm_cq(struct t4_cq *cq, int se) in t4_arm_cq() argument
736 set_bit(CQ_ARMED, &cq->flags); in t4_arm_cq()
737 while (cq->cidx_inc > CIDXINC_M) { in t4_arm_cq()
739 write_gts(cq, val); in t4_arm_cq()
740 cq->cidx_inc -= CIDXINC_M; in t4_arm_cq()
742 val = SEINTARM_V(se) | CIDXINC_V(cq->cidx_inc) | TIMERREG_V(6); in t4_arm_cq()
743 write_gts(cq, val); in t4_arm_cq()
744 cq->cidx_inc = 0; in t4_arm_cq()
748 static inline void t4_swcq_produce(struct t4_cq *cq) in t4_swcq_produce() argument
750 cq->sw_in_use++; in t4_swcq_produce()
751 if (cq->sw_in_use == cq->size) { in t4_swcq_produce()
753 __func__, cq->cqid); in t4_swcq_produce()
754 cq->error = 1; in t4_swcq_produce()
755 cq->sw_in_use--; in t4_swcq_produce()
758 if (++cq->sw_pidx == cq->size) in t4_swcq_produce()
759 cq->sw_pidx = 0; in t4_swcq_produce()
762 static inline void t4_swcq_consume(struct t4_cq *cq) in t4_swcq_consume() argument
764 cq->sw_in_use--; in t4_swcq_consume()
765 if (++cq->sw_cidx == cq->size) in t4_swcq_consume()
766 cq->sw_cidx = 0; in t4_swcq_consume()
769 static inline void t4_hwcq_consume(struct t4_cq *cq) in t4_hwcq_consume() argument
771 cq->bits_type_ts = cq->queue[cq->cidx].bits_type_ts; in t4_hwcq_consume()
772 if (++cq->cidx_inc == (cq->size >> 4) || cq->cidx_inc == CIDXINC_M) { in t4_hwcq_consume()
775 val = SEINTARM_V(0) | CIDXINC_V(cq->cidx_inc) | TIMERREG_V(7); in t4_hwcq_consume()
776 write_gts(cq, val); in t4_hwcq_consume()
777 cq->cidx_inc = 0; in t4_hwcq_consume()
779 if (++cq->cidx == cq->size) { in t4_hwcq_consume()
780 cq->cidx = 0; in t4_hwcq_consume()
781 cq->gen ^= 1; in t4_hwcq_consume()
785 static inline int t4_valid_cqe(struct t4_cq *cq, struct t4_cqe *cqe) in t4_valid_cqe() argument
787 return (CQE_GENBIT(cqe) == cq->gen); in t4_valid_cqe()
790 static inline int t4_cq_notempty(struct t4_cq *cq) in t4_cq_notempty() argument
792 return cq->sw_in_use || t4_valid_cqe(cq, &cq->queue[cq->cidx]); in t4_cq_notempty()
795 static inline int t4_next_hw_cqe(struct t4_cq *cq, struct t4_cqe **cqe) in t4_next_hw_cqe() argument
800 if (cq->cidx == 0) in t4_next_hw_cqe()
801 prev_cidx = cq->size - 1; in t4_next_hw_cqe()
803 prev_cidx = cq->cidx - 1; in t4_next_hw_cqe()
805 if (cq->queue[prev_cidx].bits_type_ts != cq->bits_type_ts) { in t4_next_hw_cqe()
807 cq->error = 1; in t4_next_hw_cqe()
808 pr_err("cq overflow cqid %u\n", cq->cqid); in t4_next_hw_cqe()
809 } else if (t4_valid_cqe(cq, &cq->queue[cq->cidx])) { in t4_next_hw_cqe()
813 *cqe = &cq->queue[cq->cidx]; in t4_next_hw_cqe()
820 static inline struct t4_cqe *t4_next_sw_cqe(struct t4_cq *cq) in t4_next_sw_cqe() argument
822 if (cq->sw_in_use == cq->size) { in t4_next_sw_cqe()
824 __func__, cq->cqid); in t4_next_sw_cqe()
825 cq->error = 1; in t4_next_sw_cqe()
828 if (cq->sw_in_use) in t4_next_sw_cqe()
829 return &cq->sw_queue[cq->sw_cidx]; in t4_next_sw_cqe()
833 static inline int t4_next_cqe(struct t4_cq *cq, struct t4_cqe **cqe) in t4_next_cqe() argument
837 if (cq->error) in t4_next_cqe()
839 else if (cq->sw_in_use) in t4_next_cqe()
840 *cqe = &cq->sw_queue[cq->sw_cidx]; in t4_next_cqe()
842 ret = t4_next_hw_cqe(cq, cqe); in t4_next_cqe()
846 static inline int t4_cq_in_error(struct t4_cq *cq) in t4_cq_in_error() argument
848 return *cq->qp_errp; in t4_cq_in_error()
851 static inline void t4_set_cq_in_error(struct t4_cq *cq) in t4_set_cq_in_error() argument
853 *cq->qp_errp = 1; in t4_set_cq_in_error()