Lines Matching refs:sge
170 struct qib_sge *sge = &ss->sge; in qib_copy_sge() local
173 u32 len = sge->length; in qib_copy_sge()
177 if (len > sge->sge_length) in qib_copy_sge()
178 len = sge->sge_length; in qib_copy_sge()
180 memcpy(sge->vaddr, data, len); in qib_copy_sge()
181 sge->vaddr += len; in qib_copy_sge()
182 sge->length -= len; in qib_copy_sge()
183 sge->sge_length -= len; in qib_copy_sge()
184 if (sge->sge_length == 0) { in qib_copy_sge()
186 qib_put_mr(sge->mr); in qib_copy_sge()
188 *sge = *ss->sg_list++; in qib_copy_sge()
189 } else if (sge->length == 0 && sge->mr->lkey) { in qib_copy_sge()
190 if (++sge->n >= QIB_SEGSZ) { in qib_copy_sge()
191 if (++sge->m >= sge->mr->mapsz) in qib_copy_sge()
193 sge->n = 0; in qib_copy_sge()
195 sge->vaddr = in qib_copy_sge()
196 sge->mr->map[sge->m]->segs[sge->n].vaddr; in qib_copy_sge()
197 sge->length = in qib_copy_sge()
198 sge->mr->map[sge->m]->segs[sge->n].length; in qib_copy_sge()
212 struct qib_sge *sge = &ss->sge; in qib_skip_sge() local
215 u32 len = sge->length; in qib_skip_sge()
219 if (len > sge->sge_length) in qib_skip_sge()
220 len = sge->sge_length; in qib_skip_sge()
222 sge->vaddr += len; in qib_skip_sge()
223 sge->length -= len; in qib_skip_sge()
224 sge->sge_length -= len; in qib_skip_sge()
225 if (sge->sge_length == 0) { in qib_skip_sge()
227 qib_put_mr(sge->mr); in qib_skip_sge()
229 *sge = *ss->sg_list++; in qib_skip_sge()
230 } else if (sge->length == 0 && sge->mr->lkey) { in qib_skip_sge()
231 if (++sge->n >= QIB_SEGSZ) { in qib_skip_sge()
232 if (++sge->m >= sge->mr->mapsz) in qib_skip_sge()
234 sge->n = 0; in qib_skip_sge()
236 sge->vaddr = in qib_skip_sge()
237 sge->mr->map[sge->m]->segs[sge->n].vaddr; in qib_skip_sge()
238 sge->length = in qib_skip_sge()
239 sge->mr->map[sge->m]->segs[sge->n].length; in qib_skip_sge()
253 struct qib_sge sge = ss->sge; in qib_count_sge() local
258 u32 len = sge.length; in qib_count_sge()
262 if (len > sge.sge_length) in qib_count_sge()
263 len = sge.sge_length; in qib_count_sge()
265 if (((long) sge.vaddr & (sizeof(u32) - 1)) || in qib_count_sge()
271 sge.vaddr += len; in qib_count_sge()
272 sge.length -= len; in qib_count_sge()
273 sge.sge_length -= len; in qib_count_sge()
274 if (sge.sge_length == 0) { in qib_count_sge()
276 sge = *sg_list++; in qib_count_sge()
277 } else if (sge.length == 0 && sge.mr->lkey) { in qib_count_sge()
278 if (++sge.n >= QIB_SEGSZ) { in qib_count_sge()
279 if (++sge.m >= sge.mr->mapsz) in qib_count_sge()
281 sge.n = 0; in qib_count_sge()
283 sge.vaddr = in qib_count_sge()
284 sge.mr->map[sge.m]->segs[sge.n].vaddr; in qib_count_sge()
285 sge.length = in qib_count_sge()
286 sge.mr->map[sge.m]->segs[sge.n].length; in qib_count_sge()
298 struct qib_sge *sge = &ss->sge; in qib_copy_from_sge() local
301 u32 len = sge->length; in qib_copy_from_sge()
305 if (len > sge->sge_length) in qib_copy_from_sge()
306 len = sge->sge_length; in qib_copy_from_sge()
308 memcpy(data, sge->vaddr, len); in qib_copy_from_sge()
309 sge->vaddr += len; in qib_copy_from_sge()
310 sge->length -= len; in qib_copy_from_sge()
311 sge->sge_length -= len; in qib_copy_from_sge()
312 if (sge->sge_length == 0) { in qib_copy_from_sge()
314 *sge = *ss->sg_list++; in qib_copy_from_sge()
315 } else if (sge->length == 0 && sge->mr->lkey) { in qib_copy_from_sge()
316 if (++sge->n >= QIB_SEGSZ) { in qib_copy_from_sge()
317 if (++sge->m >= sge->mr->mapsz) in qib_copy_from_sge()
319 sge->n = 0; in qib_copy_from_sge()
321 sge->vaddr = in qib_copy_from_sge()
322 sge->mr->map[sge->m]->segs[sge->n].vaddr; in qib_copy_from_sge()
323 sge->length = in qib_copy_from_sge()
324 sge->mr->map[sge->m]->segs[sge->n].length; in qib_copy_from_sge()
437 struct qib_sge *sge = &wqe->sg_list[--j]; in qib_post_one_send() local
439 qib_put_mr(sge->mr); in qib_post_one_send()
734 struct qib_sge *sge = &ss->sge; in update_sge() local
736 sge->vaddr += length; in update_sge()
737 sge->length -= length; in update_sge()
738 sge->sge_length -= length; in update_sge()
739 if (sge->sge_length == 0) { in update_sge()
741 *sge = *ss->sg_list++; in update_sge()
742 } else if (sge->length == 0 && sge->mr->lkey) { in update_sge()
743 if (++sge->n >= QIB_SEGSZ) { in update_sge()
744 if (++sge->m >= sge->mr->mapsz) in update_sge()
746 sge->n = 0; in update_sge()
748 sge->vaddr = sge->mr->map[sge->m]->segs[sge->n].vaddr; in update_sge()
749 sge->length = sge->mr->map[sge->m]->segs[sge->n].length; in update_sge()
797 u32 len = ss->sge.length; in copy_io()
802 if (len > ss->sge.sge_length) in copy_io()
803 len = ss->sge.sge_length; in copy_io()
806 off = (unsigned long)ss->sge.vaddr & (sizeof(u32) - 1); in copy_io()
808 u32 *addr = (u32 *)((unsigned long)ss->sge.vaddr & in copy_io()
839 u32 *addr = (u32 *) ss->sge.vaddr; in copy_io()
892 qib_pio_copy(piobuf, ss->sge.vaddr, w - 1); in copy_io()
894 last = ((u32 *) ss->sge.vaddr)[w - 1]; in copy_io()
899 qib_pio_copy(piobuf, ss->sge.vaddr, w); in copy_io()
904 u32 v = ((u32 *) ss->sge.vaddr)[w]; in copy_io()
1322 if (likely(ss->num_sge == 1 && len <= ss->sge.length && in qib_verbs_send_pio()
1323 !((unsigned long)ss->sge.vaddr & (sizeof(u32) - 1)))) { in qib_verbs_send_pio()
1324 u32 *addr = (u32 *) ss->sge.vaddr; in qib_verbs_send_pio()