• Home
  • Raw
  • Download

Lines Matching refs:r

53 void ring_buffer_init(struct ring_buffer* r) {  in ring_buffer_init()  argument
54 r->guest_version = 1; in ring_buffer_init()
55 r->write_pos = 0; in ring_buffer_init()
56 r->read_pos = 0; in ring_buffer_init()
58 r->read_live_count = 0; in ring_buffer_init()
59 r->read_yield_count = 0; in ring_buffer_init()
60 r->read_sleep_us_count = 0; in ring_buffer_init()
62 r->state = 0; in ring_buffer_init()
69 bool ring_buffer_can_write(const struct ring_buffer* r, uint32_t bytes) { in ring_buffer_can_write() argument
71 __atomic_load(&r->read_pos, &read_view, __ATOMIC_SEQ_CST); in ring_buffer_can_write()
72 return get_ring_pos(read_view - r->write_pos - 1) >= bytes; in ring_buffer_can_write()
75 bool ring_buffer_can_read(const struct ring_buffer* r, uint32_t bytes) { in ring_buffer_can_read() argument
77 __atomic_load(&r->write_pos, &write_view, __ATOMIC_SEQ_CST); in ring_buffer_can_read()
78 return get_ring_pos(write_view - r->read_pos) >= bytes; in ring_buffer_can_read()
82 struct ring_buffer* r, const void* data, uint32_t step_size, uint32_t steps) { in ring_buffer_write() argument
87 if (!ring_buffer_can_write(r, step_size)) { in ring_buffer_write()
94 RING_BUFFER_SIZE - get_ring_pos(r->write_pos); in ring_buffer_write()
99 &r->buf[get_ring_pos(r->write_pos)], in ring_buffer_write()
103 &r->buf[get_ring_pos(r->write_pos + available_at_end)], in ring_buffer_write()
108 &r->buf[get_ring_pos(r->write_pos)], in ring_buffer_write()
113 __atomic_add_fetch(&r->write_pos, step_size, __ATOMIC_SEQ_CST); in ring_buffer_write()
121 struct ring_buffer* r, void* data, uint32_t step_size, uint32_t steps) { in ring_buffer_read() argument
126 if (!ring_buffer_can_read(r, step_size)) { in ring_buffer_read()
133 RING_BUFFER_SIZE - get_ring_pos(r->read_pos); in ring_buffer_read()
139 &r->buf[get_ring_pos(r->read_pos)], in ring_buffer_read()
143 &r->buf[get_ring_pos(r->read_pos + available_at_end)], in ring_buffer_read()
148 &r->buf[get_ring_pos(r->read_pos)], in ring_buffer_read()
152 __atomic_add_fetch(&r->read_pos, step_size, __ATOMIC_SEQ_CST); in ring_buffer_read()
160 struct ring_buffer* r, uint32_t step_size, uint32_t steps) { in ring_buffer_advance_write() argument
164 if (!ring_buffer_can_write(r, step_size)) { in ring_buffer_advance_write()
169 __atomic_add_fetch(&r->write_pos, step_size, __ATOMIC_SEQ_CST); in ring_buffer_advance_write()
177 struct ring_buffer* r, uint32_t step_size, uint32_t steps) { in ring_buffer_advance_read() argument
181 if (!ring_buffer_can_read(r, step_size)) { in ring_buffer_advance_read()
186 __atomic_add_fetch(&r->read_pos, step_size, __ATOMIC_SEQ_CST); in ring_buffer_advance_read()
207 struct ring_buffer* r, in ring_buffer_view_init() argument
214 ring_buffer_init(r); in ring_buffer_view_init()
240 const struct ring_buffer* r, in ring_buffer_view_can_write() argument
244 __atomic_load(&r->read_pos, &read_view, __ATOMIC_SEQ_CST); in ring_buffer_view_can_write()
246 v, read_view - r->write_pos - 1) >= bytes; in ring_buffer_view_can_write()
250 const struct ring_buffer* r, in ring_buffer_view_can_read() argument
254 __atomic_load(&r->write_pos, &write_view, __ATOMIC_SEQ_CST); in ring_buffer_view_can_read()
256 v, write_view - r->read_pos) >= bytes; in ring_buffer_view_can_read()
260 const struct ring_buffer* r, in ring_buffer_available_read() argument
263 __atomic_load(&r->write_pos, &write_view, __ATOMIC_SEQ_CST); in ring_buffer_available_read()
266 v, write_view - r->read_pos); in ring_buffer_available_read()
268 return get_ring_pos(write_view - r->read_pos); in ring_buffer_available_read()
273 const struct ring_buffer* r, in ring_buffer_copy_contents() argument
279 ring_buffer_available_read(r, v); in ring_buffer_copy_contents()
284 v->size - ring_buffer_view_get_ring_pos(v, r->read_pos); in ring_buffer_copy_contents()
287 RING_BUFFER_SIZE - get_ring_pos(r->write_pos); in ring_buffer_copy_contents()
298 &v->buf[ring_buffer_view_get_ring_pos(v, r->read_pos)], in ring_buffer_copy_contents()
301 &v->buf[ring_buffer_view_get_ring_pos(v, r->read_pos + available_at_end)], in ring_buffer_copy_contents()
305 &v->buf[ring_buffer_view_get_ring_pos(v, r->read_pos)], in ring_buffer_copy_contents()
312 &r->buf[get_ring_pos(r->read_pos)], in ring_buffer_copy_contents()
315 &r->buf[get_ring_pos(r->read_pos + available_at_end)], in ring_buffer_copy_contents()
319 &r->buf[get_ring_pos(r->read_pos)], in ring_buffer_copy_contents()
327 struct ring_buffer* r, in ring_buffer_view_write() argument
335 if (!ring_buffer_view_can_write(r, v, step_size)) { in ring_buffer_view_write()
342 v->size - ring_buffer_view_get_ring_pos(v, r->write_pos); in ring_buffer_view_write()
347 &v->buf[ring_buffer_view_get_ring_pos(v, r->write_pos)], in ring_buffer_view_write()
351 &v->buf[ring_buffer_view_get_ring_pos(v, r->write_pos + available_at_end)], in ring_buffer_view_write()
356 &v->buf[ring_buffer_view_get_ring_pos(v, r->write_pos)], in ring_buffer_view_write()
361 __atomic_add_fetch(&r->write_pos, step_size, __ATOMIC_SEQ_CST); in ring_buffer_view_write()
370 struct ring_buffer* r, in ring_buffer_view_read() argument
377 if (!ring_buffer_view_can_read(r, v, step_size)) { in ring_buffer_view_read()
384 v->size - ring_buffer_view_get_ring_pos(v, r->read_pos); in ring_buffer_view_read()
390 &v->buf[ring_buffer_view_get_ring_pos(v, r->read_pos)], in ring_buffer_view_read()
394 &v->buf[ring_buffer_view_get_ring_pos(v, r->read_pos + available_at_end)], in ring_buffer_view_read()
398 &v->buf[ring_buffer_view_get_ring_pos(v, r->read_pos)], in ring_buffer_view_read()
401 __atomic_add_fetch(&r->read_pos, step_size, __ATOMIC_SEQ_CST); in ring_buffer_view_read()
419 const struct ring_buffer* r, in ring_buffer_wait_write() argument
425 v ? ring_buffer_view_can_write(r, v, bytes) : in ring_buffer_wait_write()
426 ring_buffer_can_write(r, bytes); in ring_buffer_wait_write()
431 v ? ring_buffer_view_can_write(r, v, bytes) : in ring_buffer_wait_write()
432 ring_buffer_can_write(r, bytes); in ring_buffer_wait_write()
439 const struct ring_buffer* r, in ring_buffer_wait_read() argument
445 v ? ring_buffer_view_can_read(r, v, bytes) : in ring_buffer_wait_read()
446 ring_buffer_can_read(r, bytes); in ring_buffer_wait_read()
451 v ? ring_buffer_view_can_read(r, v, bytes) : in ring_buffer_wait_read()
452 ring_buffer_can_read(r, bytes); in ring_buffer_wait_read()
455 ((struct ring_buffer*)r)->read_live_count++; in ring_buffer_wait_read()
460 struct ring_buffer* r, in get_step_size() argument
471 struct ring_buffer* r, in ring_buffer_write_fully() argument
475 ring_buffer_write_fully_with_abort(r, v, data, bytes, 0, 0); in ring_buffer_write_fully()
479 struct ring_buffer* r, in ring_buffer_read_fully() argument
483 ring_buffer_read_fully_with_abort(r, v, data, bytes, 0, 0); in ring_buffer_read_fully()
487 struct ring_buffer* r, in ring_buffer_write_fully_with_abort() argument
494 uint32_t candidate_step = get_step_size(r, v, bytes); in ring_buffer_write_fully_with_abort()
505 ring_buffer_wait_write(r, v, candidate_step, (uint64_t)(-1)); in ring_buffer_write_fully_with_abort()
508 processed_here = ring_buffer_view_write(r, v, dst + processed, candidate_step, 1); in ring_buffer_write_fully_with_abort()
510 processed_here = ring_buffer_write(r, dst + processed, candidate_step, 1); in ring_buffer_write_fully_with_abort()
524 struct ring_buffer* r, in ring_buffer_read_fully_with_abort() argument
531 uint32_t candidate_step = get_step_size(r, v, bytes); in ring_buffer_read_fully_with_abort()
543 ring_buffer_wait_read(r, v, candidate_step, (uint64_t)(-1)); in ring_buffer_read_fully_with_abort()
546 processed_here = ring_buffer_view_read(r, v, dst + processed, candidate_step, 1); in ring_buffer_read_fully_with_abort()
548 processed_here = ring_buffer_read(r, dst + processed, candidate_step, 1); in ring_buffer_read_fully_with_abort()
561 void ring_buffer_sync_init(struct ring_buffer* r) { in ring_buffer_sync_init() argument
562 __atomic_store_n(&r->state, RING_BUFFER_SYNC_PRODUCER_IDLE, __ATOMIC_SEQ_CST); in ring_buffer_sync_init()
565 bool ring_buffer_producer_acquire(struct ring_buffer* r) { in ring_buffer_producer_acquire() argument
568 &r->state, in ring_buffer_producer_acquire()
577 bool ring_buffer_producer_acquire_from_hangup(struct ring_buffer* r) { in ring_buffer_producer_acquire_from_hangup() argument
580 &r->state, in ring_buffer_producer_acquire_from_hangup()
589 void ring_buffer_producer_wait_hangup(struct ring_buffer* r) { in ring_buffer_producer_wait_hangup() argument
590 while (__atomic_load_n(&r->state, __ATOMIC_SEQ_CST) != in ring_buffer_producer_wait_hangup()
596 void ring_buffer_producer_idle(struct ring_buffer* r) { in ring_buffer_producer_idle() argument
597 __atomic_store_n(&r->state, RING_BUFFER_SYNC_PRODUCER_IDLE, __ATOMIC_SEQ_CST); in ring_buffer_producer_idle()
600 bool ring_buffer_consumer_hangup(struct ring_buffer* r) { in ring_buffer_consumer_hangup() argument
603 &r->state, in ring_buffer_consumer_hangup()
612 void ring_buffer_consumer_wait_producer_idle(struct ring_buffer* r) { in ring_buffer_consumer_wait_producer_idle() argument
613 while (__atomic_load_n(&r->state, __ATOMIC_SEQ_CST) != in ring_buffer_consumer_wait_producer_idle()
619 void ring_buffer_consumer_hung_up(struct ring_buffer* r) { in ring_buffer_consumer_hung_up() argument
620 __atomic_store_n(&r->state, RING_BUFFER_SYNC_CONSUMER_HUNG_UP, __ATOMIC_SEQ_CST); in ring_buffer_consumer_hung_up()