• Home
  • Raw
  • Download

Lines Matching refs:pt

34 	struct lws_context_per_thread *pt = lws_container_of(timer,  in lws_uv_sultimer_cb()  local
38 lws_pt_lock(pt, __func__); in lws_uv_sultimer_cb()
39 us = __lws_sul_service_ripe(&pt->pt_sul_owner, lws_now_usecs()); in lws_uv_sultimer_cb()
41 uv_timer_start(&pt->uv.sultimer, lws_uv_sultimer_cb, in lws_uv_sultimer_cb()
43 lws_pt_unlock(pt); in lws_uv_sultimer_cb()
53 struct lws_context_per_thread *pt = lws_container_of(handle, in lws_uv_idle() local
57 lws_service_do_ripe_rxflow(pt); in lws_uv_idle()
62 if (!lws_service_adjust_timeout(pt->context, 1, pt->tid)) in lws_uv_idle()
64 _lws_plat_service_forced_tsi(pt->context, pt->tid); in lws_uv_idle()
68 lws_pt_lock(pt, __func__); in lws_uv_idle()
69 us = __lws_sul_service_ripe(&pt->pt_sul_owner, lws_now_usecs()); in lws_uv_idle()
71 uv_timer_start(&pt->uv.sultimer, lws_uv_sultimer_cb, in lws_uv_idle()
73 lws_pt_unlock(pt); in lws_uv_idle()
84 struct lws_context_per_thread *pt = &context->pt[(int)wsi->tsi]; in lws_io_cb() local
87 if (pt->is_destroyed) in lws_io_cb()
122 if (pt->destroy_self) { in lws_io_cb()
123 lws_context_destroy(pt->context); in lws_io_cb()
127 uv_idle_start(&pt->uv.idle, lws_uv_idle); in lws_io_cb()
139 struct lws_context_per_thread *pt; in lws_libuv_stop() local
159 pt = &context->pt[m]; in lws_libuv_stop()
161 if (pt->pipe_wsi) { in lws_libuv_stop()
162 uv_poll_stop(pt->pipe_wsi->w_read.uv.pwatcher); in lws_libuv_stop()
163 lws_destroy_event_pipe(pt->pipe_wsi); in lws_libuv_stop()
164 pt->pipe_wsi = NULL; in lws_libuv_stop()
167 for (n = 0; (unsigned int)n < context->pt[m].fds_count; n++) { in lws_libuv_stop()
168 struct lws *wsi = wsi_from_fd(context, pt->fds[n].fd); in lws_libuv_stop()
230 struct lws_context_per_thread *pt = &context->pt[n]; in lws_uv_close_cb_sa() local
232 if (pt->uv.io_loop && !pt->event_loop_foreign) in lws_uv_close_cb_sa()
233 uv_stop(pt->uv.io_loop); in lws_uv_close_cb_sa()
236 if (!context->pt[0].event_loop_foreign) { in lws_uv_close_cb_sa()
287 if (context->pt[tsi].uv.io_loop) in lws_libuv_stop_without_kill()
288 uv_stop(context->pt[tsi].uv.io_loop); in lws_libuv_stop_without_kill()
296 if (context->pt[tsi].uv.io_loop) in lws_uv_getloop()
297 return context->pt[tsi].uv.io_loop; in lws_uv_getloop()
485 context->pt[n].w_sigint.context = context; in elops_init_context_uv()
493 struct lws_context_per_thread *pt; in elops_destroy_context1_uv() local
498 pt = &context->pt[n]; in elops_destroy_context1_uv()
502 if (!pt->event_loop_foreign) { in elops_destroy_context1_uv()
504 while (budget-- && (m = uv_run(pt->uv.io_loop, in elops_destroy_context1_uv()
515 return !context->pt[0].event_loop_foreign; in elops_destroy_context1_uv()
521 struct lws_context_per_thread *pt; in elops_destroy_context2_uv() local
525 pt = &context->pt[n]; in elops_destroy_context2_uv()
529 if (!pt->event_loop_foreign && pt->uv.io_loop) { in elops_destroy_context2_uv()
532 uv_stop(pt->uv.io_loop); in elops_destroy_context2_uv()
535 uv_loop_close(pt->uv.io_loop); in elops_destroy_context2_uv()
537 lws_free_set_NULL(pt->uv.io_loop); in elops_destroy_context2_uv()
614 struct lws_context_per_thread *pt = &wsi->context->pt[(int)wsi->tsi]; in elops_accept_uv() local
624 uv_poll_init(pt->uv.io_loop, wsi->w_read.uv.pwatcher, in elops_accept_uv()
627 uv_poll_init_socket(pt->uv.io_loop, in elops_accept_uv()
639 struct lws_context_per_thread *pt = &wsi->context->pt[(int)wsi->tsi]; in elops_io_uv() local
647 if (!pt->uv.io_loop || !w->context) { in elops_io_uv()
692 struct lws_context_per_thread *pt; in elops_init_vhost_listen_wsi_uv() local
700 pt = &wsi->context->pt[(int)wsi->tsi]; in elops_init_vhost_listen_wsi_uv()
701 if (!pt->uv.io_loop) in elops_init_vhost_listen_wsi_uv()
711 n = uv_poll_init_socket(pt->uv.io_loop, wsi->w_read.uv.pwatcher, in elops_init_vhost_listen_wsi_uv()
730 if (context->pt[tsi].uv.io_loop) in elops_run_pt_uv()
731 uv_run(context->pt[tsi].uv.io_loop, 0); in elops_run_pt_uv()
737 struct lws_context_per_thread *pt = &context->pt[tsi]; in elops_destroy_pt_uv() local
745 if (!pt->uv.io_loop) in elops_destroy_pt_uv()
748 if (pt->event_loop_destroy_processing_done) in elops_destroy_pt_uv()
751 pt->event_loop_destroy_processing_done = 1; in elops_destroy_pt_uv()
753 if (!pt->event_loop_foreign) { in elops_destroy_pt_uv()
754 uv_signal_stop(&pt->w_sigint.uv.watcher); in elops_destroy_pt_uv()
762 uv_signal_stop(&pt->uv.signals[m]); in elops_destroy_pt_uv()
763 uv_close((uv_handle_t *)&pt->uv.signals[m], in elops_destroy_pt_uv()
769 uv_timer_stop(&pt->uv.sultimer); in elops_destroy_pt_uv()
770 uv_close((uv_handle_t *)&pt->uv.sultimer, lws_uv_close_cb_sa); in elops_destroy_pt_uv()
772 uv_idle_stop(&pt->uv.idle); in elops_destroy_pt_uv()
773 uv_close((uv_handle_t *)&pt->uv.idle, lws_uv_close_cb_sa); in elops_destroy_pt_uv()
786 struct lws_context_per_thread *pt = &context->pt[tsi]; in elops_init_pt_uv() local
791 if (!pt->uv.io_loop) { in elops_init_pt_uv()
804 pt->event_loop_foreign = 0; in elops_init_pt_uv()
807 pt->event_loop_foreign = 1; in elops_init_pt_uv()
810 pt->uv.io_loop = loop; in elops_init_pt_uv()
811 uv_idle_init(loop, &pt->uv.idle); in elops_init_pt_uv()
812 LWS_UV_REFCOUNT_STATIC_HANDLE_NEW(&pt->uv.idle, context); in elops_init_pt_uv()
820 if (!pt->event_loop_foreign) { in elops_init_pt_uv()
821 assert(ns <= (int)LWS_ARRAY_SIZE(pt->uv.signals)); in elops_init_pt_uv()
823 uv_signal_init(loop, &pt->uv.signals[n]); in elops_init_pt_uv()
824 LWS_UV_REFCOUNT_STATIC_HANDLE_NEW(&pt->uv.signals[n], in elops_init_pt_uv()
826 pt->uv.signals[n].data = pt->context; in elops_init_pt_uv()
827 uv_signal_start(&pt->uv.signals[n], in elops_init_pt_uv()
850 uv_timer_init(pt->uv.io_loop, &pt->uv.sultimer); in elops_init_pt_uv()
851 LWS_UV_REFCOUNT_STATIC_HANDLE_NEW(&pt->uv.sultimer, context); in elops_init_pt_uv()
861 struct lws_context_per_thread *pt = &context->pt[(int)wsi->tsi]; in lws_libuv_closewsi() local
881 lws_pt_lock(pt, __func__); in lws_libuv_closewsi()
883 lws_pt_unlock(pt); in lws_libuv_closewsi()
925 context->pt[0].event_loop_foreign) { in lws_libuv_closewsi()