• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *
3  * Copyright 2015 gRPC authors.
4  *
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at
8  *
9  *     http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  *
17  */
18 
19 #include <grpc/support/port_platform.h>
20 
21 #include <grpc/support/alloc.h>
22 #include <grpc/support/log.h>
23 #include "src/core/lib/channel/channel_stack.h"
24 #include "src/core/lib/gpr/alloc.h"
25 
26 #include <stdlib.h>
27 #include <string.h>
28 
29 grpc_core::TraceFlag grpc_trace_channel(false, "channel");
30 
31 /* Memory layouts.
32 
33    Channel stack is laid out as: {
34      grpc_channel_stack stk;
35      padding to GPR_MAX_ALIGNMENT
36      grpc_channel_element[stk.count];
37      per-filter memory, aligned to GPR_MAX_ALIGNMENT
38    }
39 
40    Call stack is laid out as: {
41      grpc_call_stack stk;
42      padding to GPR_MAX_ALIGNMENT
43      grpc_call_element[stk.count];
44      per-filter memory, aligned to GPR_MAX_ALIGNMENT
45    } */
46 
grpc_channel_stack_size(const grpc_channel_filter ** filters,size_t filter_count)47 size_t grpc_channel_stack_size(const grpc_channel_filter** filters,
48                                size_t filter_count) {
49   /* always need the header, and size for the channel elements */
50   size_t size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_channel_stack)) +
51                 GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filter_count *
52                                                sizeof(grpc_channel_element));
53   size_t i;
54 
55   GPR_ASSERT((GPR_MAX_ALIGNMENT & (GPR_MAX_ALIGNMENT - 1)) == 0 &&
56              "GPR_MAX_ALIGNMENT must be a power of two");
57 
58   /* add the size for each filter */
59   for (i = 0; i < filter_count; i++) {
60     size += GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_channel_data);
61   }
62 
63   return size;
64 }
65 
66 #define CHANNEL_ELEMS_FROM_STACK(stk)                                     \
67   ((grpc_channel_element*)((char*)(stk) + GPR_ROUND_UP_TO_ALIGNMENT_SIZE( \
68                                               sizeof(grpc_channel_stack))))
69 
70 #define CALL_ELEMS_FROM_STACK(stk)                                     \
71   ((grpc_call_element*)((char*)(stk) + GPR_ROUND_UP_TO_ALIGNMENT_SIZE( \
72                                            sizeof(grpc_call_stack))))
73 
grpc_channel_stack_element(grpc_channel_stack * channel_stack,size_t index)74 grpc_channel_element* grpc_channel_stack_element(
75     grpc_channel_stack* channel_stack, size_t index) {
76   return CHANNEL_ELEMS_FROM_STACK(channel_stack) + index;
77 }
78 
grpc_channel_stack_last_element(grpc_channel_stack * channel_stack)79 grpc_channel_element* grpc_channel_stack_last_element(
80     grpc_channel_stack* channel_stack) {
81   return grpc_channel_stack_element(channel_stack, channel_stack->count - 1);
82 }
83 
grpc_call_stack_element(grpc_call_stack * call_stack,size_t index)84 grpc_call_element* grpc_call_stack_element(grpc_call_stack* call_stack,
85                                            size_t index) {
86   return CALL_ELEMS_FROM_STACK(call_stack) + index;
87 }
88 
grpc_channel_stack_init(int initial_refs,grpc_iomgr_cb_func destroy,void * destroy_arg,const grpc_channel_filter ** filters,size_t filter_count,const grpc_channel_args * channel_args,grpc_transport * optional_transport,const char * name,grpc_channel_stack * stack)89 grpc_error* grpc_channel_stack_init(
90     int initial_refs, grpc_iomgr_cb_func destroy, void* destroy_arg,
91     const grpc_channel_filter** filters, size_t filter_count,
92     const grpc_channel_args* channel_args, grpc_transport* optional_transport,
93     const char* name, grpc_channel_stack* stack) {
94   size_t call_size =
95       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call_stack)) +
96       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filter_count * sizeof(grpc_call_element));
97   grpc_channel_element* elems;
98   grpc_channel_element_args args;
99   char* user_data;
100   size_t i;
101 
102   stack->count = filter_count;
103   GRPC_STREAM_REF_INIT(&stack->refcount, initial_refs, destroy, destroy_arg,
104                        name);
105   elems = CHANNEL_ELEMS_FROM_STACK(stack);
106   user_data = (reinterpret_cast<char*>(elems)) +
107               GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filter_count *
108                                              sizeof(grpc_channel_element));
109 
110   /* init per-filter data */
111   grpc_error* first_error = GRPC_ERROR_NONE;
112   for (i = 0; i < filter_count; i++) {
113     args.channel_stack = stack;
114     args.channel_args = channel_args;
115     args.optional_transport = optional_transport;
116     args.is_first = i == 0;
117     args.is_last = i == (filter_count - 1);
118     elems[i].filter = filters[i];
119     elems[i].channel_data = user_data;
120     grpc_error* error = elems[i].filter->init_channel_elem(&elems[i], &args);
121     if (error != GRPC_ERROR_NONE) {
122       if (first_error == GRPC_ERROR_NONE) {
123         first_error = error;
124       } else {
125         GRPC_ERROR_UNREF(error);
126       }
127     }
128     user_data +=
129         GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_channel_data);
130     call_size += GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_call_data);
131   }
132 
133   GPR_ASSERT(user_data > (char*)stack);
134   GPR_ASSERT((uintptr_t)(user_data - (char*)stack) ==
135              grpc_channel_stack_size(filters, filter_count));
136 
137   stack->call_stack_size = call_size;
138   return first_error;
139 }
140 
grpc_channel_stack_destroy(grpc_channel_stack * stack)141 void grpc_channel_stack_destroy(grpc_channel_stack* stack) {
142   grpc_channel_element* channel_elems = CHANNEL_ELEMS_FROM_STACK(stack);
143   size_t count = stack->count;
144   size_t i;
145 
146   /* destroy per-filter data */
147   for (i = 0; i < count; i++) {
148     channel_elems[i].filter->destroy_channel_elem(&channel_elems[i]);
149   }
150 }
151 
grpc_call_stack_init(grpc_channel_stack * channel_stack,int initial_refs,grpc_iomgr_cb_func destroy,void * destroy_arg,const grpc_call_element_args * elem_args)152 grpc_error* grpc_call_stack_init(grpc_channel_stack* channel_stack,
153                                  int initial_refs, grpc_iomgr_cb_func destroy,
154                                  void* destroy_arg,
155                                  const grpc_call_element_args* elem_args) {
156   grpc_channel_element* channel_elems = CHANNEL_ELEMS_FROM_STACK(channel_stack);
157   size_t count = channel_stack->count;
158   grpc_call_element* call_elems;
159   char* user_data;
160 
161   elem_args->call_stack->count = count;
162   GRPC_STREAM_REF_INIT(&elem_args->call_stack->refcount, initial_refs, destroy,
163                        destroy_arg, "CALL_STACK");
164   call_elems = CALL_ELEMS_FROM_STACK(elem_args->call_stack);
165   user_data = (reinterpret_cast<char*>(call_elems)) +
166               GPR_ROUND_UP_TO_ALIGNMENT_SIZE(count * sizeof(grpc_call_element));
167 
168   /* init per-filter data */
169   grpc_error* first_error = GRPC_ERROR_NONE;
170   for (size_t i = 0; i < count; i++) {
171     call_elems[i].filter = channel_elems[i].filter;
172     call_elems[i].channel_data = channel_elems[i].channel_data;
173     call_elems[i].call_data = user_data;
174     user_data +=
175         GPR_ROUND_UP_TO_ALIGNMENT_SIZE(call_elems[i].filter->sizeof_call_data);
176   }
177   for (size_t i = 0; i < count; i++) {
178     grpc_error* error =
179         call_elems[i].filter->init_call_elem(&call_elems[i], elem_args);
180     if (error != GRPC_ERROR_NONE) {
181       if (first_error == GRPC_ERROR_NONE) {
182         first_error = error;
183       } else {
184         GRPC_ERROR_UNREF(error);
185       }
186     }
187   }
188   return first_error;
189 }
190 
grpc_call_stack_set_pollset_or_pollset_set(grpc_call_stack * call_stack,grpc_polling_entity * pollent)191 void grpc_call_stack_set_pollset_or_pollset_set(grpc_call_stack* call_stack,
192                                                 grpc_polling_entity* pollent) {
193   size_t count = call_stack->count;
194   grpc_call_element* call_elems;
195   size_t i;
196 
197   call_elems = CALL_ELEMS_FROM_STACK(call_stack);
198 
199   /* init per-filter data */
200   for (i = 0; i < count; i++) {
201     call_elems[i].filter->set_pollset_or_pollset_set(&call_elems[i], pollent);
202   }
203 }
204 
grpc_call_stack_ignore_set_pollset_or_pollset_set(grpc_call_element *,grpc_polling_entity *)205 void grpc_call_stack_ignore_set_pollset_or_pollset_set(
206     grpc_call_element* /*elem*/, grpc_polling_entity* /*pollent*/) {}
207 
grpc_call_stack_destroy(grpc_call_stack * stack,const grpc_call_final_info * final_info,grpc_closure * then_schedule_closure)208 void grpc_call_stack_destroy(grpc_call_stack* stack,
209                              const grpc_call_final_info* final_info,
210                              grpc_closure* then_schedule_closure) {
211   grpc_call_element* elems = CALL_ELEMS_FROM_STACK(stack);
212   size_t count = stack->count;
213   size_t i;
214 
215   /* destroy per-filter data */
216   for (i = 0; i < count; i++) {
217     elems[i].filter->destroy_call_elem(
218         &elems[i], final_info,
219         i == count - 1 ? then_schedule_closure : nullptr);
220   }
221 }
222 
grpc_call_next_op(grpc_call_element * elem,grpc_transport_stream_op_batch * op)223 void grpc_call_next_op(grpc_call_element* elem,
224                        grpc_transport_stream_op_batch* op) {
225   grpc_call_element* next_elem = elem + 1;
226   GRPC_CALL_LOG_OP(GPR_INFO, next_elem, op);
227   next_elem->filter->start_transport_stream_op_batch(next_elem, op);
228 }
229 
grpc_channel_next_get_info(grpc_channel_element * elem,const grpc_channel_info * channel_info)230 void grpc_channel_next_get_info(grpc_channel_element* elem,
231                                 const grpc_channel_info* channel_info) {
232   grpc_channel_element* next_elem = elem + 1;
233   next_elem->filter->get_channel_info(next_elem, channel_info);
234 }
235 
grpc_channel_next_op(grpc_channel_element * elem,grpc_transport_op * op)236 void grpc_channel_next_op(grpc_channel_element* elem, grpc_transport_op* op) {
237   grpc_channel_element* next_elem = elem + 1;
238   next_elem->filter->start_transport_op(next_elem, op);
239 }
240 
grpc_channel_stack_from_top_element(grpc_channel_element * elem)241 grpc_channel_stack* grpc_channel_stack_from_top_element(
242     grpc_channel_element* elem) {
243   return reinterpret_cast<grpc_channel_stack*>(
244       reinterpret_cast<char*>(elem) -
245       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_channel_stack)));
246 }
247 
grpc_call_stack_from_top_element(grpc_call_element * elem)248 grpc_call_stack* grpc_call_stack_from_top_element(grpc_call_element* elem) {
249   return reinterpret_cast<grpc_call_stack*>(
250       reinterpret_cast<char*>(elem) -
251       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call_stack)));
252 }
253