Lines Matching refs:d
28 gnet_stats_copy(struct gnet_dump *d, int type, void *buf, int size) in gnet_stats_copy() argument
30 if (nla_put(d->skb, type, size, buf)) in gnet_stats_copy()
35 kfree(d->xstats); in gnet_stats_copy()
36 d->xstats = NULL; in gnet_stats_copy()
37 d->xstats_len = 0; in gnet_stats_copy()
38 spin_unlock_bh(d->lock); in gnet_stats_copy()
62 int xstats_type, spinlock_t *lock, struct gnet_dump *d) in gnet_stats_start_copy_compat() argument
65 memset(d, 0, sizeof(*d)); in gnet_stats_start_copy_compat()
68 d->lock = lock; in gnet_stats_start_copy_compat()
70 d->tail = (struct nlattr *)skb_tail_pointer(skb); in gnet_stats_start_copy_compat()
71 d->skb = skb; in gnet_stats_start_copy_compat()
72 d->compat_tc_stats = tc_stats_type; in gnet_stats_start_copy_compat()
73 d->compat_xstats = xstats_type; in gnet_stats_start_copy_compat()
75 if (d->tail) in gnet_stats_start_copy_compat()
76 return gnet_stats_copy(d, type, NULL, 0); in gnet_stats_start_copy_compat()
97 struct gnet_dump *d) in gnet_stats_start_copy() argument
99 return gnet_stats_start_copy_compat(skb, type, 0, 0, lock, d); in gnet_stats_start_copy()
152 gnet_stats_copy_basic(struct gnet_dump *d, in gnet_stats_copy_basic() argument
160 if (d->compat_tc_stats) { in gnet_stats_copy_basic()
161 d->tc_stats.bytes = bstats.bytes; in gnet_stats_copy_basic()
162 d->tc_stats.packets = bstats.packets; in gnet_stats_copy_basic()
165 if (d->tail) { in gnet_stats_copy_basic()
171 return gnet_stats_copy(d, TCA_STATS_BASIC, &sb, sizeof(sb)); in gnet_stats_copy_basic()
190 gnet_stats_copy_rate_est(struct gnet_dump *d, in gnet_stats_copy_rate_est() argument
204 if (d->compat_tc_stats) { in gnet_stats_copy_rate_est()
205 d->tc_stats.bps = est.bps; in gnet_stats_copy_rate_est()
206 d->tc_stats.pps = est.pps; in gnet_stats_copy_rate_est()
209 if (d->tail) { in gnet_stats_copy_rate_est()
210 res = gnet_stats_copy(d, TCA_STATS_RATE_EST, &est, sizeof(est)); in gnet_stats_copy_rate_est()
214 return gnet_stats_copy(d, TCA_STATS_RATE_EST64, r, sizeof(*r)); in gnet_stats_copy_rate_est()
271 gnet_stats_copy_queue(struct gnet_dump *d, in gnet_stats_copy_queue() argument
279 if (d->compat_tc_stats) { in gnet_stats_copy_queue()
280 d->tc_stats.drops = qstats.drops; in gnet_stats_copy_queue()
281 d->tc_stats.qlen = qstats.qlen; in gnet_stats_copy_queue()
282 d->tc_stats.backlog = qstats.backlog; in gnet_stats_copy_queue()
283 d->tc_stats.overlimits = qstats.overlimits; in gnet_stats_copy_queue()
286 if (d->tail) in gnet_stats_copy_queue()
287 return gnet_stats_copy(d, TCA_STATS_QUEUE, in gnet_stats_copy_queue()
308 gnet_stats_copy_app(struct gnet_dump *d, void *st, int len) in gnet_stats_copy_app() argument
310 if (d->compat_xstats) { in gnet_stats_copy_app()
311 d->xstats = kmemdup(st, len, GFP_ATOMIC); in gnet_stats_copy_app()
312 if (!d->xstats) in gnet_stats_copy_app()
314 d->xstats_len = len; in gnet_stats_copy_app()
317 if (d->tail) in gnet_stats_copy_app()
318 return gnet_stats_copy(d, TCA_STATS_APP, st, len); in gnet_stats_copy_app()
323 d->xstats_len = 0; in gnet_stats_copy_app()
324 spin_unlock_bh(d->lock); in gnet_stats_copy_app()
342 gnet_stats_finish_copy(struct gnet_dump *d) in gnet_stats_finish_copy() argument
344 if (d->tail) in gnet_stats_finish_copy()
345 d->tail->nla_len = skb_tail_pointer(d->skb) - (u8 *)d->tail; in gnet_stats_finish_copy()
347 if (d->compat_tc_stats) in gnet_stats_finish_copy()
348 if (gnet_stats_copy(d, d->compat_tc_stats, &d->tc_stats, in gnet_stats_finish_copy()
349 sizeof(d->tc_stats)) < 0) in gnet_stats_finish_copy()
352 if (d->compat_xstats && d->xstats) { in gnet_stats_finish_copy()
353 if (gnet_stats_copy(d, d->compat_xstats, d->xstats, in gnet_stats_finish_copy()
354 d->xstats_len) < 0) in gnet_stats_finish_copy()
358 kfree(d->xstats); in gnet_stats_finish_copy()
359 d->xstats = NULL; in gnet_stats_finish_copy()
360 d->xstats_len = 0; in gnet_stats_finish_copy()
361 spin_unlock_bh(d->lock); in gnet_stats_finish_copy()