Lines Matching refs:head

30 	struct cls_mall_head *head = rcu_dereference_bh(tp->root);  in mall_classify()  local
32 if (unlikely(!head)) in mall_classify()
35 if (tc_skip_sw(head->flags)) in mall_classify()
38 *res = head->res; in mall_classify()
39 __this_cpu_inc(head->pf->rhit); in mall_classify()
40 return tcf_exts_exec(skb, &head->exts, res); in mall_classify()
48 static void __mall_destroy(struct cls_mall_head *head) in __mall_destroy() argument
50 tcf_exts_destroy(&head->exts); in __mall_destroy()
51 tcf_exts_put_net(&head->exts); in __mall_destroy()
52 free_percpu(head->pf); in __mall_destroy()
53 kfree(head); in __mall_destroy()
58 struct cls_mall_head *head = container_of(to_rcu_work(work), in mall_destroy_work() local
62 __mall_destroy(head); in mall_destroy_work()
67 struct cls_mall_head *head, in mall_destroy_hw_filter() argument
74 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, extack); in mall_destroy_hw_filter()
79 &head->flags, &head->in_hw_count, true); in mall_destroy_hw_filter()
83 struct cls_mall_head *head, in mall_replace_hw_filter() argument
89 bool skip_sw = tc_skip_sw(head->flags); in mall_replace_hw_filter()
92 cls_mall.rule = flow_rule_alloc(tcf_exts_num_actions(&head->exts)); in mall_replace_hw_filter()
96 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, extack); in mall_replace_hw_filter()
100 err = tc_setup_flow_action(&cls_mall.rule->action, &head->exts); in mall_replace_hw_filter()
103 mall_destroy_hw_filter(tp, head, cookie, NULL); in mall_replace_hw_filter()
113 skip_sw, &head->flags, &head->in_hw_count, true); in mall_replace_hw_filter()
118 mall_destroy_hw_filter(tp, head, cookie, NULL); in mall_replace_hw_filter()
122 if (skip_sw && !(head->flags & TCA_CLS_FLAGS_IN_HW)) in mall_replace_hw_filter()
131 struct cls_mall_head *head = rtnl_dereference(tp->root); in mall_destroy() local
133 if (!head) in mall_destroy()
136 tcf_unbind_filter(tp, &head->res); in mall_destroy()
138 if (!tc_skip_hw(head->flags)) in mall_destroy()
139 mall_destroy_hw_filter(tp, head, (unsigned long) head, extack); in mall_destroy()
141 if (tcf_exts_get_net(&head->exts)) in mall_destroy()
142 tcf_queue_work(&head->rwork, mall_destroy_work); in mall_destroy()
144 __mall_destroy(head); in mall_destroy()
149 struct cls_mall_head *head = rtnl_dereference(tp->root); in mall_get() local
151 if (head && head->handle == handle) in mall_get()
152 return head; in mall_get()
164 struct cls_mall_head *head, in mall_set_parms() argument
171 err = tcf_exts_validate(net, tp, tb, est, &head->exts, flags, extack); in mall_set_parms()
176 head->res.classid = nla_get_u32(tb[TCA_MATCHALL_CLASSID]); in mall_set_parms()
177 tcf_bind_filter(tp, &head->res, base); in mall_set_parms()
188 struct cls_mall_head *head = rtnl_dereference(tp->root); in mall_change() local
197 if (head) in mall_change()
244 *arg = head; in mall_change()
261 struct cls_mall_head *head = rtnl_dereference(tp->root); in mall_delete() local
263 head->deleting = true; in mall_delete()
271 struct cls_mall_head *head = rtnl_dereference(tp->root); in mall_walk() local
276 if (!head || head->deleting) in mall_walk()
278 if (arg->fn(tp, head, arg) < 0) in mall_walk()
287 struct cls_mall_head *head = rtnl_dereference(tp->root); in mall_reoffload() local
292 if (tc_skip_hw(head->flags)) in mall_reoffload()
295 cls_mall.rule = flow_rule_alloc(tcf_exts_num_actions(&head->exts)); in mall_reoffload()
299 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, extack); in mall_reoffload()
302 cls_mall.cookie = (unsigned long)head; in mall_reoffload()
304 err = tc_setup_flow_action(&cls_mall.rule->action, &head->exts); in mall_reoffload()
307 if (add && tc_skip_sw(head->flags)) { in mall_reoffload()
315 &cls_mall, cb_priv, &head->flags, in mall_reoffload()
316 &head->in_hw_count); in mall_reoffload()
327 struct cls_mall_head *head, in mall_stats_hw_filter() argument
333 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, NULL); in mall_stats_hw_filter()
339 tcf_exts_stats_update(&head->exts, cls_mall.stats.bytes, in mall_stats_hw_filter()
350 struct cls_mall_head *head = fh; in mall_dump() local
354 if (!head) in mall_dump()
357 if (!tc_skip_hw(head->flags)) in mall_dump()
358 mall_stats_hw_filter(tp, head, (unsigned long)head); in mall_dump()
360 t->tcm_handle = head->handle; in mall_dump()
366 if (head->res.classid && in mall_dump()
367 nla_put_u32(skb, TCA_MATCHALL_CLASSID, head->res.classid)) in mall_dump()
370 if (head->flags && nla_put_u32(skb, TCA_MATCHALL_FLAGS, head->flags)) in mall_dump()
374 struct tc_matchall_pcnt *pf = per_cpu_ptr(head->pf, cpu); in mall_dump()
384 if (tcf_exts_dump(skb, &head->exts)) in mall_dump()
389 if (tcf_exts_dump_stats(skb, &head->exts) < 0) in mall_dump()
402 struct cls_mall_head *head = fh; in mall_bind_class() local
404 if (head && head->res.classid == classid) { in mall_bind_class()
406 __tcf_bind_filter(q, &head->res, base); in mall_bind_class()
408 __tcf_unbind_filter(q, &head->res); in mall_bind_class()