1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2008-2009 Patrick McHardy <kaber@trash.net>
5 * Development of this code funded by Astaro AG (http://www.astaro.com/)
8 #include <linux/kernel.h>
9 #include <linux/init.h>
10 #include <linux/module.h>
11 #include <linux/netlink.h>
12 #include <linux/netfilter.h>
13 #include <linux/netfilter/nf_tables.h>
14 #include <net/netfilter/nf_tables_core.h>
15 #include <net/netfilter/nf_tables.h>
16 #include <net/netfilter/nf_tables_offload.h>
21 enum nft_bitwise_ops op:8;
28 static void nft_bitwise_eval_bool(u32 *dst, const u32 *src,
29 const struct nft_bitwise *priv)
33 for (i = 0; i < DIV_ROUND_UP(priv->len, sizeof(u32)); i++)
34 dst[i] = (src[i] & priv->mask.data[i]) ^ priv->xor.data[i];
37 static void nft_bitwise_eval_lshift(u32 *dst, const u32 *src,
38 const struct nft_bitwise *priv)
40 u32 shift = priv->data.data[0];
44 for (i = DIV_ROUND_UP(priv->len, sizeof(u32)); i > 0; i--) {
45 dst[i - 1] = (src[i - 1] << shift) | carry;
46 carry = src[i - 1] >> (BITS_PER_TYPE(u32) - shift);
50 static void nft_bitwise_eval_rshift(u32 *dst, const u32 *src,
51 const struct nft_bitwise *priv)
53 u32 shift = priv->data.data[0];
57 for (i = 0; i < DIV_ROUND_UP(priv->len, sizeof(u32)); i++) {
58 dst[i] = carry | (src[i] >> shift);
59 carry = src[i] << (BITS_PER_TYPE(u32) - shift);
63 void nft_bitwise_eval(const struct nft_expr *expr,
64 struct nft_regs *regs, const struct nft_pktinfo *pkt)
66 const struct nft_bitwise *priv = nft_expr_priv(expr);
67 const u32 *src = ®s->data[priv->sreg];
68 u32 *dst = ®s->data[priv->dreg];
71 case NFT_BITWISE_BOOL:
72 nft_bitwise_eval_bool(dst, src, priv);
74 case NFT_BITWISE_LSHIFT:
75 nft_bitwise_eval_lshift(dst, src, priv);
77 case NFT_BITWISE_RSHIFT:
78 nft_bitwise_eval_rshift(dst, src, priv);
83 static const struct nla_policy nft_bitwise_policy[NFTA_BITWISE_MAX + 1] = {
84 [NFTA_BITWISE_SREG] = { .type = NLA_U32 },
85 [NFTA_BITWISE_DREG] = { .type = NLA_U32 },
86 [NFTA_BITWISE_LEN] = { .type = NLA_U32 },
87 [NFTA_BITWISE_MASK] = { .type = NLA_NESTED },
88 [NFTA_BITWISE_XOR] = { .type = NLA_NESTED },
89 [NFTA_BITWISE_OP] = { .type = NLA_U32 },
90 [NFTA_BITWISE_DATA] = { .type = NLA_NESTED },
93 static int nft_bitwise_init_bool(struct nft_bitwise *priv,
94 const struct nlattr *const tb[])
96 struct nft_data_desc mask, xor;
99 if (tb[NFTA_BITWISE_DATA])
102 if (!tb[NFTA_BITWISE_MASK] ||
103 !tb[NFTA_BITWISE_XOR])
106 err = nft_data_init(NULL, &priv->mask, sizeof(priv->mask), &mask,
107 tb[NFTA_BITWISE_MASK]);
110 if (mask.type != NFT_DATA_VALUE || mask.len != priv->len) {
112 goto err_mask_release;
115 err = nft_data_init(NULL, &priv->xor, sizeof(priv->xor), &xor,
116 tb[NFTA_BITWISE_XOR]);
118 goto err_mask_release;
119 if (xor.type != NFT_DATA_VALUE || xor.len != priv->len) {
121 goto err_xor_release;
127 nft_data_release(&priv->xor, xor.type);
129 nft_data_release(&priv->mask, mask.type);
133 static int nft_bitwise_init_shift(struct nft_bitwise *priv,
134 const struct nlattr *const tb[])
136 struct nft_data_desc d;
139 if (tb[NFTA_BITWISE_MASK] ||
140 tb[NFTA_BITWISE_XOR])
143 if (!tb[NFTA_BITWISE_DATA])
146 err = nft_data_init(NULL, &priv->data, sizeof(priv->data), &d,
147 tb[NFTA_BITWISE_DATA]);
150 if (d.type != NFT_DATA_VALUE || d.len != sizeof(u32) ||
151 priv->data.data[0] >= BITS_PER_TYPE(u32)) {
152 nft_data_release(&priv->data, d.type);
159 static int nft_bitwise_init(const struct nft_ctx *ctx,
160 const struct nft_expr *expr,
161 const struct nlattr * const tb[])
163 struct nft_bitwise *priv = nft_expr_priv(expr);
167 err = nft_parse_u32_check(tb[NFTA_BITWISE_LEN], U8_MAX, &len);
173 err = nft_parse_register_load(tb[NFTA_BITWISE_SREG], &priv->sreg,
178 err = nft_parse_register_store(ctx, tb[NFTA_BITWISE_DREG],
179 &priv->dreg, NULL, NFT_DATA_VALUE,
184 if (tb[NFTA_BITWISE_OP]) {
185 priv->op = ntohl(nla_get_be32(tb[NFTA_BITWISE_OP]));
187 case NFT_BITWISE_BOOL:
188 case NFT_BITWISE_LSHIFT:
189 case NFT_BITWISE_RSHIFT:
195 priv->op = NFT_BITWISE_BOOL;
199 case NFT_BITWISE_BOOL:
200 err = nft_bitwise_init_bool(priv, tb);
202 case NFT_BITWISE_LSHIFT:
203 case NFT_BITWISE_RSHIFT:
204 err = nft_bitwise_init_shift(priv, tb);
211 static int nft_bitwise_dump_bool(struct sk_buff *skb,
212 const struct nft_bitwise *priv)
214 if (nft_data_dump(skb, NFTA_BITWISE_MASK, &priv->mask,
215 NFT_DATA_VALUE, priv->len) < 0)
218 if (nft_data_dump(skb, NFTA_BITWISE_XOR, &priv->xor,
219 NFT_DATA_VALUE, priv->len) < 0)
225 static int nft_bitwise_dump_shift(struct sk_buff *skb,
226 const struct nft_bitwise *priv)
228 if (nft_data_dump(skb, NFTA_BITWISE_DATA, &priv->data,
229 NFT_DATA_VALUE, sizeof(u32)) < 0)
234 static int nft_bitwise_dump(struct sk_buff *skb, const struct nft_expr *expr)
236 const struct nft_bitwise *priv = nft_expr_priv(expr);
239 if (nft_dump_register(skb, NFTA_BITWISE_SREG, priv->sreg))
241 if (nft_dump_register(skb, NFTA_BITWISE_DREG, priv->dreg))
243 if (nla_put_be32(skb, NFTA_BITWISE_LEN, htonl(priv->len)))
245 if (nla_put_be32(skb, NFTA_BITWISE_OP, htonl(priv->op)))
249 case NFT_BITWISE_BOOL:
250 err = nft_bitwise_dump_bool(skb, priv);
252 case NFT_BITWISE_LSHIFT:
253 case NFT_BITWISE_RSHIFT:
254 err = nft_bitwise_dump_shift(skb, priv);
261 static struct nft_data zero;
263 static int nft_bitwise_offload(struct nft_offload_ctx *ctx,
264 struct nft_flow_rule *flow,
265 const struct nft_expr *expr)
267 const struct nft_bitwise *priv = nft_expr_priv(expr);
268 struct nft_offload_reg *reg = &ctx->regs[priv->dreg];
270 if (priv->op != NFT_BITWISE_BOOL)
273 if (memcmp(&priv->xor, &zero, sizeof(priv->xor)) ||
274 priv->sreg != priv->dreg || priv->len != reg->len)
277 memcpy(®->mask, &priv->mask, sizeof(priv->mask));
282 static bool nft_bitwise_reduce(struct nft_regs_track *track,
283 const struct nft_expr *expr)
285 const struct nft_bitwise *priv = nft_expr_priv(expr);
286 const struct nft_bitwise *bitwise;
287 unsigned int regcount;
291 if (!track->regs[priv->sreg].selector)
294 bitwise = nft_expr_priv(track->regs[priv->dreg].selector);
295 if (track->regs[priv->sreg].selector == track->regs[priv->dreg].selector &&
296 track->regs[priv->sreg].num_reg == 0 &&
297 track->regs[priv->dreg].bitwise &&
298 track->regs[priv->dreg].bitwise->ops == expr->ops &&
299 priv->sreg == bitwise->sreg &&
300 priv->dreg == bitwise->dreg &&
301 priv->op == bitwise->op &&
302 priv->len == bitwise->len &&
303 !memcmp(&priv->mask, &bitwise->mask, sizeof(priv->mask)) &&
304 !memcmp(&priv->xor, &bitwise->xor, sizeof(priv->xor)) &&
305 !memcmp(&priv->data, &bitwise->data, sizeof(priv->data))) {
310 if (track->regs[priv->sreg].bitwise ||
311 track->regs[priv->sreg].num_reg != 0) {
312 nft_reg_track_cancel(track, priv->dreg, priv->len);
316 if (priv->sreg != priv->dreg) {
317 nft_reg_track_update(track, track->regs[priv->sreg].selector,
318 priv->dreg, priv->len);
322 regcount = DIV_ROUND_UP(priv->len, NFT_REG32_SIZE);
323 for (i = 0; i < regcount; i++, dreg++)
324 track->regs[priv->dreg].bitwise = expr;
329 static const struct nft_expr_ops nft_bitwise_ops = {
330 .type = &nft_bitwise_type,
331 .size = NFT_EXPR_SIZE(sizeof(struct nft_bitwise)),
332 .eval = nft_bitwise_eval,
333 .init = nft_bitwise_init,
334 .dump = nft_bitwise_dump,
335 .reduce = nft_bitwise_reduce,
336 .offload = nft_bitwise_offload,
340 nft_bitwise_extract_u32_data(const struct nlattr * const tb, u32 *out)
342 struct nft_data_desc desc;
343 struct nft_data data;
346 err = nft_data_init(NULL, &data, sizeof(data), &desc, tb);
350 if (desc.type != NFT_DATA_VALUE || desc.len != sizeof(u32)) {
356 nft_data_release(&data, desc.type);
360 static int nft_bitwise_fast_init(const struct nft_ctx *ctx,
361 const struct nft_expr *expr,
362 const struct nlattr * const tb[])
364 struct nft_bitwise_fast_expr *priv = nft_expr_priv(expr);
367 err = nft_parse_register_load(tb[NFTA_BITWISE_SREG], &priv->sreg,
372 err = nft_parse_register_store(ctx, tb[NFTA_BITWISE_DREG], &priv->dreg,
373 NULL, NFT_DATA_VALUE, sizeof(u32));
377 if (tb[NFTA_BITWISE_DATA])
380 if (!tb[NFTA_BITWISE_MASK] ||
381 !tb[NFTA_BITWISE_XOR])
384 err = nft_bitwise_extract_u32_data(tb[NFTA_BITWISE_MASK], &priv->mask);
388 err = nft_bitwise_extract_u32_data(tb[NFTA_BITWISE_XOR], &priv->xor);
396 nft_bitwise_fast_dump(struct sk_buff *skb, const struct nft_expr *expr)
398 const struct nft_bitwise_fast_expr *priv = nft_expr_priv(expr);
399 struct nft_data data;
401 if (nft_dump_register(skb, NFTA_BITWISE_SREG, priv->sreg))
403 if (nft_dump_register(skb, NFTA_BITWISE_DREG, priv->dreg))
405 if (nla_put_be32(skb, NFTA_BITWISE_LEN, htonl(sizeof(u32))))
407 if (nla_put_be32(skb, NFTA_BITWISE_OP, htonl(NFT_BITWISE_BOOL)))
410 data.data[0] = priv->mask;
411 if (nft_data_dump(skb, NFTA_BITWISE_MASK, &data,
412 NFT_DATA_VALUE, sizeof(u32)) < 0)
415 data.data[0] = priv->xor;
416 if (nft_data_dump(skb, NFTA_BITWISE_XOR, &data,
417 NFT_DATA_VALUE, sizeof(u32)) < 0)
423 static int nft_bitwise_fast_offload(struct nft_offload_ctx *ctx,
424 struct nft_flow_rule *flow,
425 const struct nft_expr *expr)
427 const struct nft_bitwise_fast_expr *priv = nft_expr_priv(expr);
428 struct nft_offload_reg *reg = &ctx->regs[priv->dreg];
430 if (priv->xor || priv->sreg != priv->dreg || reg->len != sizeof(u32))
433 reg->mask.data[0] = priv->mask;
437 static bool nft_bitwise_fast_reduce(struct nft_regs_track *track,
438 const struct nft_expr *expr)
440 const struct nft_bitwise_fast_expr *priv = nft_expr_priv(expr);
441 const struct nft_bitwise_fast_expr *bitwise;
443 if (!track->regs[priv->sreg].selector)
446 bitwise = nft_expr_priv(track->regs[priv->dreg].selector);
447 if (track->regs[priv->sreg].selector == track->regs[priv->dreg].selector &&
448 track->regs[priv->dreg].bitwise &&
449 track->regs[priv->dreg].bitwise->ops == expr->ops &&
450 priv->sreg == bitwise->sreg &&
451 priv->dreg == bitwise->dreg &&
452 priv->mask == bitwise->mask &&
453 priv->xor == bitwise->xor) {
458 if (track->regs[priv->sreg].bitwise) {
459 nft_reg_track_cancel(track, priv->dreg, NFT_REG32_SIZE);
463 if (priv->sreg != priv->dreg) {
464 track->regs[priv->dreg].selector =
465 track->regs[priv->sreg].selector;
467 track->regs[priv->dreg].bitwise = expr;
472 const struct nft_expr_ops nft_bitwise_fast_ops = {
473 .type = &nft_bitwise_type,
474 .size = NFT_EXPR_SIZE(sizeof(struct nft_bitwise_fast_expr)),
475 .eval = NULL, /* inlined */
476 .init = nft_bitwise_fast_init,
477 .dump = nft_bitwise_fast_dump,
478 .reduce = nft_bitwise_fast_reduce,
479 .offload = nft_bitwise_fast_offload,
482 static const struct nft_expr_ops *
483 nft_bitwise_select_ops(const struct nft_ctx *ctx,
484 const struct nlattr * const tb[])
489 if (!tb[NFTA_BITWISE_LEN] ||
490 !tb[NFTA_BITWISE_SREG] ||
491 !tb[NFTA_BITWISE_DREG])
492 return ERR_PTR(-EINVAL);
494 err = nft_parse_u32_check(tb[NFTA_BITWISE_LEN], U8_MAX, &len);
498 if (len != sizeof(u32))
499 return &nft_bitwise_ops;
501 if (tb[NFTA_BITWISE_OP] &&
502 ntohl(nla_get_be32(tb[NFTA_BITWISE_OP])) != NFT_BITWISE_BOOL)
503 return &nft_bitwise_ops;
505 return &nft_bitwise_fast_ops;
508 struct nft_expr_type nft_bitwise_type __read_mostly = {
510 .select_ops = nft_bitwise_select_ops,
511 .policy = nft_bitwise_policy,
512 .maxattr = NFTA_BITWISE_MAX,
513 .owner = THIS_MODULE,
516 bool nft_expr_reduce_bitwise(struct nft_regs_track *track,
517 const struct nft_expr *expr)
519 const struct nft_expr *last = track->last;
520 const struct nft_expr *next;
525 next = nft_expr_next(expr);
526 if (next->ops == &nft_bitwise_ops)
527 return nft_bitwise_reduce(track, next);
528 else if (next->ops == &nft_bitwise_fast_ops)
529 return nft_bitwise_fast_reduce(track, next);
533 EXPORT_SYMBOL_GPL(nft_expr_reduce_bitwise);