1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (c) 2008-2009 Patrick McHardy <kaber@trash.net> 4 * 5 * Development of this code funded by Astaro AG (http://www.astaro.com/) 6 */ 7 8 #include <linux/kernel.h> 9 #include <linux/init.h> 10 #include <linux/module.h> 11 #include <linux/netlink.h> 12 #include <linux/netfilter.h> 13 #include <linux/netfilter/nf_tables.h> 14 #include <net/netfilter/nf_tables_core.h> 15 #include <net/netfilter/nf_tables.h> 16 #include <net/netfilter/nf_tables_offload.h> 17 18 struct nft_bitwise { 19 u8 sreg; 20 u8 sreg2; 21 u8 dreg; 22 enum nft_bitwise_ops op:8; 23 u8 len; 24 struct nft_data mask; 25 struct nft_data xor; 26 struct nft_data data; 27 }; 28 29 static void nft_bitwise_eval_mask_xor(u32 *dst, const u32 *src, 30 const struct nft_bitwise *priv) 31 { 32 unsigned int i; 33 34 for (i = 0; i < DIV_ROUND_UP(priv->len, sizeof(u32)); i++) 35 dst[i] = (src[i] & priv->mask.data[i]) ^ priv->xor.data[i]; 36 } 37 38 static void nft_bitwise_eval_lshift(u32 *dst, const u32 *src, 39 const struct nft_bitwise *priv) 40 { 41 u32 shift = priv->data.data[0]; 42 unsigned int i; 43 u32 carry = 0; 44 45 for (i = DIV_ROUND_UP(priv->len, sizeof(u32)); i > 0; i--) { 46 dst[i - 1] = (src[i - 1] << shift) | carry; 47 carry = src[i - 1] >> (BITS_PER_TYPE(u32) - shift); 48 } 49 } 50 51 static void nft_bitwise_eval_rshift(u32 *dst, const u32 *src, 52 const struct nft_bitwise *priv) 53 { 54 u32 shift = priv->data.data[0]; 55 unsigned int i; 56 u32 carry = 0; 57 58 for (i = 0; i < DIV_ROUND_UP(priv->len, sizeof(u32)); i++) { 59 dst[i] = carry | (src[i] >> shift); 60 carry = src[i] << (BITS_PER_TYPE(u32) - shift); 61 } 62 } 63 64 static void nft_bitwise_eval_and(u32 *dst, const u32 *src, const u32 *src2, 65 const struct nft_bitwise *priv) 66 { 67 unsigned int i, n; 68 69 for (i = 0, n = DIV_ROUND_UP(priv->len, sizeof(u32)); i < n; i++) 70 dst[i] = src[i] & src2[i]; 71 } 72 73 static void nft_bitwise_eval_or(u32 *dst, const u32 *src, const u32 *src2, 74 const struct nft_bitwise *priv) 75 { 76 unsigned int i, n; 77 78 for (i = 0, n = DIV_ROUND_UP(priv->len, sizeof(u32)); i < n; i++) 79 dst[i] = src[i] | src2[i]; 80 } 81 82 static void nft_bitwise_eval_xor(u32 *dst, const u32 *src, const u32 *src2, 83 const struct nft_bitwise *priv) 84 { 85 unsigned int i, n; 86 87 for (i = 0, n = DIV_ROUND_UP(priv->len, sizeof(u32)); i < n; i++) 88 dst[i] = src[i] ^ src2[i]; 89 } 90 91 void nft_bitwise_eval(const struct nft_expr *expr, 92 struct nft_regs *regs, const struct nft_pktinfo *pkt) 93 { 94 const struct nft_bitwise *priv = nft_expr_priv(expr); 95 const u32 *src = ®s->data[priv->sreg], *src2; 96 u32 *dst = ®s->data[priv->dreg]; 97 98 if (priv->op == NFT_BITWISE_MASK_XOR) { 99 nft_bitwise_eval_mask_xor(dst, src, priv); 100 return; 101 } 102 if (priv->op == NFT_BITWISE_LSHIFT) { 103 nft_bitwise_eval_lshift(dst, src, priv); 104 return; 105 } 106 if (priv->op == NFT_BITWISE_RSHIFT) { 107 nft_bitwise_eval_rshift(dst, src, priv); 108 return; 109 } 110 111 src2 = priv->sreg2 ? ®s->data[priv->sreg2] : priv->data.data; 112 113 if (priv->op == NFT_BITWISE_AND) { 114 nft_bitwise_eval_and(dst, src, src2, priv); 115 return; 116 } 117 if (priv->op == NFT_BITWISE_OR) { 118 nft_bitwise_eval_or(dst, src, src2, priv); 119 return; 120 } 121 if (priv->op == NFT_BITWISE_XOR) { 122 nft_bitwise_eval_xor(dst, src, src2, priv); 123 return; 124 } 125 } 126 127 static const struct nla_policy nft_bitwise_policy[NFTA_BITWISE_MAX + 1] = { 128 [NFTA_BITWISE_SREG] = { .type = NLA_U32 }, 129 [NFTA_BITWISE_SREG2] = { .type = NLA_U32 }, 130 [NFTA_BITWISE_DREG] = { .type = NLA_U32 }, 131 [NFTA_BITWISE_LEN] = { .type = NLA_U32 }, 132 [NFTA_BITWISE_MASK] = { .type = NLA_NESTED }, 133 [NFTA_BITWISE_XOR] = { .type = NLA_NESTED }, 134 [NFTA_BITWISE_OP] = NLA_POLICY_MAX(NLA_BE32, 255), 135 [NFTA_BITWISE_DATA] = { .type = NLA_NESTED }, 136 }; 137 138 static int nft_bitwise_init_mask_xor(struct nft_bitwise *priv, 139 const struct nlattr *const tb[]) 140 { 141 struct nft_data_desc mask = { 142 .type = NFT_DATA_VALUE, 143 .size = sizeof(priv->mask), 144 .len = priv->len, 145 }; 146 struct nft_data_desc xor = { 147 .type = NFT_DATA_VALUE, 148 .size = sizeof(priv->xor), 149 .len = priv->len, 150 }; 151 int err; 152 153 if (tb[NFTA_BITWISE_DATA] || 154 tb[NFTA_BITWISE_SREG2]) 155 return -EINVAL; 156 157 if (!tb[NFTA_BITWISE_MASK] || 158 !tb[NFTA_BITWISE_XOR]) 159 return -EINVAL; 160 161 err = nft_data_init(NULL, &priv->mask, &mask, tb[NFTA_BITWISE_MASK]); 162 if (err < 0) 163 return err; 164 165 err = nft_data_init(NULL, &priv->xor, &xor, tb[NFTA_BITWISE_XOR]); 166 if (err < 0) 167 goto err_xor_err; 168 169 return 0; 170 171 err_xor_err: 172 nft_data_release(&priv->mask, mask.type); 173 174 return err; 175 } 176 177 static int nft_bitwise_init_shift(struct nft_bitwise *priv, 178 const struct nlattr *const tb[]) 179 { 180 struct nft_data_desc desc = { 181 .type = NFT_DATA_VALUE, 182 .size = sizeof(priv->data), 183 .len = sizeof(u32), 184 }; 185 int err; 186 187 if (tb[NFTA_BITWISE_MASK] || 188 tb[NFTA_BITWISE_XOR] || 189 tb[NFTA_BITWISE_SREG2]) 190 return -EINVAL; 191 192 if (!tb[NFTA_BITWISE_DATA]) 193 return -EINVAL; 194 195 err = nft_data_init(NULL, &priv->data, &desc, tb[NFTA_BITWISE_DATA]); 196 if (err < 0) 197 return err; 198 199 if (priv->data.data[0] >= BITS_PER_TYPE(u32)) { 200 nft_data_release(&priv->data, desc.type); 201 return -EINVAL; 202 } 203 204 return 0; 205 } 206 207 static int nft_bitwise_init_bool(const struct nft_ctx *ctx, 208 struct nft_bitwise *priv, 209 const struct nlattr *const tb[]) 210 { 211 int err; 212 213 if (tb[NFTA_BITWISE_MASK] || 214 tb[NFTA_BITWISE_XOR]) 215 return -EINVAL; 216 217 if ((!tb[NFTA_BITWISE_DATA] && !tb[NFTA_BITWISE_SREG2]) || 218 (tb[NFTA_BITWISE_DATA] && tb[NFTA_BITWISE_SREG2])) 219 return -EINVAL; 220 221 if (tb[NFTA_BITWISE_DATA]) { 222 struct nft_data_desc desc = { 223 .type = NFT_DATA_VALUE, 224 .size = sizeof(priv->data), 225 .len = priv->len, 226 }; 227 228 err = nft_data_init(NULL, &priv->data, &desc, 229 tb[NFTA_BITWISE_DATA]); 230 if (err < 0) 231 return err; 232 } else { 233 err = nft_parse_register_load(ctx, tb[NFTA_BITWISE_SREG2], 234 &priv->sreg2, priv->len); 235 if (err < 0) 236 return err; 237 } 238 239 return 0; 240 } 241 242 static int nft_bitwise_init(const struct nft_ctx *ctx, 243 const struct nft_expr *expr, 244 const struct nlattr * const tb[]) 245 { 246 struct nft_bitwise *priv = nft_expr_priv(expr); 247 u32 len; 248 int err; 249 250 err = nft_parse_u32_check(tb[NFTA_BITWISE_LEN], U8_MAX, &len); 251 if (err < 0) 252 return err; 253 254 priv->len = len; 255 256 err = nft_parse_register_load(ctx, tb[NFTA_BITWISE_SREG], &priv->sreg, 257 priv->len); 258 if (err < 0) 259 return err; 260 261 err = nft_parse_register_store(ctx, tb[NFTA_BITWISE_DREG], 262 &priv->dreg, NULL, NFT_DATA_VALUE, 263 priv->len); 264 if (err < 0) 265 return err; 266 267 if (tb[NFTA_BITWISE_OP]) { 268 priv->op = ntohl(nla_get_be32(tb[NFTA_BITWISE_OP])); 269 switch (priv->op) { 270 case NFT_BITWISE_MASK_XOR: 271 case NFT_BITWISE_LSHIFT: 272 case NFT_BITWISE_RSHIFT: 273 case NFT_BITWISE_AND: 274 case NFT_BITWISE_OR: 275 case NFT_BITWISE_XOR: 276 break; 277 default: 278 return -EOPNOTSUPP; 279 } 280 } else { 281 priv->op = NFT_BITWISE_MASK_XOR; 282 } 283 284 switch(priv->op) { 285 case NFT_BITWISE_MASK_XOR: 286 err = nft_bitwise_init_mask_xor(priv, tb); 287 break; 288 case NFT_BITWISE_LSHIFT: 289 case NFT_BITWISE_RSHIFT: 290 err = nft_bitwise_init_shift(priv, tb); 291 break; 292 case NFT_BITWISE_AND: 293 case NFT_BITWISE_OR: 294 case NFT_BITWISE_XOR: 295 err = nft_bitwise_init_bool(ctx, priv, tb); 296 break; 297 } 298 299 return err; 300 } 301 302 static int nft_bitwise_dump_mask_xor(struct sk_buff *skb, 303 const struct nft_bitwise *priv) 304 { 305 if (nft_data_dump(skb, NFTA_BITWISE_MASK, &priv->mask, 306 NFT_DATA_VALUE, priv->len) < 0) 307 return -1; 308 309 if (nft_data_dump(skb, NFTA_BITWISE_XOR, &priv->xor, 310 NFT_DATA_VALUE, priv->len) < 0) 311 return -1; 312 313 return 0; 314 } 315 316 static int nft_bitwise_dump_shift(struct sk_buff *skb, 317 const struct nft_bitwise *priv) 318 { 319 if (nft_data_dump(skb, NFTA_BITWISE_DATA, &priv->data, 320 NFT_DATA_VALUE, sizeof(u32)) < 0) 321 return -1; 322 return 0; 323 } 324 325 static int nft_bitwise_dump_bool(struct sk_buff *skb, 326 const struct nft_bitwise *priv) 327 { 328 if (priv->sreg2) { 329 if (nft_dump_register(skb, NFTA_BITWISE_SREG2, priv->sreg2)) 330 return -1; 331 } else { 332 if (nft_data_dump(skb, NFTA_BITWISE_DATA, &priv->data, 333 NFT_DATA_VALUE, sizeof(u32)) < 0) 334 return -1; 335 } 336 337 return 0; 338 } 339 340 static int nft_bitwise_dump(struct sk_buff *skb, 341 const struct nft_expr *expr, bool reset) 342 { 343 const struct nft_bitwise *priv = nft_expr_priv(expr); 344 int err = 0; 345 346 if (nft_dump_register(skb, NFTA_BITWISE_SREG, priv->sreg)) 347 return -1; 348 if (nft_dump_register(skb, NFTA_BITWISE_DREG, priv->dreg)) 349 return -1; 350 if (nla_put_be32(skb, NFTA_BITWISE_LEN, htonl(priv->len))) 351 return -1; 352 if (nla_put_be32(skb, NFTA_BITWISE_OP, htonl(priv->op))) 353 return -1; 354 355 switch (priv->op) { 356 case NFT_BITWISE_MASK_XOR: 357 err = nft_bitwise_dump_mask_xor(skb, priv); 358 break; 359 case NFT_BITWISE_LSHIFT: 360 case NFT_BITWISE_RSHIFT: 361 err = nft_bitwise_dump_shift(skb, priv); 362 break; 363 case NFT_BITWISE_AND: 364 case NFT_BITWISE_OR: 365 case NFT_BITWISE_XOR: 366 err = nft_bitwise_dump_bool(skb, priv); 367 break; 368 } 369 370 return err; 371 } 372 373 static struct nft_data zero; 374 375 static int nft_bitwise_offload(struct nft_offload_ctx *ctx, 376 struct nft_flow_rule *flow, 377 const struct nft_expr *expr) 378 { 379 const struct nft_bitwise *priv = nft_expr_priv(expr); 380 struct nft_offload_reg *reg = &ctx->regs[priv->dreg]; 381 382 if (priv->op != NFT_BITWISE_MASK_XOR) 383 return -EOPNOTSUPP; 384 385 if (memcmp(&priv->xor, &zero, sizeof(priv->xor)) || 386 priv->sreg != priv->dreg || priv->len != reg->len) 387 return -EOPNOTSUPP; 388 389 memcpy(®->mask, &priv->mask, sizeof(priv->mask)); 390 391 return 0; 392 } 393 394 static bool nft_bitwise_reduce(struct nft_regs_track *track, 395 const struct nft_expr *expr) 396 { 397 const struct nft_bitwise *priv = nft_expr_priv(expr); 398 const struct nft_bitwise *bitwise; 399 unsigned int regcount; 400 u8 dreg; 401 int i; 402 403 if (!track->regs[priv->sreg].selector) 404 return false; 405 406 bitwise = nft_expr_priv(track->regs[priv->dreg].selector); 407 if (track->regs[priv->sreg].selector == track->regs[priv->dreg].selector && 408 track->regs[priv->sreg].num_reg == 0 && 409 track->regs[priv->dreg].bitwise && 410 track->regs[priv->dreg].bitwise->ops == expr->ops && 411 priv->sreg == bitwise->sreg && 412 priv->sreg2 == bitwise->sreg2 && 413 priv->dreg == bitwise->dreg && 414 priv->op == bitwise->op && 415 priv->len == bitwise->len && 416 !memcmp(&priv->mask, &bitwise->mask, sizeof(priv->mask)) && 417 !memcmp(&priv->xor, &bitwise->xor, sizeof(priv->xor)) && 418 !memcmp(&priv->data, &bitwise->data, sizeof(priv->data))) { 419 track->cur = expr; 420 return true; 421 } 422 423 if (track->regs[priv->sreg].bitwise || 424 track->regs[priv->sreg].num_reg != 0) { 425 nft_reg_track_cancel(track, priv->dreg, priv->len); 426 return false; 427 } 428 429 if (priv->sreg != priv->dreg) { 430 nft_reg_track_update(track, track->regs[priv->sreg].selector, 431 priv->dreg, priv->len); 432 } 433 434 dreg = priv->dreg; 435 regcount = DIV_ROUND_UP(priv->len, NFT_REG32_SIZE); 436 for (i = 0; i < regcount; i++, dreg++) 437 track->regs[dreg].bitwise = expr; 438 439 return false; 440 } 441 442 static const struct nft_expr_ops nft_bitwise_ops = { 443 .type = &nft_bitwise_type, 444 .size = NFT_EXPR_SIZE(sizeof(struct nft_bitwise)), 445 .eval = nft_bitwise_eval, 446 .init = nft_bitwise_init, 447 .dump = nft_bitwise_dump, 448 .reduce = nft_bitwise_reduce, 449 .offload = nft_bitwise_offload, 450 }; 451 452 static int 453 nft_bitwise_extract_u32_data(const struct nlattr * const tb, u32 *out) 454 { 455 struct nft_data data; 456 struct nft_data_desc desc = { 457 .type = NFT_DATA_VALUE, 458 .size = sizeof(data), 459 .len = sizeof(u32), 460 }; 461 int err; 462 463 err = nft_data_init(NULL, &data, &desc, tb); 464 if (err < 0) 465 return err; 466 467 *out = data.data[0]; 468 469 return 0; 470 } 471 472 static int nft_bitwise_fast_init(const struct nft_ctx *ctx, 473 const struct nft_expr *expr, 474 const struct nlattr * const tb[]) 475 { 476 struct nft_bitwise_fast_expr *priv = nft_expr_priv(expr); 477 int err; 478 479 err = nft_parse_register_load(ctx, tb[NFTA_BITWISE_SREG], &priv->sreg, 480 sizeof(u32)); 481 if (err < 0) 482 return err; 483 484 err = nft_parse_register_store(ctx, tb[NFTA_BITWISE_DREG], &priv->dreg, 485 NULL, NFT_DATA_VALUE, sizeof(u32)); 486 if (err < 0) 487 return err; 488 489 if (tb[NFTA_BITWISE_DATA] || 490 tb[NFTA_BITWISE_SREG2]) 491 return -EINVAL; 492 493 if (!tb[NFTA_BITWISE_MASK] || 494 !tb[NFTA_BITWISE_XOR]) 495 return -EINVAL; 496 497 err = nft_bitwise_extract_u32_data(tb[NFTA_BITWISE_MASK], &priv->mask); 498 if (err < 0) 499 return err; 500 501 err = nft_bitwise_extract_u32_data(tb[NFTA_BITWISE_XOR], &priv->xor); 502 if (err < 0) 503 return err; 504 505 return 0; 506 } 507 508 static int 509 nft_bitwise_fast_dump(struct sk_buff *skb, 510 const struct nft_expr *expr, bool reset) 511 { 512 const struct nft_bitwise_fast_expr *priv = nft_expr_priv(expr); 513 struct nft_data data; 514 515 if (nft_dump_register(skb, NFTA_BITWISE_SREG, priv->sreg)) 516 return -1; 517 if (nft_dump_register(skb, NFTA_BITWISE_DREG, priv->dreg)) 518 return -1; 519 if (nla_put_be32(skb, NFTA_BITWISE_LEN, htonl(sizeof(u32)))) 520 return -1; 521 if (nla_put_be32(skb, NFTA_BITWISE_OP, htonl(NFT_BITWISE_MASK_XOR))) 522 return -1; 523 524 data.data[0] = priv->mask; 525 if (nft_data_dump(skb, NFTA_BITWISE_MASK, &data, 526 NFT_DATA_VALUE, sizeof(u32)) < 0) 527 return -1; 528 529 data.data[0] = priv->xor; 530 if (nft_data_dump(skb, NFTA_BITWISE_XOR, &data, 531 NFT_DATA_VALUE, sizeof(u32)) < 0) 532 return -1; 533 534 return 0; 535 } 536 537 static int nft_bitwise_fast_offload(struct nft_offload_ctx *ctx, 538 struct nft_flow_rule *flow, 539 const struct nft_expr *expr) 540 { 541 const struct nft_bitwise_fast_expr *priv = nft_expr_priv(expr); 542 struct nft_offload_reg *reg = &ctx->regs[priv->dreg]; 543 544 if (priv->xor || priv->sreg != priv->dreg || reg->len != sizeof(u32)) 545 return -EOPNOTSUPP; 546 547 reg->mask.data[0] = priv->mask; 548 return 0; 549 } 550 551 static bool nft_bitwise_fast_reduce(struct nft_regs_track *track, 552 const struct nft_expr *expr) 553 { 554 const struct nft_bitwise_fast_expr *priv = nft_expr_priv(expr); 555 const struct nft_bitwise_fast_expr *bitwise; 556 557 if (!track->regs[priv->sreg].selector) 558 return false; 559 560 bitwise = nft_expr_priv(track->regs[priv->dreg].selector); 561 if (track->regs[priv->sreg].selector == track->regs[priv->dreg].selector && 562 track->regs[priv->dreg].bitwise && 563 track->regs[priv->dreg].bitwise->ops == expr->ops && 564 priv->sreg == bitwise->sreg && 565 priv->dreg == bitwise->dreg && 566 priv->mask == bitwise->mask && 567 priv->xor == bitwise->xor) { 568 track->cur = expr; 569 return true; 570 } 571 572 if (track->regs[priv->sreg].bitwise) { 573 nft_reg_track_cancel(track, priv->dreg, NFT_REG32_SIZE); 574 return false; 575 } 576 577 if (priv->sreg != priv->dreg) { 578 track->regs[priv->dreg].selector = 579 track->regs[priv->sreg].selector; 580 } 581 track->regs[priv->dreg].bitwise = expr; 582 583 return false; 584 } 585 586 const struct nft_expr_ops nft_bitwise_fast_ops = { 587 .type = &nft_bitwise_type, 588 .size = NFT_EXPR_SIZE(sizeof(struct nft_bitwise_fast_expr)), 589 .eval = NULL, /* inlined */ 590 .init = nft_bitwise_fast_init, 591 .dump = nft_bitwise_fast_dump, 592 .reduce = nft_bitwise_fast_reduce, 593 .offload = nft_bitwise_fast_offload, 594 }; 595 596 static const struct nft_expr_ops * 597 nft_bitwise_select_ops(const struct nft_ctx *ctx, 598 const struct nlattr * const tb[]) 599 { 600 int err; 601 u32 len; 602 603 if (!tb[NFTA_BITWISE_LEN] || 604 !tb[NFTA_BITWISE_SREG] || 605 !tb[NFTA_BITWISE_DREG]) 606 return ERR_PTR(-EINVAL); 607 608 err = nft_parse_u32_check(tb[NFTA_BITWISE_LEN], U8_MAX, &len); 609 if (err < 0) 610 return ERR_PTR(err); 611 612 if (len != sizeof(u32)) 613 return &nft_bitwise_ops; 614 615 if (tb[NFTA_BITWISE_OP] && 616 ntohl(nla_get_be32(tb[NFTA_BITWISE_OP])) != NFT_BITWISE_MASK_XOR) 617 return &nft_bitwise_ops; 618 619 return &nft_bitwise_fast_ops; 620 } 621 622 struct nft_expr_type nft_bitwise_type __read_mostly = { 623 .name = "bitwise", 624 .select_ops = nft_bitwise_select_ops, 625 .policy = nft_bitwise_policy, 626 .maxattr = NFTA_BITWISE_MAX, 627 .owner = THIS_MODULE, 628 }; 629 630 bool nft_expr_reduce_bitwise(struct nft_regs_track *track, 631 const struct nft_expr *expr) 632 { 633 const struct nft_expr *last = track->last; 634 const struct nft_expr *next; 635 636 if (expr == last) 637 return false; 638 639 next = nft_expr_next(expr); 640 if (next->ops == &nft_bitwise_ops) 641 return nft_bitwise_reduce(track, next); 642 else if (next->ops == &nft_bitwise_fast_ops) 643 return nft_bitwise_fast_reduce(track, next); 644 645 return false; 646 } 647 EXPORT_SYMBOL_GPL(nft_expr_reduce_bitwise); 648