Lines Matching defs:ctl
154 vtw_ctl_t *ctl;
325 idx_encode(vtw_ctl_t *ctl, uint32_t idx)
327 return (idx << ctl->idx_bits) | idx;
331 idx_decode(vtw_ctl_t *ctl, uint32_t bits)
333 uint32_t idx = bits & ctl->idx_mask;
335 if (idx_encode(ctl, idx) == bits)
418 vtw_index_v4(vtw_ctl_t *ctl, vtw_v4_t *v4)
420 if (ctl->base.v4 <= v4 && v4 <= ctl->lim.v4)
421 return v4 - ctl->base.v4;
429 vtw_index_v6(vtw_ctl_t *ctl, vtw_v6_t *v6)
431 if (ctl->base.v6 <= v6 && v6 <= ctl->lim.v6)
432 return v6 - ctl->base.v6;
440 vtw_index(vtw_ctl_t *ctl, vtw_t *vtw)
442 if (ctl->clidx)
443 ctl = ctl->ctl;
445 if (ctl->is_v4)
446 return vtw_index_v4(ctl, (vtw_v4_t *)vtw);
448 if (ctl->is_v6)
449 return vtw_index_v6(ctl, (vtw_v6_t *)vtw);
457 vtw_from_index(vtw_ctl_t *ctl, uint32_t idx)
459 if (ctl->clidx)
460 ctl = ctl->ctl;
465 idx = idx_decode(ctl, idx);
469 } else if (ctl->is_v4) {
470 vtw_v4_t *vtw = ctl->base.v4 + idx;
472 return (ctl->base.v4 <= vtw && vtw <= ctl->lim.v4)
474 } else if (ctl->is_v6) {
475 vtw_v6_t *vtw = ctl->base.v6 + idx;
477 return (ctl->base.v6 <= vtw && vtw <= ctl->lim.v6)
493 vtw_next(vtw_ctl_t *ctl, vtw_t *vtw)
495 if (ctl->is_v4) {
505 if (vtw > ctl->lim.v)
506 vtw = ctl->base.v;
514 vtw_unhash(vtw_ctl_t *ctl, vtw_t *vtw)
516 fatp_ctl_t *fat = ctl->fat;
541 idx = vtw_index(ctl, vtw);
548 KASSERT(fp->tag[slot] == (tag ^ idx_encode(ctl, idx)
552 && fp->tag[slot] == (tag ^ idx_encode(ctl, idx)
620 idx = vtw_index(ctl, vtw);
628 KASSERT(fp->tag[slot] == (tag ^ idx_encode(ctl, idx)
632 && fp->tag[slot] == (tag ^ idx_encode(ctl, idx)
676 vtw_del(vtw_ctl_t *ctl, vtw_t *vtw)
682 vtw_unhash(ctl, vtw);
687 if (vtw != ctl->oldest.v)
690 --ctl->nalloc;
691 ++ctl->nfree;
696 if (!ctl->nalloc)
697 ctl->oldest.v = 0;
699 ctl->oldest.v = vtw_next(ctl, vtw);
705 vtw_inshash_v4(vtw_ctl_t *ctl, vtw_t *vtw)
707 uint32_t idx = vtw_index(ctl, vtw);
713 KASSERT(ctl->clidx == vtw->msl_class);
720 vtw->key = fatp_vtw_inshash(ctl->fat, idx, tag, 0, vtw);
722 db_trace(KTR_VTW, (ctl
731 vtw->port_key = fatp_vtw_inshash(ctl->fat, idx, tag, 1, vtw);
733 db_trace(KTR_VTW, (ctl, "vtw: ins %P - %4.4x tag %8.8x key %8.8x"
744 vtw_inshash_v6(vtw_ctl_t *ctl, vtw_t *vtw)
746 uint32_t idx = vtw_index(ctl, vtw);
752 KASSERT(ctl->clidx == vtw->msl_class);
759 vtw->key = fatp_vtw_inshash(ctl->fat, idx, tag, 0, vtw);
762 vtw->port_key = fatp_vtw_inshash(ctl->fat, idx, tag, 1, vtw);
764 db_trace(KTR_VTW, (ctl, "vtw: ins %P - %4.4x tag %8.8x key %8.8x"
773 vtw_lookup_hash_v4(vtw_ctl_t *ctl, uint32_t faddr, uint16_t fport
784 if (!ctl || !ctl->fat)
791 fp = ctl->fat->port[tag & ctl->fat->mask];
794 fp = ctl->fat->hash[tag & ctl->fat->mask];
814 vtw = vtw_from_index(ctl, idx);
835 == fatp_key(ctl->fat, fp, i))
847 , idx_decode(ctl, idx), vtw->key));
862 , fatp_key(ctl->fat, fp, i)
874 if (vtw->key == fatp_key(ctl->fat, fp, i)) {
891 , fatp_key(ctl->fat, fp, i)
892 , idx_decode(ctl, idx)
901 , idx_decode(ctl, idx)
907 fp = fatp_next(ctl->fat, fp);
926 vtw_lookup_hash_v6(vtw_ctl_t *ctl, const struct in6_addr *faddr, uint16_t fport
939 if (!ctl || !ctl->fat)
944 fp = ctl->fat->port[tag & ctl->fat->mask];
947 fp = ctl->fat->hash[tag & ctl->fat->mask];
967 vtw = vtw_from_index(ctl, idx);
974 , idx_decode(ctl, idx)));
986 == fatp_key(ctl->fat, fp, i))
1004 fp = fatp_next(ctl->fat, fp);
1027 vtw_ctl_t *ctl = it->ctl;
1038 it->fp = ctl->fat->port[tag & ctl->fat->mask];
1063 vtw = vtw_from_index(ctl, idx);
1074 && vtw->port_key == fatp_key(ctl->fat, fp, i)
1109 , idx_decode(ctl, idx)
1115 it->fp = fp = fatp_next(ctl->fat, fp);
1141 vtw_ctl_t *ctl = it->ctl;
1152 it->fp = ctl->fat->port[tag & ctl->fat->mask];
1177 vtw = vtw_from_index(ctl, idx);
1190 , i, idx_decode(ctl, idx)
1194 && vtw->port_key == fatp_key(ctl->fat, fp, i)
1202 , idx_decode(ctl, idx), vtw->key));
1237 , lport, idx_decode(ctl, idx)
1243 it->fp = fp = fatp_next(ctl->fat, fp);
1280 vtw_init(fatp_ctl_t *fat, vtw_ctl_t *ctl, const uint32_t n, vtw_t *ctl_base_v)
1285 ctl->base.v = ctl_base_v;
1287 if (ctl->is_v4) {
1288 ctl->lim.v4 = ctl->base.v4 + n - 1;
1289 ctl->alloc.v4 = ctl->base.v4;
1291 ctl->lim.v6 = ctl->base.v6 + n - 1;
1292 ctl->alloc.v6 = ctl->base.v6;
1295 ctl->nfree = n;
1296 ctl->ctl = ctl;
1298 ctl->idx_bits = 32;
1299 for (ctl->idx_mask = ~0; (ctl->idx_mask & (n-1)) == n-1; ) {
1300 ctl->idx_mask >>= 1;
1301 ctl->idx_bits -= 1;
1304 ctl->idx_mask <<= 1;
1305 ctl->idx_mask |= 1;
1306 ctl->idx_bits += 1;
1308 ctl->fat = fat;
1309 fat->vtw = ctl;
1317 base = ctl->base.v;
1322 ctl[i] = ctl[0];
1323 ctl[i].clidx = i;
1325 ctl[i].base.v = base;
1326 ctl[i].alloc = ctl[i].base;
1331 base = vtw_next(ctl, base);
1334 ctl[i].lim.v = base;
1335 base = vtw_next(ctl, base);
1336 ctl[i].nfree = class_n;
1376 vtw_alloc(vtw_ctl_t *ctl)
1380 int avail = ctl ? (ctl->nalloc + ctl->nfree) : 0;
1387 if (!ctl || !ctl->base.v4 || avail <= 0)
1392 while (!ctl->nfree) {
1393 vtw_age(ctl, 0);
1408 , (ctl, "vtw:!none free in class %x %x/%x"
1409 , ctl->clidx
1410 , ctl->nalloc, ctl->nfree));
1416 vtw = ctl->alloc.v;
1418 if (vtw->msl_class != ctl->clidx) {
1422 KASSERT(!vtw->msl_class || !ctl->clidx);
1432 , (ctl, "vtw:!%p class %x!=%x %x:%x%s"
1433 , vtw, vtw->msl_class, ctl->clidx
1442 , (ctl, "vtw:!%p usurped from %x to %x"
1443 , vtw, vtw->msl_class, ctl->clidx));
1445 vtw->msl_class = ctl->clidx;
1455 ctl->alloc.v = vtw_next(ctl, vtw);
1457 --ctl->nfree;
1458 ++ctl->nalloc;
1460 msl = (2 * class_to_msl(ctl->clidx) * 1000) / PR_SLOWHZ; // msec
1476 if (!ctl->oldest.v)
1477 ctl->oldest.v = vtw;
1485 vtw_age(vtw_ctl_t *ctl, struct timeval *_when)
1491 if (!ctl->oldest.v) {
1492 KASSERT(!ctl->nalloc);
1496 for (vtw = ctl->oldest.v; vtw && ctl->nalloc; ) {
1497 if (++maxtries > ctl->nalloc)
1500 if (vtw->msl_class != ctl->clidx) {
1503 , vtw->msl_class, ctl->clidx));
1508 ctl->oldest.v = vtw = vtw_next(ctl, vtw);
1523 , ctl->clidx
1526 , ctl->nalloc
1527 , ctl->nfree));
1532 vtw_del(ctl, vtw);
1533 vtw = ctl->oldest.v;
1536 return ctl->nalloc; // # remaining allocated
1591 it->ctl = &vtw_tcpv4[0];
1602 vtw_export_v4(vtw_ctl_t *ctl, vtw_t *vtw, vestigial_inpcb_t *res)
1608 if (ctl && vtw) {
1609 if (!ctl->clidx && vtw->msl_class)
1610 ctl += vtw->msl_class;
1612 KASSERT(ctl->clidx == vtw->msl_class);
1622 res->ctl = ctl;
1642 if (it->ctl)
1646 it->ctl = 0;
1648 return vtw_export_v4(it->ctl, vtw, res);
1657 vtw_ctl_t *ctl;
1665 vtw = vtw_lookup_hash_v4((ctl = &vtw_tcpv4[0])
1669 return vtw_export_v4(ctl, vtw, res);
1690 it->ctl = &vtw_tcpv6[0];
1701 vtw_export_v6(vtw_ctl_t *ctl, vtw_t *vtw, vestigial_inpcb_t *res)
1707 if (ctl && vtw) {
1708 if (!ctl->clidx && vtw->msl_class)
1709 ctl += vtw->msl_class;
1711 KASSERT(ctl->clidx == vtw->msl_class);
1721 res->ctl = ctl;
1742 if (it->ctl)
1746 it->ctl = 0;
1748 return vtw_export_v6(it->ctl, vtw, res);
1756 vtw_ctl_t *ctl;
1764 vtw = vtw_lookup_hash_v6((ctl = &vtw_tcpv6[0])
1768 return vtw_export_v6(ctl, vtw, res);
1784 vtw_ctl_t *ctl;
1789 ctl = &vtw_tcpv4[0];
1793 ctl = &vtw_tcpv6[0];
1801 *ctlp = ctl;
1811 vtw_ctl_t *ctl;
1820 if (!vtw_select(af, &fat, &ctl))
1824 KASSERT(fat->base != NULL && ctl->base.v != NULL);
1835 sz = (ctl->is_v4 ? sizeof(vtw_v4_t) : sizeof(vtw_v6_t));
1841 vtw_init(fat, ctl, tcp_vtw_entries, ctl_base_v);
1852 vtw_ctl_t *ctl;
1855 if (!vtw_select(af, &fat, &ctl))
1858 if (!fat->base || !ctl->base.v)
1871 return ctl + msl_class;
1882 vtw_ctl_t *ctl;
1887 ctl = vtw_control(af, tp->t_msl);
1888 if (!ctl)
1895 vtw = vtw_alloc(ctl);
1918 vtw_inshash_v4(ctl, vtw);
1927 (ctl
1933 (ctl
1991 vtw_inshash_v6(ctl, vtw);
1997 KASSERT(vtw_lookup_hash_v6(ctl
2003 (ctl
2062 vtw_ctl_t *ctl;
2073 ctl = vtw_control(AF_INET, class_to_msl(cp->msl_class));
2074 vtw = vtw_alloc(ctl);
2081 vtw_del(vp->ctl, vp->vtw);
2096 vtw_inshash_v4(ctl, vtw);
2110 vtw_ctl_t *ctl;
2123 ctl = vtw_control(AF_INET6, class_to_msl(cp->msl_class));
2124 vtw = vtw_alloc(ctl);
2131 vtw_del(vp->ctl, vp->vtw);
2146 vtw_inshash_v6(ctl, vtw);
2229 vtw_ctl_t *ctl;
2232 ctl = vtw_control(af, msl ? msl : class_to_msl(msl_class));
2233 if (!ctl)
2236 vtw = vtw_alloc(ctl);
2256 vtw_inshash_v4(ctl, vtw);
2274 vtw_inshash_v6(ctl, vtw);
2375 vtw_ctl_t *ctl;
2381 ctl = &vtw_tcpv4[i];
2383 if (!ctl->base.v || ctl->nalloc)
2386 for (n = 0, vtw = ctl->base.v; ; ) {
2388 vtw = vtw_next(ctl, vtw);
2389 if (vtw == ctl->base.v)
2393 , (ctl, "sanity: class %x n %x nfree %x"
2394 , i, n, ctl->nfree));
2396 KASSERT(n == ctl->nfree);
2400 ctl = &vtw_tcpv6[i];
2402 if (!ctl->base.v || ctl->nalloc)
2405 for (n = 0, vtw = ctl->base.v; ; ) {
2407 vtw = vtw_next(ctl, vtw);
2408 if (vtw == ctl->base.v)
2412 , (ctl, "sanity: class %x n %x nfree %x"
2413 , i, n, ctl->nfree));
2414 KASSERT(n == ctl->nfree);