Lines Matching defs:sn

178 	struct sample_node *sn = ATH_NODE_SAMPLE(an);
179 const struct txschedule *sched = &sn->sched[rix0];
317 struct sample_node *sn = ATH_NODE_SAMPLE(an);
325 for (mask = sn->ratemask, rix = 0; mask != 0; mask >>= 1, rix++) {
335 tt = sn->stats[size_bin][rix].average_tx_time;
338 !sn->stats[size_bin][rix].packets_acked))
342 if (sn->stats[size_bin][rix].total_packets > 0) {
343 pct = sn->stats[size_bin][rix].ewma_pct;
349 if (sn->stats[size_bin][rix].successive_failures > 3)
371 sn->stats[size_bin][best_rate_rix].total_packets,
375 sn->stats[size_bin][rix].total_packets,
420 struct sample_node *sn = ATH_NODE_SAMPLE(an);
425 current_rix = sn->current_rix[size_bin];
432 current_tt = sn->stats[size_bin][current_rix].average_tx_time;
434 rix = sn->last_sample_rix[size_bin]+1; /* next sample rate */
435 mask = sn->ratemask &~ ((uint64_t) 1<<current_rix);/* don't sample current rate */
469 if (sn->stats[size_bin][rix].perfect_tx_time > current_tt) {
475 if (sn->stats[size_bin][rix].successive_failures > ssc->max_successive_failures &&
476 ticks - sn->stats[size_bin][rix].last_tx < ssc->stale_failure_timeout) {
513 sn->last_sample_rix[size_bin] = rix;
552 struct sample_node *sn = ATH_NODE_SAMPLE(an);
562 sn->static_rix = ath_rate_get_static_rix(sc, ni);
564 sn->static_rix = -1;
580 struct sample_node *sn = ATH_NODE_SAMPLE(an);
585 if ((sn->ratemask & ((uint64_t) 1<<rix)) == 0)
597 sn->stats[size_bin][rix].successive_failures == 0) {
621 struct sample_node *sn = ATH_NODE_SAMPLE(an);
627 if ((sn->ratemask & ((uint64_t) 1<<rix)) == 0)
644 sn->stats[size_bin][rix].successive_failures == 0) {
668 struct sample_node *sn = ATH_NODE_SAMPLE(an);
688 if (sn->currates != sc->sc_currates) {
696 if (sn->static_rix != -1) {
697 rix = sn->static_rix;
704 sn->static_rix,
739 average_tx_time = sn->stats[size_bin][best_rix].average_tx_time;
748 if (sn->sample_tt[size_bin] <
750 (sn->packets_since_sample[size_bin]*ssc->sample_rate/100)) {
756 sn->sample_tt[size_bin],
760 dot11rate(rt, sn->current_rix[size_bin]),
761 dot11rate_label(rt, sn->current_rix[size_bin]));
762 if (rix != sn->current_rix[size_bin]) {
763 sn->current_sample_rix[size_bin] = rix;
765 sn->current_sample_rix[size_bin] = -1;
767 sn->packets_since_sample[size_bin] = 0;
770 if (!sn->packets_sent[size_bin] || best_rix == -1) {
779 } else if (sn->packets_sent[size_bin] < 20) {
785 } else if (ticks - ssc->min_switch > sn->ticks_since_switch[size_bin]) {
790 __func__, ticks - ssc->min_switch, sn->ticks_since_switch[size_bin]);
793 (2*average_tx_time < sn->stats[size_bin][sn->current_rix[size_bin]].average_tx_time)) {
799 2 * average_tx_time, sn->stats[size_bin][sn->current_rix[size_bin]].average_tx_time);
802 int cur_rix = sn->current_rix[size_bin];
803 int cur_att = sn->stats[size_bin][cur_rix].average_tx_time;
829 sn->packets_since_sample[size_bin]++;
832 if (best_rix != sn->current_rix[size_bin]) {
839 dot11rate(rt, sn->current_rix[size_bin]),
840 dot11rate_label(rt, sn->current_rix[size_bin]),
841 sn->stats[size_bin][sn->current_rix[size_bin]].average_tx_time,
842 sn->stats[size_bin][sn->current_rix[size_bin]].perfect_tx_time,
843 sn->stats[size_bin][sn->current_rix[size_bin]].ewma_pct,
846 sn->stats[size_bin][best_rix].average_tx_time,
847 sn->stats[size_bin][best_rix].perfect_tx_time,
848 sn->stats[size_bin][best_rix].ewma_pct,
849 sn->packets_since_switch[size_bin],
852 sn->packets_since_switch[size_bin] = 0;
853 sn->current_rix[size_bin] = best_rix;
854 sn->ticks_since_switch[size_bin] = ticks;
862 rix = sn->current_rix[size_bin];
863 sn->packets_since_switch[size_bin]++;
865 *try0 = mrr ? sn->sched[rix].t0 : ATH_TXMAXTRY;
886 sn->packets_sent[size_bin]++;
901 struct sample_node *sn = ATH_NODE_SAMPLE(an);
902 const struct txschedule *sched = &sn->sched[rix0];
929 struct sample_node *sn = ATH_NODE_SAMPLE(an);
930 const struct txschedule *sched = &sn->sched[rix];
969 struct sample_node *sn = ATH_NODE_SAMPLE(an);
980 if (!IS_RATE_DEFINED(sn, rix0))
1010 if (sn->stats[size_bin][rix0].total_packets < ssc->smoothing_minpackets) {
1012 int avg_tx = sn->stats[size_bin][rix0].average_tx_time;
1013 int packets = sn->stats[size_bin][rix0].total_packets;
1014 sn->stats[size_bin][rix0].average_tx_time = (tt+(avg_tx*packets))/(packets+nframes);
1017 sn->stats[size_bin][rix0].average_tx_time =
1018 ((sn->stats[size_bin][rix0].average_tx_time * ssc->smoothing_rate) +
1023 sn->stats[size_bin][rix0].successive_failures += nbad;
1025 sn->stats[size_bin][rix0].packets_acked += (nframes - nbad);
1026 sn->stats[size_bin][rix0].successive_failures = 0;
1028 sn->stats[size_bin][rix0].tries += tries;
1029 sn->stats[size_bin][rix0].last_tx = ticks;
1030 sn->stats[size_bin][rix0].total_packets += nframes;
1039 if (sn->stats[size_bin][rix0].total_packets <
1042 int a_pct = (sn->stats[size_bin][rix0].packets_acked * 1000) /
1043 (sn->stats[size_bin][rix0].total_packets);
1044 sn->stats[size_bin][rix0].ewma_pct = a_pct;
1047 sn->stats[size_bin][rix0].ewma_pct =
1048 ((sn->stats[size_bin][rix0].ewma_pct * ssc->smoothing_rate) +
1068 rix0 == sn->current_sample_rix[size_bin] ? "sample" : "mrr",
1072 sn->stats[size_bin][rix0].average_tx_time,
1073 sn->stats[size_bin][rix0].perfect_tx_time,
1076 if (rix0 == sn->current_sample_rix[size_bin]) {
1077 sn->sample_tt[size_bin] = tt;
1078 sn->current_sample_rix[size_bin] = -1;
1097 struct sample_node *sn = ATH_NODE_SAMPLE(an);
1149 if (sn->ratemask == 0) {
1165 if (!IS_RATE_DEFINED(sn, final_rix)) {
1217 if (rc[i].tries && !IS_RATE_DEFINED(sn, rc[i].rix))
1301 struct sample_node *sn = ATH_NODE_SAMPLE(an);
1310 sn->sched = mrr_schedules[sc->sc_curmode];
1311 KASSERT(sn->sched != NULL,
1314 sn->static_rix = -1;
1317 sn->currates = sc->sc_currates;
1324 sn->ratemask = 0;
1336 sn->ratemask |= (uint64_t) 1<<rix;
1350 sn->ratemask |= (uint64_t) 1<<rix;
1358 for (mask = sn->ratemask, rix = 0; mask != 0; mask >>= 1, rix++) {
1372 sn->packets_sent[y] = 0;
1373 sn->current_sample_rix[y] = -1;
1374 sn->last_sample_rix[y] = 0;
1376 sn->current_rix[y] = ffs(sn->ratemask)-1;
1382 for (rix = 0, mask = sn->ratemask; mask != 0; rix++, mask >>= 1) {
1385 sn->stats[y][rix].successive_failures = 0;
1386 sn->stats[y][rix].tries = 0;
1387 sn->stats[y][rix].total_packets = 0;
1388 sn->stats[y][rix].packets_acked = 0;
1389 sn->stats[y][rix].last_tx = 0;
1390 sn->stats[y][rix].ewma_pct = 0;
1392 sn->stats[y][rix].perfect_tx_time =
1395 sn->stats[y][rix].average_tx_time =
1396 sn->stats[y][rix].perfect_tx_time;
1403 sn->num_rates,
1405 sn->stats[1][0].perfect_tx_time,
1406 DOT11RATE(sn->num_rates-1)/2, DOT11RATE(sn->num_rates-1) % 1 ? ".5" : "",
1407 sn->stats[1][sn->num_rates-1].perfect_tx_time
1411 if (sn->static_rix != -1)
1412 ni->ni_txrate = DOT11RATE(sn->static_rix);
1433 struct sample_node *sn = ATH_NODE_SAMPLE(an);
1506 error = copyout(sn, rs->buf + o, sizeof(struct sample_node));
1521 struct sample_node *sn = ATH_NODE_SAMPLE(ATH_NODE(ni));
1527 dot11rate(rt, sn->static_rix),
1528 dot11rate_label(rt, sn->static_rix),
1529 (uintmax_t)sn->ratemask);
1532 bin_to_size(y), sn->current_rix[y],
1533 dot11rate(rt, sn->current_rix[y]),
1534 dot11rate_label(rt, sn->current_rix[y]),
1535 sn->packets_since_switch[y], sn->ticks_since_switch[y]);
1538 dot11rate(rt, sn->last_sample_rix[y]),
1539 dot11rate_label(rt, sn->last_sample_rix[y]),
1540 dot11rate(rt, sn->current_sample_rix[y]),
1541 dot11rate_label(rt, sn->current_sample_rix[y]),
1542 sn->packets_sent[y]);
1544 bin_to_size(y), sn->packets_since_sample[y],
1545 sn->sample_tt[y]);
1547 for (mask = sn->ratemask, rix = 0; mask != 0; mask >>= 1, rix++) {
1551 if (sn->stats[y][rix].total_packets == 0)
1556 (uintmax_t) sn->stats[y][rix].total_packets,
1557 (uintmax_t) sn->stats[y][rix].packets_acked,
1558 (int) ((sn->stats[y][rix].packets_acked * 100ULL) /
1559 sn->stats[y][rix].total_packets),
1560 sn->stats[y][rix].ewma_pct / 10,
1561 sn->stats[y][rix].ewma_pct % 10,
1562 (uintmax_t) sn->stats[y][rix].tries,
1563 sn->stats[y][rix].successive_failures,
1564 sn->stats[y][rix].average_tx_time,
1565 ticks - sn->stats[y][rix].last_tx);