105d727e8SNithin Dabilpuram /* SPDX-License-Identifier: BSD-3-Clause 205d727e8SNithin Dabilpuram * Copyright(C) 2021 Marvell. 305d727e8SNithin Dabilpuram */ 405d727e8SNithin Dabilpuram 505d727e8SNithin Dabilpuram #include "roc_api.h" 605d727e8SNithin Dabilpuram #include "roc_priv.h" 705d727e8SNithin Dabilpuram 805d727e8SNithin Dabilpuram int 905d727e8SNithin Dabilpuram roc_nix_tm_sq_aura_fc(struct roc_nix_sq *sq, bool enable) 1005d727e8SNithin Dabilpuram { 11143a419eSAshwin Sekhar T K struct npa_cn20k_aq_enq_req *req_cn20k; 1205d727e8SNithin Dabilpuram struct npa_aq_enq_req *req; 1305d727e8SNithin Dabilpuram struct npa_aq_enq_rsp *rsp; 1405d727e8SNithin Dabilpuram uint64_t aura_handle; 1505d727e8SNithin Dabilpuram struct npa_lf *lf; 1605d727e8SNithin Dabilpuram struct mbox *mbox; 1705d727e8SNithin Dabilpuram int rc = -ENOSPC; 1805d727e8SNithin Dabilpuram 1905d727e8SNithin Dabilpuram plt_tm_dbg("Setting SQ %u SQB aura FC to %s", sq->qid, 2005d727e8SNithin Dabilpuram enable ? "enable" : "disable"); 2105d727e8SNithin Dabilpuram 2205d727e8SNithin Dabilpuram lf = idev_npa_obj_get(); 2305d727e8SNithin Dabilpuram if (!lf) 2405d727e8SNithin Dabilpuram return NPA_ERR_DEVICE_NOT_BOUNDED; 2505d727e8SNithin Dabilpuram 2644a9307cSRakesh Kudurumalla mbox = mbox_get(lf->mbox); 2705d727e8SNithin Dabilpuram /* Set/clear sqb aura fc_ena */ 2805d727e8SNithin Dabilpuram aura_handle = sq->aura_handle; 29143a419eSAshwin Sekhar T K if (roc_model_is_cn20k()) { 30143a419eSAshwin Sekhar T K req_cn20k = mbox_alloc_msg_npa_cn20k_aq_enq(mbox); 31143a419eSAshwin Sekhar T K req = (struct npa_aq_enq_req *)req_cn20k; 32143a419eSAshwin Sekhar T K } else { 3305d727e8SNithin Dabilpuram req = mbox_alloc_msg_npa_aq_enq(mbox); 34143a419eSAshwin Sekhar T K } 3505d727e8SNithin Dabilpuram if (req == NULL) 3644a9307cSRakesh Kudurumalla goto exit; 3705d727e8SNithin Dabilpuram 3805d727e8SNithin Dabilpuram req->aura_id = roc_npa_aura_handle_to_aura(aura_handle); 3905d727e8SNithin Dabilpuram req->ctype = NPA_AQ_CTYPE_AURA; 4005d727e8SNithin Dabilpuram req->op = NPA_AQ_INSTOP_WRITE; 4105d727e8SNithin Dabilpuram /* Below is not needed for aura writes but AF driver needs it */ 4205d727e8SNithin Dabilpuram /* AF will translate to associated poolctx */ 4305d727e8SNithin Dabilpuram req->aura.pool_addr = req->aura_id; 4405d727e8SNithin Dabilpuram 4505d727e8SNithin Dabilpuram req->aura.fc_ena = enable; 4605d727e8SNithin Dabilpuram req->aura_mask.fc_ena = 1; 471f997c06SRahul Bhansali if (roc_model_is_cn9k() || roc_errata_npa_has_no_fc_stype_ststp()) { 48dd9525a7SAshwin Sekhar T K req->aura.fc_stype = 0x0; /* STF */ 49dd9525a7SAshwin Sekhar T K req->aura_mask.fc_stype = 0x0; /* STF */ 50dd9525a7SAshwin Sekhar T K } else { 51dd9525a7SAshwin Sekhar T K req->aura.fc_stype = 0x3; /* STSTP */ 52dd9525a7SAshwin Sekhar T K req->aura_mask.fc_stype = 0x3; /* STSTP */ 53dd9525a7SAshwin Sekhar T K } 5405d727e8SNithin Dabilpuram 5505d727e8SNithin Dabilpuram rc = mbox_process(mbox); 5605d727e8SNithin Dabilpuram if (rc) 5744a9307cSRakesh Kudurumalla goto exit; 5805d727e8SNithin Dabilpuram 5905d727e8SNithin Dabilpuram /* Read back npa aura ctx */ 603928f698SRakesh Kudurumalla if (enable) { 61143a419eSAshwin Sekhar T K if (roc_model_is_cn20k()) { 62143a419eSAshwin Sekhar T K req_cn20k = mbox_alloc_msg_npa_cn20k_aq_enq(mbox); 63143a419eSAshwin Sekhar T K req = (struct npa_aq_enq_req *)req_cn20k; 64143a419eSAshwin Sekhar T K } else { 6505d727e8SNithin Dabilpuram req = mbox_alloc_msg_npa_aq_enq(mbox); 66143a419eSAshwin Sekhar T K } 6744a9307cSRakesh Kudurumalla if (req == NULL) { 6844a9307cSRakesh Kudurumalla rc = -ENOSPC; 6944a9307cSRakesh Kudurumalla goto exit; 7044a9307cSRakesh Kudurumalla } 7105d727e8SNithin Dabilpuram 7205d727e8SNithin Dabilpuram req->aura_id = roc_npa_aura_handle_to_aura(aura_handle); 7305d727e8SNithin Dabilpuram req->ctype = NPA_AQ_CTYPE_AURA; 7405d727e8SNithin Dabilpuram req->op = NPA_AQ_INSTOP_READ; 7505d727e8SNithin Dabilpuram 7605d727e8SNithin Dabilpuram rc = mbox_process_msg(mbox, (void *)&rsp); 7705d727e8SNithin Dabilpuram if (rc) 7844a9307cSRakesh Kudurumalla goto exit; 7905d727e8SNithin Dabilpuram 8005d727e8SNithin Dabilpuram /* Init when enabled as there might be no triggers */ 8105d727e8SNithin Dabilpuram *(volatile uint64_t *)sq->fc = rsp->aura.count; 823928f698SRakesh Kudurumalla } else { 831134d758SPavan Nikhilesh *(volatile uint64_t *)sq->fc = sq->aura_sqb_bufs; 843928f698SRakesh Kudurumalla } 8505d727e8SNithin Dabilpuram /* Sync write barrier */ 8605d727e8SNithin Dabilpuram plt_wmb(); 8744a9307cSRakesh Kudurumalla rc = 0; 8844a9307cSRakesh Kudurumalla exit: 8944a9307cSRakesh Kudurumalla mbox_put(mbox); 9044a9307cSRakesh Kudurumalla return rc; 9105d727e8SNithin Dabilpuram } 92be3009e7SNithin Dabilpuram 93df405df9SNithin Dabilpuram int 94df405df9SNithin Dabilpuram roc_nix_tm_free_resources(struct roc_nix *roc_nix, bool hw_only) 95df405df9SNithin Dabilpuram { 96df405df9SNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 97df405df9SNithin Dabilpuram 98df405df9SNithin Dabilpuram if (nix->tm_flags & NIX_TM_HIERARCHY_ENA) 99df405df9SNithin Dabilpuram return -EBUSY; 100df405df9SNithin Dabilpuram 101df405df9SNithin Dabilpuram return nix_tm_free_resources(roc_nix, BIT(ROC_NIX_TM_USER), hw_only); 102df405df9SNithin Dabilpuram } 103df405df9SNithin Dabilpuram 104c2460d14SSatha Rao static int 105680078faSSatha Rao nix_tm_adjust_shaper_pps_rate(struct nix_tm_shaper_profile *profile) 106680078faSSatha Rao { 107680078faSSatha Rao uint64_t min_rate = profile->commit.rate; 108680078faSSatha Rao 109680078faSSatha Rao if (!profile->pkt_mode) 110680078faSSatha Rao return 0; 111680078faSSatha Rao 112680078faSSatha Rao profile->pkt_mode_adj = 1; 113680078faSSatha Rao 114680078faSSatha Rao if (profile->commit.rate && 115680078faSSatha Rao (profile->commit.rate < NIX_TM_MIN_SHAPER_PPS_RATE || 116680078faSSatha Rao profile->commit.rate > NIX_TM_MAX_SHAPER_PPS_RATE)) 117680078faSSatha Rao return NIX_ERR_TM_INVALID_COMMIT_RATE; 118680078faSSatha Rao 119680078faSSatha Rao if (profile->peak.rate && 120680078faSSatha Rao (profile->peak.rate < NIX_TM_MIN_SHAPER_PPS_RATE || 121680078faSSatha Rao profile->peak.rate > NIX_TM_MAX_SHAPER_PPS_RATE)) 122680078faSSatha Rao return NIX_ERR_TM_INVALID_PEAK_RATE; 123680078faSSatha Rao 124680078faSSatha Rao if (profile->peak.rate && min_rate > profile->peak.rate) 125680078faSSatha Rao min_rate = profile->peak.rate; 126680078faSSatha Rao 1277be78d02SJosh Soref /* Each packet accumulate single count, whereas HW 128680078faSSatha Rao * considers each unit as Byte, so we need convert 129680078faSSatha Rao * user pps to bps 130680078faSSatha Rao */ 131680078faSSatha Rao profile->commit.rate = profile->commit.rate * 8; 132680078faSSatha Rao profile->peak.rate = profile->peak.rate * 8; 133680078faSSatha Rao min_rate = min_rate * 8; 134680078faSSatha Rao 135680078faSSatha Rao if (min_rate && (min_rate < NIX_TM_MIN_SHAPER_RATE)) { 136680078faSSatha Rao int adjust = NIX_TM_MIN_SHAPER_RATE / min_rate; 137680078faSSatha Rao 138680078faSSatha Rao if (adjust > NIX_TM_LENGTH_ADJUST_MAX) 139680078faSSatha Rao return NIX_ERR_TM_SHAPER_PKT_LEN_ADJUST; 140680078faSSatha Rao 141680078faSSatha Rao profile->pkt_mode_adj += adjust; 142680078faSSatha Rao profile->commit.rate += (adjust * profile->commit.rate); 143680078faSSatha Rao profile->peak.rate += (adjust * profile->peak.rate); 14495ac1578SSatha Rao /* Number of tokens freed after scheduling was proportional 14595ac1578SSatha Rao * to adjust value 14695ac1578SSatha Rao */ 14795ac1578SSatha Rao profile->commit.size *= adjust; 14895ac1578SSatha Rao profile->peak.size *= adjust; 149680078faSSatha Rao } 150680078faSSatha Rao 151680078faSSatha Rao return 0; 152680078faSSatha Rao } 153680078faSSatha Rao 154680078faSSatha Rao static int 155c2460d14SSatha Rao nix_tm_shaper_profile_add(struct roc_nix *roc_nix, 156c2460d14SSatha Rao struct nix_tm_shaper_profile *profile, int skip_ins) 157c2460d14SSatha Rao { 158c2460d14SSatha Rao struct nix *nix = roc_nix_to_nix_priv(roc_nix); 159c2460d14SSatha Rao uint64_t commit_rate, commit_sz; 1606fe49f10SNithin Dabilpuram uint64_t min_burst, max_burst; 161c2460d14SSatha Rao uint64_t peak_rate, peak_sz; 162c2460d14SSatha Rao uint32_t id; 163680078faSSatha Rao int rc; 164c2460d14SSatha Rao 165c2460d14SSatha Rao id = profile->id; 166680078faSSatha Rao rc = nix_tm_adjust_shaper_pps_rate(profile); 167680078faSSatha Rao if (rc) 168680078faSSatha Rao return rc; 169680078faSSatha Rao 170c2460d14SSatha Rao commit_rate = profile->commit.rate; 171c2460d14SSatha Rao commit_sz = profile->commit.size; 172c2460d14SSatha Rao peak_rate = profile->peak.rate; 173c2460d14SSatha Rao peak_sz = profile->peak.size; 174c2460d14SSatha Rao 1756fe49f10SNithin Dabilpuram min_burst = NIX_TM_MIN_SHAPER_BURST; 1766fe49f10SNithin Dabilpuram max_burst = roc_nix_tm_max_shaper_burst_get(); 1776fe49f10SNithin Dabilpuram 178c2460d14SSatha Rao if (nix_tm_shaper_profile_search(nix, id) && !skip_ins) 179c2460d14SSatha Rao return NIX_ERR_TM_SHAPER_PROFILE_EXISTS; 180c2460d14SSatha Rao 181c2460d14SSatha Rao if (profile->pkt_len_adj < NIX_TM_LENGTH_ADJUST_MIN || 182c2460d14SSatha Rao profile->pkt_len_adj > NIX_TM_LENGTH_ADJUST_MAX) 183c2460d14SSatha Rao return NIX_ERR_TM_SHAPER_PKT_LEN_ADJUST; 184c2460d14SSatha Rao 185c2460d14SSatha Rao /* We cannot support both pkt length adjust and pkt mode */ 186c2460d14SSatha Rao if (profile->pkt_mode && profile->pkt_len_adj) 187c2460d14SSatha Rao return NIX_ERR_TM_SHAPER_PKT_LEN_ADJUST; 188c2460d14SSatha Rao 189c2460d14SSatha Rao /* commit rate and burst size can be enabled/disabled */ 190c2460d14SSatha Rao if (commit_rate || commit_sz) { 1916fe49f10SNithin Dabilpuram if (commit_sz < min_burst || commit_sz > max_burst) 192c2460d14SSatha Rao return NIX_ERR_TM_INVALID_COMMIT_SZ; 19302bf1662SSatha Rao else if (!nix_tm_shaper_rate_conv(commit_rate, NULL, NULL, NULL, 19402bf1662SSatha Rao profile->accuracy)) 195c2460d14SSatha Rao return NIX_ERR_TM_INVALID_COMMIT_RATE; 196c2460d14SSatha Rao } 197c2460d14SSatha Rao 198c2460d14SSatha Rao /* Peak rate and burst size can be enabled/disabled */ 199c2460d14SSatha Rao if (peak_sz || peak_rate) { 2006fe49f10SNithin Dabilpuram if (peak_sz < min_burst || peak_sz > max_burst) 201c2460d14SSatha Rao return NIX_ERR_TM_INVALID_PEAK_SZ; 20202bf1662SSatha Rao else if (!nix_tm_shaper_rate_conv(peak_rate, NULL, NULL, NULL, 20302bf1662SSatha Rao profile->accuracy)) 204c2460d14SSatha Rao return NIX_ERR_TM_INVALID_PEAK_RATE; 205c2460d14SSatha Rao } 206c2460d14SSatha Rao 20795ac1578SSatha Rao /* If PIR and CIR are requested, PIR should always be larger than CIR */ 20895ac1578SSatha Rao if (peak_rate && commit_rate && (commit_rate > peak_rate)) 20995ac1578SSatha Rao return NIX_ERR_TM_INVALID_PEAK_RATE; 21095ac1578SSatha Rao 211c2460d14SSatha Rao if (!skip_ins) 212c2460d14SSatha Rao TAILQ_INSERT_TAIL(&nix->shaper_profile_list, profile, shaper); 213c2460d14SSatha Rao 214c2460d14SSatha Rao plt_tm_dbg("Added TM shaper profile %u, " 215c2460d14SSatha Rao " pir %" PRIu64 " , pbs %" PRIu64 ", cir %" PRIu64 216c2460d14SSatha Rao ", cbs %" PRIu64 " , adj %u, pkt_mode %u", 217c2460d14SSatha Rao id, profile->peak.rate, profile->peak.size, 218c2460d14SSatha Rao profile->commit.rate, profile->commit.size, 219c2460d14SSatha Rao profile->pkt_len_adj, profile->pkt_mode); 220c2460d14SSatha Rao 221c2460d14SSatha Rao /* Always use PIR for single rate shaping */ 222c2460d14SSatha Rao if (!peak_rate && commit_rate) { 223c2460d14SSatha Rao profile->peak.rate = profile->commit.rate; 224c2460d14SSatha Rao profile->peak.size = profile->commit.size; 225c2460d14SSatha Rao profile->commit.rate = 0; 226c2460d14SSatha Rao profile->commit.size = 0; 227c2460d14SSatha Rao } 228c2460d14SSatha Rao 229c2460d14SSatha Rao /* update min rate */ 230c2460d14SSatha Rao nix->tm_rate_min = nix_tm_shaper_profile_rate_min(nix); 231c2460d14SSatha Rao return 0; 232c2460d14SSatha Rao } 233c2460d14SSatha Rao 234c2460d14SSatha Rao int 235c2460d14SSatha Rao roc_nix_tm_shaper_profile_add(struct roc_nix *roc_nix, 236c2460d14SSatha Rao struct roc_nix_tm_shaper_profile *roc_profile) 237c2460d14SSatha Rao { 238c2460d14SSatha Rao struct nix_tm_shaper_profile *profile; 239c2460d14SSatha Rao 240c2460d14SSatha Rao profile = (struct nix_tm_shaper_profile *)roc_profile->reserved; 241c2460d14SSatha Rao 242c2460d14SSatha Rao profile->ref_cnt = 0; 243c2460d14SSatha Rao profile->id = roc_profile->id; 244c2460d14SSatha Rao profile->commit.rate = roc_profile->commit_rate; 245c2460d14SSatha Rao profile->peak.rate = roc_profile->peak_rate; 246c2460d14SSatha Rao profile->commit.size = roc_profile->commit_sz; 247c2460d14SSatha Rao profile->peak.size = roc_profile->peak_sz; 248c2460d14SSatha Rao profile->pkt_len_adj = roc_profile->pkt_len_adj; 249c2460d14SSatha Rao profile->pkt_mode = roc_profile->pkt_mode; 250c2460d14SSatha Rao profile->free_fn = roc_profile->free_fn; 25102bf1662SSatha Rao profile->accuracy = roc_profile->accuracy; 252c2460d14SSatha Rao 253c2460d14SSatha Rao return nix_tm_shaper_profile_add(roc_nix, profile, 0); 254c2460d14SSatha Rao } 255c2460d14SSatha Rao 256c2460d14SSatha Rao int 257c2460d14SSatha Rao roc_nix_tm_shaper_profile_update(struct roc_nix *roc_nix, 258c2460d14SSatha Rao struct roc_nix_tm_shaper_profile *roc_profile) 259c2460d14SSatha Rao { 260c2460d14SSatha Rao struct nix_tm_shaper_profile *profile; 261c2460d14SSatha Rao 262c2460d14SSatha Rao profile = (struct nix_tm_shaper_profile *)roc_profile->reserved; 263c2460d14SSatha Rao 264c2460d14SSatha Rao profile->commit.rate = roc_profile->commit_rate; 265c2460d14SSatha Rao profile->peak.rate = roc_profile->peak_rate; 266c2460d14SSatha Rao profile->commit.size = roc_profile->commit_sz; 267c2460d14SSatha Rao profile->peak.size = roc_profile->peak_sz; 26802bf1662SSatha Rao profile->pkt_len_adj = roc_profile->pkt_len_adj; 26902bf1662SSatha Rao profile->accuracy = roc_profile->accuracy; 270c2460d14SSatha Rao 271c2460d14SSatha Rao return nix_tm_shaper_profile_add(roc_nix, profile, 1); 272c2460d14SSatha Rao } 273c2460d14SSatha Rao 274c2460d14SSatha Rao int 275c2460d14SSatha Rao roc_nix_tm_shaper_profile_delete(struct roc_nix *roc_nix, uint32_t id) 276c2460d14SSatha Rao { 277c2460d14SSatha Rao struct nix *nix = roc_nix_to_nix_priv(roc_nix); 278c2460d14SSatha Rao struct nix_tm_shaper_profile *profile; 279c2460d14SSatha Rao 280c2460d14SSatha Rao profile = nix_tm_shaper_profile_search(nix, id); 281c2460d14SSatha Rao if (!profile) 282c2460d14SSatha Rao return NIX_ERR_TM_INVALID_SHAPER_PROFILE; 283c2460d14SSatha Rao 284c2460d14SSatha Rao if (profile->ref_cnt) 285c2460d14SSatha Rao return NIX_ERR_TM_SHAPER_PROFILE_IN_USE; 286c2460d14SSatha Rao 287c2460d14SSatha Rao plt_tm_dbg("Removing TM shaper profile %u", id); 288c2460d14SSatha Rao TAILQ_REMOVE(&nix->shaper_profile_list, profile, shaper); 289c2460d14SSatha Rao nix_tm_shaper_profile_free(profile); 290c2460d14SSatha Rao 291c2460d14SSatha Rao /* update min rate */ 292c2460d14SSatha Rao nix->tm_rate_min = nix_tm_shaper_profile_rate_min(nix); 293c2460d14SSatha Rao return 0; 294c2460d14SSatha Rao } 295c2460d14SSatha Rao 296be3009e7SNithin Dabilpuram int 297be3009e7SNithin Dabilpuram roc_nix_tm_node_add(struct roc_nix *roc_nix, struct roc_nix_tm_node *roc_node) 298be3009e7SNithin Dabilpuram { 299be3009e7SNithin Dabilpuram struct nix_tm_node *node; 300be3009e7SNithin Dabilpuram 301be3009e7SNithin Dabilpuram node = (struct nix_tm_node *)&roc_node->reserved; 302be3009e7SNithin Dabilpuram node->id = roc_node->id; 303be3009e7SNithin Dabilpuram node->priority = roc_node->priority; 304be3009e7SNithin Dabilpuram node->weight = roc_node->weight; 305be3009e7SNithin Dabilpuram node->lvl = roc_node->lvl; 306be3009e7SNithin Dabilpuram node->parent_id = roc_node->parent_id; 307be3009e7SNithin Dabilpuram node->shaper_profile_id = roc_node->shaper_profile_id; 308be3009e7SNithin Dabilpuram node->pkt_mode = roc_node->pkt_mode; 309be3009e7SNithin Dabilpuram node->pkt_mode_set = roc_node->pkt_mode_set; 310be3009e7SNithin Dabilpuram node->free_fn = roc_node->free_fn; 311be3009e7SNithin Dabilpuram node->tree = ROC_NIX_TM_USER; 312cc90e35bSHarman Kalra node->rel_chan = NIX_TM_CHAN_INVALID; 313be3009e7SNithin Dabilpuram 314be3009e7SNithin Dabilpuram return nix_tm_node_add(roc_nix, node); 315be3009e7SNithin Dabilpuram } 316be3009e7SNithin Dabilpuram 317be3009e7SNithin Dabilpuram int 318be3009e7SNithin Dabilpuram roc_nix_tm_node_pkt_mode_update(struct roc_nix *roc_nix, uint32_t node_id, 319be3009e7SNithin Dabilpuram bool pkt_mode) 320be3009e7SNithin Dabilpuram { 321be3009e7SNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 322be3009e7SNithin Dabilpuram struct nix_tm_node *node, *child; 323be3009e7SNithin Dabilpuram struct nix_tm_node_list *list; 324be3009e7SNithin Dabilpuram int num_children = 0; 325be3009e7SNithin Dabilpuram 326be3009e7SNithin Dabilpuram node = nix_tm_node_search(nix, node_id, ROC_NIX_TM_USER); 327be3009e7SNithin Dabilpuram if (!node) 328be3009e7SNithin Dabilpuram return NIX_ERR_TM_INVALID_NODE; 329be3009e7SNithin Dabilpuram 330be3009e7SNithin Dabilpuram if (node->pkt_mode == pkt_mode) { 331be3009e7SNithin Dabilpuram node->pkt_mode_set = true; 332be3009e7SNithin Dabilpuram return 0; 333be3009e7SNithin Dabilpuram } 334be3009e7SNithin Dabilpuram 335be3009e7SNithin Dabilpuram /* Check for any existing children, if there are any, 336be3009e7SNithin Dabilpuram * then we cannot update the pkt mode as children's quantum 337be3009e7SNithin Dabilpuram * are already taken in. 338be3009e7SNithin Dabilpuram */ 339be3009e7SNithin Dabilpuram list = nix_tm_node_list(nix, ROC_NIX_TM_USER); 340be3009e7SNithin Dabilpuram TAILQ_FOREACH(child, list, node) { 341be3009e7SNithin Dabilpuram if (child->parent == node) 342be3009e7SNithin Dabilpuram num_children++; 343be3009e7SNithin Dabilpuram } 344be3009e7SNithin Dabilpuram 345be3009e7SNithin Dabilpuram /* Cannot update mode if it has children or tree is enabled */ 346be3009e7SNithin Dabilpuram if ((nix->tm_flags & NIX_TM_HIERARCHY_ENA) && num_children) 347be3009e7SNithin Dabilpuram return -EBUSY; 348be3009e7SNithin Dabilpuram 349be3009e7SNithin Dabilpuram if (node->pkt_mode_set && num_children) 350be3009e7SNithin Dabilpuram return NIX_ERR_TM_PKT_MODE_MISMATCH; 351be3009e7SNithin Dabilpuram 352be3009e7SNithin Dabilpuram node->pkt_mode = pkt_mode; 353be3009e7SNithin Dabilpuram node->pkt_mode_set = true; 354be3009e7SNithin Dabilpuram 355be3009e7SNithin Dabilpuram return 0; 356be3009e7SNithin Dabilpuram } 357be3009e7SNithin Dabilpuram 358be3009e7SNithin Dabilpuram int 359be3009e7SNithin Dabilpuram roc_nix_tm_node_name_get(struct roc_nix *roc_nix, uint32_t node_id, char *buf, 360be3009e7SNithin Dabilpuram size_t buflen) 361be3009e7SNithin Dabilpuram { 362be3009e7SNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 363be3009e7SNithin Dabilpuram struct nix_tm_node *node; 364be3009e7SNithin Dabilpuram 365be3009e7SNithin Dabilpuram node = nix_tm_node_search(nix, node_id, ROC_NIX_TM_USER); 366be3009e7SNithin Dabilpuram if (!node) { 367be3009e7SNithin Dabilpuram plt_strlcpy(buf, "???", buflen); 368be3009e7SNithin Dabilpuram return NIX_ERR_TM_INVALID_NODE; 369be3009e7SNithin Dabilpuram } 370be3009e7SNithin Dabilpuram 371be3009e7SNithin Dabilpuram if (node->hw_lvl == NIX_TXSCH_LVL_CNT) 372be3009e7SNithin Dabilpuram snprintf(buf, buflen, "SQ_%d", node->id); 373be3009e7SNithin Dabilpuram else 374be3009e7SNithin Dabilpuram snprintf(buf, buflen, "%s_%d", nix_tm_hwlvl2str(node->hw_lvl), 375be3009e7SNithin Dabilpuram node->hw_id); 376be3009e7SNithin Dabilpuram return 0; 377be3009e7SNithin Dabilpuram } 378be3009e7SNithin Dabilpuram 379be3009e7SNithin Dabilpuram int 380be3009e7SNithin Dabilpuram roc_nix_tm_node_delete(struct roc_nix *roc_nix, uint32_t node_id, bool free) 381be3009e7SNithin Dabilpuram { 382be3009e7SNithin Dabilpuram return nix_tm_node_delete(roc_nix, node_id, ROC_NIX_TM_USER, free); 383be3009e7SNithin Dabilpuram } 3840885429cSNithin Dabilpuram 3850885429cSNithin Dabilpuram int 3868cfde95dSSatha Rao roc_nix_smq_flush(struct roc_nix *roc_nix) 3878cfde95dSSatha Rao { 3888cfde95dSSatha Rao struct nix *nix = roc_nix_to_nix_priv(roc_nix); 3898cfde95dSSatha Rao struct nix_tm_node_list *list; 3908cfde95dSSatha Rao enum roc_nix_tm_tree tree; 3918cfde95dSSatha Rao struct nix_tm_node *node; 3928cfde95dSSatha Rao int rc = 0; 3938cfde95dSSatha Rao 3948cfde95dSSatha Rao if (!(nix->tm_flags & NIX_TM_HIERARCHY_ENA)) 3958cfde95dSSatha Rao return 0; 3968cfde95dSSatha Rao 3978cfde95dSSatha Rao tree = nix->tm_tree; 3988cfde95dSSatha Rao list = nix_tm_node_list(nix, tree); 3998cfde95dSSatha Rao 4008cfde95dSSatha Rao /* XOFF & Flush all SMQ's. HRM mandates 4018cfde95dSSatha Rao * all SQ's empty before SMQ flush is issued. 4028cfde95dSSatha Rao */ 4038cfde95dSSatha Rao TAILQ_FOREACH(node, list, node) { 4048cfde95dSSatha Rao if (node->hw_lvl != NIX_TXSCH_LVL_SMQ) 4058cfde95dSSatha Rao continue; 4068cfde95dSSatha Rao if (!(node->flags & NIX_TM_NODE_HWRES)) 4078cfde95dSSatha Rao continue; 4088cfde95dSSatha Rao 4098cfde95dSSatha Rao rc = nix_tm_smq_xoff(nix, node, true); 4108cfde95dSSatha Rao if (rc) { 4118cfde95dSSatha Rao plt_err("Failed to enable smq %u, rc=%d", node->hw_id, 4128cfde95dSSatha Rao rc); 4138cfde95dSSatha Rao goto exit; 4148cfde95dSSatha Rao } 4158cfde95dSSatha Rao } 4168cfde95dSSatha Rao 4178cfde95dSSatha Rao /* XON all SMQ's */ 4188cfde95dSSatha Rao TAILQ_FOREACH(node, list, node) { 4198cfde95dSSatha Rao if (node->hw_lvl != NIX_TXSCH_LVL_SMQ) 4208cfde95dSSatha Rao continue; 4218cfde95dSSatha Rao if (!(node->flags & NIX_TM_NODE_HWRES)) 4228cfde95dSSatha Rao continue; 4238cfde95dSSatha Rao 4248cfde95dSSatha Rao rc = nix_tm_smq_xoff(nix, node, false); 4258cfde95dSSatha Rao if (rc) { 4268cfde95dSSatha Rao plt_err("Failed to enable smq %u, rc=%d", node->hw_id, 4278cfde95dSSatha Rao rc); 4288cfde95dSSatha Rao goto exit; 4298cfde95dSSatha Rao } 4308cfde95dSSatha Rao } 4318cfde95dSSatha Rao exit: 4328cfde95dSSatha Rao return rc; 4338cfde95dSSatha Rao } 4348cfde95dSSatha Rao 4358cfde95dSSatha Rao int 4360885429cSNithin Dabilpuram roc_nix_tm_hierarchy_disable(struct roc_nix *roc_nix) 4370885429cSNithin Dabilpuram { 4380885429cSNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 4390885429cSNithin Dabilpuram uint16_t sqb_cnt, head_off, tail_off; 4400885429cSNithin Dabilpuram uint16_t sq_cnt = nix->nb_tx_queues; 4410885429cSNithin Dabilpuram struct mbox *mbox = (&nix->dev)->mbox; 4420885429cSNithin Dabilpuram struct nix_tm_node_list *list; 4430885429cSNithin Dabilpuram enum roc_nix_tm_tree tree; 4440885429cSNithin Dabilpuram struct nix_tm_node *node; 4450885429cSNithin Dabilpuram struct roc_nix_sq *sq; 4460885429cSNithin Dabilpuram uint64_t wdata, val; 4470885429cSNithin Dabilpuram uintptr_t regaddr; 4480885429cSNithin Dabilpuram int rc = -1, i; 4490885429cSNithin Dabilpuram 4500885429cSNithin Dabilpuram if (!(nix->tm_flags & NIX_TM_HIERARCHY_ENA)) 4510885429cSNithin Dabilpuram return 0; 4520885429cSNithin Dabilpuram 4530885429cSNithin Dabilpuram plt_tm_dbg("Disabling hierarchy on %s", nix->pci_dev->name); 4540885429cSNithin Dabilpuram 4550885429cSNithin Dabilpuram tree = nix->tm_tree; 4560885429cSNithin Dabilpuram list = nix_tm_node_list(nix, tree); 4570885429cSNithin Dabilpuram 4580885429cSNithin Dabilpuram /* Enable CGX RXTX to drain pkts */ 4590885429cSNithin Dabilpuram if (!roc_nix->io_enabled) { 4600885429cSNithin Dabilpuram /* Though it enables both RX MCAM Entries and CGX Link 4610885429cSNithin Dabilpuram * we assume all the rx queues are stopped way back. 4620885429cSNithin Dabilpuram */ 46344a9307cSRakesh Kudurumalla mbox_alloc_msg_nix_lf_start_rx(mbox_get(mbox)); 4640885429cSNithin Dabilpuram rc = mbox_process(mbox); 4650885429cSNithin Dabilpuram if (rc) { 46644a9307cSRakesh Kudurumalla mbox_put(mbox); 4670885429cSNithin Dabilpuram plt_err("cgx start failed, rc=%d", rc); 4680885429cSNithin Dabilpuram return rc; 4690885429cSNithin Dabilpuram } 47044a9307cSRakesh Kudurumalla mbox_put(mbox); 4710885429cSNithin Dabilpuram } 4720885429cSNithin Dabilpuram 4730885429cSNithin Dabilpuram /* XON all SMQ's */ 4740885429cSNithin Dabilpuram TAILQ_FOREACH(node, list, node) { 4750885429cSNithin Dabilpuram if (node->hw_lvl != NIX_TXSCH_LVL_SMQ) 4760885429cSNithin Dabilpuram continue; 4770885429cSNithin Dabilpuram if (!(node->flags & NIX_TM_NODE_HWRES)) 4780885429cSNithin Dabilpuram continue; 4790885429cSNithin Dabilpuram 4800885429cSNithin Dabilpuram rc = nix_tm_smq_xoff(nix, node, false); 4810885429cSNithin Dabilpuram if (rc) { 4820885429cSNithin Dabilpuram plt_err("Failed to enable smq %u, rc=%d", node->hw_id, 4830885429cSNithin Dabilpuram rc); 4840885429cSNithin Dabilpuram goto cleanup; 4850885429cSNithin Dabilpuram } 4860885429cSNithin Dabilpuram } 4870885429cSNithin Dabilpuram 48858debb81SNithin Dabilpuram /* Disable backpressure, it will be enabled back if needed on 48958debb81SNithin Dabilpuram * hierarchy enable 49058debb81SNithin Dabilpuram */ 49120d02329SSunil Kumar Kori for (i = 0; i < sq_cnt; i++) { 49220d02329SSunil Kumar Kori sq = nix->sqs[i]; 49320d02329SSunil Kumar Kori if (!sq) 49420d02329SSunil Kumar Kori continue; 49520d02329SSunil Kumar Kori 49678fb5efeSNithin Dabilpuram rc = nix_tm_bp_config_set(roc_nix, sq->qid, 0, false); 497d120c089SSatha Rao if (rc && rc != -ENOENT) { 49820d02329SSunil Kumar Kori plt_err("Failed to disable backpressure, rc=%d", rc); 49958debb81SNithin Dabilpuram goto cleanup; 50058debb81SNithin Dabilpuram } 50120d02329SSunil Kumar Kori } 50258debb81SNithin Dabilpuram 5030885429cSNithin Dabilpuram /* Flush all tx queues */ 5040885429cSNithin Dabilpuram for (i = 0; i < sq_cnt; i++) { 5050885429cSNithin Dabilpuram sq = nix->sqs[i]; 5060885429cSNithin Dabilpuram if (!sq) 5070885429cSNithin Dabilpuram continue; 5080885429cSNithin Dabilpuram 50994a406d4SNithin Dabilpuram rc = roc_nix_sq_ena_dis(sq, false); 5100885429cSNithin Dabilpuram if (rc) { 5110885429cSNithin Dabilpuram plt_err("Failed to disable sqb aura fc, rc=%d", rc); 5120885429cSNithin Dabilpuram goto cleanup; 5130885429cSNithin Dabilpuram } 5140885429cSNithin Dabilpuram 5150885429cSNithin Dabilpuram /* Wait for sq entries to be flushed */ 5160885429cSNithin Dabilpuram rc = roc_nix_tm_sq_flush_spin(sq); 5170885429cSNithin Dabilpuram if (rc) { 518f665790aSDavid Marchand plt_err("Failed to drain sq, rc=%d", rc); 5190885429cSNithin Dabilpuram goto cleanup; 5200885429cSNithin Dabilpuram } 5210885429cSNithin Dabilpuram } 5220885429cSNithin Dabilpuram 5230885429cSNithin Dabilpuram /* XOFF & Flush all SMQ's. HRM mandates 5240885429cSNithin Dabilpuram * all SQ's empty before SMQ flush is issued. 5250885429cSNithin Dabilpuram */ 5260885429cSNithin Dabilpuram TAILQ_FOREACH(node, list, node) { 5270885429cSNithin Dabilpuram if (node->hw_lvl != NIX_TXSCH_LVL_SMQ) 5280885429cSNithin Dabilpuram continue; 5290885429cSNithin Dabilpuram if (!(node->flags & NIX_TM_NODE_HWRES)) 5300885429cSNithin Dabilpuram continue; 5310885429cSNithin Dabilpuram 5320885429cSNithin Dabilpuram rc = nix_tm_smq_xoff(nix, node, true); 5330885429cSNithin Dabilpuram if (rc) { 5340885429cSNithin Dabilpuram plt_err("Failed to enable smq %u, rc=%d", node->hw_id, 5350885429cSNithin Dabilpuram rc); 5360885429cSNithin Dabilpuram goto cleanup; 5370885429cSNithin Dabilpuram } 5380885429cSNithin Dabilpuram 5390885429cSNithin Dabilpuram node->flags &= ~NIX_TM_NODE_ENABLED; 5400885429cSNithin Dabilpuram } 5410885429cSNithin Dabilpuram 5420885429cSNithin Dabilpuram /* Verify sanity of all tx queues */ 5430885429cSNithin Dabilpuram for (i = 0; i < sq_cnt; i++) { 5440885429cSNithin Dabilpuram sq = nix->sqs[i]; 5450885429cSNithin Dabilpuram if (!sq) 5460885429cSNithin Dabilpuram continue; 5470885429cSNithin Dabilpuram 5480885429cSNithin Dabilpuram wdata = ((uint64_t)sq->qid << 32); 5490885429cSNithin Dabilpuram regaddr = nix->base + NIX_LF_SQ_OP_STATUS; 5500885429cSNithin Dabilpuram val = roc_atomic64_add_nosync(wdata, (int64_t *)regaddr); 5510885429cSNithin Dabilpuram 5520885429cSNithin Dabilpuram sqb_cnt = val & 0xFFFF; 5530885429cSNithin Dabilpuram head_off = (val >> 20) & 0x3F; 5540885429cSNithin Dabilpuram tail_off = (val >> 28) & 0x3F; 5550885429cSNithin Dabilpuram 5560885429cSNithin Dabilpuram if (sqb_cnt > 1 || head_off != tail_off || 5571134d758SPavan Nikhilesh (*(uint64_t *)sq->fc != sq->aura_sqb_bufs)) 5580885429cSNithin Dabilpuram plt_err("Failed to gracefully flush sq %u", sq->qid); 5590885429cSNithin Dabilpuram } 5600885429cSNithin Dabilpuram 5610885429cSNithin Dabilpuram nix->tm_flags &= ~NIX_TM_HIERARCHY_ENA; 5620885429cSNithin Dabilpuram cleanup: 5630885429cSNithin Dabilpuram /* Restore cgx state */ 5640885429cSNithin Dabilpuram if (!roc_nix->io_enabled) { 56544a9307cSRakesh Kudurumalla mbox_alloc_msg_nix_lf_stop_rx(mbox_get(mbox)); 5660885429cSNithin Dabilpuram rc |= mbox_process(mbox); 56744a9307cSRakesh Kudurumalla mbox_put(mbox); 5680885429cSNithin Dabilpuram } 5690885429cSNithin Dabilpuram return rc; 5700885429cSNithin Dabilpuram } 5710885429cSNithin Dabilpuram 5720885429cSNithin Dabilpuram int 57390a903ffSSatha Rao roc_nix_tm_hierarchy_xmit_enable(struct roc_nix *roc_nix, enum roc_nix_tm_tree tree) 57490a903ffSSatha Rao { 57590a903ffSSatha Rao struct nix *nix = roc_nix_to_nix_priv(roc_nix); 57690a903ffSSatha Rao struct nix_tm_node_list *list; 57790a903ffSSatha Rao struct nix_tm_node *node; 57890a903ffSSatha Rao struct roc_nix_sq *sq; 57990a903ffSSatha Rao uint16_t sq_id; 58090a903ffSSatha Rao int rc; 58190a903ffSSatha Rao 58290a903ffSSatha Rao if (tree >= ROC_NIX_TM_TREE_MAX) 58390a903ffSSatha Rao return NIX_ERR_PARAM; 58490a903ffSSatha Rao 58590a903ffSSatha Rao list = nix_tm_node_list(nix, tree); 58690a903ffSSatha Rao 58790a903ffSSatha Rao /* Update SQ Sched Data while SQ is idle */ 58890a903ffSSatha Rao TAILQ_FOREACH(node, list, node) { 58990a903ffSSatha Rao if (!nix_tm_is_leaf(nix, node->lvl)) 59090a903ffSSatha Rao continue; 59190a903ffSSatha Rao 59290a903ffSSatha Rao rc = nix_tm_sq_sched_conf(nix, node, false); 59390a903ffSSatha Rao if (rc) { 59490a903ffSSatha Rao plt_err("SQ %u sched update failed, rc=%d", node->id, 59590a903ffSSatha Rao rc); 59690a903ffSSatha Rao return rc; 59790a903ffSSatha Rao } 59890a903ffSSatha Rao } 59990a903ffSSatha Rao 60090a903ffSSatha Rao /* Finally XON all SMQ's */ 60190a903ffSSatha Rao TAILQ_FOREACH(node, list, node) { 60290a903ffSSatha Rao if (node->hw_lvl != NIX_TXSCH_LVL_SMQ) 60390a903ffSSatha Rao continue; 60490a903ffSSatha Rao 60590a903ffSSatha Rao rc = nix_tm_smq_xoff(nix, node, false); 60690a903ffSSatha Rao if (rc) { 60790a903ffSSatha Rao plt_err("Failed to enable smq %u, rc=%d", node->hw_id, 60890a903ffSSatha Rao rc); 60990a903ffSSatha Rao return rc; 61090a903ffSSatha Rao } 61190a903ffSSatha Rao } 61290a903ffSSatha Rao 61390a903ffSSatha Rao /* Enable xmit as all the topology is ready */ 61490a903ffSSatha Rao TAILQ_FOREACH(node, list, node) { 61590a903ffSSatha Rao if (!nix_tm_is_leaf(nix, node->lvl)) 61690a903ffSSatha Rao continue; 61790a903ffSSatha Rao 61890a903ffSSatha Rao sq_id = node->id; 61990a903ffSSatha Rao sq = nix->sqs[sq_id]; 62090a903ffSSatha Rao 62194a406d4SNithin Dabilpuram rc = roc_nix_sq_ena_dis(sq, true); 62290a903ffSSatha Rao if (rc) { 62390a903ffSSatha Rao plt_err("TM sw xon failed on SQ %u, rc=%d", node->id, 62490a903ffSSatha Rao rc); 62590a903ffSSatha Rao return rc; 62690a903ffSSatha Rao } 62790a903ffSSatha Rao node->flags |= NIX_TM_NODE_ENABLED; 62890a903ffSSatha Rao } 62990a903ffSSatha Rao 63090a903ffSSatha Rao return 0; 63190a903ffSSatha Rao } 63290a903ffSSatha Rao 63390a903ffSSatha Rao int 6340885429cSNithin Dabilpuram roc_nix_tm_hierarchy_enable(struct roc_nix *roc_nix, enum roc_nix_tm_tree tree, 6350885429cSNithin Dabilpuram bool xmit_enable) 6360885429cSNithin Dabilpuram { 6370885429cSNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 6380885429cSNithin Dabilpuram struct nix_tm_node_list *list; 6390885429cSNithin Dabilpuram struct nix_tm_node *node; 6400885429cSNithin Dabilpuram uint32_t tree_mask; 6410885429cSNithin Dabilpuram int rc; 6420885429cSNithin Dabilpuram 6430885429cSNithin Dabilpuram if (tree >= ROC_NIX_TM_TREE_MAX) 6440885429cSNithin Dabilpuram return NIX_ERR_PARAM; 6450885429cSNithin Dabilpuram 6460885429cSNithin Dabilpuram if (nix->tm_flags & NIX_TM_HIERARCHY_ENA) { 6470885429cSNithin Dabilpuram if (nix->tm_tree != tree) 6480885429cSNithin Dabilpuram return -EBUSY; 6490885429cSNithin Dabilpuram return 0; 6500885429cSNithin Dabilpuram } 6510885429cSNithin Dabilpuram 6520885429cSNithin Dabilpuram plt_tm_dbg("Enabling hierarchy on %s, xmit_ena %u, tree %u", 6530885429cSNithin Dabilpuram nix->pci_dev->name, xmit_enable, tree); 6540885429cSNithin Dabilpuram 6550885429cSNithin Dabilpuram /* Free hw resources of other trees */ 6560885429cSNithin Dabilpuram tree_mask = NIX_TM_TREE_MASK_ALL; 6570885429cSNithin Dabilpuram tree_mask &= ~BIT(tree); 6580885429cSNithin Dabilpuram 6590885429cSNithin Dabilpuram rc = nix_tm_free_resources(roc_nix, tree_mask, true); 6600885429cSNithin Dabilpuram if (rc) { 6610885429cSNithin Dabilpuram plt_err("failed to free resources of other trees, rc=%d", rc); 6620885429cSNithin Dabilpuram return rc; 6630885429cSNithin Dabilpuram } 6640885429cSNithin Dabilpuram 6650885429cSNithin Dabilpuram /* Update active tree before starting to do anything */ 6660885429cSNithin Dabilpuram nix->tm_tree = tree; 6670885429cSNithin Dabilpuram 6680885429cSNithin Dabilpuram nix_tm_update_parent_info(nix, tree); 6690885429cSNithin Dabilpuram 6700885429cSNithin Dabilpuram rc = nix_tm_alloc_txschq(nix, tree); 6710885429cSNithin Dabilpuram if (rc) { 6720885429cSNithin Dabilpuram plt_err("TM failed to alloc tm resources=%d", rc); 6730885429cSNithin Dabilpuram return rc; 6740885429cSNithin Dabilpuram } 6750885429cSNithin Dabilpuram 6760885429cSNithin Dabilpuram rc = nix_tm_assign_resources(nix, tree); 6770885429cSNithin Dabilpuram if (rc) { 6780885429cSNithin Dabilpuram plt_err("TM failed to assign tm resources=%d", rc); 6790885429cSNithin Dabilpuram return rc; 6800885429cSNithin Dabilpuram } 6810885429cSNithin Dabilpuram 6820885429cSNithin Dabilpuram rc = nix_tm_txsch_reg_config(nix, tree); 6830885429cSNithin Dabilpuram if (rc) { 6840885429cSNithin Dabilpuram plt_err("TM failed to configure sched registers=%d", rc); 6850885429cSNithin Dabilpuram return rc; 6860885429cSNithin Dabilpuram } 6870885429cSNithin Dabilpuram 6880885429cSNithin Dabilpuram list = nix_tm_node_list(nix, tree); 6890885429cSNithin Dabilpuram /* Mark all non-leaf's as enabled */ 6900885429cSNithin Dabilpuram TAILQ_FOREACH(node, list, node) { 6910885429cSNithin Dabilpuram if (!nix_tm_is_leaf(nix, node->lvl)) 6920885429cSNithin Dabilpuram node->flags |= NIX_TM_NODE_ENABLED; 6930885429cSNithin Dabilpuram } 6940885429cSNithin Dabilpuram 69590a903ffSSatha Rao if (xmit_enable) 69690a903ffSSatha Rao rc = roc_nix_tm_hierarchy_xmit_enable(roc_nix, tree); 6970885429cSNithin Dabilpuram 69890a903ffSSatha Rao if (!rc) 6990885429cSNithin Dabilpuram nix->tm_flags |= NIX_TM_HIERARCHY_ENA; 70090a903ffSSatha Rao return rc; 7010885429cSNithin Dabilpuram } 7025a960e26SNithin Dabilpuram 7035a960e26SNithin Dabilpuram int 704464c9f91SNithin Dabilpuram roc_nix_tm_node_suspend_resume(struct roc_nix *roc_nix, uint32_t node_id, 705464c9f91SNithin Dabilpuram bool suspend) 706464c9f91SNithin Dabilpuram { 707464c9f91SNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 708464c9f91SNithin Dabilpuram struct mbox *mbox = (&nix->dev)->mbox; 709464c9f91SNithin Dabilpuram struct nix_txschq_config *req; 710464c9f91SNithin Dabilpuram struct nix_tm_node *node; 711464c9f91SNithin Dabilpuram uint16_t flags; 712464c9f91SNithin Dabilpuram int rc; 713464c9f91SNithin Dabilpuram 714464c9f91SNithin Dabilpuram node = nix_tm_node_search(nix, node_id, ROC_NIX_TM_USER); 715464c9f91SNithin Dabilpuram if (!node) 716464c9f91SNithin Dabilpuram return NIX_ERR_TM_INVALID_NODE; 717464c9f91SNithin Dabilpuram 718464c9f91SNithin Dabilpuram flags = node->flags; 719464c9f91SNithin Dabilpuram flags = suspend ? (flags & ~NIX_TM_NODE_ENABLED) : 720464c9f91SNithin Dabilpuram (flags | NIX_TM_NODE_ENABLED); 721464c9f91SNithin Dabilpuram 722464c9f91SNithin Dabilpuram if (node->flags == flags) 723464c9f91SNithin Dabilpuram return 0; 724464c9f91SNithin Dabilpuram 725464c9f91SNithin Dabilpuram /* send mbox for state change */ 72644a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 727464c9f91SNithin Dabilpuram 728464c9f91SNithin Dabilpuram req->lvl = node->hw_lvl; 729464c9f91SNithin Dabilpuram req->num_regs = 730464c9f91SNithin Dabilpuram nix_tm_sw_xoff_prep(node, suspend, req->reg, req->regval); 731464c9f91SNithin Dabilpuram rc = mbox_process(mbox); 73244a9307cSRakesh Kudurumalla mbox_put(mbox); 733464c9f91SNithin Dabilpuram if (!rc) 734464c9f91SNithin Dabilpuram node->flags = flags; 735464c9f91SNithin Dabilpuram return rc; 736464c9f91SNithin Dabilpuram } 737464c9f91SNithin Dabilpuram 738464c9f91SNithin Dabilpuram int 739fcdef46bSNithin Dabilpuram roc_nix_tm_prealloc_res(struct roc_nix *roc_nix, uint8_t lvl, 740fcdef46bSNithin Dabilpuram uint16_t discontig, uint16_t contig) 741fcdef46bSNithin Dabilpuram { 742fcdef46bSNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 743fcdef46bSNithin Dabilpuram struct mbox *mbox = (&nix->dev)->mbox; 744fcdef46bSNithin Dabilpuram struct nix_txsch_alloc_req *req; 745fcdef46bSNithin Dabilpuram struct nix_txsch_alloc_rsp *rsp; 746fcdef46bSNithin Dabilpuram uint8_t hw_lvl; 747fcdef46bSNithin Dabilpuram int rc = -ENOSPC; 748fcdef46bSNithin Dabilpuram 749fcdef46bSNithin Dabilpuram hw_lvl = nix_tm_lvl2nix(nix, lvl); 750fcdef46bSNithin Dabilpuram if (hw_lvl == NIX_TXSCH_LVL_CNT) 751fcdef46bSNithin Dabilpuram return -EINVAL; 752fcdef46bSNithin Dabilpuram 753fcdef46bSNithin Dabilpuram /* Preallocate contiguous */ 754fcdef46bSNithin Dabilpuram if (nix->contig_rsvd[hw_lvl] < contig) { 75544a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txsch_alloc(mbox_get(mbox)); 75644a9307cSRakesh Kudurumalla if (req == NULL) { 75744a9307cSRakesh Kudurumalla mbox_put(mbox); 758fcdef46bSNithin Dabilpuram return rc; 75944a9307cSRakesh Kudurumalla } 760fcdef46bSNithin Dabilpuram req->schq_contig[hw_lvl] = contig - nix->contig_rsvd[hw_lvl]; 761fcdef46bSNithin Dabilpuram 762fcdef46bSNithin Dabilpuram rc = mbox_process_msg(mbox, (void *)&rsp); 76344a9307cSRakesh Kudurumalla if (rc) { 76444a9307cSRakesh Kudurumalla mbox_put(mbox); 765fcdef46bSNithin Dabilpuram return rc; 76644a9307cSRakesh Kudurumalla } 767fcdef46bSNithin Dabilpuram 768fcdef46bSNithin Dabilpuram nix_tm_copy_rsp_to_nix(nix, rsp); 76944a9307cSRakesh Kudurumalla mbox_put(mbox); 770fcdef46bSNithin Dabilpuram } 771fcdef46bSNithin Dabilpuram 772fcdef46bSNithin Dabilpuram /* Preallocate contiguous */ 773fcdef46bSNithin Dabilpuram if (nix->discontig_rsvd[hw_lvl] < discontig) { 77444a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txsch_alloc(mbox_get(mbox)); 77544a9307cSRakesh Kudurumalla if (req == NULL) { 77644a9307cSRakesh Kudurumalla mbox_put(mbox); 777fcdef46bSNithin Dabilpuram return -ENOSPC; 77844a9307cSRakesh Kudurumalla } 779fcdef46bSNithin Dabilpuram req->schq[hw_lvl] = discontig - nix->discontig_rsvd[hw_lvl]; 780fcdef46bSNithin Dabilpuram 781fcdef46bSNithin Dabilpuram rc = mbox_process_msg(mbox, (void *)&rsp); 78244a9307cSRakesh Kudurumalla if (rc) { 78344a9307cSRakesh Kudurumalla mbox_put(mbox); 784fcdef46bSNithin Dabilpuram return rc; 78544a9307cSRakesh Kudurumalla } 786fcdef46bSNithin Dabilpuram 787fcdef46bSNithin Dabilpuram nix_tm_copy_rsp_to_nix(nix, rsp); 78844a9307cSRakesh Kudurumalla mbox_put(mbox); 789fcdef46bSNithin Dabilpuram } 790fcdef46bSNithin Dabilpuram 791fcdef46bSNithin Dabilpuram /* Save thresholds */ 792fcdef46bSNithin Dabilpuram nix->contig_rsvd[hw_lvl] = contig; 793fcdef46bSNithin Dabilpuram nix->discontig_rsvd[hw_lvl] = discontig; 794fcdef46bSNithin Dabilpuram /* Release anything present above thresholds */ 795fcdef46bSNithin Dabilpuram nix_tm_release_resources(nix, hw_lvl, true, true); 796fcdef46bSNithin Dabilpuram nix_tm_release_resources(nix, hw_lvl, false, true); 797fcdef46bSNithin Dabilpuram return 0; 798fcdef46bSNithin Dabilpuram } 799fcdef46bSNithin Dabilpuram 800fcdef46bSNithin Dabilpuram int 801464c9f91SNithin Dabilpuram roc_nix_tm_node_shaper_update(struct roc_nix *roc_nix, uint32_t node_id, 802464c9f91SNithin Dabilpuram uint32_t profile_id, bool force_update) 803464c9f91SNithin Dabilpuram { 804464c9f91SNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 805464c9f91SNithin Dabilpuram struct nix_tm_shaper_profile *profile = NULL; 806464c9f91SNithin Dabilpuram struct mbox *mbox = (&nix->dev)->mbox; 807464c9f91SNithin Dabilpuram struct nix_txschq_config *req; 808464c9f91SNithin Dabilpuram struct nix_tm_node *node; 809464c9f91SNithin Dabilpuram uint8_t k; 810464c9f91SNithin Dabilpuram int rc; 811464c9f91SNithin Dabilpuram 812464c9f91SNithin Dabilpuram /* Shaper updates valid only for user nodes */ 813464c9f91SNithin Dabilpuram node = nix_tm_node_search(nix, node_id, ROC_NIX_TM_USER); 814464c9f91SNithin Dabilpuram if (!node || nix_tm_is_leaf(nix, node->lvl)) 815464c9f91SNithin Dabilpuram return NIX_ERR_TM_INVALID_NODE; 816464c9f91SNithin Dabilpuram 817464c9f91SNithin Dabilpuram if (profile_id != ROC_NIX_TM_SHAPER_PROFILE_NONE) { 818464c9f91SNithin Dabilpuram profile = nix_tm_shaper_profile_search(nix, profile_id); 819464c9f91SNithin Dabilpuram if (!profile) 820464c9f91SNithin Dabilpuram return NIX_ERR_TM_INVALID_SHAPER_PROFILE; 821464c9f91SNithin Dabilpuram } 822464c9f91SNithin Dabilpuram 823464c9f91SNithin Dabilpuram /* Pkt mode should match existing node's pkt mode */ 824464c9f91SNithin Dabilpuram if (profile && profile->pkt_mode != node->pkt_mode) 825464c9f91SNithin Dabilpuram return NIX_ERR_TM_PKT_MODE_MISMATCH; 826464c9f91SNithin Dabilpuram 827464c9f91SNithin Dabilpuram if ((profile_id == node->shaper_profile_id) && !force_update) { 828464c9f91SNithin Dabilpuram return 0; 829464c9f91SNithin Dabilpuram } else if (profile_id != node->shaper_profile_id) { 830464c9f91SNithin Dabilpuram struct nix_tm_shaper_profile *old; 831464c9f91SNithin Dabilpuram 832464c9f91SNithin Dabilpuram /* Find old shaper profile and reduce ref count */ 833464c9f91SNithin Dabilpuram old = nix_tm_shaper_profile_search(nix, 834464c9f91SNithin Dabilpuram node->shaper_profile_id); 835464c9f91SNithin Dabilpuram if (old) 836464c9f91SNithin Dabilpuram old->ref_cnt--; 837464c9f91SNithin Dabilpuram 838464c9f91SNithin Dabilpuram if (profile) 839464c9f91SNithin Dabilpuram profile->ref_cnt++; 840464c9f91SNithin Dabilpuram 841464c9f91SNithin Dabilpuram /* Reduce older shaper ref count and increase new one */ 842464c9f91SNithin Dabilpuram node->shaper_profile_id = profile_id; 843464c9f91SNithin Dabilpuram } 844464c9f91SNithin Dabilpuram 845464c9f91SNithin Dabilpuram /* Nothing to do if hierarchy not yet enabled */ 846464c9f91SNithin Dabilpuram if (!(nix->tm_flags & NIX_TM_HIERARCHY_ENA)) 847464c9f91SNithin Dabilpuram return 0; 848464c9f91SNithin Dabilpuram 849464c9f91SNithin Dabilpuram node->flags &= ~NIX_TM_NODE_ENABLED; 850464c9f91SNithin Dabilpuram 851464c9f91SNithin Dabilpuram /* Flush the specific node with SW_XOFF */ 85244a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 853464c9f91SNithin Dabilpuram req->lvl = node->hw_lvl; 854464c9f91SNithin Dabilpuram k = nix_tm_sw_xoff_prep(node, true, req->reg, req->regval); 855464c9f91SNithin Dabilpuram req->num_regs = k; 856464c9f91SNithin Dabilpuram 857464c9f91SNithin Dabilpuram rc = mbox_process(mbox); 85844a9307cSRakesh Kudurumalla if (rc) { 85944a9307cSRakesh Kudurumalla mbox_put(mbox); 860464c9f91SNithin Dabilpuram return rc; 86144a9307cSRakesh Kudurumalla } 86244a9307cSRakesh Kudurumalla mbox_put(mbox); 863464c9f91SNithin Dabilpuram 864464c9f91SNithin Dabilpuram /* Update the PIR/CIR and clear SW XOFF */ 86544a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 866464c9f91SNithin Dabilpuram req->lvl = node->hw_lvl; 867464c9f91SNithin Dabilpuram 868464c9f91SNithin Dabilpuram k = nix_tm_shaper_reg_prep(node, profile, req->reg, req->regval); 869464c9f91SNithin Dabilpuram 870464c9f91SNithin Dabilpuram k += nix_tm_sw_xoff_prep(node, false, &req->reg[k], &req->regval[k]); 871464c9f91SNithin Dabilpuram 872464c9f91SNithin Dabilpuram req->num_regs = k; 873464c9f91SNithin Dabilpuram rc = mbox_process(mbox); 87444a9307cSRakesh Kudurumalla mbox_put(mbox); 875464c9f91SNithin Dabilpuram if (!rc) 876464c9f91SNithin Dabilpuram node->flags |= NIX_TM_NODE_ENABLED; 877464c9f91SNithin Dabilpuram return rc; 878464c9f91SNithin Dabilpuram } 879464c9f91SNithin Dabilpuram 880464c9f91SNithin Dabilpuram int 881464c9f91SNithin Dabilpuram roc_nix_tm_node_parent_update(struct roc_nix *roc_nix, uint32_t node_id, 882464c9f91SNithin Dabilpuram uint32_t new_parent_id, uint32_t priority, 883464c9f91SNithin Dabilpuram uint32_t weight) 884464c9f91SNithin Dabilpuram { 885464c9f91SNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 886464c9f91SNithin Dabilpuram struct mbox *mbox = (&nix->dev)->mbox; 887464c9f91SNithin Dabilpuram struct nix_tm_node *node, *sibling; 888464c9f91SNithin Dabilpuram struct nix_tm_node *new_parent; 889464c9f91SNithin Dabilpuram struct nix_txschq_config *req; 890464c9f91SNithin Dabilpuram struct nix_tm_node_list *list; 891464c9f91SNithin Dabilpuram uint8_t k; 892464c9f91SNithin Dabilpuram int rc; 893464c9f91SNithin Dabilpuram 894464c9f91SNithin Dabilpuram node = nix_tm_node_search(nix, node_id, ROC_NIX_TM_USER); 895464c9f91SNithin Dabilpuram if (!node) 896464c9f91SNithin Dabilpuram return NIX_ERR_TM_INVALID_NODE; 897464c9f91SNithin Dabilpuram 898464c9f91SNithin Dabilpuram /* Parent id valid only for non root nodes */ 899464c9f91SNithin Dabilpuram if (node->hw_lvl != nix->tm_root_lvl) { 900464c9f91SNithin Dabilpuram new_parent = 901464c9f91SNithin Dabilpuram nix_tm_node_search(nix, new_parent_id, ROC_NIX_TM_USER); 902464c9f91SNithin Dabilpuram if (!new_parent) 903464c9f91SNithin Dabilpuram return NIX_ERR_TM_INVALID_PARENT; 904464c9f91SNithin Dabilpuram 905464c9f91SNithin Dabilpuram /* Current support is only for dynamic weight update */ 906464c9f91SNithin Dabilpuram if (node->parent != new_parent || node->priority != priority) 907464c9f91SNithin Dabilpuram return NIX_ERR_TM_PARENT_PRIO_UPDATE; 908464c9f91SNithin Dabilpuram } 909464c9f91SNithin Dabilpuram 910464c9f91SNithin Dabilpuram list = nix_tm_node_list(nix, ROC_NIX_TM_USER); 911464c9f91SNithin Dabilpuram /* Skip if no change */ 912464c9f91SNithin Dabilpuram if (node->weight == weight) 913464c9f91SNithin Dabilpuram return 0; 914464c9f91SNithin Dabilpuram 915464c9f91SNithin Dabilpuram node->weight = weight; 916464c9f91SNithin Dabilpuram 917464c9f91SNithin Dabilpuram /* Nothing to do if hierarchy not yet enabled */ 918464c9f91SNithin Dabilpuram if (!(nix->tm_flags & NIX_TM_HIERARCHY_ENA)) 919464c9f91SNithin Dabilpuram return 0; 920464c9f91SNithin Dabilpuram 921464c9f91SNithin Dabilpuram /* For leaf nodes, SQ CTX needs update */ 922464c9f91SNithin Dabilpuram if (nix_tm_is_leaf(nix, node->lvl)) { 923464c9f91SNithin Dabilpuram /* Update SQ quantum data on the fly */ 924464c9f91SNithin Dabilpuram rc = nix_tm_sq_sched_conf(nix, node, true); 925464c9f91SNithin Dabilpuram if (rc) 926464c9f91SNithin Dabilpuram return NIX_ERR_TM_SQ_UPDATE_FAIL; 927464c9f91SNithin Dabilpuram } else { 928464c9f91SNithin Dabilpuram /* XOFF Parent node */ 92944a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 930464c9f91SNithin Dabilpuram req->lvl = node->parent->hw_lvl; 931464c9f91SNithin Dabilpuram req->num_regs = nix_tm_sw_xoff_prep(node->parent, true, 932464c9f91SNithin Dabilpuram req->reg, req->regval); 933464c9f91SNithin Dabilpuram rc = mbox_process(mbox); 93444a9307cSRakesh Kudurumalla mbox_put(mbox); 935464c9f91SNithin Dabilpuram if (rc) 936464c9f91SNithin Dabilpuram return rc; 937464c9f91SNithin Dabilpuram 938464c9f91SNithin Dabilpuram /* XOFF this node and all other siblings */ 93944a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 940464c9f91SNithin Dabilpuram req->lvl = node->hw_lvl; 941464c9f91SNithin Dabilpuram 942464c9f91SNithin Dabilpuram k = 0; 943464c9f91SNithin Dabilpuram TAILQ_FOREACH(sibling, list, node) { 944464c9f91SNithin Dabilpuram if (sibling->parent != node->parent) 945464c9f91SNithin Dabilpuram continue; 9461669a84dSSatha Rao k += nix_tm_sw_xoff_prep(sibling, true, &req->reg[k], &req->regval[k]); 9471669a84dSSatha Rao if (k >= MAX_REGS_PER_MBOX_MSG) { 948464c9f91SNithin Dabilpuram req->num_regs = k; 949464c9f91SNithin Dabilpuram rc = mbox_process(mbox); 95044a9307cSRakesh Kudurumalla mbox_put(mbox); 951464c9f91SNithin Dabilpuram if (rc) 952464c9f91SNithin Dabilpuram return rc; 9531669a84dSSatha Rao k = 0; 95444a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 955464c9f91SNithin Dabilpuram req->lvl = node->hw_lvl; 9561669a84dSSatha Rao } 9571669a84dSSatha Rao } 9581669a84dSSatha Rao 9591669a84dSSatha Rao if (k) { 9601669a84dSSatha Rao req->num_regs = k; 9611669a84dSSatha Rao rc = mbox_process(mbox); 96244a9307cSRakesh Kudurumalla mbox_put(mbox); 9631669a84dSSatha Rao if (rc) 9641669a84dSSatha Rao return rc; 9651669a84dSSatha Rao /* Update new weight for current node */ 96644a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 9671669a84dSSatha Rao } 9681669a84dSSatha Rao 9691669a84dSSatha Rao req->lvl = node->hw_lvl; 9701669a84dSSatha Rao req->num_regs = nix_tm_sched_reg_prep(nix, node, req->reg, req->regval); 971464c9f91SNithin Dabilpuram rc = mbox_process(mbox); 97244a9307cSRakesh Kudurumalla mbox_put(mbox); 973464c9f91SNithin Dabilpuram if (rc) 974464c9f91SNithin Dabilpuram return rc; 975464c9f91SNithin Dabilpuram 976464c9f91SNithin Dabilpuram /* XON this node and all other siblings */ 97744a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 978464c9f91SNithin Dabilpuram req->lvl = node->hw_lvl; 979464c9f91SNithin Dabilpuram 980464c9f91SNithin Dabilpuram k = 0; 981464c9f91SNithin Dabilpuram TAILQ_FOREACH(sibling, list, node) { 982464c9f91SNithin Dabilpuram if (sibling->parent != node->parent) 983464c9f91SNithin Dabilpuram continue; 9841669a84dSSatha Rao k += nix_tm_sw_xoff_prep(sibling, false, &req->reg[k], &req->regval[k]); 9851669a84dSSatha Rao if (k >= MAX_REGS_PER_MBOX_MSG) { 986464c9f91SNithin Dabilpuram req->num_regs = k; 987464c9f91SNithin Dabilpuram rc = mbox_process(mbox); 98844a9307cSRakesh Kudurumalla mbox_put(mbox); 989464c9f91SNithin Dabilpuram if (rc) 990464c9f91SNithin Dabilpuram return rc; 9911669a84dSSatha Rao k = 0; 99244a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 9931669a84dSSatha Rao req->lvl = node->hw_lvl; 9941669a84dSSatha Rao } 9951669a84dSSatha Rao } 996464c9f91SNithin Dabilpuram 9971669a84dSSatha Rao if (k) { 9981669a84dSSatha Rao req->num_regs = k; 9991669a84dSSatha Rao rc = mbox_process(mbox); 100044a9307cSRakesh Kudurumalla mbox_put(mbox); 10011669a84dSSatha Rao if (rc) 10021669a84dSSatha Rao return rc; 1003464c9f91SNithin Dabilpuram /* XON Parent node */ 100444a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 10051669a84dSSatha Rao } 10061669a84dSSatha Rao 1007464c9f91SNithin Dabilpuram req->lvl = node->parent->hw_lvl; 10081669a84dSSatha Rao req->num_regs = nix_tm_sw_xoff_prep(node->parent, false, req->reg, req->regval); 1009464c9f91SNithin Dabilpuram rc = mbox_process(mbox); 101044a9307cSRakesh Kudurumalla mbox_put(mbox); 1011464c9f91SNithin Dabilpuram if (rc) 1012464c9f91SNithin Dabilpuram return rc; 1013464c9f91SNithin Dabilpuram } 1014464c9f91SNithin Dabilpuram return 0; 1015464c9f91SNithin Dabilpuram } 1016464c9f91SNithin Dabilpuram 1017464c9f91SNithin Dabilpuram int 10185a960e26SNithin Dabilpuram roc_nix_tm_init(struct roc_nix *roc_nix) 10195a960e26SNithin Dabilpuram { 10205a960e26SNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 10215a960e26SNithin Dabilpuram uint32_t tree_mask; 10225a960e26SNithin Dabilpuram int rc; 10235a960e26SNithin Dabilpuram 10245a960e26SNithin Dabilpuram if (nix->tm_flags & NIX_TM_HIERARCHY_ENA) { 10255a960e26SNithin Dabilpuram plt_err("Cannot init while existing hierarchy is enabled"); 10265a960e26SNithin Dabilpuram return -EBUSY; 10275a960e26SNithin Dabilpuram } 10285a960e26SNithin Dabilpuram 10295a960e26SNithin Dabilpuram /* Free up all user resources already held */ 10305a960e26SNithin Dabilpuram tree_mask = NIX_TM_TREE_MASK_ALL; 10315a960e26SNithin Dabilpuram rc = nix_tm_free_resources(roc_nix, tree_mask, false); 10325a960e26SNithin Dabilpuram if (rc) { 10335a960e26SNithin Dabilpuram plt_err("Failed to freeup all nodes and resources, rc=%d", rc); 10345a960e26SNithin Dabilpuram return rc; 10355a960e26SNithin Dabilpuram } 10365a960e26SNithin Dabilpuram 10375a960e26SNithin Dabilpuram /* Prepare default tree */ 10385a960e26SNithin Dabilpuram rc = nix_tm_prepare_default_tree(roc_nix); 10395a960e26SNithin Dabilpuram if (rc) { 10405a960e26SNithin Dabilpuram plt_err("failed to prepare default tm tree, rc=%d", rc); 10415a960e26SNithin Dabilpuram return rc; 10425a960e26SNithin Dabilpuram } 10435a960e26SNithin Dabilpuram 10445a960e26SNithin Dabilpuram return rc; 10455a960e26SNithin Dabilpuram } 10465a960e26SNithin Dabilpuram 10475a960e26SNithin Dabilpuram int 10483b416343SSatha Rao roc_nix_tm_pfc_rlimit_sq(struct roc_nix *roc_nix, uint16_t qid, uint64_t rate) 10493b416343SSatha Rao { 10503b416343SSatha Rao struct nix *nix = roc_nix_to_nix_priv(roc_nix); 10513b416343SSatha Rao struct nix_tm_shaper_profile profile; 10523b416343SSatha Rao struct mbox *mbox = (&nix->dev)->mbox; 10533b416343SSatha Rao struct nix_tm_node *node, *parent; 10543b416343SSatha Rao struct roc_nix_link_info link_info; 10553b416343SSatha Rao 10563b416343SSatha Rao volatile uint64_t *reg, *regval; 10573b416343SSatha Rao struct nix_txschq_config *req; 10583b416343SSatha Rao uint64_t tl2_rate = 0; 10593b416343SSatha Rao uint16_t flags; 10603b416343SSatha Rao uint8_t k = 0; 10613b416343SSatha Rao int rc; 10623b416343SSatha Rao 10633b416343SSatha Rao if ((nix->tm_tree != ROC_NIX_TM_PFC) || !(nix->tm_flags & NIX_TM_HIERARCHY_ENA)) 10643b416343SSatha Rao return NIX_ERR_TM_INVALID_TREE; 10653b416343SSatha Rao 10663b416343SSatha Rao node = nix_tm_node_search(nix, qid, nix->tm_tree); 10673b416343SSatha Rao 10683b416343SSatha Rao /* check if we found a valid leaf node */ 10693b416343SSatha Rao if (!node || !nix_tm_is_leaf(nix, node->lvl) || !node->parent || 10703b416343SSatha Rao node->parent->hw_id == NIX_TM_HW_ID_INVALID) { 10713b416343SSatha Rao return NIX_ERR_TM_INVALID_NODE; 10723b416343SSatha Rao } 10733b416343SSatha Rao 10743b416343SSatha Rao /* Get the link Speed */ 10753b416343SSatha Rao if (roc_nix_mac_link_info_get(roc_nix, &link_info)) 10763b416343SSatha Rao return -EINVAL; 10773b416343SSatha Rao 10783b416343SSatha Rao if (link_info.status) 10793b416343SSatha Rao tl2_rate = link_info.speed * (uint64_t)1E6; 10803b416343SSatha Rao 10813b416343SSatha Rao /* Configure TL3 of leaf node with requested rate */ 10823b416343SSatha Rao parent = node->parent; /* SMQ/MDQ */ 10833b416343SSatha Rao parent = parent->parent; /* TL4 */ 10843b416343SSatha Rao parent = parent->parent; /* TL3 */ 10853b416343SSatha Rao flags = parent->flags; 10863b416343SSatha Rao 10873b416343SSatha Rao req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 10883b416343SSatha Rao req->lvl = parent->hw_lvl; 10893b416343SSatha Rao reg = req->reg; 10903b416343SSatha Rao regval = req->regval; 10913b416343SSatha Rao 10923b416343SSatha Rao if (rate == 0) { 10933b416343SSatha Rao k += nix_tm_sw_xoff_prep(parent, true, ®[k], ®val[k]); 10943b416343SSatha Rao flags &= ~NIX_TM_NODE_ENABLED; 10953b416343SSatha Rao goto exit; 10963b416343SSatha Rao } 10973b416343SSatha Rao 10983b416343SSatha Rao if (!(flags & NIX_TM_NODE_ENABLED)) { 10993b416343SSatha Rao k += nix_tm_sw_xoff_prep(parent, false, ®[k], ®val[k]); 11003b416343SSatha Rao flags |= NIX_TM_NODE_ENABLED; 11013b416343SSatha Rao } 11023b416343SSatha Rao 11033b416343SSatha Rao /* Use only PIR for rate limit */ 11043b416343SSatha Rao memset(&profile, 0, sizeof(profile)); 11053b416343SSatha Rao profile.peak.rate = rate; 11063b416343SSatha Rao /* Minimum burst of ~4us Bytes of Tx */ 11073b416343SSatha Rao profile.peak.size = 11083b416343SSatha Rao PLT_MAX((uint64_t)roc_nix_max_pkt_len(roc_nix), (4ul * rate) / ((uint64_t)1E6 * 8)); 11093b416343SSatha Rao if (!nix->tm_rate_min || nix->tm_rate_min > rate) 11103b416343SSatha Rao nix->tm_rate_min = rate; 11113b416343SSatha Rao 11123b416343SSatha Rao k += nix_tm_shaper_reg_prep(parent, &profile, ®[k], ®val[k]); 11133b416343SSatha Rao exit: 11143b416343SSatha Rao req->num_regs = k; 11153b416343SSatha Rao rc = mbox_process(mbox); 11163b416343SSatha Rao mbox_put(mbox); 11173b416343SSatha Rao if (rc) 11183b416343SSatha Rao return rc; 11193b416343SSatha Rao 11203b416343SSatha Rao parent->flags = flags; 11213b416343SSatha Rao 11223b416343SSatha Rao /* If link is up then configure TL2 with link speed */ 11233b416343SSatha Rao if (tl2_rate && (flags & NIX_TM_NODE_ENABLED)) { 11243b416343SSatha Rao k = 0; 11253b416343SSatha Rao parent = parent->parent; 11263b416343SSatha Rao req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 11273b416343SSatha Rao req->lvl = parent->hw_lvl; 11283b416343SSatha Rao reg = req->reg; 11293b416343SSatha Rao regval = req->regval; 11303b416343SSatha Rao 11313b416343SSatha Rao /* Use only PIR for rate limit */ 11323b416343SSatha Rao memset(&profile, 0, sizeof(profile)); 11333b416343SSatha Rao profile.peak.rate = tl2_rate; 11343b416343SSatha Rao /* Minimum burst of ~4us Bytes of Tx */ 11353b416343SSatha Rao profile.peak.size = PLT_MAX((uint64_t)roc_nix_max_pkt_len(roc_nix), 11363b416343SSatha Rao (4ul * tl2_rate) / ((uint64_t)1E6 * 8)); 11373b416343SSatha Rao k += nix_tm_shaper_reg_prep(parent, &profile, ®[k], ®val[k]); 11383b416343SSatha Rao req->num_regs = k; 11393b416343SSatha Rao rc = mbox_process(mbox); 11403b416343SSatha Rao mbox_put(mbox); 11413b416343SSatha Rao } 11423b416343SSatha Rao return rc; 11433b416343SSatha Rao } 11443b416343SSatha Rao 11453b416343SSatha Rao int 11465a960e26SNithin Dabilpuram roc_nix_tm_rlimit_sq(struct roc_nix *roc_nix, uint16_t qid, uint64_t rate) 11475a960e26SNithin Dabilpuram { 11485a960e26SNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 11495a960e26SNithin Dabilpuram struct nix_tm_shaper_profile profile; 11505a960e26SNithin Dabilpuram struct mbox *mbox = (&nix->dev)->mbox; 11515a960e26SNithin Dabilpuram struct nix_tm_node *node, *parent; 11525a960e26SNithin Dabilpuram 11535a960e26SNithin Dabilpuram volatile uint64_t *reg, *regval; 11545a960e26SNithin Dabilpuram struct nix_txschq_config *req; 11555a960e26SNithin Dabilpuram uint16_t flags; 11565a960e26SNithin Dabilpuram uint8_t k = 0; 11575a960e26SNithin Dabilpuram int rc; 11585a960e26SNithin Dabilpuram 1159f9dbd4a5SSatha Rao if ((nix->tm_tree == ROC_NIX_TM_USER) || 116044a9307cSRakesh Kudurumalla !(nix->tm_flags & NIX_TM_HIERARCHY_ENA)) { 11615a960e26SNithin Dabilpuram return NIX_ERR_TM_INVALID_TREE; 116244a9307cSRakesh Kudurumalla } 11635a960e26SNithin Dabilpuram 1164f9dbd4a5SSatha Rao node = nix_tm_node_search(nix, qid, nix->tm_tree); 11655a960e26SNithin Dabilpuram 11665a960e26SNithin Dabilpuram /* check if we found a valid leaf node */ 11675a960e26SNithin Dabilpuram if (!node || !nix_tm_is_leaf(nix, node->lvl) || !node->parent || 116844a9307cSRakesh Kudurumalla node->parent->hw_id == NIX_TM_HW_ID_INVALID) { 11695a960e26SNithin Dabilpuram return NIX_ERR_TM_INVALID_NODE; 117044a9307cSRakesh Kudurumalla } 11715a960e26SNithin Dabilpuram 11725a960e26SNithin Dabilpuram parent = node->parent; 11735a960e26SNithin Dabilpuram flags = parent->flags; 11745a960e26SNithin Dabilpuram 117544a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 11765a960e26SNithin Dabilpuram req->lvl = NIX_TXSCH_LVL_MDQ; 11775a960e26SNithin Dabilpuram reg = req->reg; 11785a960e26SNithin Dabilpuram regval = req->regval; 11795a960e26SNithin Dabilpuram 11805a960e26SNithin Dabilpuram if (rate == 0) { 11815a960e26SNithin Dabilpuram k += nix_tm_sw_xoff_prep(parent, true, ®[k], ®val[k]); 11825a960e26SNithin Dabilpuram flags &= ~NIX_TM_NODE_ENABLED; 11835a960e26SNithin Dabilpuram goto exit; 11845a960e26SNithin Dabilpuram } 11855a960e26SNithin Dabilpuram 11865a960e26SNithin Dabilpuram if (!(flags & NIX_TM_NODE_ENABLED)) { 11875a960e26SNithin Dabilpuram k += nix_tm_sw_xoff_prep(parent, false, ®[k], ®val[k]); 11885a960e26SNithin Dabilpuram flags |= NIX_TM_NODE_ENABLED; 11895a960e26SNithin Dabilpuram } 11905a960e26SNithin Dabilpuram 11915a960e26SNithin Dabilpuram /* Use only PIR for rate limit */ 11925a960e26SNithin Dabilpuram memset(&profile, 0, sizeof(profile)); 11935a960e26SNithin Dabilpuram profile.peak.rate = rate; 11945a960e26SNithin Dabilpuram /* Minimum burst of ~4us Bytes of Tx */ 11955a960e26SNithin Dabilpuram profile.peak.size = PLT_MAX((uint64_t)roc_nix_max_pkt_len(roc_nix), 11965a960e26SNithin Dabilpuram (4ul * rate) / ((uint64_t)1E6 * 8)); 11975a960e26SNithin Dabilpuram if (!nix->tm_rate_min || nix->tm_rate_min > rate) 11985a960e26SNithin Dabilpuram nix->tm_rate_min = rate; 11995a960e26SNithin Dabilpuram 12005a960e26SNithin Dabilpuram k += nix_tm_shaper_reg_prep(parent, &profile, ®[k], ®val[k]); 12015a960e26SNithin Dabilpuram exit: 12025a960e26SNithin Dabilpuram req->num_regs = k; 12035a960e26SNithin Dabilpuram rc = mbox_process(mbox); 120444a9307cSRakesh Kudurumalla mbox_put(mbox); 12055a960e26SNithin Dabilpuram if (rc) 12065a960e26SNithin Dabilpuram return rc; 12075a960e26SNithin Dabilpuram 12085a960e26SNithin Dabilpuram parent->flags = flags; 12095a960e26SNithin Dabilpuram return 0; 12105a960e26SNithin Dabilpuram } 12115a960e26SNithin Dabilpuram 12125a960e26SNithin Dabilpuram void 12135a960e26SNithin Dabilpuram roc_nix_tm_fini(struct roc_nix *roc_nix) 12145a960e26SNithin Dabilpuram { 12155a960e26SNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 12165a960e26SNithin Dabilpuram struct mbox *mbox = (&nix->dev)->mbox; 12175a960e26SNithin Dabilpuram struct nix_txsch_free_req *req; 12185a960e26SNithin Dabilpuram uint32_t tree_mask; 12195a960e26SNithin Dabilpuram uint8_t hw_lvl; 12205a960e26SNithin Dabilpuram int rc; 12215a960e26SNithin Dabilpuram 12225a960e26SNithin Dabilpuram /* Xmit is assumed to be disabled */ 12235a960e26SNithin Dabilpuram /* Free up resources already held */ 12245a960e26SNithin Dabilpuram tree_mask = NIX_TM_TREE_MASK_ALL; 12255a960e26SNithin Dabilpuram rc = nix_tm_free_resources(roc_nix, tree_mask, false); 12265a960e26SNithin Dabilpuram if (rc) 12275a960e26SNithin Dabilpuram plt_err("Failed to freeup existing nodes or rsrcs, rc=%d", rc); 12285a960e26SNithin Dabilpuram 12295a960e26SNithin Dabilpuram /* Free all other hw resources */ 123044a9307cSRakesh Kudurumalla req = mbox_alloc_msg_nix_txsch_free(mbox_get(mbox)); 123144a9307cSRakesh Kudurumalla if (req == NULL) { 123244a9307cSRakesh Kudurumalla mbox_put(mbox); 12335a960e26SNithin Dabilpuram return; 123444a9307cSRakesh Kudurumalla } 12355a960e26SNithin Dabilpuram 12365a960e26SNithin Dabilpuram req->flags = TXSCHQ_FREE_ALL; 12375a960e26SNithin Dabilpuram rc = mbox_process(mbox); 12385a960e26SNithin Dabilpuram if (rc) 12395a960e26SNithin Dabilpuram plt_err("Failed to freeup all res, rc=%d", rc); 124044a9307cSRakesh Kudurumalla mbox_put(mbox); 12415a960e26SNithin Dabilpuram 12425a960e26SNithin Dabilpuram for (hw_lvl = 0; hw_lvl < NIX_TXSCH_LVL_CNT; hw_lvl++) { 12435a960e26SNithin Dabilpuram plt_bitmap_reset(nix->schq_bmp[hw_lvl]); 12445a960e26SNithin Dabilpuram plt_bitmap_reset(nix->schq_contig_bmp[hw_lvl]); 12455a960e26SNithin Dabilpuram nix->contig_rsvd[hw_lvl] = 0; 12465a960e26SNithin Dabilpuram nix->discontig_rsvd[hw_lvl] = 0; 12475a960e26SNithin Dabilpuram } 12485a960e26SNithin Dabilpuram 12495a960e26SNithin Dabilpuram /* Clear shaper profiles */ 12505a960e26SNithin Dabilpuram nix_tm_clear_shaper_profiles(nix); 12515a960e26SNithin Dabilpuram nix->tm_tree = 0; 12525a960e26SNithin Dabilpuram nix->tm_flags &= ~NIX_TM_HIERARCHY_ENA; 12535a960e26SNithin Dabilpuram } 1254fcdef46bSNithin Dabilpuram 1255fcdef46bSNithin Dabilpuram int 1256fcdef46bSNithin Dabilpuram roc_nix_tm_rsrc_count(struct roc_nix *roc_nix, uint16_t schq[ROC_TM_LVL_MAX]) 1257fcdef46bSNithin Dabilpuram { 1258fcdef46bSNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 125944a9307cSRakesh Kudurumalla struct mbox *mbox = mbox_get((&nix->dev)->mbox); 1260fcdef46bSNithin Dabilpuram struct free_rsrcs_rsp *rsp; 1261fcdef46bSNithin Dabilpuram uint8_t hw_lvl; 1262fcdef46bSNithin Dabilpuram int rc, i; 1263fcdef46bSNithin Dabilpuram 1264fcdef46bSNithin Dabilpuram /* Get the current free resources */ 1265fcdef46bSNithin Dabilpuram mbox_alloc_msg_free_rsrc_cnt(mbox); 1266fcdef46bSNithin Dabilpuram rc = mbox_process_msg(mbox, (void *)&rsp); 1267fcdef46bSNithin Dabilpuram if (rc) 126844a9307cSRakesh Kudurumalla goto exit; 1269fcdef46bSNithin Dabilpuram 1270fcdef46bSNithin Dabilpuram for (i = 0; i < ROC_TM_LVL_MAX; i++) { 1271fcdef46bSNithin Dabilpuram hw_lvl = nix_tm_lvl2nix(nix, i); 1272fcdef46bSNithin Dabilpuram if (hw_lvl == NIX_TXSCH_LVL_CNT) 1273fcdef46bSNithin Dabilpuram continue; 1274fcdef46bSNithin Dabilpuram 1275fcdef46bSNithin Dabilpuram schq[i] = (nix->is_nix1 ? rsp->schq_nix1[hw_lvl] : 1276fcdef46bSNithin Dabilpuram rsp->schq[hw_lvl]); 1277fcdef46bSNithin Dabilpuram } 1278fcdef46bSNithin Dabilpuram 127944a9307cSRakesh Kudurumalla rc = 0; 128044a9307cSRakesh Kudurumalla exit: 128144a9307cSRakesh Kudurumalla mbox_put(mbox); 128244a9307cSRakesh Kudurumalla return rc; 1283fcdef46bSNithin Dabilpuram } 1284fcdef46bSNithin Dabilpuram 1285fcdef46bSNithin Dabilpuram void 1286fcdef46bSNithin Dabilpuram roc_nix_tm_rsrc_max(bool pf, uint16_t schq[ROC_TM_LVL_MAX]) 1287fcdef46bSNithin Dabilpuram { 1288fcdef46bSNithin Dabilpuram uint8_t hw_lvl, i; 1289fcdef46bSNithin Dabilpuram uint16_t max; 1290fcdef46bSNithin Dabilpuram 1291fcdef46bSNithin Dabilpuram for (i = 0; i < ROC_TM_LVL_MAX; i++) { 1292fcdef46bSNithin Dabilpuram hw_lvl = pf ? nix_tm_lvl2nix_tl1_root(i) : 1293fcdef46bSNithin Dabilpuram nix_tm_lvl2nix_tl2_root(i); 1294fcdef46bSNithin Dabilpuram 1295fcdef46bSNithin Dabilpuram switch (hw_lvl) { 1296fcdef46bSNithin Dabilpuram case NIX_TXSCH_LVL_SMQ: 1297*4785c406SSatha Rao max = (roc_model_is_cn9k() ? NIX_CN9K_TXSCH_LVL_SMQ_MAX : 1298*4785c406SSatha Rao (roc_model_is_cn10k() ? NIX_CN10K_TXSCH_LVL_SMQ_MAX : 1299*4785c406SSatha Rao NIX_TXSCH_LVL_SMQ_MAX)); 1300fcdef46bSNithin Dabilpuram break; 1301fcdef46bSNithin Dabilpuram case NIX_TXSCH_LVL_TL4: 1302*4785c406SSatha Rao max = (roc_model_is_cn9k() ? NIX_CN9K_TXSCH_LVL_TL4_MAX : 1303*4785c406SSatha Rao (roc_model_is_cn10k() ? NIX_CN10K_TXSCH_LVL_TL4_MAX : 1304*4785c406SSatha Rao NIX_TXSCH_LVL_TL4_MAX)); 1305fcdef46bSNithin Dabilpuram break; 1306fcdef46bSNithin Dabilpuram case NIX_TXSCH_LVL_TL3: 1307*4785c406SSatha Rao max = (roc_model_is_cn9k() ? NIX_CN9K_TXSCH_LVL_TL3_MAX : 1308*4785c406SSatha Rao (roc_model_is_cn10k() ? NIX_CN10K_TXSCH_LVL_TL3_MAX : 1309*4785c406SSatha Rao NIX_TXSCH_LVL_TL3_MAX)); 1310fcdef46bSNithin Dabilpuram break; 1311fcdef46bSNithin Dabilpuram case NIX_TXSCH_LVL_TL2: 1312fcdef46bSNithin Dabilpuram max = pf ? NIX_TXSCH_LVL_TL2_MAX : 1; 1313fcdef46bSNithin Dabilpuram break; 1314fcdef46bSNithin Dabilpuram case NIX_TXSCH_LVL_TL1: 1315fcdef46bSNithin Dabilpuram max = pf ? 1 : 0; 1316fcdef46bSNithin Dabilpuram break; 1317fcdef46bSNithin Dabilpuram default: 1318fcdef46bSNithin Dabilpuram max = 0; 1319fcdef46bSNithin Dabilpuram break; 1320fcdef46bSNithin Dabilpuram } 1321fcdef46bSNithin Dabilpuram schq[i] = max; 1322fcdef46bSNithin Dabilpuram } 1323fcdef46bSNithin Dabilpuram } 1324fcdef46bSNithin Dabilpuram 1325fcdef46bSNithin Dabilpuram bool 1326fcdef46bSNithin Dabilpuram roc_nix_tm_root_has_sp(struct roc_nix *roc_nix) 1327fcdef46bSNithin Dabilpuram { 1328fcdef46bSNithin Dabilpuram struct nix *nix = roc_nix_to_nix_priv(roc_nix); 1329fcdef46bSNithin Dabilpuram 1330fcdef46bSNithin Dabilpuram if (nix->tm_flags & NIX_TM_TL1_NO_SP) 1331fcdef46bSNithin Dabilpuram return false; 1332fcdef46bSNithin Dabilpuram return true; 1333fcdef46bSNithin Dabilpuram } 133431525923SSatha Rao 133531525923SSatha Rao static inline struct nix * 133631525923SSatha Rao pf_func_to_nix_get(uint16_t pf_func) 133731525923SSatha Rao { 133831525923SSatha Rao struct roc_nix *roc_nix_tmp = NULL; 133931525923SSatha Rao struct roc_nix_list *nix_list; 134031525923SSatha Rao 134131525923SSatha Rao nix_list = roc_idev_nix_list_get(); 134231525923SSatha Rao if (nix_list == NULL) 134331525923SSatha Rao return NULL; 134431525923SSatha Rao 134531525923SSatha Rao /* Find the NIX of given pf_func */ 134631525923SSatha Rao TAILQ_FOREACH(roc_nix_tmp, nix_list, next) { 134731525923SSatha Rao struct nix *nix = roc_nix_to_nix_priv(roc_nix_tmp); 134831525923SSatha Rao 134931525923SSatha Rao if (nix->dev.pf_func == pf_func) 135031525923SSatha Rao return nix; 135131525923SSatha Rao } 135231525923SSatha Rao 135331525923SSatha Rao return NULL; 135431525923SSatha Rao } 135531525923SSatha Rao 135631525923SSatha Rao int 135731525923SSatha Rao roc_nix_tm_egress_link_cfg_set(struct roc_nix *roc_nix, uint64_t dst_pf_func, bool enable) 135831525923SSatha Rao { 135931525923SSatha Rao struct nix *src_nix = roc_nix_to_nix_priv(roc_nix), *dst_nix; 136031525923SSatha Rao struct mbox *mbox = (&src_nix->dev)->mbox; 136131525923SSatha Rao struct nix_txschq_config *req = NULL; 136231525923SSatha Rao struct nix_tm_node_list *list; 136331525923SSatha Rao struct nix_tm_node *node; 136431525923SSatha Rao int rc = 0, k; 136531525923SSatha Rao 136631525923SSatha Rao dst_nix = pf_func_to_nix_get(dst_pf_func); 136731525923SSatha Rao if (!dst_nix) 136831525923SSatha Rao return -EINVAL; 136931525923SSatha Rao 137031525923SSatha Rao if (dst_nix == src_nix) 137131525923SSatha Rao return 0; 137231525923SSatha Rao 137331525923SSatha Rao list = nix_tm_node_list(src_nix, src_nix->tm_tree); 137431525923SSatha Rao TAILQ_FOREACH(node, list, node) { 137531525923SSatha Rao if (node->hw_lvl != src_nix->tm_link_cfg_lvl) 137631525923SSatha Rao continue; 137731525923SSatha Rao 137831525923SSatha Rao if (!(node->flags & NIX_TM_NODE_HWRES)) 137931525923SSatha Rao continue; 138031525923SSatha Rao 138131525923SSatha Rao /* Allocating TL3 request */ 138231525923SSatha Rao req = mbox_alloc_msg_nix_txschq_cfg(mbox_get(mbox)); 138331525923SSatha Rao req->lvl = src_nix->tm_link_cfg_lvl; 138431525923SSatha Rao k = 0; 138531525923SSatha Rao 138631525923SSatha Rao /* Enable PFC/pause on the identified TL3 */ 138731525923SSatha Rao req->reg[k] = NIX_AF_TL3_TL2X_LINKX_CFG(node->hw_id, dst_nix->tx_link); 138831525923SSatha Rao if (enable) 138931525923SSatha Rao req->regval[k] |= BIT_ULL(12); 139031525923SSatha Rao else 139131525923SSatha Rao req->regval[k] &= ~(BIT_ULL(12)); 139231525923SSatha Rao req->regval_mask[k] = ~(BIT_ULL(12)); 139331525923SSatha Rao k++; 139431525923SSatha Rao 139531525923SSatha Rao req->num_regs = k; 139631525923SSatha Rao rc = mbox_process(mbox); 139731525923SSatha Rao mbox_put(mbox); 139831525923SSatha Rao if (rc) 139931525923SSatha Rao goto err; 140031525923SSatha Rao } 140131525923SSatha Rao err: 140231525923SSatha Rao return rc; 140331525923SSatha Rao } 1404