1 /* SPDX-License-Identifier: BSD-3-Clause 2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved. 3 * Copyright(c) 2018 Synopsys, Inc. All rights reserved. 4 */ 5 6 #include "axgbe_ethdev.h" 7 #include "axgbe_common.h" 8 #include "axgbe_phy.h" 9 10 static void axgbe_an37_clear_interrupts(struct axgbe_port *pdata) 11 { 12 int reg; 13 14 reg = XMDIO_READ(pdata, MDIO_MMD_VEND2, MDIO_VEND2_AN_STAT); 15 reg &= ~AXGBE_AN_CL37_INT_MASK; 16 XMDIO_WRITE(pdata, MDIO_MMD_VEND2, MDIO_VEND2_AN_STAT, reg); 17 } 18 19 static void axgbe_an37_disable_interrupts(struct axgbe_port *pdata) 20 { 21 int reg; 22 23 reg = XMDIO_READ(pdata, MDIO_MMD_VEND2, MDIO_VEND2_AN_CTRL); 24 reg &= ~AXGBE_AN_CL37_INT_MASK; 25 XMDIO_WRITE(pdata, MDIO_MMD_VEND2, MDIO_VEND2_AN_CTRL, reg); 26 27 reg = XMDIO_READ(pdata, MDIO_MMD_PCS, MDIO_PCS_DIG_CTRL); 28 reg &= ~AXGBE_PCS_CL37_BP; 29 XMDIO_WRITE(pdata, MDIO_MMD_PCS, MDIO_PCS_DIG_CTRL, reg); 30 } 31 32 static void axgbe_an73_clear_interrupts(struct axgbe_port *pdata) 33 { 34 XMDIO_WRITE(pdata, MDIO_MMD_AN, MDIO_AN_INT, 0); 35 } 36 37 static void axgbe_an73_disable_interrupts(struct axgbe_port *pdata) 38 { 39 XMDIO_WRITE(pdata, MDIO_MMD_AN, MDIO_AN_INTMASK, 0); 40 } 41 42 static void axgbe_an73_enable_interrupts(struct axgbe_port *pdata) 43 { 44 XMDIO_WRITE(pdata, MDIO_MMD_AN, MDIO_AN_INTMASK, 45 AXGBE_AN_CL73_INT_MASK); 46 } 47 48 static void axgbe_an_enable_interrupts(struct axgbe_port *pdata) 49 { 50 switch (pdata->an_mode) { 51 case AXGBE_AN_MODE_CL73: 52 case AXGBE_AN_MODE_CL73_REDRV: 53 axgbe_an73_enable_interrupts(pdata); 54 break; 55 case AXGBE_AN_MODE_CL37: 56 case AXGBE_AN_MODE_CL37_SGMII: 57 PMD_DRV_LOG(ERR, "Unsupported AN_MOD_37\n"); 58 break; 59 default: 60 break; 61 } 62 } 63 64 static void axgbe_an_clear_interrupts_all(struct axgbe_port *pdata) 65 { 66 axgbe_an73_clear_interrupts(pdata); 67 axgbe_an37_clear_interrupts(pdata); 68 } 69 70 static void axgbe_an73_enable_kr_training(struct axgbe_port *pdata) 71 { 72 unsigned int reg; 73 74 reg = XMDIO_READ(pdata, MDIO_MMD_PMAPMD, MDIO_PMA_10GBR_PMD_CTRL); 75 76 reg |= AXGBE_KR_TRAINING_ENABLE; 77 XMDIO_WRITE(pdata, MDIO_MMD_PMAPMD, MDIO_PMA_10GBR_PMD_CTRL, reg); 78 } 79 80 static void axgbe_an73_disable_kr_training(struct axgbe_port *pdata) 81 { 82 unsigned int reg; 83 84 reg = XMDIO_READ(pdata, MDIO_MMD_PMAPMD, MDIO_PMA_10GBR_PMD_CTRL); 85 86 reg &= ~AXGBE_KR_TRAINING_ENABLE; 87 XMDIO_WRITE(pdata, MDIO_MMD_PMAPMD, MDIO_PMA_10GBR_PMD_CTRL, reg); 88 } 89 90 static void axgbe_kr_mode(struct axgbe_port *pdata) 91 { 92 /* Enable KR training */ 93 axgbe_an73_enable_kr_training(pdata); 94 95 /* Set MAC to 10G speed */ 96 pdata->hw_if.set_speed(pdata, SPEED_10000); 97 98 /* Call PHY implementation support to complete rate change */ 99 pdata->phy_if.phy_impl.set_mode(pdata, AXGBE_MODE_KR); 100 } 101 102 static void axgbe_kx_2500_mode(struct axgbe_port *pdata) 103 { 104 /* Disable KR training */ 105 axgbe_an73_disable_kr_training(pdata); 106 107 /* Set MAC to 2.5G speed */ 108 pdata->hw_if.set_speed(pdata, SPEED_2500); 109 110 /* Call PHY implementation support to complete rate change */ 111 pdata->phy_if.phy_impl.set_mode(pdata, AXGBE_MODE_KX_2500); 112 } 113 114 static void axgbe_kx_1000_mode(struct axgbe_port *pdata) 115 { 116 /* Disable KR training */ 117 axgbe_an73_disable_kr_training(pdata); 118 119 /* Set MAC to 1G speed */ 120 pdata->hw_if.set_speed(pdata, SPEED_1000); 121 122 /* Call PHY implementation support to complete rate change */ 123 pdata->phy_if.phy_impl.set_mode(pdata, AXGBE_MODE_KX_1000); 124 } 125 126 static void axgbe_sfi_mode(struct axgbe_port *pdata) 127 { 128 /* If a KR re-driver is present, change to KR mode instead */ 129 if (pdata->kr_redrv) 130 return axgbe_kr_mode(pdata); 131 132 /* Disable KR training */ 133 axgbe_an73_disable_kr_training(pdata); 134 135 /* Set MAC to 10G speed */ 136 pdata->hw_if.set_speed(pdata, SPEED_10000); 137 138 /* Call PHY implementation support to complete rate change */ 139 pdata->phy_if.phy_impl.set_mode(pdata, AXGBE_MODE_SFI); 140 } 141 142 static void axgbe_x_mode(struct axgbe_port *pdata) 143 { 144 /* Disable KR training */ 145 axgbe_an73_disable_kr_training(pdata); 146 147 /* Set MAC to 1G speed */ 148 pdata->hw_if.set_speed(pdata, SPEED_1000); 149 150 /* Call PHY implementation support to complete rate change */ 151 pdata->phy_if.phy_impl.set_mode(pdata, AXGBE_MODE_X); 152 } 153 154 static void axgbe_sgmii_1000_mode(struct axgbe_port *pdata) 155 { 156 /* Disable KR training */ 157 axgbe_an73_disable_kr_training(pdata); 158 159 /* Set MAC to 1G speed */ 160 pdata->hw_if.set_speed(pdata, SPEED_1000); 161 162 /* Call PHY implementation support to complete rate change */ 163 pdata->phy_if.phy_impl.set_mode(pdata, AXGBE_MODE_SGMII_1000); 164 } 165 166 static void axgbe_sgmii_100_mode(struct axgbe_port *pdata) 167 { 168 /* Disable KR training */ 169 axgbe_an73_disable_kr_training(pdata); 170 171 /* Set MAC to 1G speed */ 172 pdata->hw_if.set_speed(pdata, SPEED_1000); 173 174 /* Call PHY implementation support to complete rate change */ 175 pdata->phy_if.phy_impl.set_mode(pdata, AXGBE_MODE_SGMII_100); 176 } 177 178 static enum axgbe_mode axgbe_cur_mode(struct axgbe_port *pdata) 179 { 180 return pdata->phy_if.phy_impl.cur_mode(pdata); 181 } 182 183 static bool axgbe_in_kr_mode(struct axgbe_port *pdata) 184 { 185 return axgbe_cur_mode(pdata) == AXGBE_MODE_KR; 186 } 187 188 static void axgbe_change_mode(struct axgbe_port *pdata, 189 enum axgbe_mode mode) 190 { 191 switch (mode) { 192 case AXGBE_MODE_KX_1000: 193 axgbe_kx_1000_mode(pdata); 194 break; 195 case AXGBE_MODE_KX_2500: 196 axgbe_kx_2500_mode(pdata); 197 break; 198 case AXGBE_MODE_KR: 199 axgbe_kr_mode(pdata); 200 break; 201 case AXGBE_MODE_SGMII_100: 202 axgbe_sgmii_100_mode(pdata); 203 break; 204 case AXGBE_MODE_SGMII_1000: 205 axgbe_sgmii_1000_mode(pdata); 206 break; 207 case AXGBE_MODE_X: 208 axgbe_x_mode(pdata); 209 break; 210 case AXGBE_MODE_SFI: 211 axgbe_sfi_mode(pdata); 212 break; 213 case AXGBE_MODE_UNKNOWN: 214 break; 215 default: 216 PMD_DRV_LOG(ERR, "invalid operation mode requested (%u)\n", mode); 217 } 218 } 219 220 static void axgbe_switch_mode(struct axgbe_port *pdata) 221 { 222 axgbe_change_mode(pdata, pdata->phy_if.phy_impl.switch_mode(pdata)); 223 } 224 225 static void axgbe_set_mode(struct axgbe_port *pdata, 226 enum axgbe_mode mode) 227 { 228 if (mode == axgbe_cur_mode(pdata)) 229 return; 230 231 axgbe_change_mode(pdata, mode); 232 } 233 234 static bool axgbe_use_mode(struct axgbe_port *pdata, 235 enum axgbe_mode mode) 236 { 237 return pdata->phy_if.phy_impl.use_mode(pdata, mode); 238 } 239 240 static void axgbe_an37_set(struct axgbe_port *pdata, bool enable, 241 bool restart) 242 { 243 unsigned int reg; 244 245 reg = XMDIO_READ(pdata, MDIO_MMD_VEND2, MDIO_CTRL1); 246 reg &= ~MDIO_VEND2_CTRL1_AN_ENABLE; 247 248 if (enable) 249 reg |= MDIO_VEND2_CTRL1_AN_ENABLE; 250 251 if (restart) 252 reg |= MDIO_VEND2_CTRL1_AN_RESTART; 253 254 XMDIO_WRITE(pdata, MDIO_MMD_VEND2, MDIO_CTRL1, reg); 255 } 256 257 static void axgbe_an37_disable(struct axgbe_port *pdata) 258 { 259 axgbe_an37_set(pdata, false, false); 260 axgbe_an37_disable_interrupts(pdata); 261 } 262 263 static void axgbe_an73_set(struct axgbe_port *pdata, bool enable, 264 bool restart) 265 { 266 unsigned int reg; 267 268 reg = XMDIO_READ(pdata, MDIO_MMD_AN, MDIO_CTRL1); 269 reg &= ~MDIO_AN_CTRL1_ENABLE; 270 271 if (enable) 272 reg |= MDIO_AN_CTRL1_ENABLE; 273 274 if (restart) 275 reg |= MDIO_AN_CTRL1_RESTART; 276 277 XMDIO_WRITE(pdata, MDIO_MMD_AN, MDIO_CTRL1, reg); 278 } 279 280 static void axgbe_an73_restart(struct axgbe_port *pdata) 281 { 282 axgbe_an73_enable_interrupts(pdata); 283 axgbe_an73_set(pdata, true, true); 284 } 285 286 static void axgbe_an73_disable(struct axgbe_port *pdata) 287 { 288 axgbe_an73_set(pdata, false, false); 289 axgbe_an73_disable_interrupts(pdata); 290 pdata->an_start = 0; 291 } 292 293 static void axgbe_an_restart(struct axgbe_port *pdata) 294 { 295 if (pdata->phy_if.phy_impl.an_pre) 296 pdata->phy_if.phy_impl.an_pre(pdata); 297 298 switch (pdata->an_mode) { 299 case AXGBE_AN_MODE_CL73: 300 case AXGBE_AN_MODE_CL73_REDRV: 301 axgbe_an73_restart(pdata); 302 break; 303 case AXGBE_AN_MODE_CL37: 304 case AXGBE_AN_MODE_CL37_SGMII: 305 PMD_DRV_LOG(ERR, "Unsupported AN_MODE_CL37\n"); 306 break; 307 default: 308 break; 309 } 310 } 311 312 static void axgbe_an_disable(struct axgbe_port *pdata) 313 { 314 if (pdata->phy_if.phy_impl.an_post) 315 pdata->phy_if.phy_impl.an_post(pdata); 316 317 switch (pdata->an_mode) { 318 case AXGBE_AN_MODE_CL73: 319 case AXGBE_AN_MODE_CL73_REDRV: 320 axgbe_an73_disable(pdata); 321 break; 322 case AXGBE_AN_MODE_CL37: 323 case AXGBE_AN_MODE_CL37_SGMII: 324 PMD_DRV_LOG(ERR, "Unsupported AN_MODE_CL37\n"); 325 break; 326 default: 327 break; 328 } 329 } 330 331 static void axgbe_an_disable_all(struct axgbe_port *pdata) 332 { 333 axgbe_an73_disable(pdata); 334 axgbe_an37_disable(pdata); 335 } 336 337 static enum axgbe_an axgbe_an73_tx_training(struct axgbe_port *pdata, 338 enum axgbe_rx *state) 339 { 340 unsigned int ad_reg, lp_reg, reg; 341 342 *state = AXGBE_RX_COMPLETE; 343 344 /* If we're not in KR mode then we're done */ 345 if (!axgbe_in_kr_mode(pdata)) 346 return AXGBE_AN_PAGE_RECEIVED; 347 348 /* Enable/Disable FEC */ 349 ad_reg = XMDIO_READ(pdata, MDIO_MMD_AN, MDIO_AN_ADVERTISE + 2); 350 lp_reg = XMDIO_READ(pdata, MDIO_MMD_AN, MDIO_AN_LPA + 2); 351 352 reg = XMDIO_READ(pdata, MDIO_MMD_PMAPMD, MDIO_PMA_10GBR_FECCTRL); 353 reg &= ~(MDIO_PMA_10GBR_FECABLE_ABLE | MDIO_PMA_10GBR_FECABLE_ERRABLE); 354 if ((ad_reg & 0xc000) && (lp_reg & 0xc000)) 355 reg |= pdata->fec_ability; 356 XMDIO_WRITE(pdata, MDIO_MMD_PMAPMD, MDIO_PMA_10GBR_FECCTRL, reg); 357 358 /* Start KR training */ 359 reg = XMDIO_READ(pdata, MDIO_MMD_PMAPMD, MDIO_PMA_10GBR_PMD_CTRL); 360 if (reg & AXGBE_KR_TRAINING_ENABLE) { 361 if (pdata->phy_if.phy_impl.kr_training_pre) 362 pdata->phy_if.phy_impl.kr_training_pre(pdata); 363 364 reg |= AXGBE_KR_TRAINING_START; 365 XMDIO_WRITE(pdata, MDIO_MMD_PMAPMD, MDIO_PMA_10GBR_PMD_CTRL, 366 reg); 367 368 if (pdata->phy_if.phy_impl.kr_training_post) 369 pdata->phy_if.phy_impl.kr_training_post(pdata); 370 } 371 372 return AXGBE_AN_PAGE_RECEIVED; 373 } 374 375 static enum axgbe_an axgbe_an73_tx_xnp(struct axgbe_port *pdata, 376 enum axgbe_rx *state) 377 { 378 u16 msg; 379 380 *state = AXGBE_RX_XNP; 381 382 msg = AXGBE_XNP_MCF_NULL_MESSAGE; 383 msg |= AXGBE_XNP_MP_FORMATTED; 384 385 XMDIO_WRITE(pdata, MDIO_MMD_AN, MDIO_AN_XNP + 2, 0); 386 XMDIO_WRITE(pdata, MDIO_MMD_AN, MDIO_AN_XNP + 1, 0); 387 XMDIO_WRITE(pdata, MDIO_MMD_AN, MDIO_AN_XNP, msg); 388 389 return AXGBE_AN_PAGE_RECEIVED; 390 } 391 392 static enum axgbe_an axgbe_an73_rx_bpa(struct axgbe_port *pdata, 393 enum axgbe_rx *state) 394 { 395 unsigned int link_support; 396 unsigned int reg, ad_reg, lp_reg; 397 398 /* Read Base Ability register 2 first */ 399 reg = XMDIO_READ(pdata, MDIO_MMD_AN, MDIO_AN_LPA + 1); 400 401 /* Check for a supported mode, otherwise restart in a different one */ 402 link_support = axgbe_in_kr_mode(pdata) ? 0x80 : 0x20; 403 if (!(reg & link_support)) 404 return AXGBE_AN_INCOMPAT_LINK; 405 406 /* Check Extended Next Page support */ 407 ad_reg = XMDIO_READ(pdata, MDIO_MMD_AN, MDIO_AN_ADVERTISE); 408 lp_reg = XMDIO_READ(pdata, MDIO_MMD_AN, MDIO_AN_LPA); 409 410 return ((ad_reg & AXGBE_XNP_NP_EXCHANGE) || 411 (lp_reg & AXGBE_XNP_NP_EXCHANGE)) 412 ? axgbe_an73_tx_xnp(pdata, state) 413 : axgbe_an73_tx_training(pdata, state); 414 } 415 416 static enum axgbe_an axgbe_an73_rx_xnp(struct axgbe_port *pdata, 417 enum axgbe_rx *state) 418 { 419 unsigned int ad_reg, lp_reg; 420 421 /* Check Extended Next Page support */ 422 ad_reg = XMDIO_READ(pdata, MDIO_MMD_AN, MDIO_AN_XNP); 423 lp_reg = XMDIO_READ(pdata, MDIO_MMD_AN, MDIO_AN_LPX); 424 425 return ((ad_reg & AXGBE_XNP_NP_EXCHANGE) || 426 (lp_reg & AXGBE_XNP_NP_EXCHANGE)) 427 ? axgbe_an73_tx_xnp(pdata, state) 428 : axgbe_an73_tx_training(pdata, state); 429 } 430 431 static enum axgbe_an axgbe_an73_page_received(struct axgbe_port *pdata) 432 { 433 enum axgbe_rx *state; 434 unsigned long an_timeout; 435 enum axgbe_an ret; 436 unsigned long ticks; 437 438 if (!pdata->an_start) { 439 pdata->an_start = rte_get_timer_cycles(); 440 } else { 441 an_timeout = pdata->an_start + 442 msecs_to_timer_cycles(AXGBE_AN_MS_TIMEOUT); 443 ticks = rte_get_timer_cycles(); 444 if (time_after(ticks, an_timeout)) { 445 /* Auto-negotiation timed out, reset state */ 446 pdata->kr_state = AXGBE_RX_BPA; 447 pdata->kx_state = AXGBE_RX_BPA; 448 449 pdata->an_start = rte_get_timer_cycles(); 450 } 451 } 452 453 state = axgbe_in_kr_mode(pdata) ? &pdata->kr_state 454 : &pdata->kx_state; 455 456 switch (*state) { 457 case AXGBE_RX_BPA: 458 ret = axgbe_an73_rx_bpa(pdata, state); 459 break; 460 case AXGBE_RX_XNP: 461 ret = axgbe_an73_rx_xnp(pdata, state); 462 break; 463 default: 464 ret = AXGBE_AN_ERROR; 465 } 466 467 return ret; 468 } 469 470 static enum axgbe_an axgbe_an73_incompat_link(struct axgbe_port *pdata) 471 { 472 /* Be sure we aren't looping trying to negotiate */ 473 if (axgbe_in_kr_mode(pdata)) { 474 pdata->kr_state = AXGBE_RX_ERROR; 475 476 if (!(pdata->phy.advertising & ADVERTISED_1000baseKX_Full) && 477 !(pdata->phy.advertising & ADVERTISED_2500baseX_Full)) 478 return AXGBE_AN_NO_LINK; 479 480 if (pdata->kx_state != AXGBE_RX_BPA) 481 return AXGBE_AN_NO_LINK; 482 } else { 483 pdata->kx_state = AXGBE_RX_ERROR; 484 485 if (!(pdata->phy.advertising & ADVERTISED_10000baseKR_Full)) 486 return AXGBE_AN_NO_LINK; 487 488 if (pdata->kr_state != AXGBE_RX_BPA) 489 return AXGBE_AN_NO_LINK; 490 } 491 492 axgbe_an_disable(pdata); 493 axgbe_switch_mode(pdata); 494 axgbe_an_restart(pdata); 495 496 return AXGBE_AN_INCOMPAT_LINK; 497 } 498 499 static void axgbe_an73_state_machine(struct axgbe_port *pdata) 500 { 501 enum axgbe_an cur_state = pdata->an_state; 502 503 if (!pdata->an_int) 504 return; 505 506 next_int: 507 if (pdata->an_int & AXGBE_AN_CL73_PG_RCV) { 508 pdata->an_state = AXGBE_AN_PAGE_RECEIVED; 509 pdata->an_int &= ~AXGBE_AN_CL73_PG_RCV; 510 } else if (pdata->an_int & AXGBE_AN_CL73_INC_LINK) { 511 pdata->an_state = AXGBE_AN_INCOMPAT_LINK; 512 pdata->an_int &= ~AXGBE_AN_CL73_INC_LINK; 513 } else if (pdata->an_int & AXGBE_AN_CL73_INT_CMPLT) { 514 pdata->an_state = AXGBE_AN_COMPLETE; 515 pdata->an_int &= ~AXGBE_AN_CL73_INT_CMPLT; 516 } else { 517 pdata->an_state = AXGBE_AN_ERROR; 518 } 519 520 again: 521 cur_state = pdata->an_state; 522 523 switch (pdata->an_state) { 524 case AXGBE_AN_READY: 525 pdata->an_supported = 0; 526 break; 527 case AXGBE_AN_PAGE_RECEIVED: 528 pdata->an_state = axgbe_an73_page_received(pdata); 529 pdata->an_supported++; 530 break; 531 case AXGBE_AN_INCOMPAT_LINK: 532 pdata->an_supported = 0; 533 pdata->parallel_detect = 0; 534 pdata->an_state = axgbe_an73_incompat_link(pdata); 535 break; 536 case AXGBE_AN_COMPLETE: 537 pdata->parallel_detect = pdata->an_supported ? 0 : 1; 538 break; 539 case AXGBE_AN_NO_LINK: 540 break; 541 default: 542 pdata->an_state = AXGBE_AN_ERROR; 543 } 544 545 if (pdata->an_state == AXGBE_AN_NO_LINK) { 546 pdata->an_int = 0; 547 axgbe_an73_clear_interrupts(pdata); 548 pdata->eth_dev->data->dev_link.link_status = 549 ETH_LINK_DOWN; 550 } else if (pdata->an_state == AXGBE_AN_ERROR) { 551 PMD_DRV_LOG(ERR, "error during auto-negotiation, state=%u\n", 552 cur_state); 553 pdata->an_int = 0; 554 axgbe_an73_clear_interrupts(pdata); 555 } 556 557 if (pdata->an_state >= AXGBE_AN_COMPLETE) { 558 pdata->an_result = pdata->an_state; 559 pdata->an_state = AXGBE_AN_READY; 560 pdata->kr_state = AXGBE_RX_BPA; 561 pdata->kx_state = AXGBE_RX_BPA; 562 pdata->an_start = 0; 563 if (pdata->phy_if.phy_impl.an_post) 564 pdata->phy_if.phy_impl.an_post(pdata); 565 } 566 567 if (cur_state != pdata->an_state) 568 goto again; 569 570 if (pdata->an_int) 571 goto next_int; 572 573 axgbe_an73_enable_interrupts(pdata); 574 } 575 576 static void axgbe_an73_isr(struct axgbe_port *pdata) 577 { 578 /* Disable AN interrupts */ 579 axgbe_an73_disable_interrupts(pdata); 580 581 /* Save the interrupt(s) that fired */ 582 pdata->an_int = XMDIO_READ(pdata, MDIO_MMD_AN, MDIO_AN_INT); 583 584 if (pdata->an_int) { 585 /* Clear the interrupt(s) that fired and process them */ 586 XMDIO_WRITE(pdata, MDIO_MMD_AN, MDIO_AN_INT, ~pdata->an_int); 587 pthread_mutex_lock(&pdata->an_mutex); 588 axgbe_an73_state_machine(pdata); 589 pthread_mutex_unlock(&pdata->an_mutex); 590 } else { 591 /* Enable AN interrupts */ 592 axgbe_an73_enable_interrupts(pdata); 593 } 594 } 595 596 static void axgbe_an_isr(struct axgbe_port *pdata) 597 { 598 switch (pdata->an_mode) { 599 case AXGBE_AN_MODE_CL73: 600 case AXGBE_AN_MODE_CL73_REDRV: 601 axgbe_an73_isr(pdata); 602 break; 603 case AXGBE_AN_MODE_CL37: 604 case AXGBE_AN_MODE_CL37_SGMII: 605 PMD_DRV_LOG(ERR, "AN_MODE_37 not supported\n"); 606 break; 607 default: 608 break; 609 } 610 } 611 612 static void axgbe_an_combined_isr(struct axgbe_port *pdata) 613 { 614 axgbe_an_isr(pdata); 615 } 616 617 static void axgbe_an73_init(struct axgbe_port *pdata) 618 { 619 unsigned int advertising, reg; 620 621 advertising = pdata->phy_if.phy_impl.an_advertising(pdata); 622 623 /* Set up Advertisement register 3 first */ 624 reg = XMDIO_READ(pdata, MDIO_MMD_AN, MDIO_AN_ADVERTISE + 2); 625 if (advertising & ADVERTISED_10000baseR_FEC) 626 reg |= 0xc000; 627 else 628 reg &= ~0xc000; 629 630 XMDIO_WRITE(pdata, MDIO_MMD_AN, MDIO_AN_ADVERTISE + 2, reg); 631 632 /* Set up Advertisement register 2 next */ 633 reg = XMDIO_READ(pdata, MDIO_MMD_AN, MDIO_AN_ADVERTISE + 1); 634 if (advertising & ADVERTISED_10000baseKR_Full) 635 reg |= 0x80; 636 else 637 reg &= ~0x80; 638 639 if ((advertising & ADVERTISED_1000baseKX_Full) || 640 (advertising & ADVERTISED_2500baseX_Full)) 641 reg |= 0x20; 642 else 643 reg &= ~0x20; 644 645 XMDIO_WRITE(pdata, MDIO_MMD_AN, MDIO_AN_ADVERTISE + 1, reg); 646 647 /* Set up Advertisement register 1 last */ 648 reg = XMDIO_READ(pdata, MDIO_MMD_AN, MDIO_AN_ADVERTISE); 649 if (advertising & ADVERTISED_Pause) 650 reg |= 0x400; 651 else 652 reg &= ~0x400; 653 654 if (advertising & ADVERTISED_Asym_Pause) 655 reg |= 0x800; 656 else 657 reg &= ~0x800; 658 659 /* We don't intend to perform XNP */ 660 reg &= ~AXGBE_XNP_NP_EXCHANGE; 661 662 XMDIO_WRITE(pdata, MDIO_MMD_AN, MDIO_AN_ADVERTISE, reg); 663 } 664 665 static void axgbe_an_init(struct axgbe_port *pdata) 666 { 667 /* Set up advertisement registers based on current settings */ 668 pdata->an_mode = pdata->phy_if.phy_impl.an_mode(pdata); 669 switch (pdata->an_mode) { 670 case AXGBE_AN_MODE_CL73: 671 case AXGBE_AN_MODE_CL73_REDRV: 672 axgbe_an73_init(pdata); 673 break; 674 case AXGBE_AN_MODE_CL37: 675 case AXGBE_AN_MODE_CL37_SGMII: 676 PMD_DRV_LOG(ERR, "Unsupported AN_CL37\n"); 677 break; 678 default: 679 break; 680 } 681 } 682 683 static void axgbe_phy_adjust_link(struct axgbe_port *pdata) 684 { 685 if (pdata->phy.link) { 686 /* Flow control support */ 687 pdata->pause_autoneg = pdata->phy.pause_autoneg; 688 689 if (pdata->tx_pause != (unsigned int)pdata->phy.tx_pause) { 690 pdata->hw_if.config_tx_flow_control(pdata); 691 pdata->tx_pause = pdata->phy.tx_pause; 692 } 693 694 if (pdata->rx_pause != (unsigned int)pdata->phy.rx_pause) { 695 pdata->hw_if.config_rx_flow_control(pdata); 696 pdata->rx_pause = pdata->phy.rx_pause; 697 } 698 699 /* Speed support */ 700 if (pdata->phy_speed != pdata->phy.speed) 701 pdata->phy_speed = pdata->phy.speed; 702 if (pdata->phy_link != pdata->phy.link) 703 pdata->phy_link = pdata->phy.link; 704 } else if (pdata->phy_link) { 705 pdata->phy_link = 0; 706 pdata->phy_speed = SPEED_UNKNOWN; 707 } 708 } 709 710 static int axgbe_phy_config_fixed(struct axgbe_port *pdata) 711 { 712 enum axgbe_mode mode; 713 714 /* Disable auto-negotiation */ 715 axgbe_an_disable(pdata); 716 717 /* Set specified mode for specified speed */ 718 mode = pdata->phy_if.phy_impl.get_mode(pdata, pdata->phy.speed); 719 switch (mode) { 720 case AXGBE_MODE_KX_1000: 721 case AXGBE_MODE_KX_2500: 722 case AXGBE_MODE_KR: 723 case AXGBE_MODE_SGMII_100: 724 case AXGBE_MODE_SGMII_1000: 725 case AXGBE_MODE_X: 726 case AXGBE_MODE_SFI: 727 break; 728 case AXGBE_MODE_UNKNOWN: 729 default: 730 return -EINVAL; 731 } 732 733 /* Validate duplex mode */ 734 if (pdata->phy.duplex != DUPLEX_FULL) 735 return -EINVAL; 736 737 axgbe_set_mode(pdata, mode); 738 739 return 0; 740 } 741 742 static int __axgbe_phy_config_aneg(struct axgbe_port *pdata) 743 { 744 int ret; 745 746 axgbe_set_bit(AXGBE_LINK_INIT, &pdata->dev_state); 747 pdata->link_check = rte_get_timer_cycles(); 748 749 ret = pdata->phy_if.phy_impl.an_config(pdata); 750 if (ret) 751 return ret; 752 753 if (pdata->phy.autoneg != AUTONEG_ENABLE) { 754 ret = axgbe_phy_config_fixed(pdata); 755 if (ret || !pdata->kr_redrv) 756 return ret; 757 } 758 759 /* Disable auto-negotiation interrupt */ 760 rte_intr_disable(&pdata->pci_dev->intr_handle); 761 762 /* Start auto-negotiation in a supported mode */ 763 if (axgbe_use_mode(pdata, AXGBE_MODE_KR)) { 764 axgbe_set_mode(pdata, AXGBE_MODE_KR); 765 } else if (axgbe_use_mode(pdata, AXGBE_MODE_KX_2500)) { 766 axgbe_set_mode(pdata, AXGBE_MODE_KX_2500); 767 } else if (axgbe_use_mode(pdata, AXGBE_MODE_KX_1000)) { 768 axgbe_set_mode(pdata, AXGBE_MODE_KX_1000); 769 } else if (axgbe_use_mode(pdata, AXGBE_MODE_SFI)) { 770 axgbe_set_mode(pdata, AXGBE_MODE_SFI); 771 } else if (axgbe_use_mode(pdata, AXGBE_MODE_X)) { 772 axgbe_set_mode(pdata, AXGBE_MODE_X); 773 } else if (axgbe_use_mode(pdata, AXGBE_MODE_SGMII_1000)) { 774 axgbe_set_mode(pdata, AXGBE_MODE_SGMII_1000); 775 } else if (axgbe_use_mode(pdata, AXGBE_MODE_SGMII_100)) { 776 axgbe_set_mode(pdata, AXGBE_MODE_SGMII_100); 777 } else { 778 rte_intr_enable(&pdata->pci_dev->intr_handle); 779 return -EINVAL; 780 } 781 782 /* Disable and stop any in progress auto-negotiation */ 783 axgbe_an_disable_all(pdata); 784 785 /* Clear any auto-negotitation interrupts */ 786 axgbe_an_clear_interrupts_all(pdata); 787 788 pdata->an_result = AXGBE_AN_READY; 789 pdata->an_state = AXGBE_AN_READY; 790 pdata->kr_state = AXGBE_RX_BPA; 791 pdata->kx_state = AXGBE_RX_BPA; 792 793 /* Re-enable auto-negotiation interrupt */ 794 rte_intr_enable(&pdata->pci_dev->intr_handle); 795 796 axgbe_an_init(pdata); 797 axgbe_an_restart(pdata); 798 799 return 0; 800 } 801 802 static int axgbe_phy_config_aneg(struct axgbe_port *pdata) 803 { 804 int ret; 805 806 pthread_mutex_lock(&pdata->an_mutex); 807 808 ret = __axgbe_phy_config_aneg(pdata); 809 if (ret) 810 axgbe_set_bit(AXGBE_LINK_ERR, &pdata->dev_state); 811 else 812 axgbe_clear_bit(AXGBE_LINK_ERR, &pdata->dev_state); 813 814 pthread_mutex_unlock(&pdata->an_mutex); 815 816 return ret; 817 } 818 819 static bool axgbe_phy_aneg_done(struct axgbe_port *pdata) 820 { 821 return pdata->an_result == AXGBE_AN_COMPLETE; 822 } 823 824 static void axgbe_check_link_timeout(struct axgbe_port *pdata) 825 { 826 unsigned long link_timeout; 827 unsigned long ticks; 828 829 link_timeout = pdata->link_check + (AXGBE_LINK_TIMEOUT * 830 2 * rte_get_timer_hz()); 831 ticks = rte_get_timer_cycles(); 832 if (time_after(ticks, link_timeout)) 833 axgbe_phy_config_aneg(pdata); 834 } 835 836 static enum axgbe_mode axgbe_phy_status_aneg(struct axgbe_port *pdata) 837 { 838 return pdata->phy_if.phy_impl.an_outcome(pdata); 839 } 840 841 static void axgbe_phy_status_result(struct axgbe_port *pdata) 842 { 843 enum axgbe_mode mode; 844 845 pdata->phy.lp_advertising = 0; 846 847 if ((pdata->phy.autoneg != AUTONEG_ENABLE) || pdata->parallel_detect) 848 mode = axgbe_cur_mode(pdata); 849 else 850 mode = axgbe_phy_status_aneg(pdata); 851 852 switch (mode) { 853 case AXGBE_MODE_SGMII_100: 854 pdata->phy.speed = SPEED_100; 855 break; 856 case AXGBE_MODE_X: 857 case AXGBE_MODE_KX_1000: 858 case AXGBE_MODE_SGMII_1000: 859 pdata->phy.speed = SPEED_1000; 860 break; 861 case AXGBE_MODE_KX_2500: 862 pdata->phy.speed = SPEED_2500; 863 break; 864 case AXGBE_MODE_KR: 865 case AXGBE_MODE_SFI: 866 pdata->phy.speed = SPEED_10000; 867 break; 868 case AXGBE_MODE_UNKNOWN: 869 default: 870 pdata->phy.speed = SPEED_UNKNOWN; 871 } 872 873 pdata->phy.duplex = DUPLEX_FULL; 874 875 axgbe_set_mode(pdata, mode); 876 } 877 878 static void axgbe_phy_status(struct axgbe_port *pdata) 879 { 880 unsigned int link_aneg; 881 int an_restart; 882 883 if (axgbe_test_bit(AXGBE_LINK_ERR, &pdata->dev_state)) { 884 pdata->phy.link = 0; 885 goto adjust_link; 886 } 887 888 link_aneg = (pdata->phy.autoneg == AUTONEG_ENABLE); 889 890 pdata->phy.link = pdata->phy_if.phy_impl.link_status(pdata, 891 &an_restart); 892 if (an_restart) { 893 axgbe_phy_config_aneg(pdata); 894 return; 895 } 896 897 if (pdata->phy.link) { 898 if (link_aneg && !axgbe_phy_aneg_done(pdata)) { 899 axgbe_check_link_timeout(pdata); 900 return; 901 } 902 axgbe_phy_status_result(pdata); 903 if (axgbe_test_bit(AXGBE_LINK_INIT, &pdata->dev_state)) 904 axgbe_clear_bit(AXGBE_LINK_INIT, &pdata->dev_state); 905 } else { 906 if (axgbe_test_bit(AXGBE_LINK_INIT, &pdata->dev_state)) { 907 axgbe_check_link_timeout(pdata); 908 909 if (link_aneg) 910 return; 911 } 912 axgbe_phy_status_result(pdata); 913 } 914 915 adjust_link: 916 axgbe_phy_adjust_link(pdata); 917 } 918 919 static void axgbe_phy_stop(struct axgbe_port *pdata) 920 { 921 if (!pdata->phy_started) 922 return; 923 /* Indicate the PHY is down */ 924 pdata->phy_started = 0; 925 /* Disable auto-negotiation */ 926 axgbe_an_disable_all(pdata); 927 pdata->phy_if.phy_impl.stop(pdata); 928 pdata->phy.link = 0; 929 axgbe_phy_adjust_link(pdata); 930 } 931 932 static int axgbe_phy_start(struct axgbe_port *pdata) 933 { 934 int ret; 935 936 ret = pdata->phy_if.phy_impl.start(pdata); 937 if (ret) 938 return ret; 939 /* Set initial mode - call the mode setting routines 940 * directly to insure we are properly configured 941 */ 942 if (axgbe_use_mode(pdata, AXGBE_MODE_KR)) { 943 axgbe_kr_mode(pdata); 944 } else if (axgbe_use_mode(pdata, AXGBE_MODE_KX_2500)) { 945 axgbe_kx_2500_mode(pdata); 946 } else if (axgbe_use_mode(pdata, AXGBE_MODE_KX_1000)) { 947 axgbe_kx_1000_mode(pdata); 948 } else if (axgbe_use_mode(pdata, AXGBE_MODE_SFI)) { 949 axgbe_sfi_mode(pdata); 950 } else if (axgbe_use_mode(pdata, AXGBE_MODE_X)) { 951 axgbe_x_mode(pdata); 952 } else if (axgbe_use_mode(pdata, AXGBE_MODE_SGMII_1000)) { 953 axgbe_sgmii_1000_mode(pdata); 954 } else if (axgbe_use_mode(pdata, AXGBE_MODE_SGMII_100)) { 955 axgbe_sgmii_100_mode(pdata); 956 } else { 957 ret = -EINVAL; 958 goto err_stop; 959 } 960 /* Indicate the PHY is up and running */ 961 pdata->phy_started = 1; 962 axgbe_an_init(pdata); 963 axgbe_an_enable_interrupts(pdata); 964 return axgbe_phy_config_aneg(pdata); 965 966 err_stop: 967 pdata->phy_if.phy_impl.stop(pdata); 968 969 return ret; 970 } 971 972 static int axgbe_phy_reset(struct axgbe_port *pdata) 973 { 974 int ret; 975 976 ret = pdata->phy_if.phy_impl.reset(pdata); 977 if (ret) 978 return ret; 979 980 /* Disable auto-negotiation for now */ 981 axgbe_an_disable_all(pdata); 982 983 /* Clear auto-negotiation interrupts */ 984 axgbe_an_clear_interrupts_all(pdata); 985 986 return 0; 987 } 988 989 static int axgbe_phy_best_advertised_speed(struct axgbe_port *pdata) 990 { 991 if (pdata->phy.advertising & ADVERTISED_10000baseKR_Full) 992 return SPEED_10000; 993 else if (pdata->phy.advertising & ADVERTISED_10000baseT_Full) 994 return SPEED_10000; 995 else if (pdata->phy.advertising & ADVERTISED_2500baseX_Full) 996 return SPEED_2500; 997 else if (pdata->phy.advertising & ADVERTISED_1000baseKX_Full) 998 return SPEED_1000; 999 else if (pdata->phy.advertising & ADVERTISED_1000baseT_Full) 1000 return SPEED_1000; 1001 else if (pdata->phy.advertising & ADVERTISED_100baseT_Full) 1002 return SPEED_100; 1003 1004 return SPEED_UNKNOWN; 1005 } 1006 1007 static int axgbe_phy_init(struct axgbe_port *pdata) 1008 { 1009 int ret; 1010 1011 pdata->mdio_mmd = MDIO_MMD_PCS; 1012 1013 /* Check for FEC support */ 1014 pdata->fec_ability = XMDIO_READ(pdata, MDIO_MMD_PMAPMD, 1015 MDIO_PMA_10GBR_FECABLE); 1016 pdata->fec_ability &= (MDIO_PMA_10GBR_FECABLE_ABLE | 1017 MDIO_PMA_10GBR_FECABLE_ERRABLE); 1018 1019 /* Setup the phy (including supported features) */ 1020 ret = pdata->phy_if.phy_impl.init(pdata); 1021 if (ret) 1022 return ret; 1023 pdata->phy.advertising = pdata->phy.supported; 1024 1025 pdata->phy.address = 0; 1026 1027 if (pdata->phy.advertising & ADVERTISED_Autoneg) { 1028 pdata->phy.autoneg = AUTONEG_ENABLE; 1029 pdata->phy.speed = SPEED_UNKNOWN; 1030 pdata->phy.duplex = DUPLEX_UNKNOWN; 1031 } else { 1032 pdata->phy.autoneg = AUTONEG_DISABLE; 1033 pdata->phy.speed = axgbe_phy_best_advertised_speed(pdata); 1034 pdata->phy.duplex = DUPLEX_FULL; 1035 } 1036 1037 pdata->phy.link = 0; 1038 1039 pdata->phy.pause_autoneg = pdata->pause_autoneg; 1040 pdata->phy.tx_pause = pdata->tx_pause; 1041 pdata->phy.rx_pause = pdata->rx_pause; 1042 1043 /* Fix up Flow Control advertising */ 1044 pdata->phy.advertising &= ~ADVERTISED_Pause; 1045 pdata->phy.advertising &= ~ADVERTISED_Asym_Pause; 1046 1047 if (pdata->rx_pause) { 1048 pdata->phy.advertising |= ADVERTISED_Pause; 1049 pdata->phy.advertising |= ADVERTISED_Asym_Pause; 1050 } 1051 1052 if (pdata->tx_pause) 1053 pdata->phy.advertising ^= ADVERTISED_Asym_Pause; 1054 return 0; 1055 } 1056 1057 void axgbe_init_function_ptrs_phy(struct axgbe_phy_if *phy_if) 1058 { 1059 phy_if->phy_init = axgbe_phy_init; 1060 phy_if->phy_reset = axgbe_phy_reset; 1061 phy_if->phy_start = axgbe_phy_start; 1062 phy_if->phy_stop = axgbe_phy_stop; 1063 phy_if->phy_status = axgbe_phy_status; 1064 phy_if->phy_config_aneg = axgbe_phy_config_aneg; 1065 phy_if->an_isr = axgbe_an_combined_isr; 1066 } 1067