xref: /dpdk/dts/framework/config/__init__.py (revision 3fbb93cff3be23a45fc1ec524f83d001a30df273)
1# SPDX-License-Identifier: BSD-3-Clause
2# Copyright(c) 2010-2021 Intel Corporation
3# Copyright(c) 2022-2023 University of New Hampshire
4# Copyright(c) 2023 PANTHEON.tech s.r.o.
5# Copyright(c) 2024 Arm Limited
6
7"""Testbed configuration and test suite specification.
8
9This package offers classes that hold real-time information about the testbed, hold test run
10configuration describing the tested testbed and a loader function, :func:`load_config`, which loads
11the YAML test run configuration file and validates it against the :class:`Configuration` Pydantic
12model.
13
14The YAML test run configuration file is parsed into a dictionary, parts of which are used throughout
15this package. The allowed keys and types inside this dictionary map directly to the
16:class:`Configuration` model, its fields and sub-models.
17
18The test run configuration has two main sections:
19
20    * The :class:`TestRunConfiguration` which defines what tests are going to be run
21      and how DPDK will be built. It also references the testbed where these tests and DPDK
22      are going to be run,
23    * The nodes of the testbed are defined in the other section,
24      a :class:`list` of :class:`NodeConfiguration` objects.
25
26The real-time information about testbed is supposed to be gathered at runtime.
27
28The classes defined in this package make heavy use of :mod:`pydantic`.
29Nearly all of them are frozen:
30
31    * Frozen makes the object immutable. This enables further optimizations,
32      and makes it thread safe should we ever want to move in that direction.
33"""
34
35import tarfile
36from enum import Enum, auto, unique
37from functools import cached_property
38from pathlib import Path, PurePath
39from typing import TYPE_CHECKING, Annotated, Any, Literal, NamedTuple
40
41import yaml
42from pydantic import (
43    BaseModel,
44    ConfigDict,
45    Field,
46    ValidationError,
47    field_validator,
48    model_validator,
49)
50from typing_extensions import Self
51
52from framework.exception import ConfigurationError
53from framework.utils import REGEX_FOR_PCI_ADDRESS, StrEnum
54
55if TYPE_CHECKING:
56    from framework.test_suite import TestSuiteSpec
57
58
59class FrozenModel(BaseModel):
60    """A pre-configured :class:`~pydantic.BaseModel`."""
61
62    #: Fields are set as read-only and any extra fields are forbidden.
63    model_config = ConfigDict(frozen=True, extra="forbid")
64
65
66@unique
67class Architecture(StrEnum):
68    r"""The supported architectures of :class:`~framework.testbed_model.node.Node`\s."""
69
70    #:
71    i686 = auto()
72    #:
73    x86_64 = auto()
74    #:
75    x86_32 = auto()
76    #:
77    arm64 = auto()
78    #:
79    ppc64le = auto()
80
81
82@unique
83class OS(StrEnum):
84    r"""The supported operating systems of :class:`~framework.testbed_model.node.Node`\s."""
85
86    #:
87    linux = auto()
88    #:
89    freebsd = auto()
90    #:
91    windows = auto()
92
93
94@unique
95class CPUType(StrEnum):
96    r"""The supported CPUs of :class:`~framework.testbed_model.node.Node`\s."""
97
98    #:
99    native = auto()
100    #:
101    armv8a = auto()
102    #:
103    dpaa2 = auto()
104    #:
105    thunderx = auto()
106    #:
107    xgene1 = auto()
108
109
110@unique
111class Compiler(StrEnum):
112    r"""The supported compilers of :class:`~framework.testbed_model.node.Node`\s."""
113
114    #:
115    gcc = auto()
116    #:
117    clang = auto()
118    #:
119    icc = auto()
120    #:
121    msvc = auto()
122
123
124@unique
125class TrafficGeneratorType(str, Enum):
126    """The supported traffic generators."""
127
128    #:
129    SCAPY = "SCAPY"
130
131
132class HugepageConfiguration(FrozenModel):
133    r"""The hugepage configuration of :class:`~framework.testbed_model.node.Node`\s."""
134
135    #: The number of hugepages to allocate.
136    number_of: int
137    #: If :data:`True`, the hugepages will be configured on the first NUMA node.
138    force_first_numa: bool
139
140
141class PortConfig(FrozenModel):
142    r"""The port configuration of :class:`~framework.testbed_model.node.Node`\s."""
143
144    #: The PCI address of the port.
145    pci: str = Field(pattern=REGEX_FOR_PCI_ADDRESS)
146    #: The driver that the kernel should bind this device to for DPDK to use it.
147    os_driver_for_dpdk: str = Field(examples=["vfio-pci", "mlx5_core"])
148    #: The operating system driver name when the operating system controls the port.
149    os_driver: str = Field(examples=["i40e", "ice", "mlx5_core"])
150    #: The name of the peer node this port is connected to.
151    peer_node: str
152    #: The PCI address of the peer port connected to this port.
153    peer_pci: str = Field(pattern=REGEX_FOR_PCI_ADDRESS)
154
155
156class TrafficGeneratorConfig(FrozenModel):
157    """A protocol required to define traffic generator types."""
158
159    #: The traffic generator type the child class is required to define to be distinguished among
160    #: others.
161    type: TrafficGeneratorType
162
163
164class ScapyTrafficGeneratorConfig(TrafficGeneratorConfig):
165    """Scapy traffic generator specific configuration."""
166
167    type: Literal[TrafficGeneratorType.SCAPY]
168
169
170#: A union type discriminating traffic generators by the `type` field.
171TrafficGeneratorConfigTypes = Annotated[ScapyTrafficGeneratorConfig, Field(discriminator="type")]
172
173#: Comma-separated list of logical cores to use. An empty string means use all lcores.
174LogicalCores = Annotated[
175    str,
176    Field(
177        examples=["1,2,3,4,5,18-22", "10-15"],
178        pattern=r"^(([0-9]+|([0-9]+-[0-9]+))(,([0-9]+|([0-9]+-[0-9]+)))*)?$",
179    ),
180]
181
182
183class NodeConfiguration(FrozenModel):
184    r"""The configuration of :class:`~framework.testbed_model.node.Node`\s."""
185
186    #: The name of the :class:`~framework.testbed_model.node.Node`.
187    name: str
188    #: The hostname of the :class:`~framework.testbed_model.node.Node`. Can also be an IP address.
189    hostname: str
190    #: The name of the user used to connect to the :class:`~framework.testbed_model.node.Node`.
191    user: str
192    #: The password of the user. The use of passwords is heavily discouraged, please use SSH keys.
193    password: str | None = None
194    #: The architecture of the :class:`~framework.testbed_model.node.Node`.
195    arch: Architecture
196    #: The operating system of the :class:`~framework.testbed_model.node.Node`.
197    os: OS
198    #: A comma delimited list of logical cores to use when running DPDK.
199    lcores: LogicalCores = "1"
200    #: If :data:`True`, the first logical core won't be used.
201    use_first_core: bool = False
202    #: An optional hugepage configuration.
203    hugepages: HugepageConfiguration | None = Field(None, alias="hugepages_2mb")
204    #: The ports that can be used in testing.
205    ports: list[PortConfig] = Field(min_length=1)
206
207
208class SutNodeConfiguration(NodeConfiguration):
209    """:class:`~framework.testbed_model.sut_node.SutNode` specific configuration."""
210
211    #: The number of memory channels to use when running DPDK.
212    memory_channels: int = 1
213
214
215class TGNodeConfiguration(NodeConfiguration):
216    """:class:`~framework.testbed_model.tg_node.TGNode` specific configuration."""
217
218    #: The configuration of the traffic generator present on the TG node.
219    traffic_generator: TrafficGeneratorConfigTypes
220
221
222#: Union type for all the node configuration types.
223NodeConfigurationTypes = TGNodeConfiguration | SutNodeConfiguration
224
225
226def resolve_path(path: Path) -> Path:
227    """Resolve a path into a real path."""
228    return path.resolve()
229
230
231class BaseDPDKLocation(FrozenModel):
232    """DPDK location base class.
233
234    The path to the DPDK sources and type of location.
235    """
236
237    #: Specifies whether to find DPDK on the SUT node or on the local host. Which are respectively
238    #: represented by :class:`RemoteDPDKLocation` and :class:`LocalDPDKTreeLocation`.
239    remote: bool = False
240
241
242class LocalDPDKLocation(BaseDPDKLocation):
243    """Local DPDK location base class.
244
245    This class is meant to represent any location that is present only locally.
246    """
247
248    remote: Literal[False] = False
249
250
251class LocalDPDKTreeLocation(LocalDPDKLocation):
252    """Local DPDK tree location.
253
254    This class makes a distinction from :class:`RemoteDPDKTreeLocation` by enforcing on the fly
255    validation.
256    """
257
258    #: The path to the DPDK source tree directory on the local host passed as string.
259    dpdk_tree: Path
260
261    #: Resolve the local DPDK tree path.
262    resolve_dpdk_tree_path = field_validator("dpdk_tree")(resolve_path)
263
264    @model_validator(mode="after")
265    def validate_dpdk_tree_path(self) -> Self:
266        """Validate the provided DPDK tree path."""
267        assert self.dpdk_tree.exists(), "DPDK tree not found in local filesystem."
268        assert self.dpdk_tree.is_dir(), "The DPDK tree path must be a directory."
269        return self
270
271
272class LocalDPDKTarballLocation(LocalDPDKLocation):
273    """Local DPDK tarball location.
274
275    This class makes a distinction from :class:`RemoteDPDKTarballLocation` by enforcing on the fly
276    validation.
277    """
278
279    #: The path to the DPDK tarball on the local host passed as string.
280    tarball: Path
281
282    #: Resolve the local tarball path.
283    resolve_tarball_path = field_validator("tarball")(resolve_path)
284
285    @model_validator(mode="after")
286    def validate_tarball_path(self) -> Self:
287        """Validate the provided tarball."""
288        assert self.tarball.exists(), "DPDK tarball not found in local filesystem."
289        assert tarfile.is_tarfile(self.tarball), "The DPDK tarball must be a valid tar archive."
290        return self
291
292
293class RemoteDPDKLocation(BaseDPDKLocation):
294    """Remote DPDK location base class.
295
296    This class is meant to represent any location that is present only remotely.
297    """
298
299    remote: Literal[True] = True
300
301
302class RemoteDPDKTreeLocation(RemoteDPDKLocation):
303    """Remote DPDK tree location.
304
305    This class is distinct from :class:`LocalDPDKTreeLocation` which enforces on the fly validation.
306    """
307
308    #: The path to the DPDK source tree directory on the remote node passed as string.
309    dpdk_tree: PurePath
310
311
312class RemoteDPDKTarballLocation(RemoteDPDKLocation):
313    """Remote DPDK tarball location.
314
315    This class is distinct from :class:`LocalDPDKTarballLocation` which enforces on the fly
316    validation.
317    """
318
319    #: The path to the DPDK tarball on the remote node passed as string.
320    tarball: PurePath
321
322
323#: Union type for different DPDK locations.
324DPDKLocation = (
325    LocalDPDKTreeLocation
326    | LocalDPDKTarballLocation
327    | RemoteDPDKTreeLocation
328    | RemoteDPDKTarballLocation
329)
330
331
332class BaseDPDKBuildConfiguration(FrozenModel):
333    """The base configuration for different types of build.
334
335    The configuration contain the location of the DPDK and configuration used for building it.
336    """
337
338    #: The location of the DPDK tree.
339    dpdk_location: DPDKLocation
340
341
342class DPDKPrecompiledBuildConfiguration(BaseDPDKBuildConfiguration):
343    """DPDK precompiled build configuration."""
344
345    #: If it's defined, DPDK has been pre-compiled and the build directory is located in a
346    #: subdirectory of `~dpdk_location.dpdk_tree` or `~dpdk_location.tarball` root directory.
347    precompiled_build_dir: str = Field(min_length=1)
348
349
350class DPDKBuildOptionsConfiguration(FrozenModel):
351    """DPDK build options configuration.
352
353    The build options used for building DPDK.
354    """
355
356    #: The target architecture to build for.
357    arch: Architecture
358    #: The target OS to build for.
359    os: OS
360    #: The target CPU to build for.
361    cpu: CPUType
362    #: The compiler executable to use.
363    compiler: Compiler
364    #: This string will be put in front of the compiler when executing the build. Useful for adding
365    #: wrapper commands, such as ``ccache``.
366    compiler_wrapper: str = ""
367
368    @cached_property
369    def name(self) -> str:
370        """The name of the compiler."""
371        return f"{self.arch}-{self.os}-{self.cpu}-{self.compiler}"
372
373
374class DPDKUncompiledBuildConfiguration(BaseDPDKBuildConfiguration):
375    """DPDK uncompiled build configuration."""
376
377    #: The build options to compiled DPDK with.
378    build_options: DPDKBuildOptionsConfiguration
379
380
381#: Union type for different build configurations.
382DPDKBuildConfiguration = DPDKPrecompiledBuildConfiguration | DPDKUncompiledBuildConfiguration
383
384
385class TestSuiteConfig(FrozenModel):
386    """Test suite configuration.
387
388    Information about a single test suite to be executed. This can also be represented as a string
389    instead of a mapping, example:
390
391    .. code:: yaml
392
393        test_runs:
394        - test_suites:
395            # As string representation:
396            - hello_world # test all of `hello_world`, or
397            - hello_world hello_world_single_core # test only `hello_world_single_core`
398            # or as model fields:
399            - test_suite: hello_world
400              test_cases: [hello_world_single_core] # without this field all test cases are run
401    """
402
403    #: The name of the test suite module without the starting ``TestSuite_``.
404    test_suite_name: str = Field(alias="test_suite")
405    #: The names of test cases from this test suite to execute. If empty, all test cases will be
406    #: executed.
407    test_cases_names: list[str] = Field(default_factory=list, alias="test_cases")
408
409    @cached_property
410    def test_suite_spec(self) -> "TestSuiteSpec":
411        """The specification of the requested test suite."""
412        from framework.test_suite import find_by_name
413
414        test_suite_spec = find_by_name(self.test_suite_name)
415        assert (
416            test_suite_spec is not None
417        ), f"{self.test_suite_name} is not a valid test suite module name."
418        return test_suite_spec
419
420    @model_validator(mode="before")
421    @classmethod
422    def convert_from_string(cls, data: Any) -> Any:
423        """Convert the string representation of the model into a valid mapping."""
424        if isinstance(data, str):
425            [test_suite, *test_cases] = data.split()
426            return dict(test_suite=test_suite, test_cases=test_cases)
427        return data
428
429    @model_validator(mode="after")
430    def validate_names(self) -> Self:
431        """Validate the supplied test suite and test cases names.
432
433        This validator relies on the cached property `test_suite_spec` to run for the first
434        time in this call, therefore triggering the assertions if needed.
435        """
436        available_test_cases = map(
437            lambda t: t.name, self.test_suite_spec.class_obj.get_test_cases()
438        )
439        for requested_test_case in self.test_cases_names:
440            assert requested_test_case in available_test_cases, (
441                f"{requested_test_case} is not a valid test case "
442                f"of test suite {self.test_suite_name}."
443            )
444
445        return self
446
447
448class TestRunSUTNodeConfiguration(FrozenModel):
449    """The SUT node configuration of a test run."""
450
451    #: The SUT node to use in this test run.
452    node_name: str
453    #: The names of virtual devices to test.
454    vdevs: list[str] = Field(default_factory=list)
455
456
457class TestRunConfiguration(FrozenModel):
458    """The configuration of a test run.
459
460    The configuration contains testbed information, what tests to execute
461    and with what DPDK build.
462    """
463
464    #: The DPDK configuration used to test.
465    dpdk_config: DPDKBuildConfiguration = Field(alias="dpdk_build")
466    #: Whether to run performance tests.
467    perf: bool
468    #: Whether to run functional tests.
469    func: bool
470    #: Whether to skip smoke tests.
471    skip_smoke_tests: bool = False
472    #: The names of test suites and/or test cases to execute.
473    test_suites: list[TestSuiteConfig] = Field(min_length=1)
474    #: The SUT node configuration to use in this test run.
475    system_under_test_node: TestRunSUTNodeConfiguration
476    #: The TG node name to use in this test run.
477    traffic_generator_node: str
478    #: The seed to use for pseudo-random generation.
479    random_seed: int | None = None
480
481
482class TestRunWithNodesConfiguration(NamedTuple):
483    """Tuple containing the configuration of the test run and its associated nodes."""
484
485    #:
486    test_run_config: TestRunConfiguration
487    #:
488    sut_node_config: SutNodeConfiguration
489    #:
490    tg_node_config: TGNodeConfiguration
491
492
493class Configuration(FrozenModel):
494    """DTS testbed and test configuration."""
495
496    #: Test run configurations.
497    test_runs: list[TestRunConfiguration] = Field(min_length=1)
498    #: Node configurations.
499    nodes: list[NodeConfigurationTypes] = Field(min_length=1)
500
501    @cached_property
502    def test_runs_with_nodes(self) -> list[TestRunWithNodesConfiguration]:
503        """List of test runs with the associated nodes."""
504        test_runs_with_nodes = []
505
506        for test_run_no, test_run in enumerate(self.test_runs):
507            sut_node_name = test_run.system_under_test_node.node_name
508            sut_node = next(filter(lambda n: n.name == sut_node_name, self.nodes), None)
509
510            assert sut_node is not None, (
511                f"test_runs.{test_run_no}.sut_node_config.node_name "
512                f"({test_run.system_under_test_node.node_name}) is not a valid node name"
513            )
514            assert isinstance(sut_node, SutNodeConfiguration), (
515                f"test_runs.{test_run_no}.sut_node_config.node_name is a valid node name, "
516                "but it is not a valid SUT node"
517            )
518
519            tg_node_name = test_run.traffic_generator_node
520            tg_node = next(filter(lambda n: n.name == tg_node_name, self.nodes), None)
521
522            assert tg_node is not None, (
523                f"test_runs.{test_run_no}.tg_node_name "
524                f"({test_run.traffic_generator_node}) is not a valid node name"
525            )
526            assert isinstance(tg_node, TGNodeConfiguration), (
527                f"test_runs.{test_run_no}.tg_node_name is a valid node name, "
528                "but it is not a valid TG node"
529            )
530
531            test_runs_with_nodes.append(TestRunWithNodesConfiguration(test_run, sut_node, tg_node))
532
533        return test_runs_with_nodes
534
535    @field_validator("nodes")
536    @classmethod
537    def validate_node_names(cls, nodes: list[NodeConfiguration]) -> list[NodeConfiguration]:
538        """Validate that the node names are unique."""
539        nodes_by_name: dict[str, int] = {}
540        for node_no, node in enumerate(nodes):
541            assert node.name not in nodes_by_name, (
542                f"node {node_no} cannot have the same name as node {nodes_by_name[node.name]} "
543                f"({node.name})"
544            )
545            nodes_by_name[node.name] = node_no
546
547        return nodes
548
549    @model_validator(mode="after")
550    def validate_ports(self) -> Self:
551        """Validate that the ports are all linked to valid ones."""
552        port_links: dict[tuple[str, str], Literal[False] | tuple[int, int]] = {
553            (node.name, port.pci): False for node in self.nodes for port in node.ports
554        }
555
556        for node_no, node in enumerate(self.nodes):
557            for port_no, port in enumerate(node.ports):
558                peer_port_identifier = (port.peer_node, port.peer_pci)
559                peer_port = port_links.get(peer_port_identifier, None)
560                assert peer_port is not None, (
561                    "invalid peer port specified for " f"nodes.{node_no}.ports.{port_no}"
562                )
563                assert peer_port is False, (
564                    f"the peer port specified for nodes.{node_no}.ports.{port_no} "
565                    f"is already linked to nodes.{peer_port[0]}.ports.{peer_port[1]}"
566                )
567                port_links[peer_port_identifier] = (node_no, port_no)
568
569        return self
570
571    @model_validator(mode="after")
572    def validate_test_runs_with_nodes(self) -> Self:
573        """Validate the test runs to nodes associations.
574
575        This validator relies on the cached property `test_runs_with_nodes` to run for the first
576        time in this call, therefore triggering the assertions if needed.
577        """
578        if self.test_runs_with_nodes:
579            pass
580        return self
581
582
583def load_config(config_file_path: Path) -> Configuration:
584    """Load DTS test run configuration from a file.
585
586    Load the YAML test run configuration file, validate it, and create a test run configuration
587    object.
588
589    The YAML test run configuration file is specified in the :option:`--config-file` command line
590    argument or the :envvar:`DTS_CFG_FILE` environment variable.
591
592    Args:
593        config_file_path: The path to the YAML test run configuration file.
594
595    Returns:
596        The parsed test run configuration.
597
598    Raises:
599        ConfigurationError: If the supplied configuration file is invalid.
600    """
601    with open(config_file_path, "r") as f:
602        config_data = yaml.safe_load(f)
603
604    try:
605        return Configuration.model_validate(config_data)
606    except ValidationError as e:
607        raise ConfigurationError("failed to load the supplied configuration") from e
608