[1/1] dts: add binding to different drivers to TG node
Checks
Commit Message
From: Jeremy Spewock <jspewock@iol.unh.edu>
The DTS framework in its current state supports binding ports to
different drivers on the SUT node but not the TG node. The TG node
already has the information that it needs about the different drivers
that it has available in the configuration file, but it did not
previously have access to the devbind script, so it did not use that
information for anything.
This patch moves the steps to copy the DPDK tarball into the node class
rather than the SUT node class, and calls this function on the TG node
as well as the SUT. It also moves the driver binding step into the Node
class and triggers the same pattern of binding to ports that existed on
the SUT on the TG.
Bugzilla ID: 1420
Signed-off-by: Jeremy Spewock <jspewock@iol.unh.edu>
---
dts/framework/runner.py | 2 +
dts/framework/testbed_model/node.py | 106 +++++++++++++++++++++++-
dts/framework/testbed_model/sut_node.py | 86 +------------------
3 files changed, 109 insertions(+), 85 deletions(-)
Comments
Makes sense to me!
Reviewed-by: Nicholas Pratte <npratte@iol.unh.edu>
On Mon, Aug 12, 2024 at 1:23 PM <jspewock@iol.unh.edu> wrote:
>
> From: Jeremy Spewock <jspewock@iol.unh.edu>
>
> The DTS framework in its current state supports binding ports to
> different drivers on the SUT node but not the TG node. The TG node
> already has the information that it needs about the different drivers
> that it has available in the configuration file, but it did not
> previously have access to the devbind script, so it did not use that
> information for anything.
>
> This patch moves the steps to copy the DPDK tarball into the node class
> rather than the SUT node class, and calls this function on the TG node
> as well as the SUT. It also moves the driver binding step into the Node
> class and triggers the same pattern of binding to ports that existed on
> the SUT on the TG.
>
> Bugzilla ID: 1420
>
> Signed-off-by: Jeremy Spewock <jspewock@iol.unh.edu>
> ---
> dts/framework/runner.py | 2 +
> dts/framework/testbed_model/node.py | 106 +++++++++++++++++++++++-
> dts/framework/testbed_model/sut_node.py | 86 +------------------
> 3 files changed, 109 insertions(+), 85 deletions(-)
>
> diff --git a/dts/framework/runner.py b/dts/framework/runner.py
> index 6b6f6a05f5..ed9e58b172 100644
> --- a/dts/framework/runner.py
> +++ b/dts/framework/runner.py
> @@ -484,6 +484,7 @@ def _run_build_target(
>
> try:
> sut_node.set_up_build_target(build_target_config)
> + tg_node.set_up_build_target(build_target_config)
> self._result.dpdk_version = sut_node.dpdk_version
> build_target_result.add_build_target_info(sut_node.get_build_target_info())
> build_target_result.update_setup(Result.PASS)
> @@ -498,6 +499,7 @@ def _run_build_target(
> try:
> self._logger.set_stage(DtsStage.build_target_teardown)
> sut_node.tear_down_build_target()
> + tg_node.tear_down_build_target()
> build_target_result.update_teardown(Result.PASS)
> except Exception as e:
> self._logger.exception("Build target teardown failed.")
> diff --git a/dts/framework/testbed_model/node.py b/dts/framework/testbed_model/node.py
> index 12a40170ac..8e6181e424 100644
> --- a/dts/framework/testbed_model/node.py
> +++ b/dts/framework/testbed_model/node.py
> @@ -13,11 +13,19 @@
> The :func:`~Node.skip_setup` decorator can be used without subclassing.
> """
>
> +import os
> +import tarfile
> from abc import ABC
> from ipaddress import IPv4Interface, IPv6Interface
> +from pathlib import PurePath
> from typing import Any, Callable, Union
>
> -from framework.config import OS, NodeConfiguration, TestRunConfiguration
> +from framework.config import (
> + OS,
> + BuildTargetConfiguration,
> + NodeConfiguration,
> + TestRunConfiguration,
> +)
> from framework.exception import ConfigurationError
> from framework.logger import DTSLogger, get_dts_logger
> from framework.settings import SETTINGS
> @@ -58,8 +66,11 @@ class Node(ABC):
> lcores: list[LogicalCore]
> ports: list[Port]
> _logger: DTSLogger
> + _remote_tmp_dir: PurePath
> + __remote_dpdk_dir: PurePath | None
> _other_sessions: list[OSSession]
> _test_run_config: TestRunConfiguration
> + _path_to_devbind_script: PurePath | None
>
> def __init__(self, node_config: NodeConfiguration):
> """Connect to the node and gather info during initialization.
> @@ -88,6 +99,9 @@ def __init__(self, node_config: NodeConfiguration):
>
> self._other_sessions = []
> self._init_ports()
> + self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
> + self.__remote_dpdk_dir = None
> + self._path_to_devbind_script = None
>
> def _init_ports(self) -> None:
> self.ports = [Port(self.name, port_config) for port_config in self.config.ports]
> @@ -95,6 +109,34 @@ def _init_ports(self) -> None:
> for port in self.ports:
> self.configure_port_state(port)
>
> + def _guess_dpdk_remote_dir(self) -> PurePath:
> + return self.main_session.guess_dpdk_remote_dir(self._remote_tmp_dir)
> +
> + @property
> + def _remote_dpdk_dir(self) -> PurePath:
> + """The remote DPDK dir.
> +
> + This internal property should be set after extracting the DPDK tarball. If it's not set,
> + that implies the DPDK setup step has been skipped, in which case we can guess where
> + a previous build was located.
> + """
> + if self.__remote_dpdk_dir is None:
> + self.__remote_dpdk_dir = self._guess_dpdk_remote_dir()
> + return self.__remote_dpdk_dir
> +
> + @_remote_dpdk_dir.setter
> + def _remote_dpdk_dir(self, value: PurePath) -> None:
> + self.__remote_dpdk_dir = value
> +
> + @property
> + def path_to_devbind_script(self) -> PurePath:
> + """The path to the dpdk-devbind.py script on the node."""
> + if self._path_to_devbind_script is None:
> + self._path_to_devbind_script = self.main_session.join_remote_path(
> + self._remote_dpdk_dir, "usertools", "dpdk-devbind.py"
> + )
> + return self._path_to_devbind_script
> +
> def set_up_test_run(self, test_run_config: TestRunConfiguration) -> None:
> """Test run setup steps.
>
> @@ -114,6 +156,24 @@ def tear_down_test_run(self) -> None:
> Additional steps can be added by extending the method in subclasses with the use of super().
> """
>
> + def set_up_build_target(self, build_target_config: BuildTargetConfiguration) -> None:
> + """Set up DPDK the node and bind ports.
> +
> + DPDK setup includes setting all internals needed for the build, the copying of DPDK tarball
> + and then building DPDK. The drivers are bound to those that DPDK needs.
> +
> + Args:
> + build_target_config: The build target test run configuration according to which
> + the setup steps will be taken.
> + """
> + self._copy_dpdk_tarball()
> + self.bind_ports_to_driver()
> +
> + def tear_down_build_target(self) -> None:
> + """Reset DPDK variables and bind port driver to the OS driver."""
> + self.__remote_dpdk_dir = None
> + self.bind_ports_to_driver(for_dpdk=False)
> +
> def create_session(self, name: str) -> OSSession:
> """Create and return a new OS-aware remote session.
>
> @@ -228,6 +288,50 @@ def skip_setup(func: Callable[..., Any]) -> Callable[..., Any]:
> else:
> return func
>
> + @skip_setup
> + def _copy_dpdk_tarball(self) -> None:
> + """Copy to and extract DPDK tarball on the node."""
> + self._logger.info(f"Copying DPDK tarball to {self.name}.")
> + self.main_session.copy_to(SETTINGS.dpdk_tarball_path, self._remote_tmp_dir)
> +
> + # construct remote tarball path
> + # the basename is the same on local host and on remote Node
> + remote_tarball_path = self.main_session.join_remote_path(
> + self._remote_tmp_dir, os.path.basename(SETTINGS.dpdk_tarball_path)
> + )
> +
> + # construct remote path after extracting
> + with tarfile.open(SETTINGS.dpdk_tarball_path) as dpdk_tar:
> + dpdk_top_dir = dpdk_tar.getnames()[0]
> + self._remote_dpdk_dir = self.main_session.join_remote_path(
> + self._remote_tmp_dir, dpdk_top_dir
> + )
> +
> + self._logger.info(
> + f"Extracting DPDK tarball on {self.name}: "
> + f"'{remote_tarball_path}' into '{self._remote_dpdk_dir}'."
> + )
> + # clean remote path where we're extracting
> + self.main_session.remove_remote_dir(self._remote_dpdk_dir)
> +
> + # then extract to remote path
> + self.main_session.extract_remote_tarball(remote_tarball_path, self._remote_dpdk_dir)
> +
> + def bind_ports_to_driver(self, for_dpdk: bool = True) -> None:
> + """Bind all ports on the node to a driver.
> +
> + Args:
> + for_dpdk: If :data:`True`, binds ports to os_driver_for_dpdk.
> + If :data:`False`, binds to os_driver.
> + """
> + for port in self.ports:
> + driver = port.os_driver_for_dpdk if for_dpdk else port.os_driver
> + self.main_session.send_command(
> + f"{self.path_to_devbind_script} -b {driver} --force {port.pci}",
> + privileged=True,
> + verify=True,
> + )
> +
>
> def create_session(node_config: NodeConfiguration, name: str, logger: DTSLogger) -> OSSession:
> """Factory for OS-aware sessions.
> diff --git a/dts/framework/testbed_model/sut_node.py b/dts/framework/testbed_model/sut_node.py
> index 2855fe0276..f3fd4e2304 100644
> --- a/dts/framework/testbed_model/sut_node.py
> +++ b/dts/framework/testbed_model/sut_node.py
> @@ -13,7 +13,6 @@
>
>
> import os
> -import tarfile
> import time
> from pathlib import PurePath
>
> @@ -26,7 +25,6 @@
> )
> from framework.params.eal import EalParams
> from framework.remote_session.remote_session import CommandResult
> -from framework.settings import SETTINGS
> from framework.utils import MesonArgs
>
> from .node import Node
> @@ -59,14 +57,11 @@ class SutNode(Node):
> dpdk_timestamp: str
> _build_target_config: BuildTargetConfiguration | None
> _env_vars: dict
> - _remote_tmp_dir: PurePath
> - __remote_dpdk_dir: PurePath | None
> _app_compile_timeout: float
> _dpdk_kill_session: OSSession | None
> _dpdk_version: str | None
> _node_info: NodeInfo | None
> _compiler_version: str | None
> - _path_to_devbind_script: PurePath | None
>
> def __init__(self, node_config: SutNodeConfiguration):
> """Extend the constructor with SUT node specifics.
> @@ -79,8 +74,6 @@ def __init__(self, node_config: SutNodeConfiguration):
> self.dpdk_prefix_list = []
> self._build_target_config = None
> self._env_vars = {}
> - self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
> - self.__remote_dpdk_dir = None
> self._app_compile_timeout = 90
> self._dpdk_kill_session = None
> self.dpdk_timestamp = (
> @@ -89,25 +82,8 @@ def __init__(self, node_config: SutNodeConfiguration):
> self._dpdk_version = None
> self._node_info = None
> self._compiler_version = None
> - self._path_to_devbind_script = None
> self._logger.info(f"Created node: {self.name}")
>
> - @property
> - def _remote_dpdk_dir(self) -> PurePath:
> - """The remote DPDK dir.
> -
> - This internal property should be set after extracting the DPDK tarball. If it's not set,
> - that implies the DPDK setup step has been skipped, in which case we can guess where
> - a previous build was located.
> - """
> - if self.__remote_dpdk_dir is None:
> - self.__remote_dpdk_dir = self._guess_dpdk_remote_dir()
> - return self.__remote_dpdk_dir
> -
> - @_remote_dpdk_dir.setter
> - def _remote_dpdk_dir(self, value: PurePath) -> None:
> - self.__remote_dpdk_dir = value
> -
> @property
> def remote_dpdk_build_dir(self) -> PurePath:
> """The remote DPDK build directory.
> @@ -151,15 +127,6 @@ def compiler_version(self) -> str:
> return ""
> return self._compiler_version
>
> - @property
> - def path_to_devbind_script(self) -> PurePath:
> - """The path to the dpdk-devbind.py script on the node."""
> - if self._path_to_devbind_script is None:
> - self._path_to_devbind_script = self.main_session.join_remote_path(
> - self._remote_dpdk_dir, "usertools", "dpdk-devbind.py"
> - )
> - return self._path_to_devbind_script
> -
> def get_build_target_info(self) -> BuildTargetInfo:
> """Get additional build target information.
>
> @@ -170,9 +137,6 @@ def get_build_target_info(self) -> BuildTargetInfo:
> dpdk_version=self.dpdk_version, compiler_version=self.compiler_version
> )
>
> - def _guess_dpdk_remote_dir(self) -> PurePath:
> - return self.main_session.guess_dpdk_remote_dir(self._remote_tmp_dir)
> -
> def set_up_test_run(self, test_run_config: TestRunConfiguration) -> None:
> """Extend the test run setup with vdev config.
>
> @@ -199,19 +163,17 @@ def set_up_build_target(self, build_target_config: BuildTargetConfiguration) ->
> build_target_config: The build target test run configuration according to which
> the setup steps will be taken.
> """
> + super().set_up_build_target(build_target_config)
> self._configure_build_target(build_target_config)
> - self._copy_dpdk_tarball()
> self._build_dpdk()
> - self.bind_ports_to_driver()
>
> def tear_down_build_target(self) -> None:
> """Reset DPDK variables and bind port driver to the OS driver."""
> + super().tear_down_build_target()
> self._env_vars = {}
> self._build_target_config = None
> - self.__remote_dpdk_dir = None
> self._dpdk_version = None
> self._compiler_version = None
> - self.bind_ports_to_driver(for_dpdk=False)
>
> def _configure_build_target(self, build_target_config: BuildTargetConfiguration) -> None:
> """Populate common environment variables and set build target config."""
> @@ -224,35 +186,6 @@ def _configure_build_target(self, build_target_config: BuildTargetConfiguration)
> f"'{build_target_config.compiler_wrapper} {build_target_config.compiler.name}'"
> ) # fmt: skip
>
> - @Node.skip_setup
> - def _copy_dpdk_tarball(self) -> None:
> - """Copy to and extract DPDK tarball on the SUT node."""
> - self._logger.info("Copying DPDK tarball to SUT.")
> - self.main_session.copy_to(SETTINGS.dpdk_tarball_path, self._remote_tmp_dir)
> -
> - # construct remote tarball path
> - # the basename is the same on local host and on remote Node
> - remote_tarball_path = self.main_session.join_remote_path(
> - self._remote_tmp_dir, os.path.basename(SETTINGS.dpdk_tarball_path)
> - )
> -
> - # construct remote path after extracting
> - with tarfile.open(SETTINGS.dpdk_tarball_path) as dpdk_tar:
> - dpdk_top_dir = dpdk_tar.getnames()[0]
> - self._remote_dpdk_dir = self.main_session.join_remote_path(
> - self._remote_tmp_dir, dpdk_top_dir
> - )
> -
> - self._logger.info(
> - f"Extracting DPDK tarball on SUT: "
> - f"'{remote_tarball_path}' into '{self._remote_dpdk_dir}'."
> - )
> - # clean remote path where we're extracting
> - self.main_session.remove_remote_dir(self._remote_dpdk_dir)
> -
> - # then extract to remote path
> - self.main_session.extract_remote_tarball(remote_tarball_path, self._remote_dpdk_dir)
> -
> @Node.skip_setup
> def _build_dpdk(self) -> None:
> """Build DPDK.
> @@ -335,18 +268,3 @@ def configure_ipv4_forwarding(self, enable: bool) -> None:
> enable: If :data:`True`, enable the forwarding, otherwise disable it.
> """
> self.main_session.configure_ipv4_forwarding(enable)
> -
> - def bind_ports_to_driver(self, for_dpdk: bool = True) -> None:
> - """Bind all ports on the SUT to a driver.
> -
> - Args:
> - for_dpdk: If :data:`True`, binds ports to os_driver_for_dpdk.
> - If :data:`False`, binds to os_driver.
> - """
> - for port in self.ports:
> - driver = port.os_driver_for_dpdk if for_dpdk else port.os_driver
> - self.main_session.send_command(
> - f"{self.path_to_devbind_script} -b {driver} --force {port.pci}",
> - privileged=True,
> - verify=True,
> - )
> --
> 2.45.2
>
On 12. 8. 2024 19:22, jspewock@iol.unh.edu wrote:
> From: Jeremy Spewock <jspewock@iol.unh.edu>
>
> The DTS framework in its current state supports binding ports to
> different drivers on the SUT node but not the TG node. The TG node
> already has the information that it needs about the different drivers
> that it has available in the configuration file, but it did not
> previously have access to the devbind script, so it did not use that
> information for anything.
>
> This patch moves the steps to copy the DPDK tarball into the node class
> rather than the SUT node class, and calls this function on the TG node
> as well as the SUT. It also moves the driver binding step into the Node
> class and triggers the same pattern of binding to ports that existed on
> the SUT on the TG.
>
This is a very inefficient way to do this. We'll have to build DPDK
twice and that's very time consuming. I was thinking in terms of just
copying the script to the TG node and storing its location on the TG
node. We should have access to the script whether DTS is run from the
repository or a tarball.
On Mon, Sep 9, 2024 at 8:16 AM Juraj Linkeš <juraj.linkes@pantheon.tech> wrote:
>
>
>
> On 12. 8. 2024 19:22, jspewock@iol.unh.edu wrote:
> > From: Jeremy Spewock <jspewock@iol.unh.edu>
> >
> > The DTS framework in its current state supports binding ports to
> > different drivers on the SUT node but not the TG node. The TG node
> > already has the information that it needs about the different drivers
> > that it has available in the configuration file, but it did not
> > previously have access to the devbind script, so it did not use that
> > information for anything.
> >
> > This patch moves the steps to copy the DPDK tarball into the node class
> > rather than the SUT node class, and calls this function on the TG node
> > as well as the SUT. It also moves the driver binding step into the Node
> > class and triggers the same pattern of binding to ports that existed on
> > the SUT on the TG.
> >
>
> This is a very inefficient way to do this. We'll have to build DPDK
> twice and that's very time consuming. I was thinking in terms of just
This patch shouldn't be compiling DPDK twice, are you referring to the
process of copying the tarball over and extracting it taking too long?
If so, that makes sense that it takes longer than we need for this one
task. I figured it wouldn't hurt to have the whole DPDK directory
there, and that it could even be potentially useful to have it if the
TG ever needed it. That and it seemed like the most straightforward
way that kept these two set up in a similar way. Extracting the
tarball is obviously pretty quick, so I guess the real question here
is whether it is fine to add the time of one extra SCP of the DPDK
tarball around.
> copying the script to the TG node and storing its location on the TG
> node. We should have access to the script whether DTS is run from the
> repository or a tarball.
We should have access to it regardless, but extracting only that one
script would be different based on if it was a tarball or a repository
since, I believe at least, I would have to use the tarfile library to
read and extract only this one file to copy over if it was a tarball.
It would be faster I assume, so if you think it is worth it I could
make the change. Unless you are saying that we wouldn't need to take
the devbind script from the tarball that is passed into the DTS run
and instead assume that we can just go one directory up from `dts/` on
the runner host. That could be an interesting idea which would be
faster, but I wasn't sure if that was something that was fine to do
since (I don't think at least) there is anything that technically ties
you to running from in a DPDK directory other than the docker
container.
>
Looks good to me, and I don't understand it as triggering a second
compile on the TG node.
I wonder whether the docstring for set_up_build_target() makes it
sound like it does though?
Reviewed-by: Patrick Robb <probb@iol.unh.edu>
@@ -484,6 +484,7 @@ def _run_build_target(
try:
sut_node.set_up_build_target(build_target_config)
+ tg_node.set_up_build_target(build_target_config)
self._result.dpdk_version = sut_node.dpdk_version
build_target_result.add_build_target_info(sut_node.get_build_target_info())
build_target_result.update_setup(Result.PASS)
@@ -498,6 +499,7 @@ def _run_build_target(
try:
self._logger.set_stage(DtsStage.build_target_teardown)
sut_node.tear_down_build_target()
+ tg_node.tear_down_build_target()
build_target_result.update_teardown(Result.PASS)
except Exception as e:
self._logger.exception("Build target teardown failed.")
@@ -13,11 +13,19 @@
The :func:`~Node.skip_setup` decorator can be used without subclassing.
"""
+import os
+import tarfile
from abc import ABC
from ipaddress import IPv4Interface, IPv6Interface
+from pathlib import PurePath
from typing import Any, Callable, Union
-from framework.config import OS, NodeConfiguration, TestRunConfiguration
+from framework.config import (
+ OS,
+ BuildTargetConfiguration,
+ NodeConfiguration,
+ TestRunConfiguration,
+)
from framework.exception import ConfigurationError
from framework.logger import DTSLogger, get_dts_logger
from framework.settings import SETTINGS
@@ -58,8 +66,11 @@ class Node(ABC):
lcores: list[LogicalCore]
ports: list[Port]
_logger: DTSLogger
+ _remote_tmp_dir: PurePath
+ __remote_dpdk_dir: PurePath | None
_other_sessions: list[OSSession]
_test_run_config: TestRunConfiguration
+ _path_to_devbind_script: PurePath | None
def __init__(self, node_config: NodeConfiguration):
"""Connect to the node and gather info during initialization.
@@ -88,6 +99,9 @@ def __init__(self, node_config: NodeConfiguration):
self._other_sessions = []
self._init_ports()
+ self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
+ self.__remote_dpdk_dir = None
+ self._path_to_devbind_script = None
def _init_ports(self) -> None:
self.ports = [Port(self.name, port_config) for port_config in self.config.ports]
@@ -95,6 +109,34 @@ def _init_ports(self) -> None:
for port in self.ports:
self.configure_port_state(port)
+ def _guess_dpdk_remote_dir(self) -> PurePath:
+ return self.main_session.guess_dpdk_remote_dir(self._remote_tmp_dir)
+
+ @property
+ def _remote_dpdk_dir(self) -> PurePath:
+ """The remote DPDK dir.
+
+ This internal property should be set after extracting the DPDK tarball. If it's not set,
+ that implies the DPDK setup step has been skipped, in which case we can guess where
+ a previous build was located.
+ """
+ if self.__remote_dpdk_dir is None:
+ self.__remote_dpdk_dir = self._guess_dpdk_remote_dir()
+ return self.__remote_dpdk_dir
+
+ @_remote_dpdk_dir.setter
+ def _remote_dpdk_dir(self, value: PurePath) -> None:
+ self.__remote_dpdk_dir = value
+
+ @property
+ def path_to_devbind_script(self) -> PurePath:
+ """The path to the dpdk-devbind.py script on the node."""
+ if self._path_to_devbind_script is None:
+ self._path_to_devbind_script = self.main_session.join_remote_path(
+ self._remote_dpdk_dir, "usertools", "dpdk-devbind.py"
+ )
+ return self._path_to_devbind_script
+
def set_up_test_run(self, test_run_config: TestRunConfiguration) -> None:
"""Test run setup steps.
@@ -114,6 +156,24 @@ def tear_down_test_run(self) -> None:
Additional steps can be added by extending the method in subclasses with the use of super().
"""
+ def set_up_build_target(self, build_target_config: BuildTargetConfiguration) -> None:
+ """Set up DPDK the node and bind ports.
+
+ DPDK setup includes setting all internals needed for the build, the copying of DPDK tarball
+ and then building DPDK. The drivers are bound to those that DPDK needs.
+
+ Args:
+ build_target_config: The build target test run configuration according to which
+ the setup steps will be taken.
+ """
+ self._copy_dpdk_tarball()
+ self.bind_ports_to_driver()
+
+ def tear_down_build_target(self) -> None:
+ """Reset DPDK variables and bind port driver to the OS driver."""
+ self.__remote_dpdk_dir = None
+ self.bind_ports_to_driver(for_dpdk=False)
+
def create_session(self, name: str) -> OSSession:
"""Create and return a new OS-aware remote session.
@@ -228,6 +288,50 @@ def skip_setup(func: Callable[..., Any]) -> Callable[..., Any]:
else:
return func
+ @skip_setup
+ def _copy_dpdk_tarball(self) -> None:
+ """Copy to and extract DPDK tarball on the node."""
+ self._logger.info(f"Copying DPDK tarball to {self.name}.")
+ self.main_session.copy_to(SETTINGS.dpdk_tarball_path, self._remote_tmp_dir)
+
+ # construct remote tarball path
+ # the basename is the same on local host and on remote Node
+ remote_tarball_path = self.main_session.join_remote_path(
+ self._remote_tmp_dir, os.path.basename(SETTINGS.dpdk_tarball_path)
+ )
+
+ # construct remote path after extracting
+ with tarfile.open(SETTINGS.dpdk_tarball_path) as dpdk_tar:
+ dpdk_top_dir = dpdk_tar.getnames()[0]
+ self._remote_dpdk_dir = self.main_session.join_remote_path(
+ self._remote_tmp_dir, dpdk_top_dir
+ )
+
+ self._logger.info(
+ f"Extracting DPDK tarball on {self.name}: "
+ f"'{remote_tarball_path}' into '{self._remote_dpdk_dir}'."
+ )
+ # clean remote path where we're extracting
+ self.main_session.remove_remote_dir(self._remote_dpdk_dir)
+
+ # then extract to remote path
+ self.main_session.extract_remote_tarball(remote_tarball_path, self._remote_dpdk_dir)
+
+ def bind_ports_to_driver(self, for_dpdk: bool = True) -> None:
+ """Bind all ports on the node to a driver.
+
+ Args:
+ for_dpdk: If :data:`True`, binds ports to os_driver_for_dpdk.
+ If :data:`False`, binds to os_driver.
+ """
+ for port in self.ports:
+ driver = port.os_driver_for_dpdk if for_dpdk else port.os_driver
+ self.main_session.send_command(
+ f"{self.path_to_devbind_script} -b {driver} --force {port.pci}",
+ privileged=True,
+ verify=True,
+ )
+
def create_session(node_config: NodeConfiguration, name: str, logger: DTSLogger) -> OSSession:
"""Factory for OS-aware sessions.
@@ -13,7 +13,6 @@
import os
-import tarfile
import time
from pathlib import PurePath
@@ -26,7 +25,6 @@
)
from framework.params.eal import EalParams
from framework.remote_session.remote_session import CommandResult
-from framework.settings import SETTINGS
from framework.utils import MesonArgs
from .node import Node
@@ -59,14 +57,11 @@ class SutNode(Node):
dpdk_timestamp: str
_build_target_config: BuildTargetConfiguration | None
_env_vars: dict
- _remote_tmp_dir: PurePath
- __remote_dpdk_dir: PurePath | None
_app_compile_timeout: float
_dpdk_kill_session: OSSession | None
_dpdk_version: str | None
_node_info: NodeInfo | None
_compiler_version: str | None
- _path_to_devbind_script: PurePath | None
def __init__(self, node_config: SutNodeConfiguration):
"""Extend the constructor with SUT node specifics.
@@ -79,8 +74,6 @@ def __init__(self, node_config: SutNodeConfiguration):
self.dpdk_prefix_list = []
self._build_target_config = None
self._env_vars = {}
- self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
- self.__remote_dpdk_dir = None
self._app_compile_timeout = 90
self._dpdk_kill_session = None
self.dpdk_timestamp = (
@@ -89,25 +82,8 @@ def __init__(self, node_config: SutNodeConfiguration):
self._dpdk_version = None
self._node_info = None
self._compiler_version = None
- self._path_to_devbind_script = None
self._logger.info(f"Created node: {self.name}")
- @property
- def _remote_dpdk_dir(self) -> PurePath:
- """The remote DPDK dir.
-
- This internal property should be set after extracting the DPDK tarball. If it's not set,
- that implies the DPDK setup step has been skipped, in which case we can guess where
- a previous build was located.
- """
- if self.__remote_dpdk_dir is None:
- self.__remote_dpdk_dir = self._guess_dpdk_remote_dir()
- return self.__remote_dpdk_dir
-
- @_remote_dpdk_dir.setter
- def _remote_dpdk_dir(self, value: PurePath) -> None:
- self.__remote_dpdk_dir = value
-
@property
def remote_dpdk_build_dir(self) -> PurePath:
"""The remote DPDK build directory.
@@ -151,15 +127,6 @@ def compiler_version(self) -> str:
return ""
return self._compiler_version
- @property
- def path_to_devbind_script(self) -> PurePath:
- """The path to the dpdk-devbind.py script on the node."""
- if self._path_to_devbind_script is None:
- self._path_to_devbind_script = self.main_session.join_remote_path(
- self._remote_dpdk_dir, "usertools", "dpdk-devbind.py"
- )
- return self._path_to_devbind_script
-
def get_build_target_info(self) -> BuildTargetInfo:
"""Get additional build target information.
@@ -170,9 +137,6 @@ def get_build_target_info(self) -> BuildTargetInfo:
dpdk_version=self.dpdk_version, compiler_version=self.compiler_version
)
- def _guess_dpdk_remote_dir(self) -> PurePath:
- return self.main_session.guess_dpdk_remote_dir(self._remote_tmp_dir)
-
def set_up_test_run(self, test_run_config: TestRunConfiguration) -> None:
"""Extend the test run setup with vdev config.
@@ -199,19 +163,17 @@ def set_up_build_target(self, build_target_config: BuildTargetConfiguration) ->
build_target_config: The build target test run configuration according to which
the setup steps will be taken.
"""
+ super().set_up_build_target(build_target_config)
self._configure_build_target(build_target_config)
- self._copy_dpdk_tarball()
self._build_dpdk()
- self.bind_ports_to_driver()
def tear_down_build_target(self) -> None:
"""Reset DPDK variables and bind port driver to the OS driver."""
+ super().tear_down_build_target()
self._env_vars = {}
self._build_target_config = None
- self.__remote_dpdk_dir = None
self._dpdk_version = None
self._compiler_version = None
- self.bind_ports_to_driver(for_dpdk=False)
def _configure_build_target(self, build_target_config: BuildTargetConfiguration) -> None:
"""Populate common environment variables and set build target config."""
@@ -224,35 +186,6 @@ def _configure_build_target(self, build_target_config: BuildTargetConfiguration)
f"'{build_target_config.compiler_wrapper} {build_target_config.compiler.name}'"
) # fmt: skip
- @Node.skip_setup
- def _copy_dpdk_tarball(self) -> None:
- """Copy to and extract DPDK tarball on the SUT node."""
- self._logger.info("Copying DPDK tarball to SUT.")
- self.main_session.copy_to(SETTINGS.dpdk_tarball_path, self._remote_tmp_dir)
-
- # construct remote tarball path
- # the basename is the same on local host and on remote Node
- remote_tarball_path = self.main_session.join_remote_path(
- self._remote_tmp_dir, os.path.basename(SETTINGS.dpdk_tarball_path)
- )
-
- # construct remote path after extracting
- with tarfile.open(SETTINGS.dpdk_tarball_path) as dpdk_tar:
- dpdk_top_dir = dpdk_tar.getnames()[0]
- self._remote_dpdk_dir = self.main_session.join_remote_path(
- self._remote_tmp_dir, dpdk_top_dir
- )
-
- self._logger.info(
- f"Extracting DPDK tarball on SUT: "
- f"'{remote_tarball_path}' into '{self._remote_dpdk_dir}'."
- )
- # clean remote path where we're extracting
- self.main_session.remove_remote_dir(self._remote_dpdk_dir)
-
- # then extract to remote path
- self.main_session.extract_remote_tarball(remote_tarball_path, self._remote_dpdk_dir)
-
@Node.skip_setup
def _build_dpdk(self) -> None:
"""Build DPDK.
@@ -335,18 +268,3 @@ def configure_ipv4_forwarding(self, enable: bool) -> None:
enable: If :data:`True`, enable the forwarding, otherwise disable it.
"""
self.main_session.configure_ipv4_forwarding(enable)
-
- def bind_ports_to_driver(self, for_dpdk: bool = True) -> None:
- """Bind all ports on the SUT to a driver.
-
- Args:
- for_dpdk: If :data:`True`, binds ports to os_driver_for_dpdk.
- If :data:`False`, binds to os_driver.
- """
- for port in self.ports:
- driver = port.os_driver_for_dpdk if for_dpdk else port.os_driver
- self.main_session.send_command(
- f"{self.path_to_devbind_script} -b {driver} --force {port.pci}",
- privileged=True,
- verify=True,
- )