[RFC,v2,6/6] dts: add performance test functions to test suite api

Message ID 20250516201834.626206-7-npratte@iol.unh.edu (mailing list archive)
State New
Delegated to: Paul Szczepanek
Headers
Series Add TREX Traffic Generator to DTS Framework |

Checks

Context Check Description
ci/checkpatch warning coding style issues
ci/iol-marvell-Functional success Functional Testing PASS
ci/iol-mellanox-Performance success Performance Testing PASS
ci/iol-broadcom-Performance success Performance Testing PASS
ci/iol-intel-Functional success Functional Testing PASS
ci/iol-intel-Performance success Performance Testing PASS
ci/iol-abi-testing success Testing PASS
ci/iol-unit-amd64-testing success Testing PASS
ci/iol-unit-arm64-testing success Testing PASS
ci/iol-sample-apps-testing success Testing PASS
ci/iol-compile-amd64-testing success Testing PASS
ci/iol-compile-arm64-testing success Testing PASS
ci/Intel-compilation fail Compilation issues
ci/intel-Testing success Testing PASS
ci/intel-Functional success Functional PASS

Commit Message

Nicholas Pratte May 16, 2025, 8:18 p.m. UTC
Provide functional performance method to run performance tests using a
user-supplied performance traffic generator. The single core performance
test is included, with some basic statistics checks verifying TG packet
transmission rates.

Bugzilla ID: 1697
Signed-off-by: Nicholas Pratte <npratte@iol.unh.edu>
---
 dts/configurations/tests_config.example.yaml |  5 ++
 dts/framework/test_suite.py                  | 27 ++++++++++
 dts/tests/TestSuite_single_core_perf.py      | 56 ++++++++++++++++++++
 3 files changed, 88 insertions(+)
 create mode 100644 dts/tests/TestSuite_single_core_perf.py
  

Comments

Dean Marx May 22, 2025, 5:54 p.m. UTC | #1
Reviewed-by: Dean Marx <dmarx@iol.unh.edu>
  

Patch

diff --git a/dts/configurations/tests_config.example.yaml b/dts/configurations/tests_config.example.yaml
index 38cf7a0dce..d3d867ae18 100644
--- a/dts/configurations/tests_config.example.yaml
+++ b/dts/configurations/tests_config.example.yaml
@@ -2,3 +2,8 @@ 
 
 hello_world:
   msg: A custom hello world to you!
+single_core_perf:
+  tx_rx_descriptors: [128, 512, 2048]
+  frame_sizes: [64, 128]
+  expected_throughput: 40
+  # rate: gbps | mbps
\ No newline at end of file
diff --git a/dts/framework/test_suite.py b/dts/framework/test_suite.py
index 507df508cb..a89faac2d5 100644
--- a/dts/framework/test_suite.py
+++ b/dts/framework/test_suite.py
@@ -38,6 +38,10 @@ 
     CapturingTrafficGenerator,
     PacketFilteringConfig,
 )
+from framework.testbed_model.traffic_generator.performance_traffic_generator import (
+    PerformanceTrafficGenerator,
+    PerformanceTrafficStats,
+)
 
 from .exception import ConfigurationError, InternalError, TestCaseVerifyError
 from .logger import DTSLogger, get_dts_logger
@@ -266,6 +270,26 @@  def send_packets_and_capture(
             duration,
         )
 
+    def assess_performance_by_packet(
+        self, packet: Packet, duration: int = 60
+    ) -> PerformanceTrafficStats:
+        """Send a given packet for a given duration and assess basic performance statistics.
+
+        Send `packet` and assess NIC performance for a given duration, corresponding to the test
+        suite's given topology.
+
+        Args:
+            packet: The packet to send.
+            duration: Performance test duration (in seconds)
+
+        Returns:
+            Performance statistics of the generated test.
+        """
+        assert isinstance(
+            self._ctx.perf_tg, PerformanceTrafficGenerator
+        ), "Cannot run performance tests on non-performance traffic generator."
+        return self._ctx.perf_tg.generate_traffic_and_stats(packet, duration)
+
     def send_packets(
         self,
         packets: list[Packet],
@@ -275,6 +299,9 @@  def send_packets(
         Args:
             packets: Packets to send.
         """
+        assert isinstance(
+            self._ctx.perf_tg, CapturingTrafficGenerator
+        ), "Cannot run performance tests on non-capturing traffic generator."
         packets = self._adjust_addresses(packets)
         self._ctx.func_tg.send_packets(packets, self._ctx.topology.tg_port_egress)
 
diff --git a/dts/tests/TestSuite_single_core_perf.py b/dts/tests/TestSuite_single_core_perf.py
new file mode 100644
index 0000000000..2e1d3c1ae8
--- /dev/null
+++ b/dts/tests/TestSuite_single_core_perf.py
@@ -0,0 +1,56 @@ 
+"""Single core performance test suite."""
+
+
+from framework.params.testpmd import RXRingParams, TXRingParams
+from framework.remote_session.testpmd_shell import TestPmdShell
+from framework.test_suite import TestSuite, perf_test, BaseConfig
+
+from scapy.layers.inet import IP
+from scapy.layers.l2 import Ether
+from scapy.packet import Raw
+
+class Config(BaseConfig):
+    """Performance test metrics to be compared by real-world results."""
+
+    #: Expected results
+    tx_rx_descriptors: list[int]
+    frame_sizes: list[int]
+    expected_throughput: int
+    
+
+class TestSingleCorePerf(TestSuite):
+    """Single core performance test suite."""
+
+    config: Config
+
+    frame_sizes: list[int]
+    tx_rx_descriptors: list[int]
+    expected_throughput: int
+
+
+    def set_up_suite(self):
+        self.frame_sizes = self.config.frame_sizes
+        for frame_size in self.frame_sizes:
+            self.verify(frame_size >= 34,
+                "Provided frame size is too small. (Space needed for Ether()/IP())"
+            )
+        self.tx_rx_descriptors = self.config.tx_rx_descriptors
+        self.expected_throughput = self.config.expected_throughput
+
+    @perf_test
+    def test_perf_nic_single_core(self) -> None:
+        """Prototype test case."""
+        for frame_size in self.frame_sizes:
+            for descriptor_size in self.tx_rx_descriptors:
+                with TestPmdShell(
+                    tx_ring=TXRingParams(descriptors=descriptor_size),
+                    rx_ring=RXRingParams(descriptors=descriptor_size)
+                ) as testpmd:
+                    packet = Ether() / IP() / Raw(load="x" * (frame_size - 14 - 20))
+
+                    testpmd.start()
+                    stats = self.assess_performance_by_packet(packet, duration=60)
+                    print(stats)
+                    self.verify(
+                        stats.tx_expected_bps == 40, "Expected output does not match recorded output."
+                    )
\ No newline at end of file