get:
Show a patch.

patch:
Update a patch.

put:
Update a patch.

GET /api/patches/134583/?format=api
HTTP 200 OK
Allow: GET, PUT, PATCH, HEAD, OPTIONS
Content-Type: application/json
Vary: Accept

{
    "id": 134583,
    "url": "https://patches.dpdk.org/api/patches/134583/?format=api",
    "web_url": "https://patches.dpdk.org/project/dpdk/patch/20231123151344.162812-18-juraj.linkes@pantheon.tech/",
    "project": {
        "id": 1,
        "url": "https://patches.dpdk.org/api/projects/1/?format=api",
        "name": "DPDK",
        "link_name": "dpdk",
        "list_id": "dev.dpdk.org",
        "list_email": "dev@dpdk.org",
        "web_url": "http://core.dpdk.org",
        "scm_url": "git://dpdk.org/dpdk",
        "webscm_url": "http://git.dpdk.org/dpdk",
        "list_archive_url": "https://inbox.dpdk.org/dev",
        "list_archive_url_format": "https://inbox.dpdk.org/dev/{}",
        "commit_url_format": ""
    },
    "msgid": "<20231123151344.162812-18-juraj.linkes@pantheon.tech>",
    "list_archive_url": "https://inbox.dpdk.org/dev/20231123151344.162812-18-juraj.linkes@pantheon.tech",
    "date": "2023-11-23T15:13:40",
    "name": "[v8,17/21] dts: node docstring update",
    "commit_ref": null,
    "pull_url": null,
    "state": "superseded",
    "archived": true,
    "hash": "cc809196577ad817e4ffb2e3f35d0c7126cc25a2",
    "submitter": {
        "id": 1626,
        "url": "https://patches.dpdk.org/api/people/1626/?format=api",
        "name": "Juraj Linkeš",
        "email": "juraj.linkes@pantheon.tech"
    },
    "delegate": {
        "id": 1,
        "url": "https://patches.dpdk.org/api/users/1/?format=api",
        "username": "tmonjalo",
        "first_name": "Thomas",
        "last_name": "Monjalon",
        "email": "thomas@monjalon.net"
    },
    "mbox": "https://patches.dpdk.org/project/dpdk/patch/20231123151344.162812-18-juraj.linkes@pantheon.tech/mbox/",
    "series": [
        {
            "id": 30375,
            "url": "https://patches.dpdk.org/api/series/30375/?format=api",
            "web_url": "https://patches.dpdk.org/project/dpdk/list/?series=30375",
            "date": "2023-11-23T15:13:23",
            "name": "dts: docstrings update",
            "version": 8,
            "mbox": "https://patches.dpdk.org/series/30375/mbox/"
        }
    ],
    "comments": "https://patches.dpdk.org/api/patches/134583/comments/",
    "check": "success",
    "checks": "https://patches.dpdk.org/api/patches/134583/checks/",
    "tags": {},
    "related": [],
    "headers": {
        "Return-Path": "<dev-bounces@dpdk.org>",
        "X-Original-To": "patchwork@inbox.dpdk.org",
        "Delivered-To": "patchwork@inbox.dpdk.org",
        "Received": [
            "from mails.dpdk.org (mails.dpdk.org [217.70.189.124])\n\tby inbox.dpdk.org (Postfix) with ESMTP id 95DA1433AC;\n\tThu, 23 Nov 2023 16:16:28 +0100 (CET)",
            "from mails.dpdk.org (localhost [127.0.0.1])\n\tby mails.dpdk.org (Postfix) with ESMTP id 4A93D43299;\n\tThu, 23 Nov 2023 16:14:26 +0100 (CET)",
            "from mail-wr1-f54.google.com (mail-wr1-f54.google.com\n [209.85.221.54]) by mails.dpdk.org (Postfix) with ESMTP id 657C342FCD\n for <dev@dpdk.org>; Thu, 23 Nov 2023 16:14:09 +0100 (CET)",
            "by mail-wr1-f54.google.com with SMTP id\n ffacd0b85a97d-3316ad2bee5so545447f8f.1\n for <dev@dpdk.org>; Thu, 23 Nov 2023 07:14:09 -0800 (PST)",
            "from jlinkes-PT-Latitude-5530.. ([84.245.121.10])\n by smtp.gmail.com with ESMTPSA id\n q4-20020adfea04000000b003296b488961sm1870143wrm.31.2023.11.23.07.14.07\n (version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256);\n Thu, 23 Nov 2023 07:14:08 -0800 (PST)"
        ],
        "DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/relaxed;\n d=pantheon.tech; s=google; t=1700752449; x=1701357249; darn=dpdk.org;\n h=content-transfer-encoding:mime-version:references:in-reply-to\n :message-id:date:subject:cc:to:from:from:to:cc:subject:date\n :message-id:reply-to;\n bh=vQu+v+3UG/m2JX1k4LoRsjqvWSYETb2xzJAIFf7nUBU=;\n b=sivPSFb0BW+ZstSPBx48N5dct1/aITLz4gnjeYR2jjJnKN5D8Rx4DoTR9IJNQNe6Fx\n 7ASo8XKmH1cRK2PpoOWZ+GG/lggAvqcxWZolendruS94pmPmyEICq1olS6id6OEfVZC6\n I3979p/TXyUHl4ToV/3I/+4ju9AK948Jpi8m2CTf4cPBMHSsZtVGNDAqx8QiW0zbz1D8\n hD+9lHZJNufosbq3eOobvMfSR0D71RKcLnYC9ufn+4U2s1HYn2YFJgh8ENLxXxQqLp7g\n PodqPCJWGnVoojRzmYNe/+U8GQeS+qzZCoNSlmriYfRnrrZ3CJ/MlpvhmP0UucC8lH0W\n 1kcg==",
        "X-Google-DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/relaxed;\n d=1e100.net; s=20230601; t=1700752449; x=1701357249;\n h=content-transfer-encoding:mime-version:references:in-reply-to\n :message-id:date:subject:cc:to:from:x-gm-message-state:from:to:cc\n :subject:date:message-id:reply-to;\n bh=vQu+v+3UG/m2JX1k4LoRsjqvWSYETb2xzJAIFf7nUBU=;\n b=n1gsIotJcwpECgzi8efsgtFqgFB4M2iEn4JQLF6p3K5y1B+uBSYo336I74dRfwS66T\n cV+0oO07gQOPcQbDkl9LDQgeeV4hxS8xf664Ox5GOYG/x65iUQtQuY5/I8N+FfSB5FaN\n Wmkaf8cFcEFjEA+X5f8P1jXhONlRF7MJh8OR9FH49CUMhD57qfgdpfWdbV4CHVwRvr3W\n BQE2vuUAZsZVryqhKhZOFCjFXLlOYVImc9WneRgKDtFSNAyjeIH2jGERHKx6j1dMicAO\n koA+C3aI6gY6L2RUSayGQnKaO61BCjtthGdGjnVktepCCvoERzUSeNmKrtJ4+qbhmkjC\n 94ug==",
        "X-Gm-Message-State": "AOJu0YzqMNjgU00ugo8L2HAklUIzKzC10lfLCRoyx6qh6h3J1BJG+izW\n 8N8XchKVWKm1vqJECmXxBbgj/05O/G4Ut92hLiF6Lw==",
        "X-Google-Smtp-Source": "\n AGHT+IGWzIvS0u1tj5bMEvL7c96OPArFRSkRbQchAfvtzLm5SEAYzZPUWfjuIDfZt+tKQJb4Ic1jmw==",
        "X-Received": "by 2002:a5d:5749:0:b0:332:e692:a127 with SMTP id\n q9-20020a5d5749000000b00332e692a127mr585291wrw.50.1700752448987;\n Thu, 23 Nov 2023 07:14:08 -0800 (PST)",
        "From": "=?utf-8?q?Juraj_Linke=C5=A1?= <juraj.linkes@pantheon.tech>",
        "To": "thomas@monjalon.net, Honnappa.Nagarahalli@arm.com, jspewock@iol.unh.edu,\n probb@iol.unh.edu, paul.szczepanek@arm.com, yoan.picchi@foss.arm.com,\n Luca.Vizzarro@arm.com",
        "Cc": "dev@dpdk.org, =?utf-8?q?Juraj_Linke=C5=A1?= <juraj.linkes@pantheon.tech>",
        "Subject": "[PATCH v8 17/21] dts: node docstring update",
        "Date": "Thu, 23 Nov 2023 16:13:40 +0100",
        "Message-Id": "<20231123151344.162812-18-juraj.linkes@pantheon.tech>",
        "X-Mailer": "git-send-email 2.34.1",
        "In-Reply-To": "<20231123151344.162812-1-juraj.linkes@pantheon.tech>",
        "References": "<20231115130959.39420-1-juraj.linkes@pantheon.tech>\n <20231123151344.162812-1-juraj.linkes@pantheon.tech>",
        "MIME-Version": "1.0",
        "Content-Type": "text/plain; charset=UTF-8",
        "Content-Transfer-Encoding": "8bit",
        "X-BeenThere": "dev@dpdk.org",
        "X-Mailman-Version": "2.1.29",
        "Precedence": "list",
        "List-Id": "DPDK patches and discussions <dev.dpdk.org>",
        "List-Unsubscribe": "<https://mails.dpdk.org/options/dev>,\n <mailto:dev-request@dpdk.org?subject=unsubscribe>",
        "List-Archive": "<http://mails.dpdk.org/archives/dev/>",
        "List-Post": "<mailto:dev@dpdk.org>",
        "List-Help": "<mailto:dev-request@dpdk.org?subject=help>",
        "List-Subscribe": "<https://mails.dpdk.org/listinfo/dev>,\n <mailto:dev-request@dpdk.org?subject=subscribe>",
        "Errors-To": "dev-bounces@dpdk.org"
    },
    "content": "Format according to the Google format and PEP257, with slight\ndeviations.\n\nSigned-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>\n---\n dts/framework/testbed_model/node.py | 191 +++++++++++++++++++---------\n 1 file changed, 131 insertions(+), 60 deletions(-)",
    "diff": "diff --git a/dts/framework/testbed_model/node.py b/dts/framework/testbed_model/node.py\nindex b313b5ad54..6eecbdfd6a 100644\n--- a/dts/framework/testbed_model/node.py\n+++ b/dts/framework/testbed_model/node.py\n@@ -3,8 +3,13 @@\n # Copyright(c) 2022-2023 PANTHEON.tech s.r.o.\n # Copyright(c) 2022-2023 University of New Hampshire\n \n-\"\"\"\n-A node is a generic host that DTS connects to and manages.\n+\"\"\"Common functionality for node management.\n+\n+A node is any host/server DTS connects to.\n+\n+The base class, :class:`Node`, provides functionality common to all nodes and is supposed\n+to be extended by subclasses with functionalities specific to each node type.\n+The :func:`~Node.skip_setup` decorator can be used without subclassing.\n \"\"\"\n \n from abc import ABC\n@@ -35,10 +40,22 @@\n \n \n class Node(ABC):\n-    \"\"\"\n-    Basic class for node management. This class implements methods that\n-    manage a node, such as information gathering (of CPU/PCI/NIC) and\n-    environment setup.\n+    \"\"\"The base class for node management.\n+\n+    It shouldn't be instantiated, but rather subclassed.\n+    It implements common methods to manage any node:\n+\n+        * Connection to the node,\n+        * Hugepages setup.\n+\n+    Attributes:\n+        main_session: The primary OS-aware remote session used to communicate with the node.\n+        config: The node configuration.\n+        name: The name of the node.\n+        lcores: The list of logical cores that DTS can use on the node.\n+            It's derived from logical cores present on the node and the test run configuration.\n+        ports: The ports of this node specified in the test run configuration.\n+        virtual_devices: The virtual devices used on the node.\n     \"\"\"\n \n     main_session: OSSession\n@@ -52,6 +69,17 @@ class Node(ABC):\n     virtual_devices: list[VirtualDevice]\n \n     def __init__(self, node_config: NodeConfiguration):\n+        \"\"\"Connect to the node and gather info during initialization.\n+\n+        Extra gathered information:\n+\n+        * The list of available logical CPUs. This is then filtered by\n+          the ``lcores`` configuration in the YAML test run configuration file,\n+        * Information about ports from the YAML test run configuration file.\n+\n+        Args:\n+            node_config: The node's test run configuration.\n+        \"\"\"\n         self.config = node_config\n         self.name = node_config.name\n         self._logger = getLogger(self.name)\n@@ -60,7 +88,7 @@ def __init__(self, node_config: NodeConfiguration):\n         self._logger.info(f\"Connected to node: {self.name}\")\n \n         self._get_remote_cpus()\n-        # filter the node lcores according to user config\n+        # filter the node lcores according to the test run configuration\n         self.lcores = LogicalCoreListFilter(\n             self.lcores, LogicalCoreList(self.config.lcores)\n         ).filter()\n@@ -76,9 +104,14 @@ def _init_ports(self) -> None:\n             self.configure_port_state(port)\n \n     def set_up_execution(self, execution_config: ExecutionConfiguration) -> None:\n-        \"\"\"\n-        Perform the execution setup that will be done for each execution\n-        this node is part of.\n+        \"\"\"Execution setup steps.\n+\n+        Configure hugepages and call :meth:`_set_up_execution` where\n+        the rest of the configuration steps (if any) are implemented.\n+\n+        Args:\n+            execution_config: The execution test run configuration according to which\n+                the setup steps will be taken.\n         \"\"\"\n         self._setup_hugepages()\n         self._set_up_execution(execution_config)\n@@ -87,54 +120,70 @@ def set_up_execution(self, execution_config: ExecutionConfiguration) -> None:\n             self.virtual_devices.append(VirtualDevice(vdev))\n \n     def _set_up_execution(self, execution_config: ExecutionConfiguration) -> None:\n-        \"\"\"\n-        This method exists to be optionally overwritten by derived classes and\n-        is not decorated so that the derived class doesn't have to use the decorator.\n+        \"\"\"Optional additional execution setup steps for subclasses.\n+\n+        Subclasses should override this if they need to add additional execution setup steps.\n         \"\"\"\n \n     def tear_down_execution(self) -> None:\n-        \"\"\"\n-        Perform the execution teardown that will be done after each execution\n-        this node is part of concludes.\n+        \"\"\"Execution teardown steps.\n+\n+        There are currently no common execution teardown steps common to all DTS node types.\n         \"\"\"\n         self.virtual_devices = []\n         self._tear_down_execution()\n \n     def _tear_down_execution(self) -> None:\n-        \"\"\"\n-        This method exists to be optionally overwritten by derived classes and\n-        is not decorated so that the derived class doesn't have to use the decorator.\n+        \"\"\"Optional additional execution teardown steps for subclasses.\n+\n+        Subclasses should override this if they need to add additional execution teardown steps.\n         \"\"\"\n \n     def set_up_build_target(self, build_target_config: BuildTargetConfiguration) -> None:\n-        \"\"\"\n-        Perform the build target setup that will be done for each build target\n-        tested on this node.\n+        \"\"\"Build target setup steps.\n+\n+        There are currently no common build target setup steps common to all DTS node types.\n+\n+        Args:\n+            build_target_config: The build target test run configuration according to which\n+                the setup steps will be taken.\n         \"\"\"\n         self._set_up_build_target(build_target_config)\n \n     def _set_up_build_target(self, build_target_config: BuildTargetConfiguration) -> None:\n-        \"\"\"\n-        This method exists to be optionally overwritten by derived classes and\n-        is not decorated so that the derived class doesn't have to use the decorator.\n+        \"\"\"Optional additional build target setup steps for subclasses.\n+\n+        Subclasses should override this if they need to add additional build target setup steps.\n         \"\"\"\n \n     def tear_down_build_target(self) -> None:\n-        \"\"\"\n-        Perform the build target teardown that will be done after each build target\n-        tested on this node.\n+        \"\"\"Build target teardown steps.\n+\n+        There are currently no common build target teardown steps common to all DTS node types.\n         \"\"\"\n         self._tear_down_build_target()\n \n     def _tear_down_build_target(self) -> None:\n-        \"\"\"\n-        This method exists to be optionally overwritten by derived classes and\n-        is not decorated so that the derived class doesn't have to use the decorator.\n+        \"\"\"Optional additional build target teardown steps for subclasses.\n+\n+        Subclasses should override this if they need to add additional build target teardown steps.\n         \"\"\"\n \n     def create_session(self, name: str) -> OSSession:\n-        \"\"\"\n-        Create and return a new OSSession tailored to the remote OS.\n+        \"\"\"Create and return a new OS-aware remote session.\n+\n+        The returned session won't be used by the node creating it. The session must be used by\n+        the caller. The session will be maintained for the entire lifecycle of the node object,\n+        at the end of which the session will be cleaned up automatically.\n+\n+        Note:\n+            Any number of these supplementary sessions may be created.\n+\n+        Args:\n+            name: The name of the session.\n+\n+        Returns:\n+            A new OS-aware remote session.\n         \"\"\"\n         session_name = f\"{self.name} {name}\"\n         connection = create_session(\n@@ -152,19 +201,19 @@ def create_interactive_shell(\n         privileged: bool = False,\n         app_args: str = \"\",\n     ) -> InteractiveShellType:\n-        \"\"\"Create a handler for an interactive session.\n+        \"\"\"Factory for interactive session handlers.\n \n-        Instantiate shell_cls according to the remote OS specifics.\n+        Instantiate `shell_cls` according to the remote OS specifics.\n \n         Args:\n             shell_cls: The class of the shell.\n-            timeout: Timeout for reading output from the SSH channel. If you are\n-                reading from the buffer and don't receive any data within the timeout\n-                it will throw an error.\n+            timeout: Timeout for reading output from the SSH channel. If you are reading from\n+                the buffer and don't receive any data within the timeout it will throw an error.\n             privileged: Whether to run the shell with administrative privileges.\n             app_args: The arguments to be passed to the application.\n+\n         Returns:\n-            Instance of the desired interactive application.\n+            An instance of the desired interactive application shell.\n         \"\"\"\n         if not shell_cls.dpdk_app:\n             shell_cls.path = self.main_session.join_remote_path(shell_cls.path)\n@@ -181,14 +230,22 @@ def filter_lcores(\n         filter_specifier: LogicalCoreCount | LogicalCoreList,\n         ascending: bool = True,\n     ) -> list[LogicalCore]:\n-        \"\"\"\n-        Filter the LogicalCores found on the Node according to\n-        a LogicalCoreCount or a LogicalCoreList.\n+        \"\"\"Filter the node's logical cores that DTS can use.\n+\n+        Logical cores that DTS can use are the ones that are present on the node, but filtered\n+        according to the test run configuration. The `filter_specifier` will filter cores from\n+        those logical cores.\n+\n+        Args:\n+            filter_specifier: Two different filters can be used, one that specifies the number\n+                of logical cores per core, cores per socket and the number of sockets,\n+                and another one that specifies a logical core list.\n+            ascending: If :data:`True`, use cores with the lowest numerical id first and continue\n+                in ascending order. If :data:`False`, start with the highest id and continue\n+                in descending order. This ordering affects which sockets to consider first as well.\n \n-        If ascending is True, use cores with the lowest numerical id first\n-        and continue in ascending order. If False, start with the highest\n-        id and continue in descending order. This ordering affects which\n-        sockets to consider first as well.\n+        Returns:\n+            The filtered logical cores.\n         \"\"\"\n         self._logger.debug(f\"Filtering {filter_specifier} from {self.lcores}.\")\n         return lcore_filter(\n@@ -198,17 +255,14 @@ def filter_lcores(\n         ).filter()\n \n     def _get_remote_cpus(self) -> None:\n-        \"\"\"\n-        Scan CPUs in the remote OS and store a list of LogicalCores.\n-        \"\"\"\n+        \"\"\"Scan CPUs in the remote OS and store a list of LogicalCores.\"\"\"\n         self._logger.info(\"Getting CPU information.\")\n         self.lcores = self.main_session.get_remote_cpus(self.config.use_first_core)\n \n     def _setup_hugepages(self) -> None:\n-        \"\"\"\n-        Setup hugepages on the Node. Different architectures can supply different\n-        amounts of memory for hugepages and numa-based hugepage allocation may need\n-        to be considered.\n+        \"\"\"Setup hugepages on the node.\n+\n+        Configure the hugepages only if they're specified in the node's test run configuration.\n         \"\"\"\n         if self.config.hugepages:\n             self.main_session.setup_hugepages(\n@@ -216,8 +270,11 @@ def _setup_hugepages(self) -> None:\n             )\n \n     def configure_port_state(self, port: Port, enable: bool = True) -> None:\n-        \"\"\"\n-        Enable/disable port.\n+        \"\"\"Enable/disable `port`.\n+\n+        Args:\n+            port: The port to enable/disable.\n+            enable: :data:`True` to enable, :data:`False` to disable.\n         \"\"\"\n         self.main_session.configure_port_state(port, enable)\n \n@@ -227,15 +284,17 @@ def configure_port_ip_address(\n         port: Port,\n         delete: bool = False,\n     ) -> None:\n-        \"\"\"\n-        Configure the IP address of a port on this node.\n+        \"\"\"Add an IP address to `port` on this node.\n+\n+        Args:\n+            address: The IP address with mask in CIDR format. Can be either IPv4 or IPv6.\n+            port: The port to which to add the address.\n+            delete: If :data:`True`, will delete the address from the port instead of adding it.\n         \"\"\"\n         self.main_session.configure_port_ip_address(address, port, delete)\n \n     def close(self) -> None:\n-        \"\"\"\n-        Close all connections and free other resources.\n-        \"\"\"\n+        \"\"\"Close all connections and free other resources.\"\"\"\n         if self.main_session:\n             self.main_session.close()\n         for session in self._other_sessions:\n@@ -244,6 +303,11 @@ def close(self) -> None:\n \n     @staticmethod\n     def skip_setup(func: Callable[..., Any]) -> Callable[..., Any]:\n+        \"\"\"Skip the decorated function.\n+\n+        The :option:`--skip-setup` command line argument and the :envvar:`DTS_SKIP_SETUP`\n+        environment variable enable the decorator.\n+        \"\"\"\n         if SETTINGS.skip_setup:\n             return lambda *args: None\n         else:\n@@ -251,6 +315,13 @@ def skip_setup(func: Callable[..., Any]) -> Callable[..., Any]:\n \n \n def create_session(node_config: NodeConfiguration, name: str, logger: DTSLOG) -> OSSession:\n+    \"\"\"Factory for OS-aware sessions.\n+\n+    Args:\n+        node_config: The test run configuration of the node to connect to.\n+        name: The name of the session.\n+        logger: The logger instance this session will use.\n+    \"\"\"\n     match node_config.os:\n         case OS.linux:\n             return LinuxSession(node_config, name, logger)\n",
    "prefixes": [
        "v8",
        "17/21"
    ]
}