get:
Show a patch.

patch:
Update a patch.

put:
Update a patch.

GET /api/patches/129585/?format=api
HTTP 200 OK
Allow: GET, PUT, PATCH, HEAD, OPTIONS
Content-Type: application/json
Vary: Accept

{
    "id": 129585,
    "url": "https://patches.dpdk.org/api/patches/129585/?format=api",
    "web_url": "https://patches.dpdk.org/project/ci/patch/20230717210815.29737-4-ahassick@iol.unh.edu/",
    "project": {
        "id": 5,
        "url": "https://patches.dpdk.org/api/projects/5/?format=api",
        "name": "CI",
        "link_name": "ci",
        "list_id": "ci.dpdk.org",
        "list_email": "ci@dpdk.org",
        "web_url": "",
        "scm_url": "git://dpdk.org/tools/dpdk-ci",
        "webscm_url": "https://git.dpdk.org/tools/dpdk-ci/",
        "list_archive_url": "https://inbox.dpdk.org/ci",
        "list_archive_url_format": "https://inbox.dpdk.org/ci/{}",
        "commit_url_format": ""
    },
    "msgid": "<20230717210815.29737-4-ahassick@iol.unh.edu>",
    "list_archive_url": "https://inbox.dpdk.org/ci/20230717210815.29737-4-ahassick@iol.unh.edu",
    "date": "2023-07-17T21:08:12",
    "name": "[v8,3/6] containers/builder: Dockerfile creation script",
    "commit_ref": null,
    "pull_url": null,
    "state": "new",
    "archived": false,
    "hash": "d3167ca1ebd5f8e71fbf4479cd42dbbc0bcf216a",
    "submitter": {
        "id": 3127,
        "url": "https://patches.dpdk.org/api/people/3127/?format=api",
        "name": "Adam Hassick",
        "email": "ahassick@iol.unh.edu"
    },
    "delegate": null,
    "mbox": "https://patches.dpdk.org/project/ci/patch/20230717210815.29737-4-ahassick@iol.unh.edu/mbox/",
    "series": [
        {
            "id": 28957,
            "url": "https://patches.dpdk.org/api/series/28957/?format=api",
            "web_url": "https://patches.dpdk.org/project/ci/list/?series=28957",
            "date": "2023-07-17T21:08:09",
            "name": "Community Lab Containers and Builder Engine",
            "version": 8,
            "mbox": "https://patches.dpdk.org/series/28957/mbox/"
        }
    ],
    "comments": "https://patches.dpdk.org/api/patches/129585/comments/",
    "check": "pending",
    "checks": "https://patches.dpdk.org/api/patches/129585/checks/",
    "tags": {},
    "related": [],
    "headers": {
        "Return-Path": "<ci-bounces@dpdk.org>",
        "X-Original-To": "patchwork@inbox.dpdk.org",
        "Delivered-To": "patchwork@inbox.dpdk.org",
        "Received": [
            "from mails.dpdk.org (mails.dpdk.org [217.70.189.124])\n\tby inbox.dpdk.org (Postfix) with ESMTP id A4DE042E99;\n\tMon, 17 Jul 2023 23:11:40 +0200 (CEST)",
            "from mails.dpdk.org (localhost [127.0.0.1])\n\tby mails.dpdk.org (Postfix) with ESMTP id 9C10C410EF;\n\tMon, 17 Jul 2023 23:11:40 +0200 (CEST)",
            "from mail-qt1-f170.google.com (mail-qt1-f170.google.com\n [209.85.160.170])\n by mails.dpdk.org (Postfix) with ESMTP id 5CDC94068E\n for <ci@dpdk.org>; Mon, 17 Jul 2023 23:11:39 +0200 (CEST)",
            "by mail-qt1-f170.google.com with SMTP id\n d75a77b69052e-403f64ad8a0so1686111cf.2\n for <ci@dpdk.org>; Mon, 17 Jul 2023 14:11:39 -0700 (PDT)",
            "from pogmachine2.loudonlune.net ([216.212.51.182])\n by smtp.gmail.com with ESMTPSA id\n ev10-20020a05622a510a00b003f9e58afea6sm153695qtb.12.2023.07.17.14.11.37\n (version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256);\n Mon, 17 Jul 2023 14:11:38 -0700 (PDT)"
        ],
        "DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/relaxed;\n d=iol.unh.edu; s=unh-iol; t=1689628298; x=1692220298;\n h=content-transfer-encoding:mime-version:references:in-reply-to\n :message-id:date:subject:cc:to:from:from:to:cc:subject:date\n :message-id:reply-to;\n bh=IuZkQxCLETL1Mmp0qBYKex8Ye3zuxug74bUyaTfvONk=;\n b=RJC1klABWU1jZIO55O1osoacIkn+anP0W4Z496UwwtKlefp5r0095qy4lTXDKSOAVc\n WUHMT3VTZxCp9TAH+kEiRPjgwDwMcbAGDVM3BuAUd1SLJnVJYu/awK8BT2NVcg+bkbBO\n r1CGJAzKyptJe7Gdf7Y/leqglxxTxELt2JR3E=",
        "X-Google-DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/relaxed;\n d=1e100.net; s=20221208; t=1689628298; x=1692220298;\n h=content-transfer-encoding:mime-version:references:in-reply-to\n :message-id:date:subject:cc:to:from:x-gm-message-state:from:to:cc\n :subject:date:message-id:reply-to;\n bh=IuZkQxCLETL1Mmp0qBYKex8Ye3zuxug74bUyaTfvONk=;\n b=eCRNQ9iKQePfOZUTGSGLuhTzqLakpalWv4n5B+CXTl7/RlLT7XMd/9m2GCv+YE8oG1\n bKKIRM5f7uG7kyh5aH//PTiBa0syGYOr5OIhu/DeUCo8dxVYgvNbU/ksRpAcljczKk6j\n IcdRX3UidUn90LWfKQ97jq7tPAJ7qagT96NZLS5mK2I7sFbZn9i7UhwumTYbe1V49d8y\n ES3mbEZO3A6Zs54qVMmY7xyBK84cArVFDXPhzvn13BfGEA537sejWWVr5iqhYfcyo7Sm\n /EIvKKGNywjpeFbgssESCCPg4pdwkV4rK+btohxlHsbEZlM/TdkCxhdE1XwcxjAb2Xvt\n stdQ==",
        "X-Gm-Message-State": "ABy/qLZRXTtCXBSY2XRiOPq91mbLBy/Pc0KEpZoR4SyR4PIzovxWgSR9\n a4GG4gAd84OJH45Vb7UjUDxD16gdF509urdGm8MCFX16Ekyz2VajC0u2Zae/rCtW93g3PesVM4W\n 9TOXWdbKGKTs0sFbvdSauxUDLyOtQ1jn96AGiAgcQhVK9UYZBQC/9ykvrzG1zKg==",
        "X-Google-Smtp-Source": "\n APBJJlETDNjPYIOp7b/Z43dCiGiJyqbrfx5Abt0ZM5HOmGIB7aJ6x7YnnzpsVrcxZAVbS+gccWyolw==",
        "X-Received": "by 2002:a05:622a:103:b0:403:38dd:ab5e with SMTP id\n u3-20020a05622a010300b0040338ddab5emr15145651qtw.1.1689628298512;\n Mon, 17 Jul 2023 14:11:38 -0700 (PDT)",
        "From": "Adam Hassick <ahassick@iol.unh.edu>",
        "To": "ci@dpdk.org",
        "Cc": "aconole@redhat.com, alialnu@nvidia.com,\n Owen Hilyard <ohilyard@iol.unh.edu>, Adam Hassick <ahassick@iol.unh.edu>",
        "Subject": "[PATCH v8 3/6] containers/builder: Dockerfile creation script",
        "Date": "Mon, 17 Jul 2023 17:08:12 -0400",
        "Message-ID": "<20230717210815.29737-4-ahassick@iol.unh.edu>",
        "X-Mailer": "git-send-email 2.41.0",
        "In-Reply-To": "<20230717210815.29737-1-ahassick@iol.unh.edu>",
        "References": "<20230717210815.29737-1-ahassick@iol.unh.edu>",
        "MIME-Version": "1.0",
        "Content-Transfer-Encoding": "8bit",
        "X-BeenThere": "ci@dpdk.org",
        "X-Mailman-Version": "2.1.29",
        "Precedence": "list",
        "List-Id": "DPDK CI discussions <ci.dpdk.org>",
        "List-Unsubscribe": "<https://mails.dpdk.org/options/ci>,\n <mailto:ci-request@dpdk.org?subject=unsubscribe>",
        "List-Archive": "<http://mails.dpdk.org/archives/ci/>",
        "List-Post": "<mailto:ci@dpdk.org>",
        "List-Help": "<mailto:ci-request@dpdk.org?subject=help>",
        "List-Subscribe": "<https://mails.dpdk.org/listinfo/ci>,\n <mailto:ci-request@dpdk.org?subject=subscribe>",
        "Errors-To": "ci-bounces@dpdk.org"
    },
    "content": "From: Owen Hilyard <ohilyard@iol.unh.edu>\n\nThis script will template out all of the Dockerfiles based on the\ndefinitions provided in the inventory using the jinja2 templating\nlibrary.\n\nSigned-off-by: Owen Hilyard <ohilyard@iol.unh.edu>\nSigned-off-by: Adam Hassick <ahassick@iol.unh.edu>\n---\n containers/template_engine/make_dockerfile.py | 358 ++++++++++++++++++\n 1 file changed, 358 insertions(+)\n create mode 100755 containers/template_engine/make_dockerfile.py",
    "diff": "diff --git a/containers/template_engine/make_dockerfile.py b/containers/template_engine/make_dockerfile.py\nnew file mode 100755\nindex 0000000..60269a0\n--- /dev/null\n+++ b/containers/template_engine/make_dockerfile.py\n@@ -0,0 +1,358 @@\n+#!/usr/bin/env python3\n+# SPDX-License-Identifier: BSD-3-Clause\n+# Copyright (c) 2022 University of New Hampshire\n+import argparse\n+import json\n+import logging\n+import os\n+import re\n+from dataclasses import dataclass\n+from datetime import datetime\n+import platform\n+from typing import Any, Dict, List, Optional\n+\n+import jsonschema\n+import yaml\n+from jinja2 import Environment, FileSystemLoader, select_autoescape\n+\n+\n+@dataclass(frozen=True)\n+class Options:\n+    on_rhel: bool\n+    fail_on_unbuildable: bool\n+    has_coverity: bool\n+    build_libabigail: bool\n+    build_abi: bool\n+    output_dir: str\n+    registry_hostname: str\n+    host_arch_only: bool\n+    omit_latest: bool\n+    is_builder: bool\n+    date_override: Optional[str]\n+    ninja_workers: Optional[int]\n+\n+\n+def _get_arg_parser() -> argparse.ArgumentParser:\n+    parser = argparse.ArgumentParser(description=\"Makes the dockerfile\")\n+    parser.add_argument(\"--output-dir\", required=True)\n+    parser.add_argument(\n+        \"--rhel\",\n+        action=\"store_true\",\n+        help=\"Overwrite the check for running on RHEL\",\n+        default=False,\n+    )\n+    parser.add_argument(\n+        \"--fail-on-unbuildable\",\n+        action=\"store_true\",\n+        help=\"If any container would not be possible to build, fail and exit with a non-zero exit code.\",\n+        default=False,\n+    )\n+    parser.add_argument(\n+        \"--build-abi\",\n+        action=\"store_true\",\n+        help=\"Whether to build the ABI references into the image. Disabled by \\\n+            default due to producing 10+ GB images. \\\n+            Implies '--build-libabigail'.\",\n+    )\n+    parser.add_argument(\n+        \"--build-libabigail\",\n+        action=\"store_true\",\n+        help=\"Whether to build libabigail from source for distros that do not \\\n+            package it. Implied by '--build-abi'\",\n+    )\n+    parser.add_argument(\n+        \"--host-arch-only\",\n+        action=\"store_true\",\n+        help=\"Only build containers for the architecture of the host system\",\n+    )\n+    parser.add_argument(\n+        \"--omit-latest\",\n+        action=\"store_true\",\n+        help=\"Whether to include the \\\"latest\\\" tag in the generated makefile.\"\n+    )\n+    parser.add_argument(\n+        \"--builder-mode\",\n+        action=\"store_true\",\n+        help=\"Specifies that the makefile is being templated for a builder. \\\n+            This implicitly sets \\\"--host-arch-only\\\" to true and disables making the manifests.\",\n+        default=False\n+    )\n+    parser.add_argument(\n+        \"--date\",\n+        type=str,\n+        help=\"Overrides generation of the timestamp and uses the provided string instead.\"\n+    )\n+    parser.add_argument(\n+        \"--ninja-workers\",\n+        type=int,\n+        help=\"Specifies a number of ninja workers to limit builds to. Uses the ninja default when not given.\"\n+    )\n+    parser.add_argument(\n+        \"--coverity\",\n+        action=\"store_true\",\n+        help=\"Whether the Coverity Scan binaries are available for building the Coverity containers.\",\n+        default=False\n+    )\n+    return parser\n+\n+\n+def parse_args() -> Options:\n+    parser = _get_arg_parser()\n+    args = parser.parse_args()\n+\n+    registry_hostname = (\n+        os.environ.get(\"DPDK_CI_CONTAINERS_REGISTRY_HOSTNAME\") or \"localhost\"\n+    )\n+\n+    # In order to to build the ABIs, libabigail must be built from source on\n+    # some platforms\n+    build_libabigail: bool = args.build_libabigail or args.build_abi\n+\n+    opts = Options(\n+        on_rhel=args.rhel,\n+        fail_on_unbuildable=args.fail_on_unbuildable,\n+        build_libabigail=build_libabigail,\n+        build_abi=args.build_abi,\n+        output_dir=args.output_dir,\n+        registry_hostname=registry_hostname,\n+        host_arch_only=args.host_arch_only or args.builder_mode,\n+        omit_latest=args.omit_latest,\n+        is_builder=args.builder_mode,\n+        date_override=args.date,\n+        ninja_workers=args.ninja_workers,\n+        has_coverity=args.coverity\n+    )\n+\n+    logging.info(f\"make_dockerfile.py options: {opts}\")\n+    return opts\n+\n+\n+def running_on_RHEL(options: Options) -> bool:\n+    \"\"\"\n+    RHEL containers can only be built on RHEL, so disable them and emit a\n+    warning if not on RHEL.\n+    \"\"\"\n+    redhat_release_path = \"/etc/redhat-release\"\n+\n+    if os.path.exists(redhat_release_path):\n+        with open(redhat_release_path) as f:\n+            first_line = f.readline()\n+            on_rhel = \"Red Hat Enterprise Linux\" in first_line\n+            if on_rhel:\n+                logging.info(\"Running on RHEL, allowing RHEL containers\")\n+                return True\n+\n+    logging.warning(\"Not on RHEL, disabling RHEL containers\")\n+    assert options is not None, \"Internal state error, OPTIONS should not be None\"\n+\n+    if options.on_rhel:\n+        logging.info(\"Override enabled, enabling RHEL containers\")\n+\n+    return options.on_rhel\n+\n+\n+def get_path_to_parent_directory() -> str:\n+    return os.path.dirname(__file__)\n+\n+\n+def get_raw_inventory():\n+    parent_dir = get_path_to_parent_directory()\n+\n+    schema_path = os.path.join(parent_dir, \"inventory_schema.json\")\n+    inventory_path = os.path.join(parent_dir, \"inventory.yaml\")\n+\n+    inventory: Dict[str, Any]\n+    with open(inventory_path, \"r\") as f:\n+        inventory = yaml.safe_load(f)\n+\n+    schema: Dict[str, Any]\n+    with open(schema_path, \"r\") as f:\n+        schema = json.load(f)\n+\n+    jsonschema.validate(instance=inventory, schema=schema)\n+    return inventory\n+\n+\n+def apply_group_config_to_target(\n+    target: Dict[str, Any],\n+    raw_inventory: Dict[str, Any],\n+    on_rhel: bool,\n+    fail_on_unbuildable: bool,\n+) -> Optional[Dict[str, Any]]:\n+    groups_for_target: List[Dict[str, Any]] = []\n+    groups: List[Dict[str, Any]] = raw_inventory[\"dockerfiles\"][\"groups\"]\n+    group = groups[target[\"group\"]]\n+\n+    target_primary_group = target[\"group\"]\n+\n+    assert isinstance(target_primary_group, str), \"Target group name was not a string\"\n+\n+    requires_rhel = \"rhel\" in target_primary_group.lower()\n+\n+    if requires_rhel and not on_rhel:\n+        logging.warning(\n+            f\"Disabling target {target['name']}, because it must be built on RHEL.\"\n+        )\n+        if fail_on_unbuildable:\n+            raise AssertionError(\n+                f\"Not on RHEL and target {target['name']} must be built on RHEL\"\n+            )\n+\n+        return None\n+\n+    while group[\"parent\"] != \"NONE\":\n+        groups_for_target.append(group)\n+        group = groups[group[\"parent\"]]\n+\n+    groups_for_target.append(group)  # add the \"all\" group\n+    groups_for_target.reverse()  # reverse it so overrides work\n+\n+    target_packages: List[str] = target.get(\"packages\") or []\n+\n+    for group in groups_for_target:\n+        target_packages = [*target_packages, *(group.get(\"packages\") or [])]\n+        target = dict(target, **group)\n+\n+    target[\"packages\"] = target_packages\n+\n+    return target\n+\n+def apply_defaults_to_target(target: Dict[str, Any]) -> Dict[str, Any]:\n+    def default_if_unset(target: Dict[str, Any], key: str, value: Any) -> Dict[str, Any]:\n+        if key not in target:\n+            target[key] = value\n+\n+        return target\n+\n+    target = default_if_unset(target, \"requires_coverity\", False)\n+    target = default_if_unset(target, \"force_disable_abi\", False)\n+    target = default_if_unset(target, \"minimum_dpdk_version\", dict(major=0, minor=0, revision=0))\n+    target = default_if_unset(target, \"extra_information\", {})\n+\n+    return target\n+\n+def get_host_arch() -> str:\n+    machine: str = platform.machine()\n+    match machine:\n+        case \"aarch64\" | \"armv8b\" | \"armv8l\":\n+            return \"linux/arm64\"\n+        case \"ppc64le\":\n+            return \"linux/ppc64le\"\n+        case \"x86_64\" | \"x64\" | \"amd64\":\n+            return \"linux/amd64\"\n+        case arch:\n+            raise ValueError(f\"Unknown arch {arch}\")\n+\n+def process_target(\n+    target: Dict[str, Any],\n+    raw_inventory: Dict[str, Any],\n+    has_coverity: bool,\n+    on_rhel: bool,\n+    fail_on_unbuildable: bool,\n+    host_arch_only: bool,\n+    build_timestamp: str\n+) -> Optional[Dict[str, Any]]:\n+    target = apply_defaults_to_target(target)\n+    # Copy the platforms, for building the manifest list.\n+\n+    # Write the build timestamp.\n+    target[\"extra_information\"].update({\n+        \"build_timestamp\": build_timestamp\n+    })\n+\n+    if (not has_coverity) and target[\"requires_coverity\"]:\n+        print(f\"Disabling {target['name']}. Target requires Coverity, and it is not enabled.\")\n+        return None\n+\n+    if host_arch_only:\n+        host_arch = get_host_arch()\n+        if host_arch in target[\"platforms\"]:\n+            target[\"platforms\"] = [host_arch]\n+        else:\n+            return None\n+\n+    return apply_group_config_to_target(\n+        target, raw_inventory, on_rhel, fail_on_unbuildable\n+    )\n+\n+def get_processed_inventory(options: Options, build_timestamp: str) -> Dict[str, Any]:\n+    raw_inventory: Dict[str, Any] = get_raw_inventory()\n+    on_rhel = running_on_RHEL(options)\n+    targets = raw_inventory[\"dockerfiles\"][\"targets\"]\n+    targets = [\n+        process_target(\n+            target, raw_inventory, options.has_coverity, on_rhel, options.fail_on_unbuildable, options.host_arch_only, build_timestamp\n+        )\n+        for target in targets\n+    ]\n+    # remove disabled options\n+    targets = [target for target in targets if target is not None]\n+    raw_inventory[\"dockerfiles\"][\"targets\"] = targets\n+\n+    return raw_inventory\n+\n+\n+def main():\n+    options: Options = parse_args()\n+\n+    env = Environment(\n+        loader=FileSystemLoader(\"templates\"),\n+    )\n+\n+    build_timestamp = datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")\n+\n+    inventory = get_processed_inventory(options, build_timestamp)\n+\n+    if options.date_override:\n+        timestamp = options.date_override\n+    else:\n+        timestamp = datetime.now().strftime(\"%Y-%m-%d\")\n+\n+    for target in inventory[\"dockerfiles\"][\"targets\"]:\n+        template = env.get_template(f\"containers/{target['group']}.dockerfile.j2\")\n+        dockerfile_location = os.path.join(\n+            options.output_dir, target[\"name\"] + \".dockerfile\"\n+        )\n+\n+        tags: list[str] = target.get(\"extra_tags\") or []\n+\n+        tags.insert(0, \"$R/$N:$T\")\n+        if not options.omit_latest:\n+            tags.insert(0, \"$R/$N:latest\")\n+        else:\n+            tags = list(filter(lambda x: re.match('^.*:latest$', x) is None, tags))\n+\n+        target[\"tags\"] = tags\n+\n+        rendered_dockerfile = template.render(\n+            timestamp=timestamp,\n+            target=target,\n+            build_libabigail=options.build_libabigail,\n+            build_abi=options.build_abi,\n+            build_timestamp=build_timestamp,\n+            registry_hostname=options.registry_hostname,\n+            ninja_workers=options.ninja_workers,\n+            **inventory,\n+        )\n+        with open(dockerfile_location, \"w\") as output_file:\n+            output_file.write(rendered_dockerfile)\n+\n+    makefile_template = env.get_template(f\"containers.makefile.j2\")\n+    rendered_makefile = makefile_template.render(\n+        timestamp=timestamp,\n+        build_libabigail=options.build_libabigail,\n+        build_abi=options.build_abi,\n+        host_arch_only=options.host_arch_only,\n+        registry_hostname=options.registry_hostname,\n+        is_builder=options.is_builder,\n+        **inventory,\n+    )\n+    makefile_output_path = os.path.join(options.output_dir, \"Makefile\")\n+    with open(makefile_output_path, \"w\") as f:\n+        f.write(rendered_makefile)\n+\n+\n+if __name__ == \"__main__\":\n+    logging.basicConfig()\n+    logging.root.setLevel(0)  # log everything\n+    main()\n",
    "prefixes": [
        "v8",
        "3/6"
    ]
}