From 8df279eb9cdb56aaeb978d3f333ee34436e917c0 Mon Sep 17 00:00:00 2001 From: Florian Heilmann Date: Tue, 12 Dec 2023 16:29:27 +0100 Subject: [PATCH] feat: Tons of updates, breaking changes in artifact output, pr_helper --- .github/workflows/test_pr.yml | 7 +- poetry.lock | 166 +++++++++++++++++- pyproject.toml | 8 +- voron_ci/constants.py | 50 ++++-- voron_ci/tools/mod_structure_checker.py | 12 +- voron_ci/tools/readme_generator.py | 6 +- voron_ci/tools/stl_corruption_checker.py | 22 +-- voron_ci/tools/stl_rotation_checker.py | 26 +-- voron_ci/tools/whitespace_checker.py | 8 +- voron_ci/utils/action_summary.py | 2 +- voron_ci/utils/file_helper.py | 11 ++ voron_ci/utils/github_action_helper.py | 96 +++++----- .../{tools => utils}/imagekit_uploader.py | 12 +- voron_ci/utils/pr_helper.py | 146 +++++++++++++++ 14 files changed, 449 insertions(+), 123 deletions(-) rename voron_ci/{tools => utils}/imagekit_uploader.py (94%) create mode 100644 voron_ci/utils/pr_helper.py diff --git a/.github/workflows/test_pr.yml b/.github/workflows/test_pr.yml index 8148f24..da0ad1e 100644 --- a/.github/workflows/test_pr.yml +++ b/.github/workflows/test_pr.yml @@ -4,7 +4,7 @@ on: pull_request: types: [opened, reopened, synchronize] jobs: - test_job: + voron_ci: env: VORON_CI_OUTPUT: workflow_output VORON_CI_STEP_SUMMARY: true @@ -83,6 +83,11 @@ jobs: README_GENERATOR_JSON: false with: args: generate-readme + # Store the PR number + - name: Store PR Number + if: '!cancelled()' + run: | + echo -n ${{ github.event.number }} > ${{ env.VORON_CI_OUTPUT }}/pr_number.txt # Upload Artifact - uses: actions/upload-artifact@65d862660abb392b8c4a3d1195a2108db131dd05 if: '!cancelled()' diff --git a/poetry.lock b/poetry.lock index 1f37038..95ec8b6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -27,6 +27,26 @@ files = [ {file = "admesh-0.98.9.tar.gz", hash = "sha256:2ce7db7dc38b957f0e31de9bf554fdc8bedaf59e41a53dec548c17530ec7f639"}, ] +[[package]] +name = "anyio" +version = "4.1.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.1.0-py3-none-any.whl", hash = "sha256:56a415fbc462291813a94528a779597226619c8e78af7de0507333f700011e5f"}, + {file = "anyio-4.1.0.tar.gz", hash = "sha256:5a0bec7085176715be77df87fc66d6c9d70626bd752fcc85f57cdbee5b3760da"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + [[package]] name = "certifi" version = "2023.7.22" @@ -177,6 +197,29 @@ files = [ [package.dependencies] smmap = ">=3.0.1,<6" +[[package]] +name = "githubkit" +version = "0.10.7" +description = "GitHub SDK for Python" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "githubkit-0.10.7-py3-none-any.whl", hash = "sha256:c3993e7b79e719e4cace328310e87108c6ea7eacdc9ea94ea00aebd6a856abeb"}, + {file = "githubkit-0.10.7.tar.gz", hash = "sha256:b0a8a42feefad6604fee3faaba01ca0d0d215574f9d4557c1586c1dd926dc1e0"}, +] + +[package.dependencies] +httpx = ">=0.23.0,<1.0.0" +pydantic = ">=1.9.1,<2.0.0" +typing-extensions = ">=4.3.0,<5.0.0" + +[package.extras] +all = ["PyJWT[crypto] (>=2.4.0,<3.0.0)", "anyio (>=3.6.1,<4.0.0)"] +auth = ["PyJWT[crypto] (>=2.4.0,<3.0.0)", "anyio (>=3.6.1,<4.0.0)"] +auth-app = ["PyJWT[crypto] (>=2.4.0,<3.0.0)"] +auth-oauth-device = ["anyio (>=3.6.1,<4.0.0)"] +jwt = ["PyJWT[crypto] (>=2.4.0,<3.0.0)"] + [[package]] name = "gitpython" version = "3.1.40" @@ -194,6 +237,62 @@ gitdb = ">=4.0.1,<5" [package.extras] test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-sugar"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.2" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, + {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.23.0)"] + +[[package]] +name = "httpx" +version = "0.25.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, + {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + [[package]] name = "idna" version = "3.4" @@ -341,6 +440,58 @@ files = [ {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, ] +[[package]] +name = "pydantic" +version = "1.10.13" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, + {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, + {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, + {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, + {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + [[package]] name = "pyyaml" version = "6.0.1" @@ -472,6 +623,17 @@ files = [ {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + [[package]] name = "tweaker3" version = "0.0.0" @@ -569,5 +731,5 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" -python-versions = ">=3.11" -content-hash = "c285344223f6d231bb089a37e2704a4c195757153f8d7092fc8ae8b5289ef3e2" +python-versions = ">=3.11,<4.0" +content-hash = "845af248434a1b87347695e316b2ed7077993cf6bce892028140c96ee8d4b3ac" diff --git a/pyproject.toml b/pyproject.toml index a3cbd0b..1858f7e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ license = "GPLv3" readme = "README.md" [tool.poetry.dependencies] -python = ">=3.11" +python = ">=3.11,<4.0" admesh = "^0.98.9" imagekitio = "^3.2.0" tweaker3 = {git = "https://github.com/ChristophSchranz/Tweaker-3.git"} @@ -16,6 +16,7 @@ gitpython = "^3.1.40" requests = "^2.31.0" configargparse = "^1.7" loguru = "^0.7.2" +githubkit = "^0.10.7" [tool.poetry.group.dev.dependencies] @@ -67,7 +68,8 @@ check-stl-corruption = "voron_ci.tools.stl_corruption_checker:main" check-stl-rotation = "voron_ci.tools.stl_rotation_checker:main" check-mod-structure = "voron_ci.tools.mod_structure_checker:main" check-whitespace = "voron_ci.tools.whitespace_checker:main" -upload-images = "voron_ci.tools.imagekit_uploader:main" generate-readme = "voron_ci.tools.readme_generator:main" +upload-images = "voron_ci.utils.imagekit_uploader:main" debug-container = "voron_ci.utils.debug_container:print_container_info" -sanitize-file-list = "voron_ci.utils.github_action_helper:GithubActionHelper.sanitize_file_list" +sanitize-file-list = "voron_ci.utils.file_helper:sanitize_file_list" +set-pr-comment-labels = "voron_ci.utils.pr_helper:main" diff --git a/voron_ci/constants.py b/voron_ci/constants.py index 40c698d..4f2f1ee 100644 --- a/voron_ci/constants.py +++ b/voron_ci/constants.py @@ -1,23 +1,39 @@ -from enum import IntEnum, StrEnum +from enum import Enum +from typing import NamedTuple +SUCCESS_LABEL: str = "PR: CI passed" +PR_COMMENT_TAG: str = "" -class ReturnStatus(IntEnum): - SUCCESS = 0 - WARNING = 1 - FAILURE = 2 - EXCEPTION = 3 +class StepResultCodeStr(NamedTuple): + result_code: int + result_str: str -class SummaryStatus(StrEnum): - SUCCESS = "✅ SUCCESS" - WARNING = "⚠️ WARNING" - FAILURE = "❌ FAILURE" - EXCEPTION = "💀 EXCEPTION" +class StepIdName(NamedTuple): + step_id: str + step_name: str + step_pr_label: str -EXTENDED_OUTCOME: dict[ReturnStatus, str] = { - ReturnStatus.SUCCESS: "success", - ReturnStatus.WARNING: "warning", - ReturnStatus.FAILURE: "failure", - ReturnStatus.EXCEPTION: "exception", -} + +class StepResult(StepResultCodeStr, Enum): + SUCCESS = StepResultCodeStr(result_code=0, result_str="✅ SUCCESS") + WARNING = StepResultCodeStr(result_code=1, result_str="⚠️ WARNING") + FAILURE = StepResultCodeStr(result_code=2, result_str="❌ FAILURE") + EXCEPTION = StepResultCodeStr(result_code=3, result_str="💀 EXCEPTION") + + +class StepIdentifier(StepIdName, Enum): + WHITESPACE_CHECK = StepIdName(step_id="whitespace_check", step_name="Whitespace checker", step_pr_label="Issue: Whitespace") + ROTATION_CHECK = StepIdName(step_id="rotation_check", step_name="STL rotation checker", step_pr_label="Issue: STL Rotation") + CORRUPTION_CHECK = StepIdName(step_id="corruption_check", step_name="STL corruption checker", step_pr_label="Issue: STL Corruption") + README_GENERATOR = StepIdName(step_id="readme_generator", step_name="Readme generator", step_pr_label="Issue: Readme") + MOD_STRUCTURE_CHECK = StepIdName(step_id="mod_structure_check", step_name="Mod structure checker", step_pr_label="Issue: Mod Structure") + + +VORONUSERS_PR_COMMENT_SECTIONS: list[StepIdentifier] = [ + StepIdentifier.WHITESPACE_CHECK, + StepIdentifier.MOD_STRUCTURE_CHECK, + StepIdentifier.CORRUPTION_CHECK, + StepIdentifier.ROTATION_CHECK, +] diff --git a/voron_ci/tools/mod_structure_checker.py b/voron_ci/tools/mod_structure_checker.py index 08ef501..01bfa56 100644 --- a/voron_ci/tools/mod_structure_checker.py +++ b/voron_ci/tools/mod_structure_checker.py @@ -6,7 +6,7 @@ import yaml from loguru import logger -from voron_ci.constants import ReturnStatus +from voron_ci.constants import StepIdentifier, StepResult from voron_ci.utils.action_summary import ActionSummaryTable from voron_ci.utils.file_helper import FileHelper from voron_ci.utils.github_action_helper import ActionResult, GithubActionHelper @@ -28,7 +28,7 @@ class ModStructureChecker: def __init__(self: Self, args: configargparse.Namespace) -> None: self.input_dir: Path = Path(Path.cwd(), args.input_dir) self.gh_helper: GithubActionHelper = GithubActionHelper(ignore_warnings=args.ignore_warnings) - self.return_status: ReturnStatus = ReturnStatus.SUCCESS + self.return_status: StepResult = StepResult.SUCCESS self.check_summary: list[list[str]] = [] init_logging(verbose=args.verbose) @@ -42,7 +42,7 @@ def _check_mods(self: Self) -> None: if not Path(mod_folder, ".metadata.yml").exists(): logger.error("Mod '{}' is missing a metadata file!", mod_folder) self.check_summary.append([mod_folder.relative_to(self.input_dir).as_posix(), FileErrors.mod_missing_metadata.value.format(mod_folder)]) - self.return_status = ReturnStatus.FAILURE + self.return_status = StepResult.FAILURE continue metadata: dict[str, Any] = yaml.safe_load(Path(mod_folder, ".metadata.yml").read_text()) @@ -55,7 +55,7 @@ def _check_mods(self: Self) -> None: if not Path(mod_folder, file).exists(): logger.error("File '{}' is missing in mod folder '{}'!", file, mod_folder) self.check_summary.append([mod_folder.relative_to(self.input_dir).as_posix(), FileErrors.file_from_metadata_missing.value.format(file)]) - self.return_status = ReturnStatus.FAILURE + self.return_status = StepResult.FAILURE def _check_shallow_files(self: Self) -> None: logger.info("Performing shallow file check") @@ -63,7 +63,7 @@ def _check_shallow_files(self: Self) -> None: for file_folder in files_folders: logger.warning("File '{}' outside mod folder structure!", file_folder) self.check_summary.append([file_folder.relative_to(self.input_dir).as_posix(), FileErrors.file_outside_mod_folder.value.format(file_folder)]) - self.return_status = ReturnStatus.FAILURE + self.return_status = StepResult.FAILURE def run(self: Self) -> None: logger.info("Starting files check in '{}'", str(self.input_dir)) @@ -73,7 +73,7 @@ def run(self: Self) -> None: self.gh_helper.finalize_action( action_result=ActionResult( - action_id="mod_structure_checker", + action_id=StepIdentifier.MOD_STRUCTURE_CHECK.step_id, action_name="Mod structure checker", outcome=self.return_status, summary=ActionSummaryTable( diff --git a/voron_ci/tools/readme_generator.py b/voron_ci/tools/readme_generator.py index b463609..fa2465d 100644 --- a/voron_ci/tools/readme_generator.py +++ b/voron_ci/tools/readme_generator.py @@ -7,7 +7,7 @@ import yaml from loguru import logger -from voron_ci.constants import ReturnStatus +from voron_ci.constants import StepIdentifier, StepResult from voron_ci.utils.action_summary import ActionSummaryTable from voron_ci.utils.github_action_helper import ActionResult, GithubActionHelper from voron_ci.utils.logging import init_logging @@ -88,9 +88,9 @@ def run(self: Self) -> None: self.gh_helper.finalize_action( action_result=ActionResult( - action_id="readme_generator", + action_id=StepIdentifier.README_GENERATOR.step_id, action_name="Readme generator", - outcome=ReturnStatus.SUCCESS, + outcome=StepResult.SUCCESS, summary=ActionSummaryTable( title="Readme preview", columns=["Creator", "Mod title", "Description", "Printer compatibility", "Last Changed"], diff --git a/voron_ci/tools/stl_corruption_checker.py b/voron_ci/tools/stl_corruption_checker.py index 77c0460..5bcec7d 100644 --- a/voron_ci/tools/stl_corruption_checker.py +++ b/voron_ci/tools/stl_corruption_checker.py @@ -7,7 +7,7 @@ from admesh import Stl from loguru import logger -from voron_ci.constants import ReturnStatus, SummaryStatus +from voron_ci.constants import StepIdentifier, StepResult from voron_ci.utils.action_summary import ActionSummaryTable from voron_ci.utils.file_helper import FileHelper from voron_ci.utils.github_action_helper import ActionResult, GithubActionHelper @@ -19,7 +19,7 @@ class STLCorruptionChecker: def __init__(self: Self, args: configargparse.Namespace) -> None: self.input_dir: Path = Path(Path.cwd(), args.input_dir) - self.return_status: ReturnStatus = ReturnStatus.SUCCESS + self.return_status: StepResult = StepResult.SUCCESS self.check_summary: list[list[str]] = [] self.gh_helper: GithubActionHelper = GithubActionHelper(ignore_warnings=args.ignore_warnings) @@ -31,15 +31,15 @@ def run(self: Self) -> None: stl_paths: list[Path] = FileHelper.find_files(directory=self.input_dir, extension="stl", max_files=40) with ThreadPoolExecutor() as pool: - return_statuses: list[ReturnStatus] = list(pool.map(self._check_stl, stl_paths)) + return_statuses: list[StepResult] = list(pool.map(self._check_stl, stl_paths)) if return_statuses: self.return_status = max(*return_statuses, self.return_status) else: - self.return_status = ReturnStatus.SUCCESS + self.return_status = StepResult.SUCCESS self.gh_helper.finalize_action( action_result=ActionResult( - action_id="corruption_checker", + action_id=StepIdentifier.CORRUPTION_CHECK.step_id, action_name="STL corruption checker", outcome=self.return_status, summary=ActionSummaryTable( @@ -58,7 +58,7 @@ def _write_fixed_stl_file(self: Self, stl: Stl, path: Path) -> None: self.gh_helper.set_artifact(file_name=path.as_posix(), file_contents=Path(temp_file.name).read_bytes()) temp_file.close() - def _check_stl(self: Self, stl_file_path: Path) -> ReturnStatus: + def _check_stl(self: Self, stl_file_path: Path) -> StepResult: try: stl: Stl = Stl(stl_file_path.as_posix()) stl.repair(verbose_flag=False) @@ -74,7 +74,7 @@ def _check_stl(self: Self, stl_file_path: Path) -> ReturnStatus: self.check_summary.append( [ stl_file_path.name, - SummaryStatus.FAILURE, + StepResult.FAILURE.result_str, str(stl.stats["edges_fixed"]), str(stl.stats["backwards_edges"]), str(stl.stats["degenerate_facets"]), @@ -84,15 +84,15 @@ def _check_stl(self: Self, stl_file_path: Path) -> ReturnStatus: ] ) self._write_fixed_stl_file(stl=stl, path=Path(stl_file_path.relative_to(self.input_dir))) - return ReturnStatus.FAILURE + return StepResult.FAILURE logger.success("STL '{}' OK!", stl_file_path.relative_to(self.input_dir).as_posix()) - return ReturnStatus.SUCCESS + return StepResult.SUCCESS except Exception: # noqa: BLE001 logger.critical("A fatal error occurred while checking '{}'!", stl_file_path.relative_to(self.input_dir).as_posix()) self.check_summary.append( - [stl_file_path.name, SummaryStatus.EXCEPTION, "0", "0", "0", "0", "0", "0"], + [stl_file_path.name, StepResult.EXCEPTION.result_str, "0", "0", "0", "0", "0", "0"], ) - return ReturnStatus.EXCEPTION + return StepResult.EXCEPTION def main() -> None: diff --git a/voron_ci/tools/stl_rotation_checker.py b/voron_ci/tools/stl_rotation_checker.py index 4bc0eec..7156eab 100644 --- a/voron_ci/tools/stl_rotation_checker.py +++ b/voron_ci/tools/stl_rotation_checker.py @@ -14,7 +14,7 @@ from tweaker3.FileHandler import FileHandler from tweaker3.MeshTweaker import Tweak -from voron_ci.constants import ReturnStatus, SummaryStatus +from voron_ci.constants import StepIdentifier, StepResult from voron_ci.utils.action_summary import ActionSummaryTable from voron_ci.utils.file_helper import FileHelper from voron_ci.utils.github_action_helper import ActionResult, GithubActionHelper @@ -30,7 +30,7 @@ def __init__(self: Self, args: configargparse.Namespace) -> None: self.input_dir: Path = Path(Path.cwd(), args.input_dir) self.imagekit_endpoint: str | None = args.imagekit_endpoint if args.imagekit_endpoint else None self.imagekit_subfolder: str = args.imagekit_subfolder - self.return_status: ReturnStatus = ReturnStatus.SUCCESS + self.return_status: StepResult = StepResult.SUCCESS self.check_summary: list[list[str]] = [] self.gh_helper: GithubActionHelper = GithubActionHelper(ignore_warnings=args.ignore_warnings) @@ -105,16 +105,16 @@ def run(self: Self) -> None: stl_paths: list[Path] = FileHelper.find_files(directory=self.input_dir, extension="stl", max_files=40) with ThreadPoolExecutor() as pool: - return_statuses: list[ReturnStatus] = list(pool.map(self._check_stl, stl_paths)) + return_statuses: list[StepResult] = list(pool.map(self._check_stl, stl_paths)) if return_statuses: self.return_status = max(*return_statuses, self.return_status) else: - self.return_status = ReturnStatus.SUCCESS + self.return_status = StepResult.SUCCESS self.gh_helper.finalize_action( action_result=ActionResult( - action_id="rotation_checker", + action_id=StepIdentifier.ROTATION_CHECK.step_id, action_name="STL rotation checker", outcome=self.return_status, summary=ActionSummaryTable( @@ -130,13 +130,13 @@ def _write_fixed_stl_file(self: Self, stl: dict[int, Any], opts: Tweak, stl_file file_name=stl_file_path.as_posix(), file_contents=self._get_rotated_stl_bytes(objects=stl, info={0: {"matrix": opts.matrix, "tweaker_stats": opts}}) ) - def _check_stl(self: Self, stl_file_path: Path) -> ReturnStatus: + def _check_stl(self: Self, stl_file_path: Path) -> StepResult: try: mesh_objects: dict[int, Any] = FileHandler().load_mesh(inputfile=stl_file_path.as_posix()) if len(mesh_objects.items()) > 1: logger.warning("File '{}' contains multiple objects and is therefore skipped!", stl_file_path.relative_to(self.input_dir).as_posix()) - self.check_summary.append([stl_file_path.name, SummaryStatus.WARNING, "", ""]) - return ReturnStatus.WARNING + self.check_summary.append([stl_file_path.name, StepResult.WARNING.result_str, "", ""]) + return StepResult.WARNING rotated_mesh: Tweak = Tweak(mesh_objects[0]["mesh"], extended_mode=True, verbose=False, min_volume=True) if rotated_mesh.rotation_angle >= TWEAK_THRESHOLD: @@ -156,19 +156,19 @@ def _check_stl(self: Self, stl_file_path: Path) -> ReturnStatus: self.check_summary.append( [ stl_file_path.name, - SummaryStatus.WARNING, + StepResult.WARNING.result_str, original_image_url, rotated_image_url, ], ) - return ReturnStatus.WARNING + return StepResult.WARNING logger.success("File '{}' OK!", stl_file_path.relative_to(self.input_dir).as_posix()) - return ReturnStatus.SUCCESS + return StepResult.SUCCESS except Exception: # noqa: BLE001 logger.critical("A fatal error occurred while checking {}", stl_file_path.relative_to(self.input_dir).as_posix()) - self.check_summary.append([stl_file_path.name, SummaryStatus.EXCEPTION, "", ""]) - return ReturnStatus.EXCEPTION + self.check_summary.append([stl_file_path.name, StepResult.EXCEPTION.result_str, "", ""]) + return StepResult.EXCEPTION def main() -> None: diff --git a/voron_ci/tools/whitespace_checker.py b/voron_ci/tools/whitespace_checker.py index 003e7ab..f8d2317 100644 --- a/voron_ci/tools/whitespace_checker.py +++ b/voron_ci/tools/whitespace_checker.py @@ -7,7 +7,7 @@ import configargparse from loguru import logger -from voron_ci.constants import ReturnStatus +from voron_ci.constants import StepIdentifier, StepResult from voron_ci.utils.action_summary import ActionSummaryTable from voron_ci.utils.github_action_helper import ActionResult, GithubActionHelper from voron_ci.utils.logging import init_logging @@ -20,7 +20,7 @@ class WhitespaceChecker: def __init__(self: Self, args: configargparse.Namespace) -> None: - self.return_status: ReturnStatus = ReturnStatus.SUCCESS + self.return_status: StepResult = StepResult.SUCCESS self.check_summary: list[list[str]] = [] self.gh_helper: GithubActionHelper = GithubActionHelper(ignore_warnings=args.ignore_warnings) @@ -53,7 +53,7 @@ def _check_for_whitespace(self: Self) -> None: else: logger.error("File '{}' contains whitespace!", input_file) self.check_summary.append([input_file, "This file contains whitespace!"]) - self.return_status = ReturnStatus.FAILURE + self.return_status = StepResult.FAILURE def run(self: Self) -> None: logger.info("Starting whitespace check ...") @@ -62,7 +62,7 @@ def run(self: Self) -> None: self.gh_helper.finalize_action( action_result=ActionResult( - action_id="whitespace_check", + action_id=StepIdentifier.WHITESPACE_CHECK.step_id, action_name="Whitespace check", outcome=self.return_status, summary=ActionSummaryTable( diff --git a/voron_ci/utils/action_summary.py b/voron_ci/utils/action_summary.py index 80531a1..48a2b2d 100644 --- a/voron_ci/utils/action_summary.py +++ b/voron_ci/utils/action_summary.py @@ -17,7 +17,7 @@ class ActionSummaryTable(ActionSummary): def to_markdown(self: Self) -> str: return ( - f"## {self.title}\n\n" + f"### {self.title}\n\n" f"\n" f"Result (items: {len(self.rows)})\n\n" f"{self.create_markdown_table(self.columns, self.rows)}\n" diff --git a/voron_ci/utils/file_helper.py b/voron_ci/utils/file_helper.py index e416d62..88c520b 100644 --- a/voron_ci/utils/file_helper.py +++ b/voron_ci/utils/file_helper.py @@ -1,9 +1,12 @@ import itertools +import os from pathlib import Path from typing import Self from loguru import logger +from voron_ci.utils.github_action_helper import GithubActionHelper + class FileHelper: @classmethod @@ -30,3 +33,11 @@ def get_shallow_folders(cls: type[Self], input_dir: Path, max_depth: int, ignore @classmethod def get_all_folders(cls: type[Self], _: Path) -> list[Path]: return [] + + +def sanitize_file_list() -> None: + file_list: list[str] = os.environ.get("FILE_LIST_SANITIZE_INPUT", "").splitlines() + output_file_list: list[str] = [input_file.replace("[", "\\[").replace("]", "\\]") for input_file in file_list] + gh_helper: GithubActionHelper = GithubActionHelper() + gh_helper.set_output_multiline(output={"FILE_LIST_SANITIZE_OUTPUT": output_file_list}) + gh_helper.write_outputs() diff --git a/voron_ci/utils/github_action_helper.py b/voron_ci/utils/github_action_helper.py index 9296848..15cb012 100644 --- a/voron_ci/utils/github_action_helper.py +++ b/voron_ci/utils/github_action_helper.py @@ -3,16 +3,16 @@ import sys import zipfile from dataclasses import dataclass -from http import HTTPStatus from io import BytesIO, StringIO from pathlib import Path from typing import Self import requests from git import InvalidGitRepositoryError, NoSuchPathError, Repo +from githubkit import GitHub, Response from loguru import logger -from voron_ci.constants import EXTENDED_OUTCOME, ReturnStatus +from voron_ci.constants import PR_COMMENT_TAG, StepResult from voron_ci.utils.action_summary import ActionSummary STEP_SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY" @@ -26,7 +26,7 @@ class ActionResult: action_id: str action_name: str - outcome: ReturnStatus + outcome: StepResult summary: ActionSummary @@ -76,15 +76,14 @@ def _write_artifacts(self: Self, action_result: ActionResult) -> None: with Path(self.output_path, action_result.action_id, "summary.md").open("w") as f: f.write(action_result.summary.to_markdown()) with Path(self.output_path, action_result.action_id, "outcome.txt").open("w") as f: - f.write(str(action_result.outcome)) + f.write(action_result.outcome.result_str) def finalize_action(self: Self, action_result: ActionResult) -> None: - self.set_output(output={"extended-outcome": EXTENDED_OUTCOME[action_result.outcome]}) self._write_outputs() self._write_step_summary(action_result=action_result) self._write_artifacts(action_result=action_result) - result_ok = ReturnStatus.WARNING if self.ignore_warnings else ReturnStatus.SUCCESS + result_ok = StepResult.WARNING if self.ignore_warnings else StepResult.SUCCESS if action_result.outcome > result_ok: logger.error("Error detected while performing action '{}' (result: '{}' > '{}')!", action_result.action_name, action_result.outcome, result_ok) sys.exit(255) @@ -130,51 +129,35 @@ def last_commit_timestamp(cls: type[Self], file_or_directory: Path) -> str: return "" @classmethod - def download_artifact(cls: type[Self], repo: str, workflow_run_id: str, artifact_name: str, target_directory: Path) -> None: - # GitHub API endpoint to get the artifact information - api_url = f"https://api.github.com/repos/{repo}/actions/runs/{workflow_run_id}/artifacts" - headers: dict[str, str] = { - "Authorization": f"token {os.environ['VORON_CI_GITHUB_TOKEN']}", - "Accept": "application/vnd.github.v3+json", - "X-GitHub-Api-Version": "2022-11-28", - } + def set_labels_on_pull_request(cls: type[Self], repo: str, pull_request_number: int, labels: list[str]) -> None: + github = GitHub(os.environ["VORON_CI_GITHUB_TOKEN"]) + github.rest.issues.set_labels(owner=repo.split("/")[0], repo=repo.split("/")[1], issue_number=pull_request_number, labels=labels) - # Make a GET request to fetch artifact details - try: - response = requests.get(api_url, headers=headers, timeout=20) - response.raise_for_status() - except requests.HTTPError: - logger.exception("Failed to fetch artifacts. Status code: {}", response.status_code) - return + @classmethod + def download_artifact(cls: type[Self], repo: str, workflow_run_id: str, artifact_name: str, target_directory: Path) -> None: + github: GitHub = GitHub(os.environ["VORON_CI_GITHUB_TOKEN"]) + response: Response = github.rest.actions.list_workflow_run_artifacts(owner=repo.split("/")[0], repo=repo.split("/")[1], run_id=int(workflow_run_id)) - artifacts = response.json().get("artifacts", []) - artifact_id = None + artifacts: list[dict[str, str]] = response.json().get("artifacts", []) + artifact_id: int = -1 # Find the artifact by name for artifact in artifacts: if artifact["name"] == artifact_name: - artifact_id = artifact["id"] + artifact_id = int(artifact["id"]) break - if artifact_id is None: + if artifact_id == -1: logger.error("Artifact '{}' not found in the workflow run {}", artifact_name, workflow_run_id) return # Download artifact zip file - download_url = f"https://api.github.com/repos/{repo}/actions/artifacts/{artifact_id}/zip" - try: - download_response = requests.get(download_url, headers=headers, timeout=20) - download_response.raise_for_status() - except requests.HTTPError: - logger.exception("Failed to download artifact '{}'", artifact_name) - return - - if download_response.status_code >= HTTPStatus.MULTIPLE_CHOICES: - logger.error("Failed to download artifact '{}'. Status code: {}", artifact_name, download_response.status_code) - return + response_download: Response = github.rest.actions.download_artifact( + owner=repo.split("/")[0], repo=repo.split("/")[1], artifact_id=artifact_id, archive_format="zip" + ) # Read the zip file content into memory - zip_content = BytesIO(download_response.content) + zip_content: BytesIO = BytesIO(response_download.content) # Unzip artifact contents into target directory with zipfile.ZipFile(zip_content, "r") as zip_ref: @@ -188,23 +171,24 @@ def download_artifact(cls: type[Self], repo: str, workflow_run_id: str, artifact logger.info("Artifact '{}' downloaded and extracted to '{}' successfully.", artifact_name, target_directory.as_posix()) @classmethod - def set_labels_on_pull_request(cls: type[Self], repo: str, pull_request_number: int, labels: list[str]) -> None: - api_url: str = f"https://api.github.com/repos/{repo}/issues/{pull_request_number}/labels" - headers: dict[str, str] = { - "Authorization": f"token {os.environ['VORON_CI_GITHUB_TOKEN']}", - "Accept": "application/vnd.github.v3+json", - "X-GitHub-Api-Version": "2022-11-28", - } - try: - response: requests.Response = requests.put(api_url, headers=headers, json={"labels": labels}, timeout=10) - response.raise_for_status() - except requests.exceptions.HTTPError: - logger.exception("Failed to set labels on pull request {}", pull_request_number) + def update_or_create_pr_comment(cls: type[Self], repo: str, pull_request_number: int, comment_body: str) -> None: + github: GitHub = GitHub(os.environ["VORON_CI_GITHUB_TOKEN"]) + response: Response = github.rest.issues.list_comments(owner=repo.split("/")[0], repo=repo.split("/")[1], issue_number=pull_request_number) - @classmethod - def sanitize_file_list(cls: type[Self]) -> None: - file_list: list[str] = os.environ.get("FILE_LIST_SANITIZE_INPUT", "").splitlines() - output_file_list: list[str] = [input_file.replace("[", "\\[").replace("]", "\\]") for input_file in file_list] - gh_helper: GithubActionHelper = GithubActionHelper() - gh_helper.set_output_multiline(output={"FILE_LIST_SANITIZE_OUTPUT": output_file_list}) - gh_helper.write_outputs() + existing_comments: list[dict[str, str]] = response.json() + comment_id: int = -1 + + # Find the comment by the author + for existing_comment in existing_comments: + if PR_COMMENT_TAG in existing_comment["body"]: + comment_id = int(existing_comment["id"]) + break + + full_comment = f"{comment_body}\n\n{PR_COMMENT_TAG}\n" + + if comment_id == -1: + # Create a new comment + github.rest.issues.create_comment(owner=repo.split("/")[0], repo=repo.split("/")[1], issue_number=pull_request_number, body=full_comment) + else: + # Update existing comment + github.rest.issues.update_comment(owner=repo.split("/")[0], repo=repo.split("/")[1], comment_id=comment_id, body=full_comment) diff --git a/voron_ci/tools/imagekit_uploader.py b/voron_ci/utils/imagekit_uploader.py similarity index 94% rename from voron_ci/tools/imagekit_uploader.py rename to voron_ci/utils/imagekit_uploader.py index e807241..4fe8eb5 100644 --- a/voron_ci/tools/imagekit_uploader.py +++ b/voron_ci/utils/imagekit_uploader.py @@ -26,7 +26,7 @@ class ImageKitUploader: def __init__(self: Self, args: configargparse.Namespace) -> None: self.artifact_name: str = args.artifact_name self.workflow_run_id: str = args.workflow_run_id - self.fail_on_error: bool = args.fail_on_error + self.ignore_warnings: bool = args.ignore_warnings self.github_repository: str = args.github_repository self.tmp_path: Path = Path() self.image_base_path: Path = Path() @@ -49,7 +49,7 @@ def __init__(self: Self, args: configargparse.Namespace) -> None: ) except (KeyError, ValueError): logger.warning("No suitable imagekit credentials were found. Skipping image upload!") - if self.fail_on_error: + if not self.ignore_warnings: sys.exit(255) sys.exit(0) @@ -84,7 +84,7 @@ def run(self: Self) -> None: with ThreadPoolExecutor() as pool: results: Iterator[bool] = pool.map(self.upload_image, images) - if not all(results) and self.fail_on_error: + if not all(results) and not self.ignore_warnings: logger.error("Errors detected during image upload!") sys.exit(255) @@ -141,11 +141,11 @@ def main() -> None: ) parser.add_argument( "-f", - "--fail_on_error", + "--ignore_warnings", required=False, action="store_true", - env_var=f"{ENV_VAR_PREFIX}_FAIL_ON_ERROR", - help="Whether to return an error exit code if one of the STLs is faulty", + env_var=f"{ENV_VAR_PREFIX}_IGNORE_WARNINGS", + help="Whether to ignore warnings and return a success exit code", default=False, ) parser.add_argument( diff --git a/voron_ci/utils/pr_helper.py b/voron_ci/utils/pr_helper.py new file mode 100644 index 0000000..f7ca631 --- /dev/null +++ b/voron_ci/utils/pr_helper.py @@ -0,0 +1,146 @@ +import os +import sys +import tempfile +from pathlib import Path +from typing import Self + +import configargparse +from loguru import logger + +from voron_ci.constants import SUCCESS_LABEL, VORONUSERS_PR_COMMENT_SECTIONS, StepResult +from voron_ci.utils.github_action_helper import GithubActionHelper +from voron_ci.utils.logging import init_logging + +ENV_VAR_PREFIX = "PR_HELPER" + +PREAMBLE = """ Hi, thank you for submitting your PR. +Please find below the results of the automated PR checker: + +""" + +CLOSING_SUCCESS = """ + +Congratulations, all checks have completed successfully! Your PR is now ready for review! + +""" + +CLOSING_FAILURE = """ + +Unfortunately, some checks have failed. Please fix the issues and update your PR. + +""" + +CLOSING_BOT_NOTICE = """ + +I am a 🤖, this comment was generated automatically! + +""" + + +class PrHelper: + def __init__(self: Self, args: configargparse.Namespace) -> None: + self.artifact_name: str = args.artifact_name + self.workflow_run_id: str = args.workflow_run_id + self.github_repository: str = args.github_repository + self.tmp_path: Path = Path() + + self.pr_number: int = -1 + self.summaries: str = "" + self.labels: list[str] = [] + + init_logging(verbose=args.verbose) + + def _parse_artifact(self: Self) -> None: + logger.info("Preparing PR comment ...") + if not Path(self.tmp_path, "pr_number.txt").exists(): + logger.error("Artifact is missing pr_number.txt file!") + sys.exit(255) + self.pr_number = int(Path(self.tmp_path, "pr_number.txt").read_text()) + for pr_step_identifier in VORONUSERS_PR_COMMENT_SECTIONS: + if not ( + Path(self.tmp_path, pr_step_identifier.step_id, "summary.md").exists() + and Path(self.tmp_path, pr_step_identifier.step_id, "outcome.txt").exists() + ): + logger.warning( + "Section '{}' is missing or incomplete in artifact! folder: {}, summary: {}, outcome: {}", + pr_step_identifier, + Path(self.tmp_path, pr_step_identifier.step_id).exists(), + Path(self.tmp_path, pr_step_identifier.step_id, "summary.md").exists(), + Path(self.tmp_path, pr_step_identifier.step_id, "outcome.txt").exists(), + ) + continue + self.summaries += Path(self.tmp_path, pr_step_identifier.step_id, "summary.md").read_text() + self.summaries += "\n\n" + outcome: StepResult = StepResult[Path(self.tmp_path, pr_step_identifier.step_id, "outcome.txt").read_text()] + if outcome > StepResult.SUCCESS: + self.labels.append(pr_step_identifier.step_pr_label) + if not self.labels: + self.labels.append(SUCCESS_LABEL) + + def run(self: Self) -> None: + logger.info("Downloading artifact '{}' from workflow '{}'", self.artifact_name, self.workflow_run_id) + with tempfile.TemporaryDirectory() as tmpdir: + logger.info("Created temporary directory '{}'", tmpdir) + tmp_path = Path(tmpdir) + + GithubActionHelper.download_artifact( + repo=self.github_repository, + workflow_run_id=self.workflow_run_id, + artifact_name=self.artifact_name, + target_directory=tmp_path, + ) + + self._parse_artifact() + if self.pr_number > 0: + GithubActionHelper.set_labels_on_pull_request(repo=self.github_repository, pull_request_number=self.pr_number, labels=self.labels) + GithubActionHelper.update_or_create_pr_comment(repo=self.github_repository, pull_request_number=self.pr_number, comment_body=self.summaries) + + +def main() -> None: + parser: configargparse.ArgumentParser = configargparse.ArgumentParser( + prog="VoronDesign PR Preparer", + description="This tool updates the PR comment and attaches labels for a VoronDesign PR", + ) + parser.add_argument( + "-i", + "--workflow_run_id", + required=True, + action="store", + type=str, + env_var=f"{ENV_VAR_PREFIX}_WORKFLOW_RUN_ID", + help="Run ID of the workflow from which to pull the artifact", + ) + parser.add_argument( + "-n", + "--artifact_name", + required=True, + action="store", + type=str, + env_var=f"{ENV_VAR_PREFIX}_ARTIFACT_NAME", + help="Name of the artifact to download and extract images from", + ) + parser.add_argument( + "-g", + "--github_repository", + required=False, + action="store", + type=str, + env_var=f"{ENV_VAR_PREFIX}_GITHUB_REPOSITORY", + default=os.environ["GITHUB_REPOSITORY"], + help="Repository from which to download the artifact", + ) + parser.add_argument( + "-v", + "--verbose", + required=False, + action="store_true", + env_var=f"{ENV_VAR_PREFIX}_VERBOSE", + help="Print debug output to stdout", + default=False, + ) + args: configargparse.Namespace = parser.parse_args() + PrHelper(args=args).run() + + +if __name__ == "__main__": + main()