mirror of
https://github.com/armbian/build
synced 2025-09-24 19:47:06 +07:00
patching: Patching Summary and Patching Failures tables using Python's Rich
- even Rich'er patch output by colorizing certain strings green/yellow/red - BASE_GIT_TAG now very sneakily also accepts a branch name - IMPORTANT: this includes: BREAKING CHANGE: patches failing to apply now break the build. fixes #4958 - also break on legacy `process_patch_file()` failure, remove `EXIT_PATCHING_ERROR`
This commit is contained in:
@@ -30,6 +30,9 @@ function kernel_main_patching_python() {
|
|||||||
"USERPATCHES_PATH=${USERPATCHES_PATH}" # Needed to find the userpatches.
|
"USERPATCHES_PATH=${USERPATCHES_PATH}" # Needed to find the userpatches.
|
||||||
#"BOARD=" # BOARD is needed for the patchset selection logic; mostly for u-boot. empty for kernel.
|
#"BOARD=" # BOARD is needed for the patchset selection logic; mostly for u-boot. empty for kernel.
|
||||||
#"TARGET=" # TARGET is need for u-boot's SPI/SATA etc selection logic. empty for kernel
|
#"TARGET=" # TARGET is need for u-boot's SPI/SATA etc selection logic. empty for kernel
|
||||||
|
# For table generation to fit into the screen, or being large when in GHA.
|
||||||
|
"COLUMNS=${COLUMNS}"
|
||||||
|
"GITHUB_ACTIONS=${GITHUB_ACTIONS}"
|
||||||
# Needed so git can find the global .gitconfig, and Python can parse the PATH to determine which git to use.
|
# Needed so git can find the global .gitconfig, and Python can parse the PATH to determine which git to use.
|
||||||
"PATH=${PATH}"
|
"PATH=${PATH}"
|
||||||
"HOME=${HOME}"
|
"HOME=${HOME}"
|
||||||
|
|||||||
@@ -99,8 +99,8 @@ process_patch_file() {
|
|||||||
patch --batch -p1 -N --input="${patch}" --quiet --reject-file=- && { # "-" discards rejects
|
patch --batch -p1 -N --input="${patch}" --quiet --reject-file=- && { # "-" discards rejects
|
||||||
display_alert "* $status ${relative_patch}" "" "info"
|
display_alert "* $status ${relative_patch}" "" "info"
|
||||||
} || {
|
} || {
|
||||||
display_alert "* $status ${relative_patch}" "failed" "wrn"
|
display_alert "* $status ${relative_patch}" "failed" "err"
|
||||||
[[ $EXIT_PATCHING_ERROR == yes ]] && exit_with_error "Aborting due to" "EXIT_PATCHING_ERROR"
|
exit_with_error "Patching error, exiting."
|
||||||
}
|
}
|
||||||
|
|
||||||
return 0 # short-circuit above, avoid exiting with error
|
return 0 # short-circuit above, avoid exiting with error
|
||||||
|
|||||||
@@ -25,6 +25,9 @@ function uboot_main_patching_python() {
|
|||||||
"BOARD=${BOARD}" # BOARD is needed for the patchset selection logic; mostly for u-boot.
|
"BOARD=${BOARD}" # BOARD is needed for the patchset selection logic; mostly for u-boot.
|
||||||
"TARGET=${target_patchdir}" # TARGET is need for u-boot's SPI/SATA etc selection logic
|
"TARGET=${target_patchdir}" # TARGET is need for u-boot's SPI/SATA etc selection logic
|
||||||
"USERPATCHES_PATH=${USERPATCHES_PATH}" # Needed to find the userpatches.
|
"USERPATCHES_PATH=${USERPATCHES_PATH}" # Needed to find the userpatches.
|
||||||
|
# For table generation to fit into the screen, or being large when in GHA.
|
||||||
|
"COLUMNS=${COLUMNS}"
|
||||||
|
"GITHUB_ACTIONS=${GITHUB_ACTIONS}"
|
||||||
# Needed so git can find the global .gitconfig, and Python can parse the PATH to determine which git to use.
|
# Needed so git can find the global .gitconfig, and Python can parse the PATH to determine which git to use.
|
||||||
"PATH=${PATH}"
|
"PATH=${PATH}"
|
||||||
"HOME=${HOME}"
|
"HOME=${HOME}"
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ function early_prepare_pip3_dependencies_for_python_tools() {
|
|||||||
"PyYAML==6.0" # for parsing/writing YAML
|
"PyYAML==6.0" # for parsing/writing YAML
|
||||||
"oras==0.1.17" # for OCI stuff in mapper-oci-update
|
"oras==0.1.17" # for OCI stuff in mapper-oci-update
|
||||||
"Jinja2==3.1.2" # for templating
|
"Jinja2==3.1.2" # for templating
|
||||||
|
"rich==13.4.1" # for rich text formatting
|
||||||
)
|
)
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -379,8 +379,9 @@ function docker_cli_prepare_launch() {
|
|||||||
# Change the ccache directory to the named volume or bind created. @TODO: this needs more love. it works for Docker, but not sudo
|
# Change the ccache directory to the named volume or bind created. @TODO: this needs more love. it works for Docker, but not sudo
|
||||||
"--env" "CCACHE_DIR=${DOCKER_ARMBIAN_TARGET_PATH}/cache/ccache"
|
"--env" "CCACHE_DIR=${DOCKER_ARMBIAN_TARGET_PATH}/cache/ccache"
|
||||||
|
|
||||||
# Pass down the TERM
|
# Pass down the TERM and the COLUMNS
|
||||||
"--env" "TERM=${TERM}"
|
"--env" "TERM=${TERM}"
|
||||||
|
"--env" "COLUMNS=${COLUMNS}"
|
||||||
|
|
||||||
# Pass down the CI env var (GitHub Actions, Jenkins, etc)
|
# Pass down the CI env var (GitHub Actions, Jenkins, etc)
|
||||||
"--env" "CI=${CI}" # All CI's, hopefully
|
"--env" "CI=${CI}" # All CI's, hopefully
|
||||||
|
|||||||
@@ -47,23 +47,21 @@ class SummarizedMarkdownWriter:
|
|||||||
def write(self, text):
|
def write(self, text):
|
||||||
self.contents += text
|
self.contents += text
|
||||||
|
|
||||||
# see https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/organizing-information-with-collapsed-sections
|
def validate(self):
|
||||||
def get_summarized_markdown(self):
|
|
||||||
if len(self.title) == 0:
|
if len(self.title) == 0:
|
||||||
raise Exception("Markdown Summary Title not set")
|
raise Exception("Markdown Summary Title not set")
|
||||||
if len(self.summary) == 0:
|
if len(self.summary) == 0:
|
||||||
raise Exception("Markdown Summary not set")
|
raise Exception("Markdown Summary not set")
|
||||||
if self.contents == "":
|
if self.contents == "":
|
||||||
raise Exception("Markdown Contents not set")
|
raise Exception("Markdown Contents not set")
|
||||||
|
|
||||||
|
# see https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/organizing-information-with-collapsed-sections
|
||||||
|
def get_summarized_markdown(self):
|
||||||
|
self.validate()
|
||||||
return f"<details><summary>{self.title}: {'; '.join(self.summary)}</summary>\n<p>\n\n{self.contents}\n\n</p></details>\n"
|
return f"<details><summary>{self.title}: {'; '.join(self.summary)}</summary>\n<p>\n\n{self.contents}\n\n</p></details>\n"
|
||||||
|
|
||||||
def get_readme_markdown(self):
|
def get_readme_markdown(self):
|
||||||
if len(self.title) == 0:
|
self.validate()
|
||||||
raise Exception("Markdown Summary Title not set")
|
|
||||||
if len(self.summary) == 0:
|
|
||||||
raise Exception("Markdown Summary not set")
|
|
||||||
if self.contents == "":
|
|
||||||
raise Exception("Markdown Contents not set")
|
|
||||||
return f"#### {self.title}: {'; '.join(self.summary)}\n\n{self.contents}\n\n"
|
return f"#### {self.title}: {'; '.join(self.summary)}\n\n{self.contents}\n\n"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -289,6 +289,8 @@ class PatchInPatchFile:
|
|||||||
self.deleted_file_names = []
|
self.deleted_file_names = []
|
||||||
self.renamed_file_names_source = [] # The original file names of renamed files
|
self.renamed_file_names_source = [] # The original file names of renamed files
|
||||||
self.all_file_names_touched = []
|
self.all_file_names_touched = []
|
||||||
|
self.rejects: str | None = None
|
||||||
|
self.patch_output: str | None = None
|
||||||
|
|
||||||
def parse_from_name_email(self, from_str: str) -> tuple["str | None", "str | None"]:
|
def parse_from_name_email(self, from_str: str) -> tuple["str | None", "str | None"]:
|
||||||
m = re.match(r'(?P<name>.*)\s*<\s*(?P<email>.*)\s*>', from_str)
|
m = re.match(r'(?P<name>.*)\s*<\s*(?P<email>.*)\s*>', from_str)
|
||||||
@@ -426,7 +428,6 @@ class PatchInPatchFile:
|
|||||||
with open(rejects_file, "r") as f:
|
with open(rejects_file, "r") as f:
|
||||||
reject_contents = f.read()
|
reject_contents = f.read()
|
||||||
self.rejects = reject_contents
|
self.rejects = reject_contents
|
||||||
log.debug(f"Rejects file contents: {reject_contents}")
|
|
||||||
# delete it
|
# delete it
|
||||||
os.remove(rejects_file)
|
os.remove(rejects_file)
|
||||||
|
|
||||||
@@ -444,15 +445,16 @@ class PatchInPatchFile:
|
|||||||
self.actually_patched_files = parse_patch_stdout_for_files(stdout_output)
|
self.actually_patched_files = parse_patch_stdout_for_files(stdout_output)
|
||||||
self.apply_patch_date_to_files(working_dir, options)
|
self.apply_patch_date_to_files(working_dir, options)
|
||||||
|
|
||||||
|
# Store the stdout and stderr output
|
||||||
|
patch_output = ""
|
||||||
|
patch_output += f"{stdout_output}\n" if stdout_output != "" else ""
|
||||||
|
patch_output += f"{stderr_output}\n" if stderr_output != "" else ""
|
||||||
|
self.patch_output = f"{patch_output}"
|
||||||
|
|
||||||
# Check if the exit code is not zero and bomb
|
# Check if the exit code is not zero and bomb
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
# prefix each line of the stderr_output with "STDERR: ", then join again
|
|
||||||
stderr_output = "\n".join([f"STDERR: {line}" for line in stderr_output.splitlines()])
|
|
||||||
stderr_output = "\n" + stderr_output if stderr_output != "" else stderr_output
|
|
||||||
stdout_output = "\n".join([f"STDOUT: {line}" for line in stdout_output.splitlines()])
|
|
||||||
stdout_output = "\n" + stdout_output if stdout_output != "" else stdout_output
|
|
||||||
self.problems.append("failed_apply")
|
self.problems.append("failed_apply")
|
||||||
raise Exception(f"Failed to apply patch {self.parent.full_file_path()}:{stderr_output}{stdout_output}")
|
raise Exception(f"Failed to apply patch {self.parent.full_file_path()}")
|
||||||
|
|
||||||
def commit_changes_to_git(self, repo: git.Repo, add_rebase_tags: bool, split_patches: bool):
|
def commit_changes_to_git(self, repo: git.Repo, add_rebase_tags: bool, split_patches: bool):
|
||||||
log.info(f"Committing changes to git: {self.parent.relative_dirs_and_base_file_name}")
|
log.info(f"Committing changes to git: {self.parent.relative_dirs_and_base_file_name}")
|
||||||
@@ -609,6 +611,18 @@ class PatchInPatchFile:
|
|||||||
def markdown_diffstat(self):
|
def markdown_diffstat(self):
|
||||||
return f"`{self.text_diffstats()}`"
|
return f"`{self.text_diffstats()}`"
|
||||||
|
|
||||||
|
def text_files(self):
|
||||||
|
ret = []
|
||||||
|
max_files_shown = 15
|
||||||
|
file_names = list(self.patched_file_stats_dict.keys())
|
||||||
|
if len(file_names) == 0:
|
||||||
|
return "?"
|
||||||
|
for file_name in file_names[:max_files_shown]:
|
||||||
|
ret.append(f"{file_name}")
|
||||||
|
if len(file_names) > max_files_shown:
|
||||||
|
ret.append(f"and {len(file_names) - max_files_shown} more")
|
||||||
|
return ", ".join(ret)
|
||||||
|
|
||||||
def markdown_files(self):
|
def markdown_files(self):
|
||||||
ret = []
|
ret = []
|
||||||
max_files_shown = 15
|
max_files_shown = 15
|
||||||
@@ -623,6 +637,11 @@ class PatchInPatchFile:
|
|||||||
ret.append(f"_and {len(file_names) - max_files_shown} more_")
|
ret.append(f"_and {len(file_names) - max_files_shown} more_")
|
||||||
return ", ".join(ret)
|
return ", ".join(ret)
|
||||||
|
|
||||||
|
def text_author(self):
|
||||||
|
if self.from_name:
|
||||||
|
return f"{self.from_name.strip()}"
|
||||||
|
return "[no Author]"
|
||||||
|
|
||||||
def markdown_author(self):
|
def markdown_author(self):
|
||||||
if self.from_name:
|
if self.from_name:
|
||||||
return f"`{self.from_name.strip()}`"
|
return f"`{self.from_name.strip()}`"
|
||||||
@@ -633,22 +652,53 @@ class PatchInPatchFile:
|
|||||||
return f"_{self.subject}_"
|
return f"_{self.subject}_"
|
||||||
return "`[no Subject]`"
|
return "`[no Subject]`"
|
||||||
|
|
||||||
|
def text_subject(self):
|
||||||
|
if self.subject:
|
||||||
|
return f"{self.subject}"
|
||||||
|
return "[no Subject]"
|
||||||
|
|
||||||
def markdown_link_to_patch(self):
|
def markdown_link_to_patch(self):
|
||||||
if self.git_commit_hash is None:
|
if self.git_commit_hash is None:
|
||||||
return ""
|
return ""
|
||||||
return f"{self.git_commit_hash} "
|
return f"{self.git_commit_hash} "
|
||||||
|
|
||||||
def markdown_name(self):
|
def markdown_name(self, skip_markdown=False):
|
||||||
ret = []
|
ret = []
|
||||||
|
escape = "`" if not skip_markdown else ""
|
||||||
patch_name = self.parent.relative_dirs_and_base_file_name
|
patch_name = self.parent.relative_dirs_and_base_file_name
|
||||||
# if the basename includes slashes, split after the last slash, the first part is the directory, second the file
|
# if the basename includes slashes, split after the last slash, the first part is the directory, second the file
|
||||||
if "/" in self.parent.relative_dirs_and_base_file_name:
|
if "/" in self.parent.relative_dirs_and_base_file_name:
|
||||||
dir_name, patch_name = self.parent.relative_dirs_and_base_file_name.rsplit("/", 1)
|
dir_name, patch_name = self.parent.relative_dirs_and_base_file_name.rsplit("/", 1)
|
||||||
if dir_name is not None:
|
if dir_name is not None:
|
||||||
ret.append(f"`[{dir_name}/]`")
|
# get only the last part of the dir_name
|
||||||
ret.append(f"`{patch_name}`")
|
dir_name = dir_name.split("/")[-1]
|
||||||
|
ret.append(f"{escape}[{dir_name}/]{escape}")
|
||||||
|
ret.append(f"{escape}{patch_name}{escape}")
|
||||||
return " ".join(ret)
|
return " ".join(ret)
|
||||||
|
|
||||||
|
def rich_name_status(self):
|
||||||
|
color = "green"
|
||||||
|
for problem in self.problems:
|
||||||
|
if problem in ["not_mbox", "needs_rebase"]:
|
||||||
|
color = "yellow"
|
||||||
|
else:
|
||||||
|
color = "red"
|
||||||
|
# @TODO: once our ansi-haste supports it, use [link url=file://blaaa]
|
||||||
|
return f"[bold {color}]{self.markdown_name(skip_markdown=True)}"
|
||||||
|
|
||||||
|
def rich_patch_output(self):
|
||||||
|
ret = self.patch_output
|
||||||
|
color_tags = {
|
||||||
|
'green': ['Reversed (or previously applied) patch detected!'],
|
||||||
|
'yellow': ['with fuzz', 'offset ', ' hunks ignored', ' hunk ignored'],
|
||||||
|
'red': ['hunk FAILED', 'hunks FAILED']
|
||||||
|
}
|
||||||
|
# use Rich's syntax highlighting to highlight with color
|
||||||
|
for color in color_tags:
|
||||||
|
for tag in color_tags[color]:
|
||||||
|
ret = ret.replace(tag, f"[bold {color}]{tag}[/bold {color}]")
|
||||||
|
return ret
|
||||||
|
|
||||||
def apply_patch_date_to_files(self, working_dir, options):
|
def apply_patch_date_to_files(self, working_dir, options):
|
||||||
# The date applied to the patched files is:
|
# The date applied to the patched files is:
|
||||||
# 1) The date of the root Makefile
|
# 1) The date of the root Makefile
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
import rich.box
|
||||||
# Let's use GitPython to query and manipulate the git repo
|
# Let's use GitPython to query and manipulate the git repo
|
||||||
from git import Actor
|
from git import Actor
|
||||||
from git import GitCmdObjectDB
|
from git import GitCmdObjectDB
|
||||||
@@ -59,6 +60,9 @@ BOARD = armbian_utils.get_from_env("BOARD")
|
|||||||
TARGET = armbian_utils.get_from_env("TARGET")
|
TARGET = armbian_utils.get_from_env("TARGET")
|
||||||
USERPATCHES_PATH = armbian_utils.get_from_env("USERPATCHES_PATH")
|
USERPATCHES_PATH = armbian_utils.get_from_env("USERPATCHES_PATH")
|
||||||
|
|
||||||
|
# The exit exception, if any.
|
||||||
|
exit_with_exception: "Exception | None" = None
|
||||||
|
|
||||||
# Some path possibilities
|
# Some path possibilities
|
||||||
CONST_PATCH_ROOT_DIRS = []
|
CONST_PATCH_ROOT_DIRS = []
|
||||||
|
|
||||||
@@ -214,6 +218,9 @@ if apply_patches_to_git and git_archeology:
|
|||||||
# Now, we need to apply the patches.
|
# Now, we need to apply the patches.
|
||||||
git_repo: "git.Repo | None" = None
|
git_repo: "git.Repo | None" = None
|
||||||
total_patches = len(VALID_PATCHES)
|
total_patches = len(VALID_PATCHES)
|
||||||
|
any_failed_to_apply = False
|
||||||
|
failed_to_apply_list = []
|
||||||
|
|
||||||
if apply_patches:
|
if apply_patches:
|
||||||
log.debug("Cleaning target git directory...")
|
log.debug("Cleaning target git directory...")
|
||||||
git_repo = Repo(GIT_WORK_DIR, odbt=GitCmdObjectDB)
|
git_repo = Repo(GIT_WORK_DIR, odbt=GitCmdObjectDB)
|
||||||
@@ -231,7 +238,16 @@ if apply_patches:
|
|||||||
raise Exception("BASE_GIT_REVISION or BASE_GIT_TAG must be set")
|
raise Exception("BASE_GIT_REVISION or BASE_GIT_TAG must be set")
|
||||||
else:
|
else:
|
||||||
log.debug(f"Getting revision of BASE_GIT_TAG={BASE_GIT_TAG}")
|
log.debug(f"Getting revision of BASE_GIT_TAG={BASE_GIT_TAG}")
|
||||||
|
# first, try as a tag:
|
||||||
|
try:
|
||||||
BASE_GIT_REVISION = git_repo.tags[BASE_GIT_TAG].commit.hexsha
|
BASE_GIT_REVISION = git_repo.tags[BASE_GIT_TAG].commit.hexsha
|
||||||
|
except IndexError:
|
||||||
|
# not a tag, try as a branch:
|
||||||
|
try:
|
||||||
|
BASE_GIT_REVISION = git_repo.branches[BASE_GIT_TAG].commit.hexsha
|
||||||
|
except IndexError:
|
||||||
|
raise Exception(f"BASE_GIT_TAG={BASE_GIT_TAG} is neither a tag nor a branch")
|
||||||
|
|
||||||
log.debug(f"Found BASE_GIT_REVISION={BASE_GIT_REVISION} for BASE_GIT_TAG={BASE_GIT_TAG}")
|
log.debug(f"Found BASE_GIT_REVISION={BASE_GIT_REVISION} for BASE_GIT_TAG={BASE_GIT_TAG}")
|
||||||
|
|
||||||
patching_utils.prepare_clean_git_tree_for_patching(git_repo, BASE_GIT_REVISION, BRANCH_FOR_PATCHES)
|
patching_utils.prepare_clean_git_tree_for_patching(git_repo, BASE_GIT_REVISION, BRANCH_FOR_PATCHES)
|
||||||
@@ -240,8 +256,9 @@ if apply_patches:
|
|||||||
log.info(f"Applying {total_patches} patches {patch_file_desc}...")
|
log.info(f"Applying {total_patches} patches {patch_file_desc}...")
|
||||||
# Grab the date of the root Makefile; that is the minimum date for the patched files.
|
# Grab the date of the root Makefile; that is the minimum date for the patched files.
|
||||||
root_makefile = os.path.join(GIT_WORK_DIR, "Makefile")
|
root_makefile = os.path.join(GIT_WORK_DIR, "Makefile")
|
||||||
apply_options["root_makefile_date"] = os.path.getmtime(root_makefile)
|
root_makefile_mtime = os.path.getmtime(root_makefile)
|
||||||
log.debug(f"- Root Makefile '{root_makefile}' date: '{os.path.getmtime(root_makefile)}'")
|
apply_options["root_makefile_date"] = root_makefile_mtime
|
||||||
|
log.debug(f"- Root Makefile '{root_makefile}' date: '{root_makefile_mtime}'")
|
||||||
chars_total = len(str(total_patches))
|
chars_total = len(str(total_patches))
|
||||||
counter = 0
|
counter = 0
|
||||||
for one_patch in VALID_PATCHES:
|
for one_patch in VALID_PATCHES:
|
||||||
@@ -255,6 +272,8 @@ if apply_patches:
|
|||||||
one_patch.applied_ok = True
|
one_patch.applied_ok = True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.error(f"Problem with {one_patch}: {e}")
|
log.error(f"Problem with {one_patch}: {e}")
|
||||||
|
any_failed_to_apply = True
|
||||||
|
failed_to_apply_list.append(one_patch)
|
||||||
|
|
||||||
if one_patch.applied_ok and apply_patches_to_git:
|
if one_patch.applied_ok and apply_patches_to_git:
|
||||||
committed = one_patch.commit_changes_to_git(git_repo, (not rewrite_patches_in_place), split_patches)
|
committed = one_patch.commit_changes_to_git(git_repo, (not rewrite_patches_in_place), split_patches)
|
||||||
@@ -268,6 +287,11 @@ if apply_patches:
|
|||||||
git_repo, commit_hash)
|
git_repo, commit_hash)
|
||||||
one_patch.rewritten_patch = rewritten_patch
|
one_patch.rewritten_patch = rewritten_patch
|
||||||
|
|
||||||
|
if (not apply_patches_to_git) and (not rewrite_patches_in_place) and any_failed_to_apply:
|
||||||
|
log.error(
|
||||||
|
f"Failed to apply {len(failed_to_apply_list)} patches: {','.join([failed_patch.__str__() for failed_patch in failed_to_apply_list])}")
|
||||||
|
exit_with_exception = Exception(f"Failed to apply {len(failed_to_apply_list)} patches.")
|
||||||
|
|
||||||
if rewrite_patches_in_place:
|
if rewrite_patches_in_place:
|
||||||
# Now; we need to write the patches to files.
|
# Now; we need to write the patches to files.
|
||||||
# loop over the patches, and group them by the parent; the parent is the PatchFileInDir object.
|
# loop over the patches, and group them by the parent; the parent is the PatchFileInDir object.
|
||||||
@@ -348,3 +372,48 @@ if apply_patches_to_git and readme_markdown is not None and git_repo is not None
|
|||||||
)
|
)
|
||||||
log.info(f"Committed changes to git: {commit.hexsha}")
|
log.info(f"Committed changes to git: {commit.hexsha}")
|
||||||
log.info("Done with summary commit.")
|
log.info("Done with summary commit.")
|
||||||
|
|
||||||
|
# Use Rich.
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.table import Table
|
||||||
|
from rich.syntax import Syntax
|
||||||
|
|
||||||
|
# console width is COLUMNS env var minus 12, or just 160 if GITHUB_ACTIONS env is not empty
|
||||||
|
console_width = (int(os.environ.get("COLUMNS", 160)) - 12) if os.environ.get("GITHUB_ACTIONS", "") == "" else 160
|
||||||
|
console = Console(color_system="standard", width=console_width, highlight=False)
|
||||||
|
|
||||||
|
# Use Rich to print a summary of the patches
|
||||||
|
if True:
|
||||||
|
summary_table = Table(title=f"Summary of {PATCH_TYPE} patches", show_header=True, show_lines=True, box=rich.box.ROUNDED)
|
||||||
|
summary_table.add_column("Patch / Status", overflow="fold", min_width=25, max_width=35)
|
||||||
|
summary_table.add_column("Diffstat / files", max_width=35)
|
||||||
|
summary_table.add_column("Author / Subject", overflow="ellipsis")
|
||||||
|
for one_patch in VALID_PATCHES:
|
||||||
|
summary_table.add_row(
|
||||||
|
# (one_patch.markdown_name(skip_markdown=True)), # + " " + one_patch.markdown_problems()
|
||||||
|
one_patch.rich_name_status(),
|
||||||
|
(one_patch.text_diffstats() + " " + one_patch.text_files()),
|
||||||
|
(one_patch.text_author() + ": " + one_patch.text_subject())
|
||||||
|
)
|
||||||
|
console.print(summary_table)
|
||||||
|
|
||||||
|
# Use Rich to print a summary of the failed patches and their rejects
|
||||||
|
if any_failed_to_apply:
|
||||||
|
summary_table = Table(title="Summary of failed patches", show_header=True, show_lines=True, box=rich.box.ROUNDED)
|
||||||
|
summary_table.add_column("Patch", overflow="fold", min_width=5, max_width=20)
|
||||||
|
summary_table.add_column("Patching output", overflow="fold", min_width=20, max_width=40)
|
||||||
|
summary_table.add_column("Rejects")
|
||||||
|
for one_patch in failed_to_apply_list:
|
||||||
|
reject_compo = "No rejects"
|
||||||
|
if one_patch.rejects is not None:
|
||||||
|
reject_compo = Syntax(one_patch.rejects, "diff", line_numbers=False, word_wrap=True)
|
||||||
|
|
||||||
|
summary_table.add_row(
|
||||||
|
one_patch.rich_name_status(),
|
||||||
|
one_patch.rich_patch_output(),
|
||||||
|
reject_compo
|
||||||
|
)
|
||||||
|
console.print(summary_table)
|
||||||
|
|
||||||
|
if exit_with_exception is not None:
|
||||||
|
raise exit_with_exception
|
||||||
|
|||||||
Reference in New Issue
Block a user