armbian-next: Python patching delusion, pt4: series & better markdown

- Python patching: hopefully better Markdown: split series dir; detect more status/problems; arche only for missing Subject
  - Python patching: archeology only for missing Subject:, not description; clarify CLI pushing
  - Python patching: use `{file_base_name}.patch` for archeology as `file_name` might include a dir
- Python patching: fix: don't skip board/target patches for no reason
- Python patching: fix for series.conf patches (sunxi/sunxi64): don't sort; mark as series and show on summary
- Python patching: don't fail if archeology found no commits (but add warning)
- Python patching: CLI command `kernel-patches-to-git` with archeology summary, and git pushing of results & summary
  - Python patching: patches-to-git small fixes, auto-push if it's rpardini
  - Python patching: add `patch-kernel` CLI command
- Python patching: commit README.md & gh-pages workflow when apply_patches_to_git
  - Python patching: hopefully better markdown
- Python patching: `git add` everything all at once, for speed
This commit is contained in:
Ricardo Pardini
2022-12-20 16:17:21 +01:00
parent 2c0e9182ed
commit 97f6836705
7 changed files with 269 additions and 46 deletions

View File

@@ -0,0 +1,62 @@
function cli_patch_kernel_pre_run() {
declare -g ARMBIAN_COMMAND_REQUIRE_BASIC_DEPS="yes" # Require prepare_host_basic to run before the command.
# "gimme root on a Linux machine"
cli_standard_relaunch_docker_or_sudo
}
function cli_patch_kernel_run() {
display_alert "Patching kernel" "$BRANCH" "info"
declare -g SYNC_CLOCK=no # don't waste time syncing the clock
declare -g JUST_KERNEL=yes # only for kernel.
declare -g KERNEL_ONLY=yes # don't build images
declare -g PATCHES_TO_GIT=yes # commit to git.
declare -g PATCH_ONLY=yes # stop after patching.
declare -g DEBUG_PATCHING=yes # debug patching.
declare -g GIT_ARCHEOLOGY=yes # do archeology
#declare -g REWRITE_PATCHES=yes # rewrite the patches after git commiting. Very cheap compared to the rest.
declare -g KERNEL_CONFIGURE=no # no menuconfig
declare -g RELEASE=jammy # or whatever, not relevant, just fool the configuration
declare -g SHOW_LOG=yes # show the log
prepare_and_config_main_build_single
declare ymd vendor_lc target_repo_url summary_url
ymd="$(date +%Y%m%d)"
# lowercase ${VENDOR} and replace spaces with underscores
vendor_lc="$(tr '[:upper:]' '[:lower:]' <<< "${VENDOR}" | tr ' ' '_')-next"
target_branch="${vendor_lc}-${LINUXFAMILY}-${KERNEL_MAJOR_MINOR}-${ymd}${PUSH_BRANCH_POSTFIX:-""}"
target_repo_url="git@github.com:${PUSH_TO_REPO:-"${PUSH_TO_USER:-"rpardini"}/${PUSH_TO_REPO:-"linux"}"}.git"
summary_url="https://${PUSH_TO_USER:-"rpardini"}.github.io/${PUSH_TO_REPO:-"linux"}/${target_branch}.html"
declare -a push_command
push_command=(git -C "${SRC}/cache/git-bare/kernel" push "--force" "--verbose"
"${target_repo_url}"
"kernel-${LINUXFAMILY}-${KERNEL_MAJOR_MINOR}:${target_branch}")
# Prepare the host and build kernel instead of main_default_build_single
LOG_SECTION="prepare_host" do_with_logging prepare_host
compile_kernel # This handles its own logging sections.
display_alert "Done patching kernel" "${BRANCH} - ${LINUXFAMILY} - ${KERNEL_MAJOR_MINOR}" "cachehit"
declare do_push="no"
if git -C "${SRC}" remote get-url origin &> /dev/null; then
declare src_origin_url
src_origin_url="$(git -C "${SRC}" remote get-url origin | xargs echo -n)"
declare prefix="git@github.com:${PUSH_TO_USER:-"rpardini"}/" # @TODO refactor var
# if the src_origin_url begins with the prefix
if [[ "${src_origin_url}" == "${prefix}"* ]]; then
do_push="yes"
fi
fi
display_alert "Git push command: " "${push_command[*]}" "info"
if [[ "${do_push}" == "yes" ]]; then
display_alert "Pushing to ${target_branch}" "${target_repo_url}" "info"
"${push_command[@]}"
display_alert "Done pushing to ${target_branch}" "${summary_url}" "info"
fi
display_alert "Summary URL (after push & gh-pages deploy): " "${summary_url}" "info"
}

View File

@@ -16,6 +16,8 @@ function armbian_register_commands() {
["configdump"]="config_dump" # idem ["configdump"]="config_dump" # idem
["json-info"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run ["json-info"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["kernel-patches-to-git"]="patch_kernel" # implemented in cli_patch_kernel_pre_run and cli_patch_kernel_run
["build"]="standard_build" # implemented in cli_standard_build_pre_run and cli_standard_build_run ["build"]="standard_build" # implemented in cli_standard_build_pre_run and cli_standard_build_run
["distccd"]="distccd" # implemented in cli_distccd_pre_run and cli_distccd_run ["distccd"]="distccd" # implemented in cli_distccd_pre_run and cli_distccd_run

View File

@@ -3,10 +3,11 @@
function kernel_main_patching_python() { function kernel_main_patching_python() {
prepare_pip_packages_for_python_tools prepare_pip_packages_for_python_tools
temp_file_for_output="$(mktemp)" # Get a temporary file for the output. declare patch_debug="${SHOW_DEBUG:-${DEBUG_PATCHING:-"no"}}"
declare temp_file_for_output="$(mktemp)" # Get a temporary file for the output.
# array with all parameters; will be auto-quoted by bash's @Q modifier below # array with all parameters; will be auto-quoted by bash's @Q modifier below
declare -a params_quoted=( declare -a params_quoted=(
"LOG_DEBUG=${SHOW_DEBUG}" # Logging level for python. "LOG_DEBUG=${patch_debug}" # Logging level for python.
"SRC=${SRC}" # Armbian root "SRC=${SRC}" # Armbian root
"OUTPUT=${temp_file_for_output}" # Output file for the python script. "OUTPUT=${temp_file_for_output}" # Output file for the python script.
"ASSET_LOG_BASE=$(print_current_asset_log_base_file)" # base file name for the asset log; to write .md summaries. "ASSET_LOG_BASE=$(print_current_asset_log_base_file)" # base file name for the asset log; to write .md summaries.
@@ -24,7 +25,11 @@ function kernel_main_patching_python() {
"BASE_GIT_REVISION=${kernel_git_revision}" # The revision we're building/patching. Python will reset and clean to this. "BASE_GIT_REVISION=${kernel_git_revision}" # The revision we're building/patching. Python will reset and clean to this.
"BRANCH_FOR_PATCHES=kernel-${LINUXFAMILY}-${KERNEL_MAJOR_MINOR}" # When applying patches-to-git, use this branch. "BRANCH_FOR_PATCHES=kernel-${LINUXFAMILY}-${KERNEL_MAJOR_MINOR}" # When applying patches-to-git, use this branch.
# Lenience: allow problematic patches to be applied. # Lenience: allow problematic patches to be applied.
"ALLOW_RECREATE_EXISTING_FILES=yes" # Allow patches to recreate files that already exist. "ALLOW_RECREATE_EXISTING_FILES=yes" # Allow patches to recreate files that already exist.
"GIT_ARCHEOLOGY=${GIT_ARCHEOLOGY:-no}" # Allow git to do some archaeology to find the original patch's owners
# Pass the maintainer info, used for commits.
"MAINTAINER_NAME=${MAINTAINER}" # Name of the maintainer
"MAINTAINER_EMAIL=${MAINTAINERMAIL}" # Email of the maintainer
) )
display_alert "Calling Python patching script" "for kernel" "info" display_alert "Calling Python patching script" "for kernel" "info"
run_host_command_logged env -i "${params_quoted[@]@Q}" python3 "${SRC}/lib/tools/patching.py" run_host_command_logged env -i "${params_quoted[@]@Q}" python3 "${SRC}/lib/tools/patching.py"
@@ -42,10 +47,9 @@ function kernel_main_patching() {
#LOG_SECTION="kernel_prepare_patching" do_with_logging do_with_hooks kernel_prepare_patching #LOG_SECTION="kernel_prepare_patching" do_with_logging do_with_hooks kernel_prepare_patching
#LOG_SECTION="kernel_patching" do_with_logging do_with_hooks kernel_patching #LOG_SECTION="kernel_patching" do_with_logging do_with_hooks kernel_patching
# HACK: STOP HERE, for development. # STOP HERE, for cli support for patching tools.
if [[ "${PATCH_ONLY}" == "yes" ]]; then if [[ "${PATCH_ONLY}" == "yes" ]]; then
display_alert "PATCH_ONLY is set, stopping here." "PATCH_ONLY=yes" "info" return 0
exit 0
fi fi
# Interactive!!! # Interactive!!!

View File

@@ -36,6 +36,12 @@ function compile_kernel() {
local version hash pre_patch_version local version hash pre_patch_version
kernel_main_patching kernel_main_patching
# Stop after patching;
if [[ "${PATCH_ONLY}" == yes ]]; then
display_alert "PATCH_ONLY is set, stopping." "PATCH_ONLY=yes and patching success" "cachehit"
return 0
fi
local toolchain local toolchain
kernel_config_maybe_interactive kernel_config_maybe_interactive
@@ -50,7 +56,7 @@ function compile_kernel() {
rm -f linux-firmware-image-*.deb # remove firmware image packages here - easier than patching ~40 packaging scripts at once rm -f linux-firmware-image-*.deb # remove firmware image packages here - easier than patching ~40 packaging scripts at once
run_host_command_logged rsync --remove-source-files -r ./*.deb "${DEB_STORAGE}/" run_host_command_logged rsync --remove-source-files -r ./*.deb "${DEB_STORAGE}/"
# kernel build worked; let's clean up the git-bundle cache, since the git-bare cache is proven working. # kernel build worked; let's clean up the git-bundle cache, since the git-bare cache is proven working.
kernel_cleanup_bundle_artifacts kernel_cleanup_bundle_artifacts

View File

@@ -47,3 +47,56 @@ class SummarizedMarkdownWriter:
if self.contents == "": if self.contents == "":
raise Exception("Markdown Contents not set") raise Exception("Markdown Contents not set")
return f"<details><summary>{self.title}: {'; '.join(self.summary)}</summary>\n<p>\n\n{self.contents}\n\n</p></details>\n" return f"<details><summary>{self.title}: {'; '.join(self.summary)}</summary>\n<p>\n\n{self.contents}\n\n</p></details>\n"
def get_readme_markdown(self):
if len(self.title) == 0:
raise Exception("Markdown Summary Title not set")
if len(self.summary) == 0:
raise Exception("Markdown Summary not set")
if self.contents == "":
raise Exception("Markdown Contents not set")
return f"#### {self.title}: {'; '.join(self.summary)}\n\n{self.contents}\n\n"
def get_gh_pages_workflow_script():
return """
name: publish-ghpages
on: push
jobs:
publish:
runs-on: ubuntu-latest
steps:
# Do NOT checkout this. It is a kernel tree and takes a long time, and it's not necessary.
- name: Grab README.md
env:
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
run: |
curl -s https://raw.githubusercontent.com/${{ github.repository }}/${BRANCH_NAME}/README.md > README.md
ls -la README.md
# install grip via pip, https://github.com/joeyespo/grip; rpardini's fork https://github.com/rpardini/grip
- name: Install grip
run: |
pip3 install https://github.com/rpardini/grip/archive/refs/heads/master.tar.gz
- name: Run grip to gen ${{ github.head_ref || github.ref_name }}
env:
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
run: |
mkdir -p public
grip README.md --context=${{ github.repository }} --title="${BRANCH_NAME}" --wide --user-content --export "public/${BRANCH_NAME}.html" || true
ls -la public/
- name: Deploy to GitHub Pages (gh-pages branch)
if: success()
uses: crazy-max/ghaction-github-pages@v3
with:
target_branch: gh-pages
build_dir: public
keep_history: true
jekyll: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
"""

View File

@@ -40,23 +40,37 @@ class PatchDir:
def __str__(self) -> str: def __str__(self) -> str:
return "<PatchDir: full_dir:'" + str(self.full_dir) + "'>" return "<PatchDir: full_dir:'" + str(self.full_dir) + "'>"
def find_patch_files(self): def find_series_patch_files(self) -> list["PatchFileInDir"]:
# do nothing if the self.full_path is not a real, existing, directory # do nothing if the self.full_path is not a real, existing, directory
if not os.path.isdir(self.full_dir): if not os.path.isdir(self.full_dir):
return return []
# If the directory contains a series.conf file. # If the directory contains a series.conf file.
series_patches: list[PatchFileInDir] = []
series_conf_path = os.path.join(self.full_dir, "series.conf") series_conf_path = os.path.join(self.full_dir, "series.conf")
if os.path.isfile(series_conf_path): if os.path.isfile(series_conf_path):
counter = 0
patches_in_series = self.parse_series_conf(series_conf_path) patches_in_series = self.parse_series_conf(series_conf_path)
for patch_file_name in patches_in_series: for patch_file_name in patches_in_series:
patch_file_path = os.path.join(self.full_dir, patch_file_name) patch_file_path = os.path.join(self.full_dir, patch_file_name)
if os.path.isfile(patch_file_path): if os.path.isfile(patch_file_path):
counter += 1
patch_file = PatchFileInDir(patch_file_path, self) patch_file = PatchFileInDir(patch_file_path, self)
self.patch_files.append(patch_file) patch_file.from_series = True
patch_file.series_counter = counter
# Fix basename for patches in series.conf
relative_path = os.path.relpath(patch_file_path, self.full_dir)
patch_file.relative_dirs_and_base_file_name = os.path.splitext(relative_path)[0]
series_patches.append(patch_file)
else: else:
raise Exception( raise Exception(
f"series.conf file {series_conf_path} contains a patch file {patch_file_name} that does not exist") f"series.conf file {series_conf_path} contains a patch file {patch_file_name} that does not exist")
return series_patches
def find_files_patch_files(self) -> list["PatchFileInDir"]:
# do nothing if the self.full_path is not a real, existing, directory
if not os.path.isdir(self.full_dir):
return []
# Find the files in self.full_dir that end in .patch; do not consider subdirectories. # Find the files in self.full_dir that end in .patch; do not consider subdirectories.
# Add them to self.patch_files. # Add them to self.patch_files.
@@ -64,6 +78,7 @@ class PatchDir:
# noinspection PyTypeChecker # noinspection PyTypeChecker
if file.endswith(".patch"): if file.endswith(".patch"):
self.patch_files.append(PatchFileInDir(file, self)) self.patch_files.append(PatchFileInDir(file, self))
return self.patch_files
@staticmethod @staticmethod
def parse_series_conf(series_conf_path): def parse_series_conf(series_conf_path):
@@ -86,7 +101,10 @@ class PatchFileInDir:
def __init__(self, file_name, patch_dir: PatchDir): def __init__(self, file_name, patch_dir: PatchDir):
self.file_name = file_name self.file_name = file_name
self.patch_dir: PatchDir = patch_dir self.patch_dir: PatchDir = patch_dir
self.file_base_name = os.path.splitext(self.file_name)[0] self.relative_dirs_and_base_file_name = os.path.splitext(self.file_name)[0]
self.file_name_no_ext_no_dirs = os.path.basename(self.relative_dirs_and_base_file_name)
self.from_series = False
self.series_counter = None
def __str__(self) -> str: def __str__(self) -> str:
desc: str = f"<PatchFileInDir: file_name:'{self.file_name}', dir:{self.patch_dir.__str__()} >" desc: str = f"<PatchFileInDir: file_name:'{self.file_name}', dir:{self.patch_dir.__str__()} >"
@@ -279,7 +297,7 @@ class PatchInPatchFile:
def __str__(self) -> str: def __str__(self) -> str:
desc: str = \ desc: str = \
f"<{self.parent.file_base_name}(:{self.counter}):" + \ f"<{self.parent.relative_dirs_and_base_file_name}(:{self.counter}):" + \
f"{self.one_line_patch_stats()}: {self.from_email}: '{self.subject}' >" f"{self.one_line_patch_stats()}: {self.from_email}: '{self.subject}' >"
return desc return desc
@@ -317,6 +335,10 @@ class PatchInPatchFile:
log.warning(f"Patch {self} needs rebase: offset/fuzz used during apply.") log.warning(f"Patch {self} needs rebase: offset/fuzz used during apply.")
self.problems.append("needs_rebase") self.problems.append("needs_rebase")
if "can't find file to patch at input line" in stdout_output:
log.warning(f"Patch {self} needs review: can't find file to patch.")
self.problems.append("missing_file")
# Check if the exit code is not zero and bomb # Check if the exit code is not zero and bomb
if proc.returncode != 0: if proc.returncode != 0:
# prefix each line of the stderr_output with "STDERR: ", then join again # prefix each line of the stderr_output with "STDERR: ", then join again
@@ -324,12 +346,12 @@ class PatchInPatchFile:
stderr_output = "\n" + stderr_output if stderr_output != "" else stderr_output stderr_output = "\n" + stderr_output if stderr_output != "" else stderr_output
stdout_output = "\n".join([f"STDOUT: {line}" for line in stdout_output.splitlines()]) stdout_output = "\n".join([f"STDOUT: {line}" for line in stdout_output.splitlines()])
stdout_output = "\n" + stdout_output if stdout_output != "" else stdout_output stdout_output = "\n" + stdout_output if stdout_output != "" else stdout_output
self.problems.append("failed_to_apply") self.problems.append("failed_apply")
raise Exception( raise Exception(
f"Failed to apply patch {self.parent.full_file_path()}:{stderr_output}{stdout_output}") f"Failed to apply patch {self.parent.full_file_path()}:{stderr_output}{stdout_output}")
def commit_changes_to_git(self, repo: git.Repo, add_rebase_tags: bool): def commit_changes_to_git(self, repo: git.Repo, add_rebase_tags: bool):
log.info(f"Committing changes to git: {self.parent.file_base_name}") log.info(f"Committing changes to git: {self.parent.relative_dirs_and_base_file_name}")
# add all the files that were touched by the patch # add all the files that were touched by the patch
# if the patch failed to parse, this will be an empty list, so we'll just add all changes. # if the patch failed to parse, this will be an empty list, so we'll just add all changes.
add_all_changes_in_git = False add_all_changes_in_git = False
@@ -339,6 +361,7 @@ class PatchInPatchFile:
raise Exception( raise Exception(
f"Patch {self} has no files touched, but is not marked as failed to parse.") f"Patch {self} has no files touched, but is not marked as failed to parse.")
# add all files to git staging area # add all files to git staging area
all_files_to_add: list[str] = []
for file_name in self.all_file_names_touched: for file_name in self.all_file_names_touched:
log.info(f"Adding file {file_name} to git") log.info(f"Adding file {file_name} to git")
full_path = os.path.join(repo.working_tree_dir, file_name) full_path = os.path.join(repo.working_tree_dir, file_name)
@@ -347,14 +370,16 @@ class PatchInPatchFile:
log.error(f"File '{full_path}' does not exist, but is touched by {self}") log.error(f"File '{full_path}' does not exist, but is touched by {self}")
add_all_changes_in_git = True add_all_changes_in_git = True
else: else:
repo.git.add(file_name) all_files_to_add.append(file_name)
if not add_all_changes_in_git:
repo.git.add("-f", all_files_to_add)
if self.failed_to_parse or add_all_changes_in_git: if self.failed_to_parse or add_all_changes_in_git:
log.warning(f"Rescue: adding all changed files to git for {self}") log.warning(f"Rescue: adding all changed files to git for {self}")
repo.git.add(repo.working_tree_dir) repo.git.add(repo.working_tree_dir)
# commit the changes, using GitPython; show the produced commit hash # commit the changes, using GitPython; show the produced commit hash
commit_message = f"{self.parent.file_base_name}(:{self.counter})\n\nOriginal-Subject: {self.subject}\n{self.desc}" commit_message = f"{self.parent.relative_dirs_and_base_file_name}(:{self.counter})\n\nOriginal-Subject: {self.subject}\n{self.desc}"
if add_rebase_tags: if add_rebase_tags:
commit_message = f"{commit_message}\n{self.patch_rebase_tags_desc()}" commit_message = f"{commit_message}\n{self.patch_rebase_tags_desc()}"
author: git.Actor = git.Actor(self.from_name, self.from_email) author: git.Actor = git.Actor(self.from_name, self.from_email)
@@ -377,7 +402,7 @@ class PatchInPatchFile:
def patch_rebase_tags_desc(self): def patch_rebase_tags_desc(self):
tags = {} tags = {}
tags["Patch-File"] = self.parent.file_base_name tags["Patch-File"] = self.parent.relative_dirs_and_base_file_name
tags["Patch-File-Counter"] = self.counter tags["Patch-File-Counter"] = self.counter
tags["Patch-Rel-Directory"] = self.parent.patch_dir.rel_dir tags["Patch-Rel-Directory"] = self.parent.patch_dir.rel_dir
tags["Patch-Type"] = self.parent.patch_dir.patch_root_dir.patch_type tags["Patch-Type"] = self.parent.patch_dir.patch_root_dir.patch_type
@@ -396,15 +421,24 @@ class PatchInPatchFile:
return "" return ""
def markdown_problems(self): def markdown_problems(self):
if len(self.problems) == 0:
return ""
ret = [] ret = []
# if it's a patch in a series, add emoji
if self.parent.from_series:
ret.append(f" 📜 ")
if len(self.problems) == 0:
ret.append("")
for problem in self.problems: for problem in self.problems:
if problem in ["not_mbox", "needs_rebase"]: if problem in ["not_mbox", "needs_rebase"]:
# warning emoji # warning emoji
ret.append(f"⚠️{problem}") # normal ret.append(f"⚠️`[{problem}]` ")
else: else:
ret.append(f"**{problem}**") # bold ret.append(f"`[{problem}]` ")
# if it's a user patch, add smiley
if self.parent.patch_dir.patch_root_dir.root_type == "user":
ret.append(" 🫠`[user]` ")
return " ".join(ret) return " ".join(ret)
@@ -413,10 +447,10 @@ class PatchInPatchFile:
def markdown_files(self): def markdown_files(self):
ret = [] ret = []
max_files_shown = 5 max_files_shown = 15
# Use the keys of the patch_file_stats_dict which is already sorted by the larger files # Use the keys of the patch_file_stats_dict which is already sorted by the larger files
file_names = list(self.patched_file_stats_dict.keys()) file_names = list(self.patched_file_stats_dict.keys())
# if no files were touched, just return an interrobang # if no files were touched, just return an ?
if len(file_names) == 0: if len(file_names) == 0:
return "`?`" return "`?`"
for file_name in file_names[:max_files_shown]: for file_name in file_names[:max_files_shown]:
@@ -427,13 +461,29 @@ class PatchInPatchFile:
def markdown_author(self): def markdown_author(self):
if self.from_name: if self.from_name:
return f"{self.from_name}" return f"`{self.from_name.strip()}`"
return "`?`" return "`[no Author]`"
def markdown_subject(self): def markdown_subject(self):
if self.subject: if self.subject:
return f"_{self.subject}_" return f"_{self.subject}_"
return "`?`" return "`[no Subject]`"
def markdown_link_to_patch(self):
if self.git_commit_hash is None:
return ""
return f"{self.git_commit_hash} "
def markdown_name(self):
ret = []
patch_name = self.parent.relative_dirs_and_base_file_name
# if the basename includes slashes, split after the last slash, the first part is the directory, second the file
if "/" in self.parent.relative_dirs_and_base_file_name:
dir_name, patch_name = self.parent.relative_dirs_and_base_file_name.rsplit("/", 1)
if dir_name is not None:
ret.append(f"`[{dir_name}/]`")
ret.append(f"`{patch_name}`")
return " ".join(ret)
def fix_patch_subject(subject): def fix_patch_subject(subject):
@@ -532,9 +582,9 @@ def read_file_as_utf8(file_name: str) -> tuple[str, list[str]]:
# Extremely Armbian-specific. # Extremely Armbian-specific.
def perform_git_archeology( def perform_git_archeology(
base_armbian_src_dir: str, armbian_git_repo: git.Repo, patch: PatchInPatchFile, base_armbian_src_dir: str, armbian_git_repo: git.Repo, patch: PatchInPatchFile,
bad_archeology_hexshas: list[str], fast: bool): bad_archeology_hexshas: list[str], fast: bool) -> bool:
log.info(f"Trying to recover description for {patch.parent.file_name}:{patch.counter}") log.info(f"Trying to recover description for {patch.parent.file_name}:{patch.counter}")
patch_file_name = patch.parent.file_name file_name_for_search = f"{patch.parent.file_name_no_ext_no_dirs}.patch"
patch_file_paths: list[str] = [] patch_file_paths: list[str] = []
if fast: if fast:
@@ -545,12 +595,12 @@ def perform_git_archeology(
proc = subprocess.run( proc = subprocess.run(
[ [
"find", base_armbian_src_dir, "find", base_armbian_src_dir,
"-name", patch_file_name, "-name", file_name_for_search,
"-type", "f" "-type", "f"
], ],
cwd=base_armbian_src_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) cwd=base_armbian_src_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
patch_file_paths = proc.stdout.decode("utf-8").splitlines() patch_file_paths = proc.stdout.decode("utf-8").splitlines()
log.info(f"Found {len(patch_file_paths)} files with name {patch_file_name}") log.info(f"Found {len(patch_file_paths)} files with name {file_name_for_search}")
all_commits: list = [] all_commits: list = []
for found_file in patch_file_paths: for found_file in patch_file_paths:
relative_file_path = os.path.relpath(found_file, base_armbian_src_dir) relative_file_path = os.path.relpath(found_file, base_armbian_src_dir)
@@ -571,6 +621,10 @@ def perform_git_archeology(
unique_commits.sort(key=lambda c: c.committed_datetime) unique_commits.sort(key=lambda c: c.committed_datetime)
if len(unique_commits) == 0:
log.warning(f"Could not find any commits for '{file_name_for_search}'.")
return False
main_suspect: git.Commit = unique_commits[0] main_suspect: git.Commit = unique_commits[0]
log.info(f"- Main suspect: {main_suspect}: {main_suspect.message.rstrip()} Author: {main_suspect.author}") log.info(f"- Main suspect: {main_suspect}: {main_suspect.message.rstrip()} Author: {main_suspect.author}")
@@ -609,3 +663,4 @@ def perform_git_archeology(
if patch.from_name is None or patch.from_email is None: if patch.from_name is None or patch.from_email is None:
patch.from_name, patch.from_email = downgrade_to_ascii( patch.from_name, patch.from_email = downgrade_to_ascii(
main_suspect.author.name), main_suspect.author.email main_suspect.author.name), main_suspect.author.email
return True

View File

@@ -1,12 +1,13 @@
#! /bin/env python3 #! /bin/env python3
import logging import logging
import os
# Let's use GitPython to query and manipulate the git repo # Let's use GitPython to query and manipulate the git repo
from git import Repo, GitCmdObjectDB, InvalidGitRepositoryError from git import Repo, GitCmdObjectDB, InvalidGitRepositoryError, Actor
import common.armbian_utils as armbian_utils import common.armbian_utils as armbian_utils
import common.patching_utils as patching_utils import common.patching_utils as patching_utils
from common.md_asset_log import SummarizedMarkdownWriter from common.md_asset_log import SummarizedMarkdownWriter, get_gh_pages_workflow_script
# Prepare logging # Prepare logging
armbian_utils.setup_logging() armbian_utils.setup_logging()
@@ -44,8 +45,9 @@ CONST_PATCH_ROOT_DIRS = []
for patch_dir_to_apply in PATCH_DIRS_TO_APPLY: for patch_dir_to_apply in PATCH_DIRS_TO_APPLY:
if USERPATCHES_PATH is not None: if USERPATCHES_PATH is not None:
CONST_PATCH_ROOT_DIRS.append( CONST_PATCH_ROOT_DIRS.append(
patching_utils.PatchRootDir(f"{USERPATCHES_PATH}/{PATCH_TYPE}/{patch_dir_to_apply}", "user", patching_utils.PatchRootDir(
PATCH_TYPE, USERPATCHES_PATH)) f"{USERPATCHES_PATH}/{PATCH_TYPE}/{patch_dir_to_apply}", "user", PATCH_TYPE,
USERPATCHES_PATH))
CONST_PATCH_ROOT_DIRS.append( CONST_PATCH_ROOT_DIRS.append(
patching_utils.PatchRootDir(f"{SRC}/patch/{PATCH_TYPE}/{patch_dir_to_apply}", "core", PATCH_TYPE, SRC)) patching_utils.PatchRootDir(f"{SRC}/patch/{PATCH_TYPE}/{patch_dir_to_apply}", "core", PATCH_TYPE, SRC))
@@ -58,14 +60,22 @@ if BOARD is not None:
CONST_PATCH_SUB_DIRS.append(patching_utils.PatchSubDir("", "common")) CONST_PATCH_SUB_DIRS.append(patching_utils.PatchSubDir("", "common"))
# Prepare the full list of patch directories to apply # Prepare the full list of patch directories to apply
ALL_DIRS = [] ALL_DIRS: list[patching_utils.PatchDir] = []
for patch_root_dir in CONST_PATCH_ROOT_DIRS: for patch_root_dir in CONST_PATCH_ROOT_DIRS:
for patch_sub_dir in CONST_PATCH_SUB_DIRS: for patch_sub_dir in CONST_PATCH_SUB_DIRS:
ALL_DIRS.append(patching_utils.PatchDir(patch_root_dir, patch_sub_dir, SRC)) ALL_DIRS.append(patching_utils.PatchDir(patch_root_dir, patch_sub_dir, SRC))
SERIES_PATCH_FILES: list[patching_utils.PatchFileInDir] = []
# Now, loop over ALL_DIRS, and find the patch files in each directory # Now, loop over ALL_DIRS, and find the patch files in each directory
for one_dir in ALL_DIRS: for one_dir in ALL_DIRS:
one_dir.find_patch_files() if one_dir.patch_sub_dir.sub_type == "common":
# Handle series; those are directly added to SERIES_PATCH_FILES which is not sorted.
series_patches = one_dir.find_series_patch_files()
if len(series_patches) > 0:
log.debug(f"Directory '{one_dir.full_dir}' contains a series.")
SERIES_PATCH_FILES.extend(series_patches)
# Regular file-based patch files. This adds to the internal list.
one_dir.find_files_patch_files()
# Gather all the PatchFileInDir objects into a single list # Gather all the PatchFileInDir objects into a single list
ALL_DIR_PATCH_FILES: list[patching_utils.PatchFileInDir] = [] ALL_DIR_PATCH_FILES: list[patching_utils.PatchFileInDir] = []
@@ -75,12 +85,14 @@ for one_dir in ALL_DIRS:
ALL_DIR_PATCH_FILES_BY_NAME: dict[(str, patching_utils.PatchFileInDir)] = {} ALL_DIR_PATCH_FILES_BY_NAME: dict[(str, patching_utils.PatchFileInDir)] = {}
for one_patch_file in ALL_DIR_PATCH_FILES: for one_patch_file in ALL_DIR_PATCH_FILES:
# Hack: do a single one: DO NOT ENABLE THIS
# if one_patch_file.file_name == "board-pbp-add-dp-alt-mode.patch":
ALL_DIR_PATCH_FILES_BY_NAME[one_patch_file.file_name] = one_patch_file ALL_DIR_PATCH_FILES_BY_NAME[one_patch_file.file_name] = one_patch_file
# sort the dict by the key (file_name, sans dir...) # sort the dict by the key (file_name, sans dir...)
ALL_DIR_PATCH_FILES_BY_NAME = dict(sorted(ALL_DIR_PATCH_FILES_BY_NAME.items())) # We need a final, ordered list of patch files to apply.
# This reflects the order in which we want to apply the patches.
# For series-based patches, we want to apply the serie'd patches first.
# The other patches are separately sorted.
ALL_PATCH_FILES_SORTED = SERIES_PATCH_FILES + list(dict(sorted(ALL_DIR_PATCH_FILES_BY_NAME.items())).values())
# Now, actually read the patch files. # Now, actually read the patch files.
# Patch files might be in mailbox format, and in that case contain more than one "patch". # Patch files might be in mailbox format, and in that case contain more than one "patch".
@@ -89,8 +101,8 @@ ALL_DIR_PATCH_FILES_BY_NAME = dict(sorted(ALL_DIR_PATCH_FILES_BY_NAME.items()))
# If not, just use the whole file as a single patch. # If not, just use the whole file as a single patch.
# We'll store the patches in a list of Patch objects. # We'll store the patches in a list of Patch objects.
VALID_PATCHES: list[patching_utils.PatchInPatchFile] = [] VALID_PATCHES: list[patching_utils.PatchInPatchFile] = []
for key in ALL_DIR_PATCH_FILES_BY_NAME: patch_file_in_dir: patching_utils.PatchFileInDir
patch_file_in_dir: patching_utils.PatchFileInDir = ALL_DIR_PATCH_FILES_BY_NAME[key] for patch_file_in_dir in ALL_PATCH_FILES_SORTED:
try: try:
patches_from_file = patch_file_in_dir.split_patches_from_file() patches_from_file = patch_file_in_dir.split_patches_from_file()
VALID_PATCHES.extend(patches_from_file) VALID_PATCHES.extend(patches_from_file)
@@ -128,11 +140,14 @@ if apply_patches_to_git and git_archeology:
bad_archeology_hexshas = ["something"] bad_archeology_hexshas = ["something"]
for patch in VALID_PATCHES: for patch in VALID_PATCHES:
if patch.desc is None: if patch.subject is None: # archeology only for patches without subject
patching_utils.perform_git_archeology( archeology_ok = patching_utils.perform_git_archeology(
SRC, armbian_git_repo, patch, bad_archeology_hexshas, fast_archeology) SRC, armbian_git_repo, patch, bad_archeology_hexshas, fast_archeology)
if not archeology_ok:
patch.problems.append("archeology_failed")
# Now, we need to apply the patches. # Now, we need to apply the patches.
git_repo: "git.Repo | None" = None
if apply_patches: if apply_patches:
log.info("Cleaning target git directory...") log.info("Cleaning target git directory...")
git_repo = Repo(GIT_WORK_DIR, odbt=GitCmdObjectDB) git_repo = Repo(GIT_WORK_DIR, odbt=GitCmdObjectDB)
@@ -191,6 +206,7 @@ if apply_patches:
f"it was not applied successfully.") f"it was not applied successfully.")
# Create markdown about the patches # Create markdown about the patches
readme_markdown: "str | None" = None
with SummarizedMarkdownWriter(f"patching_{PATCH_TYPE}.md", f"{PATCH_TYPE} patching") as md: with SummarizedMarkdownWriter(f"patching_{PATCH_TYPE}.md", f"{PATCH_TYPE} patching") as md:
patch_count = 0 patch_count = 0
patches_applied = 0 patches_applied = 0
@@ -201,13 +217,13 @@ with SummarizedMarkdownWriter(f"patching_{PATCH_TYPE}.md", f"{PATCH_TYPE} patchi
else: else:
# Prepare the Markdown table header # Prepare the Markdown table header
md.write( md.write(
"| Applied? | Problems | Patch | Diffstat Summary | Files patched | Author | Subject | Link to patch |\n") "| Status | Patch | Diffstat Summary | Files patched | Author / Subject |\n")
# Markdown table hyphen line and column alignment # Markdown table hyphen line and column alignment
md.write("| :---: | :---: | :--- | :--- | :--- | :--- | :--- | :--- |\n") md.write("| :---: | :--- | :--- | :--- | :--- |\n")
for one_patch in VALID_PATCHES: for one_patch in VALID_PATCHES:
# Markdown table row # Markdown table row
md.write( md.write(
f"| {one_patch.markdown_applied()} | {one_patch.markdown_problems()} | `{one_patch.parent.file_base_name}` | {one_patch.markdown_diffstat()} | {one_patch.markdown_files()} | {one_patch.markdown_author()} | {one_patch.markdown_subject()} | {one_patch.git_commit_hash} |\n") f"| {one_patch.markdown_problems()} | {one_patch.markdown_name()} | {one_patch.markdown_diffstat()} | {one_patch.markdown_link_to_patch()}{one_patch.markdown_files()} | {one_patch.markdown_author()} {one_patch.markdown_subject()} |\n")
patch_count += 1 patch_count += 1
if one_patch.applied_ok: if one_patch.applied_ok:
patches_applied += 1 patches_applied += 1
@@ -222,3 +238,28 @@ with SummarizedMarkdownWriter(f"patching_{PATCH_TYPE}.md", f"{PATCH_TYPE} patchi
md.add_summary(f"{patches_with_problems} with problems") md.add_summary(f"{patches_with_problems} with problems")
for problem in problem_by_type: for problem in problem_by_type:
md.add_summary(f"{problem_by_type[problem]} {problem}") md.add_summary(f"{problem_by_type[problem]} {problem}")
# capture the markdown
readme_markdown = md.get_readme_markdown()
# Finally, write the README.md and the GH pages workflow file to the git dir, add them, and commit them.
if apply_patches_to_git and readme_markdown is not None and git_repo is not None:
log.info("Writing README.md and .github/workflows/gh-pages.yml")
with open(os.path.join(GIT_WORK_DIR, "README.md"), 'w') as f:
f.write(readme_markdown)
git_repo.git.add("README.md")
github_workflows_dir = os.path.join(GIT_WORK_DIR, ".github", "workflows")
if not os.path.exists(github_workflows_dir):
os.makedirs(github_workflows_dir)
with open(os.path.join(github_workflows_dir, "publish-ghpages.yaml"), 'w') as f:
f.write(get_gh_pages_workflow_script())
log.info("Committing README.md and .github/workflows/gh-pages.yml")
git_repo.git.add("-f", [".github/workflows/publish-ghpages.yaml", "README.md"])
maintainer_actor: Actor = Actor("Armbian AutoPatcher", "patching@armbian.com")
commit = git_repo.index.commit(
message="Armbian patching summary README",
author=maintainer_actor,
committer=maintainer_actor,
skip_hooks=True
)
log.info(f"Committed changes to git: {commit.hexsha}")
log.info("Done with summary commit.")