mirror of
https://github.com/armbian/build
synced 2025-09-24 19:47:06 +07:00
armbian-next: patching.py vs decent logging: better, but not there yet
- mostly downgrading stuff to .debug() - some stats
This commit is contained in:
@@ -129,7 +129,7 @@ class PatchFileInDir:
|
|||||||
f"Armbian Autopatcher <auto.patch@armbian.com>",
|
f"Armbian Autopatcher <auto.patch@armbian.com>",
|
||||||
f"[AUTOGEN] {self.relative_dirs_and_base_file_name}", None)
|
f"[AUTOGEN] {self.relative_dirs_and_base_file_name}", None)
|
||||||
bare_patch.diff_bytes = contents_bytes
|
bare_patch.diff_bytes = contents_bytes
|
||||||
log.warning(f"Patch file {self.full_file_path()} is autogenerated.")
|
log.debug(f"Patch file {self.full_file_path()} is autogenerated.")
|
||||||
return [bare_patch]
|
return [bare_patch]
|
||||||
|
|
||||||
counter: int = 1
|
counter: int = 1
|
||||||
@@ -166,7 +166,7 @@ class PatchFileInDir:
|
|||||||
bare_patch = PatchInPatchFile(self, counter, diff, None, None, None, None)
|
bare_patch = PatchInPatchFile(self, counter, diff, None, None, None, None)
|
||||||
bare_patch.problems.append("not_mbox")
|
bare_patch.problems.append("not_mbox")
|
||||||
bare_patch.problems.extend(read_problems)
|
bare_patch.problems.extend(read_problems)
|
||||||
log.warning(f"Patch file {self.full_file_path()} is not properly mbox-formatted.")
|
log.debug(f"Patch file {self.full_file_path()} is not properly mbox-formatted.")
|
||||||
return [bare_patch]
|
return [bare_patch]
|
||||||
|
|
||||||
# loop over the emails in the mbox
|
# loop over the emails in the mbox
|
||||||
@@ -282,7 +282,7 @@ class PatchInPatchFile:
|
|||||||
m = re.match(r'(?P<name>.*)\s*<\s*(?P<email>.*)\s*>', from_str)
|
m = re.match(r'(?P<name>.*)\s*<\s*(?P<email>.*)\s*>', from_str)
|
||||||
if m is None:
|
if m is None:
|
||||||
self.problems.append("invalid_author")
|
self.problems.append("invalid_author")
|
||||||
log.warning(
|
log.debug(
|
||||||
f"Failed to parse name and email from: '{from_str}' while parsing patch {self.counter} in file {self.parent.full_file_path()}")
|
f"Failed to parse name and email from: '{from_str}' while parsing patch {self.counter} in file {self.parent.full_file_path()}")
|
||||||
return downgrade_to_ascii(remove_quotes(from_str)), "unknown-email@domain.tld"
|
return downgrade_to_ascii(remove_quotes(from_str)), "unknown-email@domain.tld"
|
||||||
else:
|
else:
|
||||||
@@ -305,7 +305,7 @@ class PatchInPatchFile:
|
|||||||
# Hack: don't parse if autogenned; this could also be "don't parse if larger than X megabytes" since
|
# Hack: don't parse if autogenned; this could also be "don't parse if larger than X megabytes" since
|
||||||
# large patches cause trouble
|
# large patches cause trouble
|
||||||
if self.parent.patch_dir.is_autogen_dir:
|
if self.parent.patch_dir.is_autogen_dir:
|
||||||
log.warning(
|
log.debug(
|
||||||
f"Skipping parsing of auto-generated patch {self.counter} in file {self.parent.full_file_path()}")
|
f"Skipping parsing of auto-generated patch {self.counter} in file {self.parent.full_file_path()}")
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
@@ -356,9 +356,14 @@ class PatchInPatchFile:
|
|||||||
f"Patch file {self.parent.full_file_path()} has no changes. diff is {len(self.diff)} bytes: '{self.diff}'")
|
f"Patch file {self.parent.full_file_path()} has no changes. diff is {len(self.diff)} bytes: '{self.diff}'")
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
|
return self.str_oneline_around("->", "<-")
|
||||||
|
|
||||||
|
def str_oneline_around(self, prefix, suffix):
|
||||||
|
extra_email = f"{self.from_email}" if self.from_email is not None else ""
|
||||||
|
extra_subject = f":'{self.subject}'" if self.subject is not None else ""
|
||||||
desc: str = \
|
desc: str = \
|
||||||
f"<{self.parent.relative_dirs_and_base_file_name}(:{self.counter}):" + \
|
f"{prefix}{self.parent.relative_dirs_and_base_file_name}(:{self.counter}):" + \
|
||||||
f"{self.one_line_patch_stats()}: {self.from_email}: '{self.subject}' >"
|
f"{self.one_line_patch_stats()}:{extra_email}{extra_subject}{suffix}"
|
||||||
return desc
|
return desc
|
||||||
|
|
||||||
def apply_patch(self, working_dir: str, options: dict[str, bool]):
|
def apply_patch(self, working_dir: str, options: dict[str, bool]):
|
||||||
@@ -368,11 +373,12 @@ class PatchInPatchFile:
|
|||||||
full_path = os.path.join(working_dir, would_be_created_file)
|
full_path = os.path.join(working_dir, would_be_created_file)
|
||||||
if os.path.exists(full_path):
|
if os.path.exists(full_path):
|
||||||
self.problems.append("overwrites")
|
self.problems.append("overwrites")
|
||||||
log.warning(
|
|
||||||
f"File {would_be_created_file} already exists, but patch {self} would re-create it.")
|
|
||||||
if options["allow_recreate_existing_files"]:
|
if options["allow_recreate_existing_files"]:
|
||||||
log.warning(f"Tolerating recreation in {self} as instructed.")
|
log.debug(f"Tolerating recreation of {would_be_created_file} in {self} as instructed.")
|
||||||
os.remove(full_path)
|
os.remove(full_path)
|
||||||
|
else:
|
||||||
|
log.warning(
|
||||||
|
f"File {would_be_created_file} already exists, but patch {self} would re-create it.")
|
||||||
|
|
||||||
# Use the 'patch' utility to apply the patch.
|
# Use the 'patch' utility to apply the patch.
|
||||||
if self.diff_bytes is None:
|
if self.diff_bytes is None:
|
||||||
@@ -401,20 +407,22 @@ class PatchInPatchFile:
|
|||||||
|
|
||||||
# Check if the rejects exists:
|
# Check if the rejects exists:
|
||||||
if os.path.exists(rejects_file):
|
if os.path.exists(rejects_file):
|
||||||
log.warning(f"Rejects file {rejects_file} exists.")
|
log.debug(f"Rejects file {rejects_file} exists.")
|
||||||
# Show its contents
|
# Show its contents
|
||||||
with open(rejects_file, "r") as f:
|
with open(rejects_file, "r") as f:
|
||||||
log.warning(f"Rejects file contents: {f.read()}")
|
reject_contents = f.read()
|
||||||
|
self.rejects = reject_contents
|
||||||
|
log.debug(f"Rejects file contents: {reject_contents}")
|
||||||
# delete it
|
# delete it
|
||||||
os.remove(rejects_file)
|
os.remove(rejects_file)
|
||||||
|
|
||||||
# Look at stdout. If it contains:
|
# Look at stdout. If it contains:
|
||||||
if " (offset" in stdout_output or " with fuzz " in stdout_output:
|
if " (offset" in stdout_output or " with fuzz " in stdout_output:
|
||||||
log.warning(f"Patch {self} needs rebase: offset/fuzz used during apply.")
|
log.debug(f"Patch {self} needs rebase: offset/fuzz used during apply.")
|
||||||
self.problems.append("needs_rebase")
|
self.problems.append("needs_rebase")
|
||||||
|
|
||||||
if "can't find file to patch at input line" in stdout_output:
|
if "can't find file to patch at input line" in stdout_output:
|
||||||
log.warning(f"Patch {self} needs review: can't find file to patch.")
|
log.warning(f"Patch {self} needs fixing: can't find file to patch.")
|
||||||
self.problems.append("missing_file")
|
self.problems.append("missing_file")
|
||||||
|
|
||||||
# parse the stdout output for the files actually patched.
|
# parse the stdout output for the files actually patched.
|
||||||
@@ -635,7 +643,7 @@ class PatchInPatchFile:
|
|||||||
log.debug(f"Patch {self.parent.full_file_path()} is newer than root Makefile, using patch date")
|
log.debug(f"Patch {self.parent.full_file_path()} is newer than root Makefile, using patch date")
|
||||||
final_mtime = patch_mtime
|
final_mtime = patch_mtime
|
||||||
else:
|
else:
|
||||||
log.warn(
|
log.debug(
|
||||||
f"Root Makefile is newer than patch '{self.parent.full_file_path()}', using Makefile date")
|
f"Root Makefile is newer than patch '{self.parent.full_file_path()}', using Makefile date")
|
||||||
# Apply the date to all files that were touched by the patch
|
# Apply the date to all files that were touched by the patch
|
||||||
# If the patch parsed OK, avoid trying to touch files the patch deleted.
|
# If the patch parsed OK, avoid trying to touch files the patch deleted.
|
||||||
@@ -650,7 +658,7 @@ class PatchInPatchFile:
|
|||||||
try:
|
try:
|
||||||
os.utime(file_path, (final_mtime, final_mtime))
|
os.utime(file_path, (final_mtime, final_mtime))
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
log.error(f"File '{file_path}' not found in patch {self}, can't set mtime.")
|
log.warning(f"File '{file_path}' not found in patch {self}, can't set mtime.")
|
||||||
|
|
||||||
|
|
||||||
def fix_patch_subject(subject):
|
def fix_patch_subject(subject):
|
||||||
@@ -676,20 +684,20 @@ def fix_patch_subject(subject):
|
|||||||
# This is definitely not the right way to do this, but it works for now.
|
# This is definitely not the right way to do this, but it works for now.
|
||||||
def prepare_clean_git_tree_for_patching(repo: git.Repo, revision_sha: str, branch_name: str):
|
def prepare_clean_git_tree_for_patching(repo: git.Repo, revision_sha: str, branch_name: str):
|
||||||
# Let's find the Commit object for the revision_sha
|
# Let's find the Commit object for the revision_sha
|
||||||
log.info("Resetting git tree to revision '%s'", revision_sha)
|
log.debug("Resetting git tree to revision '%s'", revision_sha)
|
||||||
commit = repo.commit(revision_sha)
|
commit = repo.commit(revision_sha)
|
||||||
# Lets checkout, detached HEAD, to that Commit
|
# Lets checkout, detached HEAD, to that Commit
|
||||||
repo.head.reference = commit
|
repo.head.reference = commit
|
||||||
repo.head.reset(index=True, working_tree=True)
|
repo.head.reset(index=True, working_tree=True)
|
||||||
# Let's create a new branch, and checkout to it, discarding any existing branch
|
# Let's create a new branch, and checkout to it, discarding any existing branch
|
||||||
log.info("Creating branch '%s'", branch_name)
|
log.debug("Creating branch '%s'", branch_name)
|
||||||
repo.create_head(branch_name, revision_sha, force=True)
|
repo.create_head(branch_name, revision_sha, force=True)
|
||||||
repo.head.reference = repo.heads[branch_name]
|
repo.head.reference = repo.heads[branch_name]
|
||||||
repo.head.reset(index=True, working_tree=True)
|
repo.head.reset(index=True, working_tree=True)
|
||||||
# Let's remove all the untracked, but not ignored, files from the working copy
|
# Let's remove all the untracked, but not ignored, files from the working copy
|
||||||
for file in repo.untracked_files:
|
for file in repo.untracked_files:
|
||||||
full_name = os.path.join(repo.working_tree_dir, file)
|
full_name = os.path.join(repo.working_tree_dir, file)
|
||||||
log.info(f"Removing untracked file '{file}'")
|
log.debug(f"Removing untracked file '{file}'")
|
||||||
os.remove(full_name)
|
os.remove(full_name)
|
||||||
|
|
||||||
|
|
||||||
@@ -740,7 +748,7 @@ def read_file_as_utf8(file_name: str) -> tuple[str, list[str]]:
|
|||||||
return content.decode("utf-8"), [] # no problems if this worked
|
return content.decode("utf-8"), [] # no problems if this worked
|
||||||
except UnicodeDecodeError as ude:
|
except UnicodeDecodeError as ude:
|
||||||
log.warning(f"File '{file_name}' is not valid utf-8, trying to fix it...: '{ude}'")
|
log.warning(f"File '{file_name}' is not valid utf-8, trying to fix it...: '{ude}'")
|
||||||
# If decoding failed, try to decode as iso-8859-1
|
# If decoding failed, try to decode as iso-8859-1 # @TODO: or big5?
|
||||||
return content.decode("iso-8859-1"), ["invalid_utf8"] # utf-8 problems
|
return content.decode("iso-8859-1"), ["invalid_utf8"] # utf-8 problems
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ for patch_file in EXTRA_PATCH_FILES_FIRST:
|
|||||||
driver_dir.is_autogen_dir = True
|
driver_dir.is_autogen_dir = True
|
||||||
PATCH_FILES_FIRST.append(patching_utils.PatchFileInDir(patch_file, driver_dir))
|
PATCH_FILES_FIRST.append(patching_utils.PatchFileInDir(patch_file, driver_dir))
|
||||||
|
|
||||||
log.info(f"Found {len(PATCH_FILES_FIRST)} kernel driver patches")
|
log.debug(f"Found {len(PATCH_FILES_FIRST)} kernel driver patches.")
|
||||||
|
|
||||||
SERIES_PATCH_FILES: list[patching_utils.PatchFileInDir] = []
|
SERIES_PATCH_FILES: list[patching_utils.PatchFileInDir] = []
|
||||||
# Now, loop over ALL_DIRS, and find the patch files in each directory
|
# Now, loop over ALL_DIRS, and find the patch files in each directory
|
||||||
@@ -123,7 +123,17 @@ for one_patch_file in ALL_DIR_PATCH_FILES:
|
|||||||
# This reflects the order in which we want to apply the patches.
|
# This reflects the order in which we want to apply the patches.
|
||||||
# For series-based patches, we want to apply the serie'd patches first.
|
# For series-based patches, we want to apply the serie'd patches first.
|
||||||
# The other patches are separately sorted.
|
# The other patches are separately sorted.
|
||||||
ALL_PATCH_FILES_SORTED = PATCH_FILES_FIRST + SERIES_PATCH_FILES + list(dict(sorted(ALL_DIR_PATCH_FILES_BY_NAME.items())).values())
|
NORMAL_PATCH_FILES = list(dict(sorted(ALL_DIR_PATCH_FILES_BY_NAME.items())).values())
|
||||||
|
ALL_PATCH_FILES_SORTED = PATCH_FILES_FIRST + SERIES_PATCH_FILES + NORMAL_PATCH_FILES
|
||||||
|
|
||||||
|
patch_counter_desc_arr = []
|
||||||
|
if len(PATCH_FILES_FIRST) > 0:
|
||||||
|
patch_counter_desc_arr.append(f"{len(PATCH_FILES_FIRST)} driver patches")
|
||||||
|
if len(SERIES_PATCH_FILES) > 0:
|
||||||
|
patch_counter_desc_arr.append(f"{len(SERIES_PATCH_FILES)} patches in series")
|
||||||
|
if len(NORMAL_PATCH_FILES) > 0:
|
||||||
|
patch_counter_desc_arr.append(f"{len(NORMAL_PATCH_FILES)} patches in regular, sorted files")
|
||||||
|
patch_file_desc = f"from {len(ALL_PATCH_FILES_SORTED)} files of which {', '.join(patch_counter_desc_arr)}"
|
||||||
|
|
||||||
# Now, actually read the patch files.
|
# Now, actually read the patch files.
|
||||||
# Patch files might be in mailbox format, and in that case contain more than one "patch".
|
# Patch files might be in mailbox format, and in that case contain more than one "patch".
|
||||||
@@ -131,7 +141,7 @@ ALL_PATCH_FILES_SORTED = PATCH_FILES_FIRST + SERIES_PATCH_FILES + list(dict(sort
|
|||||||
# We need to read the file, and see if it's a mailbox file; if so, split into multiple patches.
|
# We need to read the file, and see if it's a mailbox file; if so, split into multiple patches.
|
||||||
# If not, just use the whole file as a single patch.
|
# If not, just use the whole file as a single patch.
|
||||||
# We'll store the patches in a list of Patch objects.
|
# We'll store the patches in a list of Patch objects.
|
||||||
log.info("Splitting patch files into patches")
|
log.debug("Splitting patch files into patches")
|
||||||
VALID_PATCHES: list[patching_utils.PatchInPatchFile] = []
|
VALID_PATCHES: list[patching_utils.PatchInPatchFile] = []
|
||||||
patch_file_in_dir: patching_utils.PatchFileInDir
|
patch_file_in_dir: patching_utils.PatchFileInDir
|
||||||
has_critical_split_errors = False
|
has_critical_split_errors = False
|
||||||
@@ -149,12 +159,12 @@ for patch_file_in_dir in ALL_PATCH_FILES_SORTED:
|
|||||||
if has_critical_split_errors:
|
if has_critical_split_errors:
|
||||||
raise Exception("Critical errors found while splitting patches. Please fix the patch files manually.")
|
raise Exception("Critical errors found while splitting patches. Please fix the patch files manually.")
|
||||||
|
|
||||||
log.info("Done splitting patch files into patches")
|
log.debug("Done splitting patch files into patches")
|
||||||
|
|
||||||
# Now, some patches might not be mbox-formatted, or somehow else invalid. We can try and recover those.
|
# Now, some patches might not be mbox-formatted, or somehow else invalid. We can try and recover those.
|
||||||
# That is only possible if we're applying patches to git.
|
# That is only possible if we're applying patches to git.
|
||||||
# Rebuilding description is only possible if we've the git repo where the patches themselves reside.
|
# Rebuilding description is only possible if we've the git repo where the patches themselves reside.
|
||||||
log.info("Parsing patches...")
|
log.debug("Parsing patches...")
|
||||||
has_critical_parse_errors = False
|
has_critical_parse_errors = False
|
||||||
for patch in VALID_PATCHES:
|
for patch in VALID_PATCHES:
|
||||||
try:
|
try:
|
||||||
@@ -169,7 +179,7 @@ for patch in VALID_PATCHES:
|
|||||||
if has_critical_parse_errors:
|
if has_critical_parse_errors:
|
||||||
raise Exception("Critical errors found while parsing patches. Please fix the patch files manually.")
|
raise Exception("Critical errors found while parsing patches. Please fix the patch files manually.")
|
||||||
|
|
||||||
log.info(f"Parsed patches.")
|
log.debug(f"Parsed patches.")
|
||||||
|
|
||||||
# Now, for patches missing description, try to recover descriptions from the Armbian repo.
|
# Now, for patches missing description, try to recover descriptions from the Armbian repo.
|
||||||
# It might be the SRC is not a git repo (say, when building in Docker), so we need to check.
|
# It might be the SRC is not a git repo (say, when building in Docker), so we need to check.
|
||||||
@@ -191,13 +201,15 @@ if apply_patches_to_git and git_archeology:
|
|||||||
|
|
||||||
# Now, we need to apply the patches.
|
# Now, we need to apply the patches.
|
||||||
git_repo: "git.Repo | None" = None
|
git_repo: "git.Repo | None" = None
|
||||||
|
total_patches = len(VALID_PATCHES)
|
||||||
if apply_patches:
|
if apply_patches:
|
||||||
log.info("Cleaning target git directory...")
|
log.debug("Cleaning target git directory...")
|
||||||
git_repo = Repo(GIT_WORK_DIR, odbt=GitCmdObjectDB)
|
git_repo = Repo(GIT_WORK_DIR, odbt=GitCmdObjectDB)
|
||||||
|
|
||||||
# Sanity check. It might be we fail to access the repo, or it's not a git repo, etc.
|
# Sanity check. It might be we fail to access the repo, or it's not a git repo, etc.
|
||||||
status = str(git_repo.git.status()).replace("\n", "; ")
|
status = str(git_repo.git.status()).replace("\n", "; ")
|
||||||
log.info(f"Git status of '{GIT_WORK_DIR}': '{status}'.")
|
GIT_WORK_DIR_REL_SRC = os.path.relpath(GIT_WORK_DIR, SRC)
|
||||||
|
log.debug(f"Git status of '{GIT_WORK_DIR_REL_SRC}': '{status}'.")
|
||||||
|
|
||||||
BRANCH_FOR_PATCHES = armbian_utils.get_from_env_or_bomb("BRANCH_FOR_PATCHES")
|
BRANCH_FOR_PATCHES = armbian_utils.get_from_env_or_bomb("BRANCH_FOR_PATCHES")
|
||||||
BASE_GIT_REVISION = armbian_utils.get_from_env("BASE_GIT_REVISION")
|
BASE_GIT_REVISION = armbian_utils.get_from_env("BASE_GIT_REVISION")
|
||||||
@@ -212,19 +224,24 @@ if apply_patches:
|
|||||||
patching_utils.prepare_clean_git_tree_for_patching(git_repo, BASE_GIT_REVISION, BRANCH_FOR_PATCHES)
|
patching_utils.prepare_clean_git_tree_for_patching(git_repo, BASE_GIT_REVISION, BRANCH_FOR_PATCHES)
|
||||||
|
|
||||||
# Loop over the VALID_PATCHES, and apply them
|
# Loop over the VALID_PATCHES, and apply them
|
||||||
log.info(f"- Applying {len(VALID_PATCHES)} patches...")
|
log.info(f"Applying {total_patches} patches {patch_file_desc}...")
|
||||||
# Grab the date of the root Makefile; that is the minimum date for the patched files.
|
# Grab the date of the root Makefile; that is the minimum date for the patched files.
|
||||||
root_makefile = os.path.join(GIT_WORK_DIR, "Makefile")
|
root_makefile = os.path.join(GIT_WORK_DIR, "Makefile")
|
||||||
apply_options["root_makefile_date"] = os.path.getmtime(root_makefile)
|
apply_options["root_makefile_date"] = os.path.getmtime(root_makefile)
|
||||||
log.info(f"- Root Makefile '{root_makefile}' date: '{os.path.getmtime(root_makefile)}'")
|
log.debug(f"- Root Makefile '{root_makefile}' date: '{os.path.getmtime(root_makefile)}'")
|
||||||
|
chars_total = len(str(total_patches))
|
||||||
|
counter = 0
|
||||||
for one_patch in VALID_PATCHES:
|
for one_patch in VALID_PATCHES:
|
||||||
log.info(f"Applying patch {one_patch}")
|
counter += 1
|
||||||
|
counter_str = str(counter).zfill(chars_total)
|
||||||
|
|
||||||
|
log.info(f"-> {counter_str}/{total_patches}: {one_patch.str_oneline_around('', '')}")
|
||||||
one_patch.applied_ok = False
|
one_patch.applied_ok = False
|
||||||
try:
|
try:
|
||||||
one_patch.apply_patch(GIT_WORK_DIR, apply_options)
|
one_patch.apply_patch(GIT_WORK_DIR, apply_options)
|
||||||
one_patch.applied_ok = True
|
one_patch.applied_ok = True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.error(f"Exception while applying patch {one_patch}: {e}", exc_info=True)
|
log.error(f"Problem with {one_patch}: {e}", exc_info=True)
|
||||||
|
|
||||||
if one_patch.applied_ok and apply_patches_to_git:
|
if one_patch.applied_ok and apply_patches_to_git:
|
||||||
committed = one_patch.commit_changes_to_git(git_repo, (not rewrite_patches_in_place), split_patches)
|
committed = one_patch.commit_changes_to_git(git_repo, (not rewrite_patches_in_place), split_patches)
|
||||||
@@ -244,7 +261,7 @@ if apply_patches:
|
|||||||
patch_files_by_parent: dict[(patching_utils.PatchFileInDir, list[patching_utils.PatchInPatchFile])] = {}
|
patch_files_by_parent: dict[(patching_utils.PatchFileInDir, list[patching_utils.PatchInPatchFile])] = {}
|
||||||
for one_patch in VALID_PATCHES:
|
for one_patch in VALID_PATCHES:
|
||||||
if not one_patch.applied_ok:
|
if not one_patch.applied_ok:
|
||||||
log.warning(f"Skipping patch {one_patch} because it was not applied successfully.")
|
log.warning(f"Skipping patch {one_patch} from rewrite because it was not applied successfully.")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if one_patch.parent not in patch_files_by_parent:
|
if one_patch.parent not in patch_files_by_parent:
|
||||||
@@ -267,7 +284,7 @@ with SummarizedMarkdownWriter(f"patching_{PATCH_TYPE}.md", f"{PATCH_TYPE} patchi
|
|||||||
patches_applied = 0
|
patches_applied = 0
|
||||||
patches_with_problems = 0
|
patches_with_problems = 0
|
||||||
problem_by_type: dict[str, int] = {}
|
problem_by_type: dict[str, int] = {}
|
||||||
if len(VALID_PATCHES) == 0:
|
if total_patches == 0:
|
||||||
md.write(f"- No patches found.\n")
|
md.write(f"- No patches found.\n")
|
||||||
else:
|
else:
|
||||||
# Prepare the Markdown table header
|
# Prepare the Markdown table header
|
||||||
@@ -298,7 +315,7 @@ with SummarizedMarkdownWriter(f"patching_{PATCH_TYPE}.md", f"{PATCH_TYPE} patchi
|
|||||||
|
|
||||||
# Finally, write the README.md and the GH pages workflow file to the git dir, add them, and commit them.
|
# Finally, write the README.md and the GH pages workflow file to the git dir, add them, and commit them.
|
||||||
if apply_patches_to_git and readme_markdown is not None and git_repo is not None:
|
if apply_patches_to_git and readme_markdown is not None and git_repo is not None:
|
||||||
log.info("Writing README.md and .github/workflows/gh-pages.yml")
|
log.debug("Writing README.md and .github/workflows/gh-pages.yml")
|
||||||
with open(os.path.join(GIT_WORK_DIR, "README.md"), 'w') as f:
|
with open(os.path.join(GIT_WORK_DIR, "README.md"), 'w') as f:
|
||||||
f.write(readme_markdown)
|
f.write(readme_markdown)
|
||||||
git_repo.git.add("README.md")
|
git_repo.git.add("README.md")
|
||||||
@@ -307,7 +324,7 @@ if apply_patches_to_git and readme_markdown is not None and git_repo is not None
|
|||||||
os.makedirs(github_workflows_dir)
|
os.makedirs(github_workflows_dir)
|
||||||
with open(os.path.join(github_workflows_dir, "publish-ghpages.yaml"), 'w') as f:
|
with open(os.path.join(github_workflows_dir, "publish-ghpages.yaml"), 'w') as f:
|
||||||
f.write(get_gh_pages_workflow_script())
|
f.write(get_gh_pages_workflow_script())
|
||||||
log.info("Committing README.md and .github/workflows/gh-pages.yml")
|
log.debug("Committing README.md and .github/workflows/gh-pages.yml")
|
||||||
git_repo.git.add("-f", [".github/workflows/publish-ghpages.yaml", "README.md"])
|
git_repo.git.add("-f", [".github/workflows/publish-ghpages.yaml", "README.md"])
|
||||||
maintainer_actor: Actor = Actor("Armbian AutoPatcher", "patching@armbian.com")
|
maintainer_actor: Actor = Actor("Armbian AutoPatcher", "patching@armbian.com")
|
||||||
commit = git_repo.index.commit(
|
commit = git_repo.index.commit(
|
||||||
|
|||||||
Reference in New Issue
Block a user