mirror of
https://github.com/armbian/build
synced 2025-09-24 19:47:06 +07:00
armbian-next: Python patching delusion pt 5 + EXTRAWIFI patch generator/harness + consistent patch mtime targets
- Python patching: tune some logging all-around - Python patching: *FINALLY* set the dates on the patched files to `max(patch_date, root_makefile_date)` thus making lighting-fast rebuilds again - new EXTRAWIFI patch generator harness; Python patching EXTRA_PATCH_FILES_FIRST - Python patching: use temp file for patching rejects; clean it up - new EXTRAWIFI patch generator harness; Python: mark drivers as autogen, don't split or parse them, read as bytes, apply bytes directly - new EXTRAWIFI patch generator harness; somewhat-works, but patches are per-family - Python patching: add the `cache/patch` as a mountpoint - Darwin Docker performance is 20x with namedvolume; the cached patches are very large
This commit is contained in:
@@ -3,6 +3,9 @@
|
||||
function kernel_main_patching_python() {
|
||||
prepare_pip_packages_for_python_tools
|
||||
|
||||
# outer scope variables
|
||||
declare -I kernel_drivers_patch_file kernel_drivers_patch_hash
|
||||
|
||||
declare patch_debug="${SHOW_DEBUG:-${DEBUG_PATCHING:-"no"}}"
|
||||
declare temp_file_for_output="$(mktemp)" # Get a temporary file for the output.
|
||||
# array with all parameters; will be auto-quoted by bash's @Q modifier below
|
||||
@@ -13,9 +16,9 @@ function kernel_main_patching_python() {
|
||||
"ASSET_LOG_BASE=$(print_current_asset_log_base_file)" # base file name for the asset log; to write .md summaries.
|
||||
"PATCH_TYPE=kernel" # or, u-boot, or, atf
|
||||
"PATCH_DIRS_TO_APPLY=${KERNELPATCHDIR}" # A space-separated list of directories to apply...
|
||||
"BOARD=" # BOARD is needed for the patchset selection logic; mostly for u-boot. empty for kernel.
|
||||
"TARGET=" # TARGET is need for u-boot's SPI/SATA etc selection logic. empty for kernel
|
||||
"USERPATCHES_PATH=${USERPATCHES_PATH}" # Needed to find the userpatches.
|
||||
#"BOARD=" # BOARD is needed for the patchset selection logic; mostly for u-boot. empty for kernel.
|
||||
#"TARGET=" # TARGET is need for u-boot's SPI/SATA etc selection logic. empty for kernel
|
||||
# What to do?
|
||||
"APPLY_PATCHES=yes" # Apply the patches to the filesystem. Does not imply git commiting. If no, still exports the hash.
|
||||
"PATCHES_TO_GIT=${PATCHES_TO_GIT:-no}" # Commit to git after applying the patches.
|
||||
@@ -30,6 +33,9 @@ function kernel_main_patching_python() {
|
||||
# Pass the maintainer info, used for commits.
|
||||
"MAINTAINER_NAME=${MAINTAINER}" # Name of the maintainer
|
||||
"MAINTAINER_EMAIL=${MAINTAINERMAIL}" # Email of the maintainer
|
||||
# Pass in the drivers extra patches and hashes; will be applied _first_, before series.
|
||||
"EXTRA_PATCH_FILES_FIRST=${kernel_drivers_patch_file}" # Is a space-separated list.
|
||||
"EXTRA_PATCH_HASHES_FIRST=${kernel_drivers_patch_hash}" # Is a space-separated list.
|
||||
)
|
||||
display_alert "Calling Python patching script" "for kernel" "info"
|
||||
run_host_command_logged env -i "${params_quoted[@]@Q}" python3 "${SRC}/lib/tools/patching.py"
|
||||
@@ -41,6 +47,11 @@ function kernel_main_patching_python() {
|
||||
}
|
||||
|
||||
function kernel_main_patching() {
|
||||
# kernel_drivers_create_patches will fill the variables below
|
||||
declare kernel_drivers_patch_file kernel_drivers_patch_hash
|
||||
LOG_SECTION="kernel_drivers_create_patches" do_with_logging do_with_hooks kernel_drivers_create_patches "${kernel_work_dir}" "${kernel_git_revision}"
|
||||
|
||||
# Python patching will git reset to the kernel SHA1 git revision, and remove all untracked files.
|
||||
LOG_SECTION="kernel_main_patching_python" do_with_logging do_with_hooks kernel_main_patching_python
|
||||
|
||||
# The old way...
|
||||
|
||||
@@ -3,13 +3,6 @@ function compile_kernel() {
|
||||
local kernel_work_dir="${SRC}/cache/sources/${LINUXSOURCEDIR}"
|
||||
display_alert "Kernel build starting" "${LINUXSOURCEDIR}" "info"
|
||||
|
||||
# Extension hook: fetch_sources_for_kernel_driver
|
||||
call_extension_method "fetch_sources_for_kernel_driver" <<- 'FETCH_SOURCES_FOR_KERNEL_DRIVER'
|
||||
*fetch external drivers from source, before fetching kernel git sources*
|
||||
Do your kernel driver fetching from external sources here.
|
||||
`${kernel_work_dir}` is set, but not yet populated with kernel sources.
|
||||
FETCH_SOURCES_FOR_KERNEL_DRIVER
|
||||
|
||||
# Prepare the git bare repo for the kernel; shared between all kernel builds
|
||||
declare kernel_git_bare_tree
|
||||
# alternative # LOG_SECTION="kernel_prepare_bare_repo_from_bundle" do_with_logging_unless_user_terminal do_with_hooks \
|
||||
@@ -156,7 +149,7 @@ function kernel_build_and_package() {
|
||||
install_make_params_quoted+=("${value}")
|
||||
done
|
||||
|
||||
display_alert "Building kernel" "${LINUXCONFIG} ${build_targets[*]}" "info"
|
||||
display_alert "Building kernel" "${LINUXFAMILY} ${LINUXCONFIG} ${build_targets[*]}" "info"
|
||||
fasthash_debug "build"
|
||||
make_filter="| grep --line-buffered -v -e 'LD' -e 'AR' -e 'INSTALL' -e 'SIGN' -e 'XZ' " \
|
||||
do_with_ccache_statistics \
|
||||
@@ -164,6 +157,7 @@ function kernel_build_and_package() {
|
||||
fasthash_debug "build"
|
||||
|
||||
cd "${kernel_work_dir}" || exit_with_error "Can't cd to kernel_work_dir: ${kernel_work_dir}"
|
||||
display_alert "Packaging kernel" "${LINUXFAMILY} ${LINUXCONFIG}" "info"
|
||||
prepare_kernel_packaging_debs "${kernel_work_dir}" "${kernel_dest_install_dir}" "${version}" kernel_install_dirs
|
||||
|
||||
display_alert "Kernel built and packaged in" "$((SECONDS - ts)) seconds - ${version}-${LINUXFAMILY}" "info"
|
||||
|
||||
140
lib/functions/compilation/patch/drivers-harness.sh
Normal file
140
lib/functions/compilation/patch/drivers-harness.sh
Normal file
@@ -0,0 +1,140 @@
|
||||
function calculate_hash_for_files() {
|
||||
declare -a hashes=()
|
||||
for file in "$@"; do
|
||||
hash="$(sha256sum "${file}" | cut -d' ' -f1)"
|
||||
hashes+=("$hash")
|
||||
done
|
||||
hash_files="$(echo "${hashes[@]}" | sha256sum | cut -d' ' -f1)" # now, hash the hashes
|
||||
hash_files="${hash_files:0:16}" # shorten it to 16 characters
|
||||
display_alert "Hash for files:" "$hash_files" "debug"
|
||||
}
|
||||
|
||||
function kernel_drivers_create_patches() {
|
||||
declare kernel_work_dir="${1}"
|
||||
declare kernel_git_revision="${2}"
|
||||
display_alert "Creating patches for kernel drivers" "version: '${KERNEL_MAJOR_MINOR}' kernel_work_dir:'${kernel_work_dir}'" "info"
|
||||
|
||||
declare hash_files # any changes in these two files will trigger a cache miss.
|
||||
calculate_hash_for_files "${SRC}/lib/functions/compilation/patch/drivers_network.sh" "${SRC}/lib/functions/compilation/patch/drivers-harness.sh"
|
||||
|
||||
declare cache_key_base="${KERNEL_MAJOR_MINOR}_${LINUXFAMILY}"
|
||||
declare cache_key="${cache_key_base}_${hash_files}"
|
||||
display_alert "Cache key base:" "$cache_key_base" "debug"
|
||||
display_alert "Cache key:" "$cache_key" "debug"
|
||||
|
||||
declare cache_dir_base="${SRC}/cache/patch/kernel-drivers"
|
||||
mkdir -p "${cache_dir_base}"
|
||||
|
||||
declare cache_target_file="${cache_dir_base}/${cache_key}.patch"
|
||||
|
||||
# outer scope variables:
|
||||
kernel_drivers_patch_file="${cache_target_file}"
|
||||
kernel_drivers_patch_hash="${cache_key}"
|
||||
|
||||
# If the target file exists, we can skip the patch creation.
|
||||
if [[ -f "${cache_target_file}" ]]; then
|
||||
display_alert "Using cached drivers patch file for ${LINUXFAMILY}-${KERNEL_MAJOR_MINOR}" "${cache_key}" "cachehit"
|
||||
return
|
||||
fi
|
||||
|
||||
# if it does _not_ exist, fist clear the base, so no old patches are left over
|
||||
run_host_command_logged rm -fv "${cache_dir_base}/${cache_key_base}*"
|
||||
|
||||
# since it does not exist, go create it. this requires working tree.
|
||||
declare target_patch_file="${cache_target_file}"
|
||||
|
||||
# grab the date of the kernel kernel_git_revision into kernel_driver_commit_date, which will be used to commit later
|
||||
declare kernel_driver_commit_date
|
||||
kernel_driver_commit_date=$(git -C "$kernel_work_dir" show -s --format=%ci "$kernel_git_revision")
|
||||
display_alert "Kernel driver commit date" "$kernel_driver_commit_date" "debug"
|
||||
|
||||
display_alert "Preparing patch for drivers" "version: ${KERNEL_MAJOR_MINOR} kernel_work_dir: ${kernel_work_dir}" "info"
|
||||
|
||||
kernel_drivers_prepare_harness "${kernel_work_dir}" "${kernel_git_revision}"
|
||||
}
|
||||
|
||||
function kernel_drivers_prepare_harness() {
|
||||
declare kernel_work_dir="${1}"
|
||||
declare kernel_git_revision="${2}"
|
||||
declare -I kernel_driver_commit_date target_patch_file # outer scope variables
|
||||
|
||||
declare -a drivers=(
|
||||
driver_rtl8152_rtl8153
|
||||
driver_rtl8189ES
|
||||
driver_rtl8189FS
|
||||
driver_rtl8192EU
|
||||
driver_rtl8811_rtl8812_rtl8814_rtl8821
|
||||
driver_xradio_xr819
|
||||
driver_rtl8811CU_rtl8821C
|
||||
driver_rtl8188EU_rtl8188ETV
|
||||
driver_rtl88x2bu
|
||||
driver_rtl88x2cs
|
||||
driver_rtl8822cs_bt
|
||||
driver_rtl8723DS
|
||||
driver_rtl8723DU
|
||||
driver_rtl8822BS
|
||||
)
|
||||
|
||||
# change cwd to the kernel working dir
|
||||
cd "${kernel_work_dir}" || exit_with_error "Failed to change directory to ${kernel_work_dir}"
|
||||
|
||||
#run_host_command_logged git status
|
||||
run_host_command_logged git reset --hard "${kernel_git_revision}"
|
||||
# git: remove tracked files, but not those in .gitignore
|
||||
run_host_command_logged git clean -fd # no -x here
|
||||
|
||||
for driver in "${drivers[@]}"; do
|
||||
display_alert "Preparing driver" "${driver}" "info"
|
||||
|
||||
# reset variables used by each driver
|
||||
declare version="${KERNEL_MAJOR_MINOR}"
|
||||
declare kernel_work_dir="${1}"
|
||||
declare kernel_git_revision="${2}"
|
||||
# for compatibility with `master`-based code
|
||||
declare kerneldir="${kernel_work_dir}"
|
||||
declare EXTRAWIFI="yes" # forced! @TODO not really?
|
||||
|
||||
# change cwd to the kernel working dir
|
||||
cd "${kernel_work_dir}" || exit_with_error "Failed to change directory to ${kernel_work_dir}"
|
||||
|
||||
# invoke the driver; non-armbian-next code.
|
||||
"${driver}"
|
||||
|
||||
# recover from possible cwd changes in the driver code
|
||||
cd "${kernel_work_dir}" || exit_with_error "Failed to change directory to ${kernel_work_dir}"
|
||||
done
|
||||
|
||||
# git: check if there are modifications
|
||||
if [[ -n "$(git status --porcelain)" ]]; then
|
||||
display_alert "Drivers have modifications" "exporting patch into ${target_patch_file}" "info"
|
||||
export_changes_as_patch_via_git_format_patch
|
||||
else
|
||||
exit_with_error "Applying drivers didn't produce changes."
|
||||
fi
|
||||
}
|
||||
|
||||
function export_changes_as_patch_via_git_format_patch() {
|
||||
# git: add all modifications
|
||||
run_host_command_logged git add . "&>/dev/null"
|
||||
|
||||
# git: commit the changes
|
||||
declare -a commit_params=(
|
||||
-m "drivers for ${LINUXFAMILY} version ${KERNEL_MAJOR_MINOR}"
|
||||
--date="${kernel_driver_commit_date}"
|
||||
--author="${MAINTAINER} <${MAINTAINERMAIL}>"
|
||||
)
|
||||
GIT_COMMITTER_NAME="${MAINTAINER}" GIT_COMMITTER_EMAIL="${MAINTAINERMAIL}" git commit "${commit_params[@]}" &> /dev/null
|
||||
|
||||
# export the commit as a patch; first to a temporary file, then move it to the target location if they're not the same
|
||||
declare formatpatch_params=(
|
||||
"-1" "--stdout"
|
||||
"--unified=3" # force 3 lines of diff context
|
||||
"--keep-subject" # do not add a prefix to the subject "[PATCH] "
|
||||
"--no-encode-email-headers" # do not encode email headers
|
||||
'--signature' "Armbian generated patch from drivers for kernel ${version} and family ${LINUXFAMILY}"
|
||||
'--stat=120' # 'wider' stat output; default is 80
|
||||
'--stat-graph-width=10' # shorten the diffgraph graph part, it's too long
|
||||
"--zero-commit" # Output an all-zero hash in each patch’s From header instead of the hash of the commit.
|
||||
)
|
||||
git format-patch "${formatpatch_params[@]}" > "${target_patch_file}"
|
||||
}
|
||||
@@ -16,6 +16,7 @@ function prepare_armbian_mountpoints_description_dict() {
|
||||
"cache/sources/linux-kernel-worktree"
|
||||
"cache/sources/u-boot-worktree"
|
||||
"cache/ccache"
|
||||
"cache/patch"
|
||||
)
|
||||
|
||||
declare -A -g ARMBIAN_MOUNTPOINTS_DESC_DICT=(
|
||||
@@ -35,6 +36,7 @@ function prepare_armbian_mountpoints_description_dict() {
|
||||
["cache/sources/linux-kernel-worktree"]="docker_kind_linux=bind docker_kind_darwin=namedvolume" # working tree for kernel builds. huge. contains both sources and the built object files. needs to be local to the container, so it's a volume by default. On Linux, it's a bind-mount by default.
|
||||
["cache/sources/u-boot-worktree"]="docker_kind_linux=bind docker_kind_darwin=namedvolume" # working tree for u-boot. large. contains both sources and the built object files. needs to be local to the container, so it's a volume by default. On Linux, it's a bind-mount by default.
|
||||
["cache/ccache"]="docker_kind_linux=bind docker_kind_darwin=namedvolume" # ccache object store. limited to 5gb by default. needs to be local to the container, so it's a volume by default. On Linux, it's a bind-mount by default.
|
||||
["cache/patch"]="docker_kind_linux=bind docker_kind_darwin=namedvolume" # auto-generated patches (for kernel drivers, etc); large patches so keep it as local as possible.
|
||||
)
|
||||
|
||||
# These, if found, will be removed on `dockerpurge` and other cleanups.
|
||||
|
||||
@@ -54,9 +54,9 @@ def setup_logging():
|
||||
level = "DEBUG"
|
||||
format = "%(message)s"
|
||||
styles = {
|
||||
'trace': {'color': 'white', },
|
||||
'debug': {'color': 'white'},
|
||||
'info': {'color': 'white', 'bold': True},
|
||||
'trace': {'color': 'white', 'bold': False},
|
||||
'debug': {'color': 'white', 'bold': False},
|
||||
'info': {'color': 'green', 'bold': True},
|
||||
'warning': {'color': 'yellow', 'bold': True},
|
||||
'error': {'color': 'red'},
|
||||
'critical': {'bold': True, 'color': 'red'}
|
||||
|
||||
@@ -5,11 +5,13 @@ import mailbox
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
import git # GitPython
|
||||
from unidecode import unidecode
|
||||
from unidiff import PatchSet
|
||||
|
||||
REGEX_PATCH_FILENAMES = r"^patching file \"(.+)\""
|
||||
log: logging.Logger = logging.getLogger("patching_utils")
|
||||
|
||||
|
||||
@@ -36,6 +38,7 @@ class PatchDir:
|
||||
self.root_type = self.patch_root_dir.root_type
|
||||
self.sub_type = self.patch_sub_dir.sub_type
|
||||
self.patch_files: list[PatchFileInDir] = []
|
||||
self.is_autogen_dir: bool = False
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<PatchDir: full_dir:'" + str(self.full_dir) + "'>"
|
||||
@@ -117,6 +120,18 @@ class PatchFileInDir:
|
||||
return os.path.join(self.patch_dir.rel_dir, self.file_name)
|
||||
|
||||
def split_patches_from_file(self) -> list["PatchInPatchFile"]:
|
||||
# Hack: for autogen dirs, we just need to be as fast as possible, don't parse anything.
|
||||
if self.patch_dir.is_autogen_dir:
|
||||
contents_bytes = read_file_as_bytes(self.full_file_path())
|
||||
# @TODO: date?
|
||||
bare_patch = PatchInPatchFile(
|
||||
self, 1, "", f"Autogenerated patch",
|
||||
f"Armbian Autopatcher <auto.patch@armbian.com>",
|
||||
f"[AUTOGEN] {self.relative_dirs_and_base_file_name}", None)
|
||||
bare_patch.diff_bytes = contents_bytes
|
||||
log.warning(f"Patch file {self.full_file_path()} is autogenerated.")
|
||||
return [bare_patch]
|
||||
|
||||
counter: int = 1
|
||||
mbox: mailbox.mbox = mailbox.mbox(self.full_file_path())
|
||||
is_invalid_mbox: bool = False
|
||||
@@ -199,6 +214,13 @@ def shorten_patched_file_name_for_stats(path):
|
||||
return os.path.basename(path)
|
||||
|
||||
|
||||
def parse_patch_stdout_for_files(stdout_output: str):
|
||||
# run the REGEX_PATCH_FILENAMES on the output; get the group 1 (the filename) for each match
|
||||
ret: list[str] = re.findall(REGEX_PATCH_FILENAMES, stdout_output, re.MULTILINE)
|
||||
# log.debug(f"Found {len(ret)} patched files in patch output: {','.join(ret)}.")
|
||||
return ret
|
||||
|
||||
|
||||
class PatchInPatchFile:
|
||||
|
||||
def __init__(self, parent: PatchFileInDir, counter: int, diff: str, desc, from_hdr, sbj_hdr, date_hdr):
|
||||
@@ -206,10 +228,12 @@ class PatchInPatchFile:
|
||||
self.applied_ok: bool = False
|
||||
self.rewritten_patch: str | None = None
|
||||
self.git_commit_hash: str | None = None
|
||||
self.actually_patched_files: list[str] = []
|
||||
|
||||
self.parent: PatchFileInDir = parent
|
||||
self.counter: int = counter
|
||||
self.diff: str = diff
|
||||
self.diff: str | None = diff
|
||||
self.diff_bytes: bytes | None = None
|
||||
|
||||
self.failed_to_parse: bool = False
|
||||
|
||||
@@ -254,15 +278,22 @@ class PatchInPatchFile:
|
||||
return f"(+{self.total_additions}/-{self.total_deletions})[{', '.join(operations)}]"
|
||||
|
||||
def parse_patch(self):
|
||||
# parse the patch, using the unidiff package
|
||||
try:
|
||||
patch = PatchSet(self.diff, encoding=None)
|
||||
except Exception as e:
|
||||
self.problems.append("invalid_diff")
|
||||
self.failed_to_parse = True
|
||||
log.error(
|
||||
f"Failed to parse unidiff for file {self.parent.full_file_path()}(:{self.counter}): {str(e).strip()}")
|
||||
return # no point in continuing; the patch is invalid; might be recovered during apply
|
||||
# Hack: don't parse if autogenned; this could also be "don't parse if larger than X megabytes" since
|
||||
# large patches cause trouble
|
||||
if self.parent.patch_dir.is_autogen_dir:
|
||||
log.warning(
|
||||
f"Skipping parsing of auto-generated patch {self.counter} in file {self.parent.full_file_path()}")
|
||||
return
|
||||
else:
|
||||
# parse the patch, using the unidiff package
|
||||
try:
|
||||
patch = PatchSet(self.diff, encoding=None)
|
||||
except Exception as e:
|
||||
self.problems.append("invalid_diff")
|
||||
self.failed_to_parse = True
|
||||
log.error(
|
||||
f"Failed to parse unidiff for file {self.parent.full_file_path()}(:{self.counter}): {str(e).strip()}")
|
||||
return # no point in continuing; the patch is invalid; might be recovered during apply
|
||||
|
||||
self.total_additions = 0
|
||||
self.total_deletions = 0
|
||||
@@ -315,20 +346,38 @@ class PatchInPatchFile:
|
||||
os.remove(full_path)
|
||||
|
||||
# Use the 'patch' utility to apply the patch.
|
||||
if self.diff_bytes is None:
|
||||
real_input = self.diff.encode("utf-8")
|
||||
else:
|
||||
real_input = self.diff_bytes
|
||||
|
||||
# create a temporary filename (don't create the file yet: patch will maybe create it)
|
||||
rejects_file = tempfile.mktemp()
|
||||
#log.debug(f"Rejects file is going to be '{rejects_file}'...")
|
||||
|
||||
proc = subprocess.run(
|
||||
["patch", "--batch", "-p1", "-N", "--reject-file=patching.rejects"],
|
||||
["patch", "--batch", "-p1", "-N", f"--reject-file={rejects_file}", "--quoting-style=c"],
|
||||
cwd=working_dir,
|
||||
input=self.diff.encode("utf-8"),
|
||||
input=real_input,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
check=False)
|
||||
# read the output of the patch command
|
||||
stdout_output = proc.stdout.decode("utf-8").strip()
|
||||
stderr_output = proc.stderr.decode("utf-8").strip()
|
||||
if stdout_output != "":
|
||||
log.debug(f"patch stdout: {stdout_output}")
|
||||
if stderr_output != "":
|
||||
log.warning(f"patch stderr: {stderr_output}")
|
||||
# if stdout_output != "":
|
||||
# log.debug(f"patch stdout: {stdout_output}")
|
||||
# if stderr_output != "":
|
||||
# log.warning(f"patch stderr: {stderr_output}")
|
||||
|
||||
# Check if the rejects exists:
|
||||
if os.path.exists(rejects_file):
|
||||
log.warning(f"Rejects file {rejects_file} exists.")
|
||||
# Show its contents
|
||||
with open(rejects_file, "r") as f:
|
||||
log.warning(f"Rejects file contents: {f.read()}")
|
||||
# delete it
|
||||
os.remove(rejects_file)
|
||||
|
||||
# Look at stdout. If it contains:
|
||||
if " (offset" in stdout_output or " with fuzz " in stdout_output:
|
||||
@@ -339,6 +388,11 @@ class PatchInPatchFile:
|
||||
log.warning(f"Patch {self} needs review: can't find file to patch.")
|
||||
self.problems.append("missing_file")
|
||||
|
||||
# parse the stdout output for the files actually patched.
|
||||
if options["set_patch_date"]:
|
||||
self.actually_patched_files = parse_patch_stdout_for_files(stdout_output)
|
||||
self.apply_patch_date_to_files(working_dir, options)
|
||||
|
||||
# Check if the exit code is not zero and bomb
|
||||
if proc.returncode != 0:
|
||||
# prefix each line of the stderr_output with "STDERR: ", then join again
|
||||
@@ -426,6 +480,9 @@ class PatchInPatchFile:
|
||||
if self.parent.from_series:
|
||||
ret.append(f" 📜 ")
|
||||
|
||||
if self.parent.patch_dir.is_autogen_dir:
|
||||
ret.append(f" 🤖 ")
|
||||
|
||||
if len(self.problems) == 0:
|
||||
ret.append("✅ ")
|
||||
|
||||
@@ -433,6 +490,8 @@ class PatchInPatchFile:
|
||||
if problem in ["not_mbox", "needs_rebase"]:
|
||||
# warning emoji
|
||||
ret.append(f"⚠️`[{problem}]` ")
|
||||
elif problem in ["autogen"]:
|
||||
ret.append(f"ℹ️`[{problem}]` ")
|
||||
else:
|
||||
ret.append(f"❌`[{problem}]` ")
|
||||
|
||||
@@ -485,6 +544,26 @@ class PatchInPatchFile:
|
||||
ret.append(f"`{patch_name}`")
|
||||
return " ".join(ret)
|
||||
|
||||
def apply_patch_date_to_files(self, working_dir, options):
|
||||
# The date applied to the patched files is:
|
||||
# 1) The date of the root Makefile
|
||||
# 2) The date of the patch file
|
||||
# And date is 2, unless 1 is higher.
|
||||
patch_mtime = os.path.getmtime(self.parent.full_file_path())
|
||||
makefile_mtime = options["root_makefile_date"]
|
||||
final_mtime = makefile_mtime
|
||||
if patch_mtime > makefile_mtime:
|
||||
log.debug(f"Patch {self.parent.full_file_path()} is newer than root Makefile, using patch date")
|
||||
final_mtime = patch_mtime
|
||||
else:
|
||||
log.warn(
|
||||
f"Root Makefile is newer than patch '{self.parent.full_file_path()}', using Makefile date")
|
||||
# Apply the date to all files that were touched by the patch
|
||||
for file_name in self.actually_patched_files:
|
||||
# log.debug(f"Setting mtime of '{file_name}' to '{final_mtime}'.")
|
||||
file_path = os.path.join(working_dir, file_name)
|
||||
os.utime(file_path, (final_mtime, final_mtime))
|
||||
|
||||
|
||||
def fix_patch_subject(subject):
|
||||
# replace newlines with one space
|
||||
@@ -547,10 +626,6 @@ def export_commit_as_patch(repo: git.Repo, commit: str):
|
||||
# read the output of the patch command
|
||||
stdout_output = proc.stdout.decode("utf-8")
|
||||
stderr_output = proc.stderr.decode("utf-8")
|
||||
# if stdout_output != "":
|
||||
# print(f"git format-patch stdout: \n{stdout_output}", file=sys.stderr)
|
||||
# if stderr_output != "":
|
||||
# print(f"git format-patch stderr: {stderr_output}", file=sys.stderr)
|
||||
# Check if the exit code is not zero and bomb
|
||||
if proc.returncode != 0:
|
||||
raise Exception(f"Failed to export commit {commit} to patch: {stderr_output}")
|
||||
@@ -574,11 +649,17 @@ def read_file_as_utf8(file_name: str) -> tuple[str, list[str]]:
|
||||
content = f.read() # Read the file as bytes
|
||||
try:
|
||||
return content.decode("utf-8"), [] # no problems if this worked
|
||||
except UnicodeDecodeError:
|
||||
except UnicodeDecodeError as ude:
|
||||
log.warning(f"File '{file_name}' is not valid utf-8, trying to fix it...: '{ude}'")
|
||||
# If decoding failed, try to decode as iso-8859-1
|
||||
return content.decode("iso-8859-1"), ["invalid_utf8"] # utf-8 problems
|
||||
|
||||
|
||||
def read_file_as_bytes(file_name: str) -> bytes:
|
||||
with open(file_name, "rb") as f:
|
||||
return f.read() # Read the file as bytes
|
||||
|
||||
|
||||
# Extremely Armbian-specific.
|
||||
def perform_git_archeology(
|
||||
base_armbian_src_dir: str, armbian_git_repo: git.Repo, patch: PatchInPatchFile,
|
||||
|
||||
@@ -16,6 +16,8 @@ log: logging.Logger = logging.getLogger("patching")
|
||||
# Show the environment variables we've been called with
|
||||
armbian_utils.show_incoming_environment()
|
||||
|
||||
# @TODO: test that "patch --version" is >= 2.7.6 using a subprocess and parsing the output.
|
||||
|
||||
# Let's start by reading environment variables.
|
||||
# Those are always needed, and we should bomb if they're not set.
|
||||
SRC = armbian_utils.get_from_env_or_bomb("SRC")
|
||||
@@ -32,7 +34,10 @@ apply_patches_to_git = PATCHES_TO_GIT == "yes"
|
||||
git_archeology = GIT_ARCHEOLOGY == "yes"
|
||||
fast_archeology = FAST_ARCHEOLOGY == "yes"
|
||||
rewrite_patches_in_place = REWRITE_PATCHES == "yes"
|
||||
apply_options = {"allow_recreate_existing_files": (ALLOW_RECREATE_EXISTING_FILES == "yes")}
|
||||
apply_options = {
|
||||
"allow_recreate_existing_files": (ALLOW_RECREATE_EXISTING_FILES == "yes"),
|
||||
"set_patch_date": True,
|
||||
}
|
||||
|
||||
# Those are optional.
|
||||
GIT_WORK_DIR = armbian_utils.get_from_env("GIT_WORK_DIR")
|
||||
@@ -42,12 +47,15 @@ USERPATCHES_PATH = armbian_utils.get_from_env("USERPATCHES_PATH")
|
||||
|
||||
# Some path possibilities
|
||||
CONST_PATCH_ROOT_DIRS = []
|
||||
|
||||
for patch_dir_to_apply in PATCH_DIRS_TO_APPLY:
|
||||
if USERPATCHES_PATH is not None:
|
||||
CONST_PATCH_ROOT_DIRS.append(
|
||||
patching_utils.PatchRootDir(
|
||||
f"{USERPATCHES_PATH}/{PATCH_TYPE}/{patch_dir_to_apply}", "user", PATCH_TYPE,
|
||||
USERPATCHES_PATH))
|
||||
|
||||
# regular patchset
|
||||
CONST_PATCH_ROOT_DIRS.append(
|
||||
patching_utils.PatchRootDir(f"{SRC}/patch/{PATCH_TYPE}/{patch_dir_to_apply}", "core", PATCH_TYPE, SRC))
|
||||
|
||||
@@ -65,6 +73,27 @@ for patch_root_dir in CONST_PATCH_ROOT_DIRS:
|
||||
for patch_sub_dir in CONST_PATCH_SUB_DIRS:
|
||||
ALL_DIRS.append(patching_utils.PatchDir(patch_root_dir, patch_sub_dir, SRC))
|
||||
|
||||
PATCH_FILES_FIRST: list[patching_utils.PatchFileInDir] = []
|
||||
EXTRA_PATCH_FILES_FIRST: list[str] = armbian_utils.parse_env_for_tokens("EXTRA_PATCH_FILES_FIRST")
|
||||
EXTRA_PATCH_HASHES_FIRST: list[str] = armbian_utils.parse_env_for_tokens("EXTRA_PATCH_HASHES_FIRST")
|
||||
|
||||
for patch_file in EXTRA_PATCH_FILES_FIRST:
|
||||
# if the file does not exist, bomb.
|
||||
if not os.path.isfile(patch_file):
|
||||
raise Exception(f"File {patch_file} does not exist.")
|
||||
|
||||
# get the directory name of the file path
|
||||
patch_dir = os.path.dirname(patch_file)
|
||||
|
||||
# Fabricate fake dirs...
|
||||
driver_root_dir = patching_utils.PatchRootDir(patch_dir, "extra-first", PATCH_TYPE, SRC)
|
||||
driver_sub_dir = patching_utils.PatchSubDir("", "extra-first")
|
||||
driver_dir = patching_utils.PatchDir(driver_root_dir, driver_sub_dir, SRC)
|
||||
driver_dir.is_autogen_dir = True
|
||||
PATCH_FILES_FIRST.append(patching_utils.PatchFileInDir(patch_file, driver_dir))
|
||||
|
||||
log.info(f"Found {len(PATCH_FILES_FIRST)} kernel driver patches")
|
||||
|
||||
SERIES_PATCH_FILES: list[patching_utils.PatchFileInDir] = []
|
||||
# Now, loop over ALL_DIRS, and find the patch files in each directory
|
||||
for one_dir in ALL_DIRS:
|
||||
@@ -92,7 +121,8 @@ for one_patch_file in ALL_DIR_PATCH_FILES:
|
||||
# This reflects the order in which we want to apply the patches.
|
||||
# For series-based patches, we want to apply the serie'd patches first.
|
||||
# The other patches are separately sorted.
|
||||
ALL_PATCH_FILES_SORTED = SERIES_PATCH_FILES + list(dict(sorted(ALL_DIR_PATCH_FILES_BY_NAME.items())).values())
|
||||
ALL_PATCH_FILES_SORTED = PATCH_FILES_FIRST + SERIES_PATCH_FILES + \
|
||||
list(dict(sorted(ALL_DIR_PATCH_FILES_BY_NAME.items())).values())
|
||||
|
||||
# Now, actually read the patch files.
|
||||
# Patch files might be in mailbox format, and in that case contain more than one "patch".
|
||||
@@ -100,6 +130,7 @@ ALL_PATCH_FILES_SORTED = SERIES_PATCH_FILES + list(dict(sorted(ALL_DIR_PATCH_FIL
|
||||
# We need to read the file, and see if it's a mailbox file; if so, split into multiple patches.
|
||||
# If not, just use the whole file as a single patch.
|
||||
# We'll store the patches in a list of Patch objects.
|
||||
log.info("Splitting patch files into patches")
|
||||
VALID_PATCHES: list[patching_utils.PatchInPatchFile] = []
|
||||
patch_file_in_dir: patching_utils.PatchFileInDir
|
||||
for patch_file_in_dir in ALL_PATCH_FILES_SORTED:
|
||||
@@ -112,10 +143,12 @@ for patch_file_in_dir in ALL_PATCH_FILES_SORTED:
|
||||
f"Can't continue; please fix the patch file {patch_file_in_dir.full_file_path()} manually. Sorry."
|
||||
, exc_info=True)
|
||||
exit(1)
|
||||
log.info("Done splitting patch files into patches")
|
||||
|
||||
# Now, some patches might not be mbox-formatted, or somehow else invalid. We can try and recover those.
|
||||
# That is only possible if we're applying patches to git.
|
||||
# Rebuilding description is only possible if we've the git repo where the patches themselves reside.
|
||||
log.info("Parsing patches...")
|
||||
for patch in VALID_PATCHES:
|
||||
try:
|
||||
patch.parse_patch() # this handles diff-level parsing; modifies itself; throws exception if invalid
|
||||
@@ -165,6 +198,10 @@ if apply_patches:
|
||||
|
||||
# Loop over the VALID_PATCHES, and apply them
|
||||
log.info(f"- Applying {len(VALID_PATCHES)} patches...")
|
||||
# Grab the date of the root Makefile; that is the minimum date for the patched files.
|
||||
root_makefile = os.path.join(GIT_WORK_DIR, "Makefile")
|
||||
apply_options["root_makefile_date"] = os.path.getmtime(root_makefile)
|
||||
log.info(f"- Root Makefile '{root_makefile}' date: '{os.path.getmtime(root_makefile)}'")
|
||||
for one_patch in VALID_PATCHES:
|
||||
log.info(f"Applying patch {one_patch}")
|
||||
one_patch.applied_ok = False
|
||||
|
||||
Reference in New Issue
Block a user