mirror of
https://github.com/armbian/build
synced 2025-09-24 19:47:06 +07:00
armbian-next: artifacts: introduce kernel/u-boot artifacts; git ref2info; "memoizer"; some hashing
- artifacts: u-boot/kernel - pt7 - adapt legacy/artifact versions; use common `capture_rename_legacy_debs_into_artifacts()`
- artifacts: u-boot - pt6: add artifact for u-boot
- use artifact version / reason in actual u-boot .deb if present
- artifacts: kernel - pt5: tune kernel version, refactor
- artifacts: kernel - pt4: squash unrelated bugs that show up; move `prepare_compilation_vars()` to default build
- artifacts: kernel - pt3: drivers+patches+.config hashing
- split file hashing function from drivers-harness; fix it so filenames are relative and sorted; sort from ${SRC}, always
- aplit prepare_kernel_config_core_or_userpatches() from `kernel_config_initialize()`
- artifacts: kernel - pt2: memoizing git ref2info
- artifacts: kernel - pt1: versioning
This commit is contained in:
10
lib/functions/artifacts/artifacts-registry.sh
Normal file
10
lib/functions/artifacts/artifacts-registry.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
function armbian_register_artifacts() {
|
||||
|
||||
declare -g -A ARMBIAN_ARTIFACTS_TO_HANDLERS_DICT=(
|
||||
#["firmware"]="firmware"
|
||||
["kernel"]="kernel"
|
||||
["u-boot"]="uboot"
|
||||
["uboot"]="uboot"
|
||||
)
|
||||
|
||||
}
|
||||
150
lib/functions/artifacts/kernel.sh
Normal file
150
lib/functions/artifacts/kernel.sh
Normal file
@@ -0,0 +1,150 @@
|
||||
function artifact_kernel_cli_adapter_pre_run() {
|
||||
declare -g ARMBIAN_COMMAND_REQUIRE_BASIC_DEPS="yes" # Require prepare_host_basic to run before the command.
|
||||
|
||||
# "gimme root on a Linux machine"
|
||||
cli_standard_relaunch_docker_or_sudo
|
||||
}
|
||||
|
||||
function artifact_kernel_cli_adapter_config_prep() {
|
||||
declare KERNEL_ONLY="yes" # @TODO: this is a hack, for the board/family code's benefit...
|
||||
use_board="yes" prep_conf_main_minimal_ni < /dev/null # no stdin for this, so it bombs if tries to be interactive.
|
||||
}
|
||||
|
||||
function artifact_kernel_prepare_version() {
|
||||
display_alert "artifact_kernel_XXXXXX" "artifact_kernel_XXXXXX" "warn"
|
||||
artifact_version="undetermined" # outer scope
|
||||
artifact_version_reason="undetermined" # outer scope
|
||||
|
||||
# Prepare the version, "sans-repos": just the armbian/build repo contents are available.
|
||||
# It is OK to reach out to the internet for a curl or ls-remote, but not for a git clone.
|
||||
|
||||
# - Given KERNELSOURCE and KERNELBRANCH, get:
|
||||
# - SHA1 of the commit (this is generic... and used for other pkgs)
|
||||
# - The first 10 lines of the root Makefile at that commit (cached lookup, same SHA1=same Makefile)
|
||||
# - This gives us the full version plus codename.
|
||||
# - Make sure this is sane, ref KERNEL_MAJOR_MINOR.
|
||||
# - Get the drivers patch hash (given LINUXFAMILY and the vX.Z.Y version)
|
||||
# - Get the kernel patches hash. (could just hash the KERNELPATCHDIR non-disabled contents, or use Python patching proper?)
|
||||
# - Get the kernel .config hash, composed of
|
||||
# - KERNELCONFIG? .config hash
|
||||
# - extensions mechanism, have an array of hashes that is then hashed together.
|
||||
# - Hash of the relevant lib/ bash sources involved, say compilation-kernel*.sh etc
|
||||
# All those produce a version string like:
|
||||
# 6.1.8-<4-digit-SHA1>_<4_digit_drivers>-<4_digit_patches>-<4_digit_config>-<4_digit_libs>
|
||||
# 6.2-rc5-a0b1-c2d3-e4f5-g6h7-i8j9
|
||||
|
||||
debug_var BRANCH
|
||||
debug_var REVISION
|
||||
debug_var KERNELSOURCE
|
||||
debug_var KERNELBRANCH
|
||||
debug_var LINUXFAMILY
|
||||
debug_var BOARDFAMILY
|
||||
debug_var KERNEL_MAJOR_MINOR
|
||||
debug_var KERNELPATCHDIR
|
||||
|
||||
declare short_hash_size=4
|
||||
|
||||
declare -A GIT_INFO=([GIT_SOURCE]="${KERNELSOURCE}" [GIT_REF]="${KERNELBRANCH}")
|
||||
run_memoized GIT_INFO "git2info" memoized_git_ref_to_info "include_makefile_body"
|
||||
debug_dict GIT_INFO
|
||||
|
||||
declare short_sha1="${GIT_INFO[SHA1]:0:${short_hash_size}}"
|
||||
|
||||
# get the drivers hash...
|
||||
declare kernel_drivers_patch_hash
|
||||
LOG_SECTION="kernel_drivers_create_patches_hash_only" do_with_logging do_with_hooks kernel_drivers_create_patches_hash_only
|
||||
declare kernel_drivers_hash_short="${kernel_drivers_patch_hash:0:${short_hash_size}}"
|
||||
|
||||
# get the kernel patches hash...
|
||||
# @TODO: why not just delegate this to the python patching, with some "dry-run" / hash-only option?
|
||||
declare patches_hash="undetermined"
|
||||
declare hash_files="undetermined"
|
||||
calculate_hash_for_all_files_in_dirs "${SRC}/patch/kernel/${KERNELPATCHDIR}" "${USERPATCHES_PATH}/kernel/${KERNELPATCHDIR}"
|
||||
patches_hash="${hash_files}"
|
||||
declare kernel_patches_hash_short="${patches_hash:0:${short_hash_size}}"
|
||||
|
||||
# get the .config hash... also userpatches...
|
||||
declare kernel_config_source_filename="" # which actual .config was used?
|
||||
prepare_kernel_config_core_or_userpatches
|
||||
declare hash_files="undetermined"
|
||||
calculate_hash_for_files "${kernel_config_source_filename}"
|
||||
config_hash="${hash_files}"
|
||||
declare config_hash_short="${config_hash:0:${short_hash_size}}"
|
||||
|
||||
# @TODO: get the extensions' .config modyfing hashes...
|
||||
# @TODO: include the compiler version? host release?
|
||||
|
||||
# get the hashes of the lib/ bash sources involved...
|
||||
declare hash_files="undetermined"
|
||||
calculate_hash_for_files "${SRC}"/lib/functions/compilation/kernel*.sh # maybe also this file, "${SRC}"/lib/functions/artifacts/kernel.sh
|
||||
declare bash_hash="${hash_files}"
|
||||
declare bash_hash_short="${bash_hash:0:${short_hash_size}}"
|
||||
|
||||
# outer scope
|
||||
artifact_version="${GIT_INFO[MAKEFILE_VERSION]}-S${short_sha1}-D${kernel_drivers_hash_short}-P${kernel_patches_hash_short}-C${config_hash_short}-B${bash_hash_short}"
|
||||
# @TODO: validate it begins with a digit, and is at max X chars long.
|
||||
|
||||
declare -a reasons=(
|
||||
"version \"${GIT_INFO[MAKEFILE_FULL_VERSION]}\""
|
||||
"git revision \"${GIT_INFO[SHA1]}\""
|
||||
"codename \"${GIT_INFO[MAKEFILE_CODENAME]}\""
|
||||
"drivers hash \"${kernel_drivers_patch_hash}\""
|
||||
"patches hash \"${patches_hash}\""
|
||||
".config hash \"${config_hash}\""
|
||||
"framework bash hash \"${bash_hash}\""
|
||||
)
|
||||
|
||||
artifact_version_reason="${reasons[*]}" # outer scope # @TODO better
|
||||
|
||||
# map what "compile_kernel()" will produce - legacy deb names and versions
|
||||
artifact_map_versions_legacy=(
|
||||
["linux-image-${BRANCH}-${LINUXFAMILY}"]="${REVISION}"
|
||||
["linux-dtb-${BRANCH}-${LINUXFAMILY}"]="${REVISION}"
|
||||
["linux-headers-${BRANCH}-${LINUXFAMILY}"]="${REVISION}"
|
||||
)
|
||||
|
||||
# now, one for each file in the artifact... we've 3 packages produced, all the same version
|
||||
artifact_map_versions=(
|
||||
["linux-image-${BRANCH}-${LINUXFAMILY}"]="${artifact_version}"
|
||||
["linux-dtb-${BRANCH}-${LINUXFAMILY}"]="${artifact_version}"
|
||||
["linux-headers-${BRANCH}-${LINUXFAMILY}"]="${artifact_version}"
|
||||
)
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function artifact_kernel_is_available_in_local_cache() {
|
||||
display_alert "artifact_kernel_XXXXXX" "artifact_kernel_XXXXXX" "warn"
|
||||
# Check if the exact DEB exists on disk (output/debs), nothing else.
|
||||
# This is more about composing the .deb filename than checking if it exists.
|
||||
}
|
||||
|
||||
function artifact_kernel_is_available_in_remote_cache() {
|
||||
display_alert "artifact_kernel_XXXXXX" "artifact_kernel_XXXXXX" "warn"
|
||||
# Check if the DEB can be obtained remotely, eg:
|
||||
# - in ghcr.io (via ORAS)
|
||||
# - in an apt repo (via apt-get), eg, Armbian's repo.
|
||||
# this is only about availability, not download. use HEAD requests / metadata-only pulls
|
||||
# what about multiple possible OCI endpoints / URLs? try them all?
|
||||
}
|
||||
|
||||
function artifact_kernel_obtain_from_remote_cache() {
|
||||
display_alert "artifact_kernel_XXXXXX" "artifact_kernel_XXXXXX" "warn"
|
||||
# Having confirmed it is available remotely, go download it into the local cache.
|
||||
# is_available_in_local_cache() must return =yes after this.
|
||||
# could be a good idea to transfer some SHA256 id from "is_available" to "obtain" to avoid overhead? or just do it together?
|
||||
}
|
||||
|
||||
function artifact_kernel_build_from_sources() {
|
||||
display_alert "artifact_kernel_XXXXXX" "artifact_kernel_XXXXXX" "warn"
|
||||
# having failed all the cache obtaining, build it from sources.
|
||||
compile_kernel
|
||||
|
||||
capture_rename_legacy_debs_into_artifacts
|
||||
}
|
||||
|
||||
function artifact_kernel_deploy_to_remote_cache() {
|
||||
display_alert "artifact_kernel_XXXXXX" "artifact_kernel_XXXXXX" "warn"
|
||||
# having built a new artifact, deploy it to the remote cache.
|
||||
# consider multiple targets, retries, etc.
|
||||
}
|
||||
130
lib/functions/artifacts/u-boot.sh
Normal file
130
lib/functions/artifacts/u-boot.sh
Normal file
@@ -0,0 +1,130 @@
|
||||
function artifact_uboot_cli_adapter_pre_run() {
|
||||
declare -g ARMBIAN_COMMAND_REQUIRE_BASIC_DEPS="yes" # Require prepare_host_basic to run before the command.
|
||||
|
||||
# "gimme root on a Linux machine"
|
||||
cli_standard_relaunch_docker_or_sudo
|
||||
}
|
||||
|
||||
function artifact_uboot_cli_adapter_config_prep() {
|
||||
declare KERNEL_ONLY="yes" # @TODO: this is a hack, for the board/family code's benefit...
|
||||
use_board="yes" prep_conf_main_minimal_ni < /dev/null # no stdin for this, so it bombs if tries to be interactive.
|
||||
}
|
||||
|
||||
function artifact_uboot_prepare_version() {
|
||||
display_alert "artifact_uboot_XXXXXX" "artifact_uboot_XXXXXX" "warn"
|
||||
artifact_version="undetermined" # outer scope
|
||||
artifact_version_reason="undetermined" # outer scope
|
||||
|
||||
# Prepare the version, "sans-repos": just the armbian/build repo contents are available.
|
||||
# It is OK to reach out to the internet for a curl or ls-remote, but not for a git clone/fetch.
|
||||
|
||||
# - Given BOOTSOURCE and BOOTBRANCH, get:
|
||||
# - SHA1 of the commit (this is generic... and used for other pkgs)
|
||||
# - The first 10 lines of the root Makefile at that commit (cached lookup, same SHA1=same Makefile)
|
||||
# - This gives us the full version plus codename.
|
||||
# - Get the u-boot patches hash. (could just hash the BOOTPATCHDIR non-disabled contents, or use Python patching proper?)
|
||||
# - Hash of the relevant lib/ bash sources involved, say compilation/uboot*.sh etc
|
||||
# All those produce a version string like:
|
||||
# 2023.11-<4-digit-SHA1>_<4_digit_patches>
|
||||
|
||||
debug_var BOOTSOURCE
|
||||
debug_var BOOTBRANCH
|
||||
debug_var BOOTPATCHDIR
|
||||
debug_var BOARD
|
||||
|
||||
declare short_hash_size=4
|
||||
|
||||
declare -A GIT_INFO=([GIT_SOURCE]="${BOOTSOURCE}" [GIT_REF]="${BOOTBRANCH}")
|
||||
run_memoized GIT_INFO "git2info" memoized_git_ref_to_info "include_makefile_body"
|
||||
debug_dict GIT_INFO
|
||||
|
||||
declare short_sha1="${GIT_INFO[SHA1]:0:${short_hash_size}}"
|
||||
|
||||
# get the uboot patches hash...
|
||||
# @TODO: why not just delegate this to the python patching, with some "dry-run" / hash-only option?
|
||||
# @TODO: this is even more grave in case of u-boot: v2022.10 has patches for many boards inside, gotta resolve.
|
||||
declare patches_hash="undetermined"
|
||||
declare hash_files="undetermined"
|
||||
calculate_hash_for_all_files_in_dirs "${SRC}/patch/u-boot/${BOOTPATCHDIR}" "${USERPATCHES_PATH}/u-boot/${BOOTPATCHDIR}"
|
||||
patches_hash="${hash_files}"
|
||||
declare uboot_patches_hash_short="${patches_hash:0:${short_hash_size}}"
|
||||
|
||||
# get the hashes of the lib/ bash sources involved...
|
||||
declare hash_files="undetermined"
|
||||
calculate_hash_for_files "${SRC}"/lib/functions/compilation/uboot*.sh # maybe also this file, "${SRC}"/lib/functions/artifacts/u-boot.sh
|
||||
declare bash_hash="${hash_files}"
|
||||
declare bash_hash_short="${bash_hash:0:${short_hash_size}}"
|
||||
|
||||
# outer scope
|
||||
artifact_version="${GIT_INFO[MAKEFILE_VERSION]}-S${short_sha1}-P${uboot_patches_hash_short}-B${bash_hash_short}"
|
||||
# @TODO: validate it begins with a digit, and is at max X chars long.
|
||||
|
||||
declare -a reasons=(
|
||||
"version \"${GIT_INFO[MAKEFILE_FULL_VERSION]}\""
|
||||
"git revision \"${GIT_INFO[SHA1]}\""
|
||||
"patches hash \"${patches_hash}\""
|
||||
"framework bash hash \"${bash_hash}\""
|
||||
)
|
||||
|
||||
artifact_version_reason="${reasons[*]}" # outer scope # @TODO better
|
||||
|
||||
# now, one for each file in the artifact...
|
||||
artifact_map_versions=(
|
||||
["u-boot"]="${artifact_version}"
|
||||
)
|
||||
|
||||
# map what "compile_uboot()" will produce - legacy deb names and versions
|
||||
artifact_map_versions_legacy=(
|
||||
["linux-u-boot-${BRANCH}-${BOARD}"]="${REVISION}"
|
||||
)
|
||||
|
||||
# now, one for each file in the artifact... single package, so just one entry
|
||||
artifact_map_versions=(
|
||||
["linux-u-boot-${BRANCH}-${BOARD}"]="${artifact_version}"
|
||||
)
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function artifact_uboot_is_available_in_local_cache() {
|
||||
display_alert "artifact_uboot_XXXXXX" "artifact_uboot_XXXXXX" "warn"
|
||||
# Check if the exact DEB exists on disk (output/debs), nothing else.
|
||||
# This is more about composing the .deb filename than checking if it exists.
|
||||
}
|
||||
|
||||
function artifact_uboot_is_available_in_remote_cache() {
|
||||
display_alert "artifact_uboot_XXXXXX" "artifact_uboot_XXXXXX" "warn"
|
||||
# Check if the DEB can be obtained remotely, eg:
|
||||
# - in ghcr.io (via ORAS)
|
||||
# - in an apt repo (via apt-get), eg, Armbian's repo.
|
||||
# this is only about availability, not download. use HEAD requests / metadata-only pulls
|
||||
# what about multiple possible OCI endpoints / URLs? try them all?
|
||||
}
|
||||
|
||||
function artifact_uboot_obtain_from_remote_cache() {
|
||||
display_alert "artifact_uboot_XXXXXX" "artifact_uboot_XXXXXX" "warn"
|
||||
# Having confirmed it is available remotely, go download it into the local cache.
|
||||
# is_available_in_local_cache() must return =yes after this.
|
||||
# could be a good idea to transfer some SHA256 id from "is_available" to "obtain" to avoid overhead? or just do it together?
|
||||
}
|
||||
|
||||
function artifact_uboot_build_from_sources() {
|
||||
display_alert "artifact_uboot_XXXXXX" "artifact_uboot_XXXXXX" "warn"
|
||||
# having failed all the cache obtaining, build it from sources.
|
||||
|
||||
if [[ -n "${ATFSOURCE}" && "${ATFSOURCE}" != "none" ]]; then
|
||||
LOG_SECTION="compile_atf" do_with_logging compile_atf
|
||||
fi
|
||||
|
||||
declare uboot_git_revision="not_determined_yet"
|
||||
LOG_SECTION="uboot_prepare_git" do_with_logging_unless_user_terminal uboot_prepare_git
|
||||
LOG_SECTION="compile_uboot" do_with_logging compile_uboot
|
||||
|
||||
capture_rename_legacy_debs_into_artifacts # has its own logging section
|
||||
}
|
||||
|
||||
function artifact_uboot_deploy_to_remote_cache() {
|
||||
display_alert "artifact_uboot_XXXXXX" "artifact_uboot_XXXXXX" "warn"
|
||||
# having built a new artifact, deploy it to the remote cache.
|
||||
# consider multiple targets, retries, etc.
|
||||
}
|
||||
109
lib/functions/cli/cli-artifact.sh
Normal file
109
lib/functions/cli/cli-artifact.sh
Normal file
@@ -0,0 +1,109 @@
|
||||
function cli_artifact_pre_run() {
|
||||
initialize_artifact "${WHAT:-"kernel"}"
|
||||
# Run the pre run adapter
|
||||
artifact_cli_adapter_pre_run
|
||||
}
|
||||
|
||||
function cli_artifact_run() {
|
||||
display_alert "artifact" "${chosen_artifact}" "warn"
|
||||
display_alert "artifact" "${chosen_artifact} :: ${chosen_artifact_impl}()" "warn"
|
||||
artifact_cli_adapter_config_prep # only if in cli.
|
||||
|
||||
# only if in cli, if not just run it bare, since we'd be already inside do_with_default_build
|
||||
do_with_default_build obtain_complete_artifact < /dev/null
|
||||
}
|
||||
|
||||
function create_artifact_functions() {
|
||||
declare -a funcs=(
|
||||
"cli_adapter_pre_run" "cli_adapter_config_prep"
|
||||
"prepare_version"
|
||||
"is_available_in_local_cache" "is_available_in_remote_cache" "obtain_from_remote_cache"
|
||||
"deploy_to_remote_cache"
|
||||
"build_from_sources"
|
||||
)
|
||||
for func in "${funcs[@]}"; do
|
||||
declare impl_func="artifact_${chosen_artifact_impl}_${func}"
|
||||
if [[ $(type -t "${impl_func}") == function ]]; then
|
||||
declare cmd
|
||||
cmd="$(
|
||||
cat <<- ARTIFACT_DEFINITION
|
||||
function artifact_${func}() {
|
||||
display_alert "Calling artifact function" "${impl_func}() \$*" "warn"
|
||||
${impl_func} "\$@"
|
||||
}
|
||||
ARTIFACT_DEFINITION
|
||||
)"
|
||||
eval "${cmd}"
|
||||
else
|
||||
exit_with_error "Missing artifact implementation function '${impl_func}'"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function initialize_artifact() {
|
||||
declare -g chosen_artifact="${1}"
|
||||
armbian_register_artifacts
|
||||
declare -g chosen_artifact_impl="${ARMBIAN_ARTIFACTS_TO_HANDLERS_DICT["${chosen_artifact}"]}"
|
||||
[[ "x${chosen_artifact_impl}x" == "xx" ]] && exit_with_error "Unknown artifact '${chosen_artifact}'"
|
||||
display_alert "artifact" "${chosen_artifact} :: ${chosen_artifact_impl}()" "info"
|
||||
create_artifact_functions
|
||||
}
|
||||
|
||||
function obtain_complete_artifact() {
|
||||
declare -g artifact_version="undetermined"
|
||||
declare -g artifact_version_reason="undetermined"
|
||||
declare -A -g artifact_map_versions=()
|
||||
declare -A -g artifact_map_versions_legacy=()
|
||||
|
||||
# Check if REVISION is set, otherwise exit_with_error
|
||||
[[ "x${REVISION}x" == "xx" ]] && exit_with_error "REVISION is not set"
|
||||
|
||||
artifact_prepare_version
|
||||
debug_var artifact_version
|
||||
debug_var artifact_version_reason
|
||||
debug_dict artifact_map_versions_legacy
|
||||
debug_dict artifact_map_versions
|
||||
|
||||
# @TODO the whole artifact upload/download dance
|
||||
artifact_is_available_in_local_cache
|
||||
artifact_is_available_in_remote_cache
|
||||
artifact_obtain_from_remote_cache
|
||||
|
||||
artifact_build_from_sources
|
||||
|
||||
artifact_deploy_to_remote_cache
|
||||
}
|
||||
|
||||
# This is meant to be run after config, inside default build.
|
||||
function build_artifact() {
|
||||
initialize_artifact "${WHAT:-"kernel"}"
|
||||
obtain_complete_artifact
|
||||
}
|
||||
|
||||
function capture_rename_legacy_debs_into_artifacts() {
|
||||
LOG_SECTION="capture_rename_legacy_debs_into_artifacts" do_with_logging capture_rename_legacy_debs_into_artifacts_logged
|
||||
}
|
||||
|
||||
function capture_rename_legacy_debs_into_artifacts_logged() {
|
||||
# So the deb-building code will consider the artifact_version in it's "Version: " field in the .debs.
|
||||
# But it will produce .deb's with the legacy name. We gotta find and rename them.
|
||||
# Loop over the artifact_map_versions, and rename the .debs.
|
||||
debug_dict artifact_map_versions_legacy
|
||||
debug_dict artifact_map_versions
|
||||
|
||||
declare deb_name_base deb_name_full new_name_full legacy_version legacy_base_version
|
||||
for deb_name_base in "${!artifact_map_versions[@]}"; do
|
||||
legacy_base_version="${artifact_map_versions_legacy[${deb_name_base}]}"
|
||||
if [[ -z "${legacy_base_version}" ]]; then
|
||||
exit_with_error "Legacy base version not found for artifact '${deb_name_base}'"
|
||||
fi
|
||||
|
||||
display_alert "Legacy base version" "${legacy_base_version}" "info"
|
||||
legacy_version="${legacy_base_version}_${ARCH}" # Arch-specific package; has ARCH at the end.
|
||||
deb_name_full="${DEST}/debs/${deb_name_base}_${legacy_version}.deb"
|
||||
new_name_full="${DEST}/debs/${deb_name_base}_${artifact_map_versions[${deb_name_base}]}_${ARCH}.deb"
|
||||
display_alert "Full legacy deb name" "${deb_name_full}" "info"
|
||||
display_alert "New artifact deb name" "${new_name_full}" "info"
|
||||
run_host_command_logged mv -v "${deb_name_full}" "${new_name_full}"
|
||||
done
|
||||
}
|
||||
@@ -27,11 +27,15 @@ function armbian_register_commands() {
|
||||
|
||||
# shortcuts, see vars set below. the use legacy single build, and try to control it via variables
|
||||
["kernel"]="standard_build"
|
||||
["kernel-config"]="standard_build"
|
||||
["u-boot"]="standard_build"
|
||||
["uboot"]="standard_build"
|
||||
|
||||
# external tooling, made easy.
|
||||
["oras-upload"]="oras" # implemented in cli_oras_pre_run and cli_oras_run; up/down/info are the same, see vars below
|
||||
|
||||
# all-around artifact wrapper
|
||||
["artifact"]="artifact" # implemented in cli_artifact_pre_run and cli_artifact_run
|
||||
|
||||
["undecided"]="undecided" # implemented in cli_undecided_pre_run and cli_undecided_run - relaunches either build or docker
|
||||
)
|
||||
@@ -48,6 +52,7 @@ function armbian_register_commands() {
|
||||
["config-dump"]="CONFIG_DEFS_ONLY='yes'"
|
||||
["configdump"]="CONFIG_DEFS_ONLY='yes'"
|
||||
|
||||
["kernel-config"]="KERNEL_ONLY='yes' JUST_KERNEL='yes' KERNEL_IGNORE_DEB='yes' KERNEL_CONFIGURE='yes'"
|
||||
["kernel"]="KERNEL_ONLY='yes' JUST_KERNEL='yes' KERNEL_IGNORE_DEB='yes' KERNEL_CONFIGURE='no'"
|
||||
["u-boot"]="KERNEL_ONLY='yes' JUST_UBOOT='yes' UBOOT_IGNORE_DEB='yes'"
|
||||
["uboot"]="KERNEL_ONLY='yes' JUST_UBOOT='yes' UBOOT_IGNORE_DEB='yes'"
|
||||
|
||||
@@ -45,6 +45,7 @@ function prepare_distcc_compilation_config() {
|
||||
display_alert "DISTCC_TARGETS_SEGMENTS" "${DISTCC_TARGETS_SEGMENTS[*]}" "warn"
|
||||
else
|
||||
# If not using distcc, just add "$CTHREADS" to the DISTCC_MAKE_J_PARALLEL array.
|
||||
[[ -z "${CTHREADS}" ]] && exit_with_error "CTHREADS is not set in prepare_distcc_compilation_config"
|
||||
DISTCC_MAKE_J_PARALLEL=("${CTHREADS}")
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,3 +1,19 @@
|
||||
function prepare_kernel_config_core_or_userpatches() {
|
||||
# LINUXCONFIG is set or exit_with_error
|
||||
[[ -z "${LINUXCONFIG}" ]] && exit_with_error "LINUXCONFIG not set: '${LINUXCONFIG}'"
|
||||
|
||||
if [[ -f $USERPATCHES_PATH/$LINUXCONFIG.config ]]; then
|
||||
display_alert "Using kernel config provided by user" "userpatches/$LINUXCONFIG.config" "info"
|
||||
kernel_config_source_filename="${USERPATCHES_PATH}/${LINUXCONFIG}.config"
|
||||
elif [[ -f "${USERPATCHES_PATH}/config/kernel/${LINUXCONFIG}.config" ]]; then
|
||||
display_alert "Using kernel config provided by user in config/kernel folder" "config/kernel/${LINUXCONFIG}.config" "info"
|
||||
kernel_config_source_filename="${USERPATCHES_PATH}/config/kernel/${LINUXCONFIG}.config"
|
||||
else
|
||||
display_alert "Using kernel config file" "config/kernel/$LINUXCONFIG.config" "info"
|
||||
kernel_config_source_filename="${SRC}/config/kernel/${LINUXCONFIG}.config"
|
||||
fi
|
||||
}
|
||||
|
||||
function kernel_config() {
|
||||
# check $kernel_work_dir is set and exists, or bail
|
||||
[[ -z "${kernel_work_dir}" ]] && exit_with_error "kernel_work_dir is not set"
|
||||
@@ -21,6 +37,7 @@ function kernel_config() {
|
||||
|
||||
function kernel_config_initialize() {
|
||||
display_alert "Configuring kernel" "${LINUXCONFIG}" "info"
|
||||
cd "${kernel_work_dir}" || exit_with_error "kernel_work_dir does not exist: ${kernel_work_dir}"
|
||||
|
||||
# If a `.config` already exists (from previous build), store it, preserving date.
|
||||
# We will compare the result of the new configuration to it, and if the contents are the same, we'll restore the original date.
|
||||
@@ -34,28 +51,17 @@ function kernel_config_initialize() {
|
||||
# copy kernel config from configuration, userpatches
|
||||
if [[ "${KERNEL_KEEP_CONFIG}" == yes && -f "${DEST}"/config/$LINUXCONFIG.config ]]; then
|
||||
display_alert "Using previously-exported kernel config" "${DEST}/config/$LINUXCONFIG.config" "info"
|
||||
run_host_command_logged cp -pv "${DEST}/config/${LINUXCONFIG}.config" .config
|
||||
run_host_command_logged cp -pv "${DEST}/config/${LINUXCONFIG}.config" "${kernel_work_dir}/.config"
|
||||
else
|
||||
# @TODO: rpardini: this is too contrived
|
||||
if [[ -f $USERPATCHES_PATH/$LINUXCONFIG.config ]]; then
|
||||
display_alert "Using kernel config provided by user" "userpatches/$LINUXCONFIG.config" "info"
|
||||
run_host_command_logged cp -pv "${USERPATCHES_PATH}/${LINUXCONFIG}.config" .config
|
||||
kernel_config_source_filename="${USERPATCHES_PATH}/${LINUXCONFIG}.config"
|
||||
elif [[ -f "${USERPATCHES_PATH}/config/kernel/${LINUXCONFIG}.config" ]]; then
|
||||
display_alert "Using kernel config provided by user in config/kernel folder" "config/kernel/${LINUXCONFIG}.config" "info"
|
||||
run_host_command_logged cp -pv "${USERPATCHES_PATH}/config/kernel/${LINUXCONFIG}.config" .config
|
||||
kernel_config_source_filename="${USERPATCHES_PATH}/config/kernel/${LINUXCONFIG}.config"
|
||||
else
|
||||
display_alert "Using kernel config file" "config/kernel/$LINUXCONFIG.config" "info"
|
||||
run_host_command_logged cp -pv "${SRC}/config/kernel/${LINUXCONFIG}.config" .config
|
||||
kernel_config_source_filename="${SRC}/config/kernel/${LINUXCONFIG}.config"
|
||||
fi
|
||||
prepare_kernel_config_core_or_userpatches
|
||||
run_host_command_logged cp -pv "${kernel_config_source_filename}" "${kernel_work_dir}/.config"
|
||||
fi
|
||||
|
||||
# Start by running olddefconfig -- always.
|
||||
# It "updates" the config, using defaults from Kbuild files in the source tree.
|
||||
# It is worthy noting that on the first run, it builds the tools, so the host-side compiler has to be working,
|
||||
# regardless of the cross-build toolchain.
|
||||
cd "${kernel_work_dir}" || exit_with_error "kernel_work_dir does not exist: ${kernel_work_dir}"
|
||||
run_kernel_make olddefconfig
|
||||
|
||||
# Run the core-armbian config modifications here, built-in extensions:
|
||||
|
||||
@@ -41,7 +41,18 @@ function prepare_kernel_packaging_debs() {
|
||||
|
||||
# Some variables and settings used throughout the script
|
||||
declare kernel_version_family="${kernel_version}-${LINUXFAMILY}"
|
||||
declare package_version="${REVISION}"
|
||||
|
||||
# Package version. Affects users upgrading from repo!
|
||||
declare package_version="${REVISION}" # default, "classic" Armbian non-version.
|
||||
# If we're building an artifact, use the pre-determined artifact version.
|
||||
if [[ "${artifact_version:-""}" != "" ]]; then
|
||||
if [[ "${artifact_version}" == "undetermined" ]]; then
|
||||
exit_with_error "Undetermined artifact version during kernel deb packaging. This is a bug, report it."
|
||||
fi
|
||||
display_alert "Using artifact version for kernel package version" "${artifact_version}" "info"
|
||||
package_version="${artifact_version}"
|
||||
fi
|
||||
display_alert "Kernel .deb package version" "${package_version}" "info"
|
||||
|
||||
# show incoming tree
|
||||
#display_alert "Kernel install dir" "incoming from KBUILD make" "debug"
|
||||
@@ -140,7 +151,9 @@ function create_kernel_deb() {
|
||||
#display_alert "Package dir" "for package ${package_name}" "debug"
|
||||
#run_host_command_logged tree -C -h -d --du "${package_directory}"
|
||||
|
||||
run_host_command_logged dpkg-deb ${DEB_COMPRESS:+-Z$DEB_COMPRESS} --build "${package_directory}" "${deb_output_dir}" # not KDEB compress, we're not under a Makefile
|
||||
# @TODO: hmm, why doesn't this use fakeroot_dpkg_deb_build() ?
|
||||
declare final_deb_filename="${deb_output_dir}/${package_name}_${REVISION}_${ARCH}.deb" # for compatibility with non-artifacts
|
||||
run_host_command_logged dpkg-deb ${DEB_COMPRESS:+-Z$DEB_COMPRESS} --build "${package_directory}" "${final_deb_filename}" # not KDEB compress, we're not under a Makefile
|
||||
|
||||
done_with_temp_dir "${cleanup_id}" # changes cwd to "${SRC}" and fires the cleanup function early
|
||||
}
|
||||
@@ -210,7 +223,7 @@ function kernel_package_callback_linux_image() {
|
||||
Maintainer: ${MAINTAINER} <${MAINTAINERMAIL}>
|
||||
Section: kernel
|
||||
Provides: linux-image, linux-image-armbian, armbian-$BRANCH
|
||||
Description: Linux kernel, armbian version $kernel_version_family $BRANCH
|
||||
Description: Armbian Linux $BRANCH kernel image ${artifact_version_reason:-"${kernel_version_family}"}
|
||||
This package contains the Linux kernel, modules and corresponding other
|
||||
files, kernel_version_family: $kernel_version_family.
|
||||
CONTROL_FILE
|
||||
@@ -263,7 +276,7 @@ function kernel_package_callback_linux_dtb() {
|
||||
Package: ${package_name}
|
||||
Architecture: ${ARCH}
|
||||
Provides: linux-dtb, linux-dtb-armbian, armbian-$BRANCH
|
||||
Description: Armbian Linux DTB, version ${kernel_version_family} $BRANCH
|
||||
Description: Armbian Linux $BRANCH DTBs ${artifact_version_reason:-"${kernel_version_family}"}
|
||||
This package contains device blobs from the Linux kernel, version ${kernel_version_family}
|
||||
CONTROL_FILE
|
||||
|
||||
@@ -379,7 +392,7 @@ function kernel_package_callback_linux_headers() {
|
||||
Architecture: ${ARCH}
|
||||
Provides: linux-headers, linux-headers-armbian, armbian-$BRANCH
|
||||
Depends: make, gcc, libc6-dev, bison, flex, libssl-dev, libelf-dev
|
||||
Description: Linux kernel headers for ${kernel_version_family}
|
||||
Description: Armbian Linux $BRANCH headers ${artifact_version_reason:-"${kernel_version_family}"}
|
||||
This package provides kernel header files for ${kernel_version_family}
|
||||
.
|
||||
This is useful for DKMS and building of external modules.
|
||||
@@ -454,7 +467,7 @@ function kernel_package_callback_linux_headers_full_source() {
|
||||
Architecture: ${ARCH}
|
||||
Provides: linux-headers, linux-headers-armbian, armbian-$BRANCH
|
||||
Depends: make, gcc, libc6-dev, bison, flex, libssl-dev, libelf-dev
|
||||
Description: Linux kernel headers for ${kernel_version_family} - based on full source
|
||||
Description: Armbian Linux $BRANCH full-source headers ${artifact_version_reason:-"${kernel_version_family}"}
|
||||
This package provides kernel header files for ${kernel_version_family}
|
||||
.
|
||||
This is useful for DKMS and building of external modules.
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
function calculate_hash_for_files() {
|
||||
hash_files="$(sha256sum "${@}" | sha256sum | cut -d' ' -f1)" # hash of hashes
|
||||
hash_files="${hash_files:0:16}" # shorten it to 16 characters
|
||||
display_alert "Hash for files:" "$hash_files" "debug"
|
||||
function kernel_drivers_create_patches_hash_only() {
|
||||
hash_only="yes" kernel_drivers_create_patches "${@}"
|
||||
}
|
||||
|
||||
function kernel_drivers_create_patches() {
|
||||
declare kernel_work_dir="${1}"
|
||||
declare kernel_git_revision="${2}"
|
||||
kernel_drivers_patch_hash="undetermined" # outer scope
|
||||
kernel_drivers_patch_file="undetermined" # outer scope
|
||||
|
||||
declare hash_files # any changes in these files will trigger a cache miss; also any changes in misc .patch with "wireless" at start or "wifi" anywhere in the name
|
||||
calculate_hash_for_files "${SRC}/lib/functions/compilation/patch/drivers_network.sh" "${SRC}/lib/functions/compilation/patch/drivers-harness.sh" "${SRC}"/patch/misc/wireless*.patch "${SRC}"/patch/misc/*wifi*.patch
|
||||
@@ -22,8 +20,16 @@ function kernel_drivers_create_patches() {
|
||||
declare cache_target_file="${cache_dir_base}/${cache_key}.patch"
|
||||
|
||||
# outer scope variables:
|
||||
kernel_drivers_patch_file="${cache_target_file}"
|
||||
kernel_drivers_patch_hash="${cache_key}"
|
||||
kernel_drivers_patch_file="${cache_target_file}" # outer scope
|
||||
kernel_drivers_patch_hash="${hash_files}" # outer scope
|
||||
|
||||
if [[ "${hash_only:-"no"}" == "yes" ]]; then
|
||||
display_alert "Hash-only kernel driver requested" "$kernel_drivers_patch_hash - returning" "warn"
|
||||
return 0
|
||||
fi
|
||||
|
||||
declare kernel_work_dir="${1}"
|
||||
declare kernel_git_revision="${2}"
|
||||
|
||||
# If the target file exists, we can skip the patch creation.
|
||||
if [[ -f "${cache_target_file}" ]]; then
|
||||
|
||||
@@ -18,7 +18,10 @@ function compile_uboot_target() {
|
||||
local uboot_work_dir=""
|
||||
uboot_work_dir="$(pwd)"
|
||||
|
||||
# outer scope variable: uboot_git_revision
|
||||
# outer scope variable: uboot_git_revision, validate that it is set
|
||||
if [[ -z "${uboot_git_revision}" ]]; then
|
||||
exit_with_error "uboot_git_revision is not set"
|
||||
fi
|
||||
|
||||
display_alert "${uboot_prefix} Checking out to clean sources SHA1 ${uboot_git_revision}" "{$BOOTSOURCEDIR} for ${target_make}"
|
||||
git checkout -f -q "${uboot_git_revision}"
|
||||
@@ -319,10 +322,22 @@ function compile_uboot() {
|
||||
$(declare -f setup_write_uboot_platform || true)
|
||||
EOF
|
||||
|
||||
# Package version. Affects users upgrading from repo!
|
||||
declare package_version="${REVISION}" # default, "classic" Armbian non-version.
|
||||
# If we're building an artifact, use the pre-determined artifact version.
|
||||
if [[ "${artifact_version:-""}" != "" ]]; then
|
||||
if [[ "${artifact_version}" == "undetermined" ]]; then
|
||||
exit_with_error "Undetermined artifact version during u-boot deb packaging. This is a bug, report it."
|
||||
fi
|
||||
display_alert "Using artifact version for u-boot package version" "${artifact_version}" "info"
|
||||
package_version="${artifact_version}"
|
||||
fi
|
||||
display_alert "Das U-Boot .deb package version" "${package_version}" "info"
|
||||
|
||||
# set up control file
|
||||
cat <<- EOF > "$uboottempdir/${uboot_name}/DEBIAN/control"
|
||||
Package: linux-u-boot-${BOARD}-${BRANCH}
|
||||
Version: $REVISION
|
||||
Version: ${package_version}
|
||||
Architecture: $ARCH
|
||||
Maintainer: $MAINTAINER <$MAINTAINERMAIL>
|
||||
Installed-Size: 1
|
||||
@@ -331,18 +346,20 @@ function compile_uboot() {
|
||||
Provides: armbian-u-boot
|
||||
Replaces: armbian-u-boot
|
||||
Conflicts: armbian-u-boot, u-boot-sunxi
|
||||
Description: Uboot loader $version
|
||||
Description: Das U-Boot for ${BOARD} ${artifact_version_reason:-"${version}"}
|
||||
EOF
|
||||
|
||||
# copy config file to the package
|
||||
# useful for FEL boot with overlayfs_wrapper
|
||||
[[ -f .config && -n $BOOTCONFIG ]] && cp .config "$uboottempdir/${uboot_name}/usr/lib/u-boot/${BOOTCONFIG}" 2>&1
|
||||
[[ -f .config && -n $BOOTCONFIG ]] && run_host_command_logged cp .config "$uboottempdir/${uboot_name}/usr/lib/u-boot/${BOOTCONFIG}"
|
||||
# copy license files from typical locations
|
||||
[[ -f COPYING ]] && cp COPYING "$uboottempdir/${uboot_name}/usr/lib/u-boot/LICENSE" 2>&1
|
||||
[[ -f Licenses/README ]] && cp Licenses/README "$uboottempdir/${uboot_name}/usr/lib/u-boot/LICENSE" 2>&1
|
||||
[[ -n $atftempdir && -f $atftempdir/license.md ]] && cp "${atftempdir}/license.md" "$uboottempdir/${uboot_name}/usr/lib/u-boot/LICENSE.atf" 2>&1
|
||||
[[ -f COPYING ]] && run_host_command_logged cp COPYING "$uboottempdir/${uboot_name}/usr/lib/u-boot/LICENSE"
|
||||
[[ -f Licenses/README ]] && run_host_command_logged cp Licenses/README "$uboottempdir/${uboot_name}/usr/lib/u-boot/LICENSE"
|
||||
[[ -n $atftempdir && -f $atftempdir/license.md ]] && run_host_command_logged cp "${atftempdir}/license.md" "$uboottempdir/${uboot_name}/usr/lib/u-boot/LICENSE.atf"
|
||||
|
||||
display_alert "Building u-boot deb" "${uboot_name}.deb"
|
||||
# Important: this forces the deb to have a specific name, and not be version-dependent...
|
||||
# This is set to `uboot_name="${CHOSEN_UBOOT}_${REVISION}_${ARCH}"` in outer scope...
|
||||
display_alert "Building u-boot deb" "(version: ${package_version}) ${uboot_name}.deb"
|
||||
fakeroot_dpkg_deb_build "$uboottempdir/${uboot_name}" "$uboottempdir/${uboot_name}.deb"
|
||||
rm -rf "$uboottempdir/${uboot_name}"
|
||||
[[ -n $atftempdir ]] && rm -rf "${atftempdir}"
|
||||
|
||||
@@ -24,6 +24,8 @@ function prepare_compilation_vars() {
|
||||
Called early, before any compilation work starts.
|
||||
POST_DETERMINE_CTHREADS
|
||||
|
||||
# readonly, global
|
||||
declare -g -r CTHREADS="${CTHREADS}"
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
111
lib/functions/general/git-ref2info.sh
Normal file
111
lib/functions/general/git-ref2info.sh
Normal file
@@ -0,0 +1,111 @@
|
||||
# This has... everything: https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/commit/?h=linux-6.1.y
|
||||
# This has... everything: https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/commit/?h=v6.2-rc5
|
||||
|
||||
# get the sha1 of the commit on tag or branch
|
||||
# git ls-remote --exit-code --symref git://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git v6.2-rc5
|
||||
# git ls-remote --exit-code --symref git://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git v6.2-rc5
|
||||
|
||||
# 93f875a8526a291005e7f38478079526c843cbec refs/heads/linux-6.1.y
|
||||
# 4cc398054ac8efe0ff832c82c7caacbdd992312a refs/tags/v6.2-rc5
|
||||
|
||||
# https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/tree/Makefile?h=linux-6.1.y
|
||||
# plaintext: https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/plain/Makefile?h=4cc398054ac8efe0ff832c82c7caacbdd992312a
|
||||
|
||||
function memoized_git_ref_to_info() {
|
||||
declare -n MEMO_DICT="${1}" # nameref
|
||||
declare ref_type ref_name
|
||||
git_parse_ref "${MEMO_DICT[GIT_REF]}"
|
||||
MEMO_DICT+=(["REF_TYPE"]="${ref_type}")
|
||||
MEMO_DICT+=(["REF_NAME"]="${ref_name}")
|
||||
|
||||
# Get the SHA1 of the commit
|
||||
declare sha1
|
||||
sha1="$(git ls-remote --exit-code "${MEMO_DICT[GIT_SOURCE]}" "${ref_name}" | cut -f1)"
|
||||
MEMO_DICT+=(["SHA1"]="${sha1}")
|
||||
|
||||
if [[ "${2}" == "include_makefile_body" ]]; then
|
||||
|
||||
function obtain_makefile_body_from_git() {
|
||||
declare git_source="${1}"
|
||||
declare sha1="${2}"
|
||||
makefile_body="undetermined" # outer scope
|
||||
makefile_url="undetermined" # outer scope
|
||||
makefile_version="undetermined" # outer scope
|
||||
makefile_codename="undetermined" # outer scope
|
||||
|
||||
declare url="undetermined"
|
||||
case "${git_source}" in
|
||||
|
||||
"git://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git")
|
||||
url="https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/plain/Makefile?h=${sha1}"
|
||||
;;
|
||||
|
||||
# @TODO: urgently add support for Google Mirror
|
||||
# @TODO: china mirror etc.
|
||||
# @TODO: mirrors might need to be resolved before/during/after this, refactor
|
||||
|
||||
"https://github.com/"*)
|
||||
# parse org/repo from https://github.com/org/repo
|
||||
declare org_and_repo=""
|
||||
org_and_repo="$(echo "${git_source}" | cut -d/ -f4-5)"
|
||||
url="https://raw.githubusercontent.com/${org_and_repo}/${sha1}/Makefile"
|
||||
;;
|
||||
|
||||
"https://source.codeaurora.org/external/imx/linux-imx")
|
||||
# Random, bizarre stuff here, to keep compatibility with some old stuff
|
||||
url="https://source.codeaurora.org/external/imx/linux-imx/plain/Makefile?h=${sha1}"
|
||||
;;
|
||||
|
||||
*)
|
||||
exit_with_error "Unknown git source '${git_source}'"
|
||||
;;
|
||||
esac
|
||||
|
||||
display_alert "Fetching Makefile via HTTP" "${url}" "warn"
|
||||
makefile_url="${url}"
|
||||
makefile_body="$(curl -sL "${url}")"
|
||||
|
||||
parse_makefile_version "${makefile_body}"
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function parse_makefile_version() {
|
||||
declare makefile_body="${1}"
|
||||
makefile_version="undetermined" # outer scope
|
||||
makefile_codename="undetermined" # outer scope
|
||||
makefile_full_version="undetermined" # outer scope
|
||||
|
||||
local ver=()
|
||||
ver[0]=$(grep "^VERSION" <(echo "${makefile_body}") | head -1 | awk '{print $(NF)}' | grep -oE '^[[:digit:]]+' || true)
|
||||
ver[1]=$(grep "^PATCHLEVEL" <(echo "${makefile_body}") | head -1 | awk '{print $(NF)}' | grep -oE '^[[:digit:]]+' || true)
|
||||
ver[2]=$(grep "^SUBLEVEL" <(echo "${makefile_body}") | head -1 | awk '{print $(NF)}' | grep -oE '^[[:digit:]]+' || true)
|
||||
ver[3]=$(grep "^EXTRAVERSION" <(echo "${makefile_body}") | head -1 | awk '{print $(NF)}' | grep -oE '^-rc[[:digit:]]+' || true)
|
||||
makefile_version="${ver[0]:-0}${ver[1]:+.${ver[1]}}${ver[2]:+.${ver[2]}}${ver[3]}"
|
||||
makefile_full_version="${makefile_version}"
|
||||
if [[ "${ver[3]}" == "-rc"* ]]; then # contentious:, if an "-rc" EXTRAVERSION, don't include the SUBLEVEL
|
||||
makefile_version="${ver[0]:-0}${ver[1]:+.${ver[1]}}${ver[3]}"
|
||||
fi
|
||||
|
||||
# grab the codename while we're at it
|
||||
makefile_codename="$(grep "^NAME\ =\ " <(echo "${makefile_body}") | head -1 | cut -d '=' -f 2 | sed -e "s|'||g" | xargs echo -n || true)"
|
||||
# remove any starting whitespace left
|
||||
makefile_codename="${makefile_codename#"${makefile_codename%%[![:space:]]*}"}"
|
||||
# remove any trailing whitespace left
|
||||
makefile_codename="${makefile_codename%"${makefile_codename##*[![:space:]]}"}"
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
display_alert "Fetching Makefile body" "${ref_name}" "warn"
|
||||
declare makefile_body makefile_url
|
||||
declare makefile_version makefile_codename makefile_full_version
|
||||
obtain_makefile_body_from_git "${MEMO_DICT[GIT_SOURCE]}" "${sha1}"
|
||||
MEMO_DICT+=(["MAKEFILE_URL"]="${makefile_url}")
|
||||
#MEMO_DICT+=(["MAKEFILE_BODY"]="${makefile_body}") # large, don't store
|
||||
MEMO_DICT+=(["MAKEFILE_VERSION"]="${makefile_version}")
|
||||
MEMO_DICT+=(["MAKEFILE_FULL_VERSION"]="${makefile_full_version}")
|
||||
MEMO_DICT+=(["MAKEFILE_CODENAME"]="${makefile_codename}")
|
||||
fi
|
||||
|
||||
}
|
||||
@@ -4,7 +4,7 @@
|
||||
# sets: ref_type=branch|tag|commit, ref_name=xxx|yyyy|zzzz|HEAD
|
||||
function git_parse_ref() {
|
||||
declare ref="$1"
|
||||
[[ -z $ref || ($ref != tag:* && $ref != branch:* && $ref != head && $ref != commit:*) ]] && exit_with_error "Error in configuration"
|
||||
[[ -z $ref || ($ref != tag:* && $ref != branch:* && $ref != head && $ref != commit:*) ]] && exit_with_error "Error in configuration; git_ref '${ref}' is not valid"
|
||||
ref_type=${ref%%:*} # outer scope
|
||||
ref_name=${ref##*:} # outer scope
|
||||
if [[ $ref_type == head ]]; then
|
||||
|
||||
50
lib/functions/general/hash-files.sh
Normal file
50
lib/functions/general/hash-files.sh
Normal file
@@ -0,0 +1,50 @@
|
||||
function calculate_hash_for_all_files_in_dirs() {
|
||||
declare -a dirs_to_hash=("$@")
|
||||
declare -a files_to_hash=()
|
||||
for dir in "${dirs_to_hash[@]}"; do
|
||||
# skip if dir doesn't exist...
|
||||
if [[ ! -d "${dir}" ]]; then
|
||||
display_alert "calculate_hash_for_all_files_in_dirs" "skipping non-existent dir \"${dir}\"" "warn"
|
||||
continue
|
||||
fi
|
||||
declare found_files="no"
|
||||
# shellcheck disable=SC2044 # lets expand... # -L: follow symlinks
|
||||
for file in $(find -L "${dir}" -type f); do
|
||||
files_to_hash+=("${file}")
|
||||
found_files="yes"
|
||||
done
|
||||
if [[ "${found_files}" == "no" ]]; then
|
||||
display_alert "calculate_hash_for_all_files_in_dirs" "skipped empty dir \"${dir}\"" "warn"
|
||||
fi
|
||||
done
|
||||
|
||||
#display_alert "calculate_hash_for_all_files_in_dirs" "files_to_hash_sorted: ${#files_to_hash_sorted[@]}" "warn"
|
||||
#display_alert "calculate_hash_for_all_files_in_dirs" "files_to_hash_sorted: ${files_to_hash_sorted[*]}" "warn"
|
||||
|
||||
calculate_hash_for_files "${files_to_hash[@]}"
|
||||
}
|
||||
|
||||
function calculate_hash_for_files() {
|
||||
hash_files="undetermined" # outer scope
|
||||
|
||||
# relativize the files to SRC
|
||||
declare -a files_to_hash=("$@")
|
||||
declare -a files_to_hash_relativized=()
|
||||
for file in "${files_to_hash[@]}"; do
|
||||
# remove the SRC/ from the file name
|
||||
file="${file#${SRC}/}"
|
||||
files_to_hash_relativized+=("${file}")
|
||||
done
|
||||
|
||||
# sort the array files_to_hash; use sort and readfile
|
||||
declare -a files_to_hash_sorted
|
||||
mapfile -t files_to_hash_sorted < <(for one in "${files_to_hash_relativized[@]}"; do echo "${one}"; done | LC_ALL=C sort -h) # "human" sorting
|
||||
|
||||
display_alert "calculate_hash_for_files:" "files_to_hash_sorted: ${files_to_hash_sorted[*]}" "debug"
|
||||
declare full_hash
|
||||
full_hash="$(cd "${SRC}" && sha256sum "${files_to_hash_sorted[@]}")"
|
||||
hash_files="$(echo "${full_hash}" | sha256sum | cut -d' ' -f1)" # hash of hashes
|
||||
hash_files="${hash_files:0:16}" # shorten it to 16 characters
|
||||
display_alert "Hash for files:" "$hash_files" "warn"
|
||||
display_alert "Full hash input for files:" "\n${full_hash}" "warn"
|
||||
}
|
||||
52
lib/functions/general/memoize-cached.sh
Normal file
52
lib/functions/general/memoize-cached.sh
Normal file
@@ -0,0 +1,52 @@
|
||||
# This does many tricks. Beware.
|
||||
# Call:
|
||||
# run_memoized VAR_NAME cache_id memoized_function_name [function_args]
|
||||
function run_memoized() {
|
||||
declare var_n="${1}"
|
||||
shift
|
||||
declare cache_id="${1}"
|
||||
shift
|
||||
declare memoized_func="${1}"
|
||||
shift
|
||||
declare extra_args=("${@}")
|
||||
|
||||
# shellcheck disable=SC2178 # nope, that's a nameref.
|
||||
declare -n MEMO_DICT="${var_n}" # nameref
|
||||
|
||||
#display_alert "memoize" "before" "info"
|
||||
#debug_dict MEMO_DICT
|
||||
|
||||
MEMO_DICT+=(["MEMO_TYPE"]="${cache_id}")
|
||||
declare single_string_input="${cache_id}"
|
||||
single_string_input="$(declare -p "${var_n}")" # this might use random order...
|
||||
|
||||
MEMO_DICT+=(["MEMO_INPUT_HASH"]="$(echo "${var_n}-${single_string_input}--$(declare -f "${memoized_func}")" "${extra_args[@]}" | sha256sum | cut -f1 -d' ')")
|
||||
|
||||
declare disk_cache_dir="${SRC}/cache/memoize/${MEMO_DICT[MEMO_TYPE]}"
|
||||
mkdir -p "${disk_cache_dir}"
|
||||
declare disk_cache_file="${disk_cache_dir}/${MEMO_DICT[MEMO_INPUT_HASH]}"
|
||||
if [[ -f "${disk_cache_file}" ]]; then
|
||||
# @TODO: check expiration; some stuff might want different expiration times, eg, branch vs tag vs commit
|
||||
|
||||
display_alert "Using memoized ${var_n} from ${disk_cache_file}" "${MEMO_DICT[MEMO_INPUT]}" "info"
|
||||
cat "${disk_cache_file}"
|
||||
# shellcheck disable=SC1090 # yep, I'm sourcing the cache here. produced below.
|
||||
source "${disk_cache_file}"
|
||||
|
||||
#display_alert "after cache hit" "before" "info"
|
||||
#debug_dict MEMO_DICT
|
||||
return 0
|
||||
fi
|
||||
|
||||
display_alert "Memoizing ${var_n} to ${disk_cache_file}" "${MEMO_DICT[MEMO_INPUT]}" "info"
|
||||
# if cache miss, run the memoized_func...
|
||||
${memoized_func} "${var_n}" "${extra_args[@]}"
|
||||
|
||||
# ... and save the output to the cache; twist declare -p's output due to the nameref
|
||||
declare -p "${var_n}" | sed -e 's|^declare -A ||' > "${disk_cache_file}"
|
||||
|
||||
#display_alert "after cache miss" "before" "info"
|
||||
#debug_dict MEMO_DICT
|
||||
|
||||
return 0
|
||||
}
|
||||
13
lib/functions/logging/debug-dump.sh
Normal file
13
lib/functions/logging/debug-dump.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
function debug_dict() {
|
||||
local dict_name="$1"
|
||||
declare -n dict="${dict_name}"
|
||||
for key in "${!dict[@]}"; do
|
||||
debug_var "${dict_name}[${key}]"
|
||||
done
|
||||
}
|
||||
|
||||
function debug_var() {
|
||||
local varname="$1"
|
||||
local -a var_val_array=("${!varname}")
|
||||
display_alert "${gray_color:-}# ${yellow_color:-}${varname}${normal_color:-}=${bright_yellow_color:-}${var_val_array[*]@Q}${ansi_reset_color:-}" "" "info"
|
||||
}
|
||||
@@ -18,6 +18,7 @@ function logging_init() {
|
||||
declare -g bright_red_color="\e[1;31m" red_color="\e[0;31m"
|
||||
declare -g bright_blue_color="\e[1;34m" blue_color="\e[0;34m"
|
||||
declare -g bright_magenta_color="\e[1;35m" magenta_color="\e[0;35m"
|
||||
declare -g bright_yellow_color="\e[1;33m" yellow_color="\e[0;33m"
|
||||
declare -g ansi_reset_color="\e[0m"
|
||||
declare -g -i logging_section_counter=0 # -i: integer
|
||||
declare -g tool_color="${gray_color}" # default to gray... (should be ok on terminals)
|
||||
|
||||
@@ -38,9 +38,6 @@ function main_default_build_packages() {
|
||||
done
|
||||
fi
|
||||
|
||||
# Prepare ccache, cthreads, etc for the build
|
||||
LOG_SECTION="prepare_compilation_vars" do_with_logging prepare_compilation_vars
|
||||
|
||||
if [[ "${do_build_uboot}" == "yes" ]]; then
|
||||
# Don't build u-boot at all if the BOOTCONFIG is 'none'.
|
||||
if [[ "${BOOTCONFIG}" != "none" ]]; then
|
||||
|
||||
@@ -26,7 +26,7 @@ function main_default_start_build() {
|
||||
declare -g start_timestamp # global timestamp; read below by main_default_end_build()
|
||||
start_timestamp=$(date +%s)
|
||||
|
||||
### Write config summary
|
||||
### Write config summary # @TODO: or not? this is a bit useless
|
||||
LOG_SECTION="config_summary" do_with_logging write_config_summary_output_file
|
||||
|
||||
# Check and install dependencies, directory structure and settings
|
||||
@@ -40,6 +40,9 @@ function main_default_start_build() {
|
||||
ln -s "/usr/bin/python2" "${BIN_WORK_DIR}/python"
|
||||
export PATH="${BIN_WORK_DIR}:${PATH}"
|
||||
|
||||
# Prepare ccache, cthreads, etc for the build
|
||||
LOG_SECTION="prepare_compilation_vars" do_with_logging prepare_compilation_vars
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,33 @@
|
||||
#!/usr/bin/env bash
|
||||
# This file is/was autogenerated by lib/tools/gen-library.sh; don't modify manually
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
set -o errtrace # trace ERR through - enabled
|
||||
set -o errexit ## set -e : exit the script if any statement returns a non-true return value - enabled
|
||||
### lib/functions/artifacts/artifacts-registry.sh
|
||||
# shellcheck source=lib/functions/artifacts/artifacts-registry.sh
|
||||
source "${SRC}"/lib/functions/artifacts/artifacts-registry.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
set -o errtrace # trace ERR through - enabled
|
||||
set -o errexit ## set -e : exit the script if any statement returns a non-true return value - enabled
|
||||
### lib/functions/artifacts/kernel.sh
|
||||
# shellcheck source=lib/functions/artifacts/kernel.sh
|
||||
source "${SRC}"/lib/functions/artifacts/kernel.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
set -o errtrace # trace ERR through - enabled
|
||||
set -o errexit ## set -e : exit the script if any statement returns a non-true return value - enabled
|
||||
### lib/functions/artifacts/u-boot.sh
|
||||
# shellcheck source=lib/functions/artifacts/u-boot.sh
|
||||
source "${SRC}"/lib/functions/artifacts/u-boot.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
@@ -28,6 +55,15 @@ set -o errexit ## set -e : exit the script if any statement returns a non-true
|
||||
# shellcheck source=lib/functions/bsp/utils-bsp.sh
|
||||
source "${SRC}"/lib/functions/bsp/utils-bsp.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
set -o errtrace # trace ERR through - enabled
|
||||
set -o errexit ## set -e : exit the script if any statement returns a non-true return value - enabled
|
||||
### lib/functions/cli/cli-artifact.sh
|
||||
# shellcheck source=lib/functions/cli/cli-artifact.sh
|
||||
source "${SRC}"/lib/functions/cli/cli-artifact.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
@@ -496,6 +532,15 @@ set -o errexit ## set -e : exit the script if any statement returns a non-true
|
||||
# shellcheck source=lib/functions/general/extensions.sh
|
||||
source "${SRC}"/lib/functions/general/extensions.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
set -o errtrace # trace ERR through - enabled
|
||||
set -o errexit ## set -e : exit the script if any statement returns a non-true return value - enabled
|
||||
### lib/functions/general/git-ref2info.sh
|
||||
# shellcheck source=lib/functions/general/git-ref2info.sh
|
||||
source "${SRC}"/lib/functions/general/git-ref2info.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
@@ -514,6 +559,24 @@ set -o errexit ## set -e : exit the script if any statement returns a non-true
|
||||
# shellcheck source=lib/functions/general/github-actions.sh
|
||||
source "${SRC}"/lib/functions/general/github-actions.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
set -o errtrace # trace ERR through - enabled
|
||||
set -o errexit ## set -e : exit the script if any statement returns a non-true return value - enabled
|
||||
### lib/functions/general/hash-files.sh
|
||||
# shellcheck source=lib/functions/general/hash-files.sh
|
||||
source "${SRC}"/lib/functions/general/hash-files.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
set -o errtrace # trace ERR through - enabled
|
||||
set -o errexit ## set -e : exit the script if any statement returns a non-true return value - enabled
|
||||
### lib/functions/general/memoize-cached.sh
|
||||
# shellcheck source=lib/functions/general/memoize-cached.sh
|
||||
source "${SRC}"/lib/functions/general/memoize-cached.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
@@ -721,6 +784,15 @@ set -o errexit ## set -e : exit the script if any statement returns a non-true
|
||||
# shellcheck source=lib/functions/logging/capture.sh
|
||||
source "${SRC}"/lib/functions/logging/capture.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
set -o errtrace # trace ERR through - enabled
|
||||
set -o errexit ## set -e : exit the script if any statement returns a non-true return value - enabled
|
||||
### lib/functions/logging/debug-dump.sh
|
||||
# shellcheck source=lib/functions/logging/debug-dump.sh
|
||||
source "${SRC}"/lib/functions/logging/debug-dump.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
|
||||
Reference in New Issue
Block a user