mirror of
https://github.com/armbian/build
synced 2025-09-24 19:47:06 +07:00
armbian-next: aggregation.py rewrite; introduce package-lists.sh; usage in core lib
This commit is contained in:
@@ -1,23 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
create_desktop_package() {
|
||||
display_alert "bsp-desktop: PACKAGE_LIST_DESKTOP" "'${PACKAGE_LIST_DESKTOP}'" "debug"
|
||||
|
||||
# Remove leading and trailing spaces with some bash monstruosity
|
||||
# https://stackoverflow.com/questions/369758/how-to-trim-whitespace-from-a-bash-variable#12973694
|
||||
DEBIAN_RECOMMENDS="${PACKAGE_LIST_DESKTOP#"${PACKAGE_LIST_DESKTOP%%[![:space:]]*}"}"
|
||||
DEBIAN_RECOMMENDS="${DEBIAN_RECOMMENDS%"${DEBIAN_RECOMMENDS##*[![:space:]]}"}"
|
||||
# Replace whitespace characters by commas
|
||||
DEBIAN_RECOMMENDS=${DEBIAN_RECOMMENDS// /,}
|
||||
# Remove others 'spacing characters' (like tabs)
|
||||
DEBIAN_RECOMMENDS=${DEBIAN_RECOMMENDS//[[:space:]]/}
|
||||
|
||||
display_alert "bsp-desktop: DEBIAN_RECOMMENDS" "'${DEBIAN_RECOMMENDS}'" "debug"
|
||||
|
||||
# Replace whitespace characters by commas
|
||||
PACKAGE_LIST_PREDEPENDS=${PACKAGE_LIST_PREDEPENDS// /,}
|
||||
# Remove others 'spacing characters' (like tabs)
|
||||
PACKAGE_LIST_PREDEPENDS=${PACKAGE_LIST_PREDEPENDS//[[:space:]]/}
|
||||
# produced by aggregation.py
|
||||
display_alert "bsp-desktop: AGGREGATED_PACKAGES_DESKTOP_COMMA" "'${AGGREGATED_PACKAGES_DESKTOP_COMMA}'" "debug"
|
||||
|
||||
local destination tmp_dir
|
||||
tmp_dir=$(mktemp -d) # subject to TMPDIR/WORKDIR, so is protected by single/common error trapmanager to clean-up.
|
||||
@@ -25,8 +10,6 @@ create_desktop_package() {
|
||||
rm -rf "${destination}"
|
||||
mkdir -p "${destination}"/DEBIAN
|
||||
|
||||
display_alert "bsp-desktop: PACKAGE_LIST_PREDEPENDS" "'${PACKAGE_LIST_PREDEPENDS}'" "debug"
|
||||
|
||||
# set up control file
|
||||
cat <<- EOF > "${destination}"/DEBIAN/control
|
||||
Package: ${CHOSEN_DESKTOP}
|
||||
@@ -36,52 +19,34 @@ create_desktop_package() {
|
||||
Installed-Size: 1
|
||||
Section: xorg
|
||||
Priority: optional
|
||||
Recommends: ${DEBIAN_RECOMMENDS//[:space:]+/,}, armbian-bsp-desktop
|
||||
Recommends: ${AGGREGATED_PACKAGES_DESKTOP_COMMA}, armbian-bsp-desktop
|
||||
Provides: ${CHOSEN_DESKTOP}, armbian-${RELEASE}-desktop
|
||||
Conflicts: gdm3
|
||||
Pre-Depends: ${PACKAGE_LIST_PREDEPENDS//[:space:]+/,}
|
||||
Description: Armbian desktop for ${DISTRIBUTION} ${RELEASE}
|
||||
EOF
|
||||
|
||||
# Recreating the DEBIAN/postinst file
|
||||
echo "#!/bin/sh -e" > "${destination}/DEBIAN/postinst"
|
||||
|
||||
local aggregated_content=""
|
||||
aggregate_all_desktop "debian/postinst" $'\n'
|
||||
|
||||
echo "${aggregated_content}" >> "${destination}/DEBIAN/postinst"
|
||||
echo "#!/bin/bash -e" > "${destination}/DEBIAN/postinst"
|
||||
echo "${AGGREGATED_DESKTOP_POSTINST}" >> "${destination}/DEBIAN/postinst"
|
||||
echo "exit 0" >> "${destination}/DEBIAN/postinst"
|
||||
|
||||
chmod 755 "${destination}"/DEBIAN/postinst
|
||||
|
||||
# Armbian create_desktop_package scripts
|
||||
|
||||
unset aggregated_content
|
||||
|
||||
mkdir -p "${destination}"/etc/armbian
|
||||
|
||||
local aggregated_content=""
|
||||
aggregate_all_desktop "armbian/create_desktop_package.sh" $'\n'
|
||||
eval "${aggregated_content}"
|
||||
[[ $? -ne 0 ]] && display_alert "create_desktop_package.sh exec error" "" "wrn"
|
||||
# @TODO: error information? This is very likely to explode....
|
||||
eval "${AGGREGATED_DESKTOP_CREATE_DESKTOP_PACKAGE}"
|
||||
|
||||
display_alert "Building desktop package" "${CHOSEN_DESKTOP}_${REVISION}_all" "info"
|
||||
|
||||
mkdir -p "${DEB_STORAGE}/${RELEASE}"
|
||||
cd "${destination}"
|
||||
cd "${destination}" || exit_with_error "Failed to cd to ${destination}"
|
||||
cd ..
|
||||
fakeroot_dpkg_deb_build "${destination}" "${DEB_STORAGE}/${RELEASE}/${CHOSEN_DESKTOP}_${REVISION}_all.deb"
|
||||
|
||||
unset aggregated_content
|
||||
|
||||
}
|
||||
|
||||
create_bsp_desktop_package() {
|
||||
|
||||
display_alert "Creating board support package for desktop" "${package_name}" "info"
|
||||
|
||||
local package_name="${BSP_DESKTOP_PACKAGE_FULLNAME}"
|
||||
|
||||
local destination tmp_dir
|
||||
tmp_dir=$(mktemp -d) # subject to TMPDIR/WORKDIR, so is protected by single/common error trapmanager to clean-up.
|
||||
destination=${tmp_dir}/${BOARD}/${BSP_DESKTOP_PACKAGE_FULLNAME}
|
||||
@@ -105,32 +70,18 @@ create_bsp_desktop_package() {
|
||||
EOF
|
||||
|
||||
# Recreating the DEBIAN/postinst file
|
||||
echo "#!/bin/sh -e" > "${destination}/DEBIAN/postinst"
|
||||
|
||||
local aggregated_content=""
|
||||
aggregate_all_desktop "debian/armbian-bsp-desktop/postinst" $'\n'
|
||||
|
||||
echo "${aggregated_content}" >> "${destination}/DEBIAN/postinst"
|
||||
echo "#!/bin/bash -e" > "${destination}/DEBIAN/postinst"
|
||||
echo "${AGGREGATED_DESKTOP_BSP_POSTINST}" >> "${destination}/DEBIAN/postinst"
|
||||
echo "exit 0" >> "${destination}/DEBIAN/postinst"
|
||||
|
||||
chmod 755 "${destination}"/DEBIAN/postinst
|
||||
|
||||
# Armbian create_desktop_package scripts
|
||||
|
||||
unset aggregated_content
|
||||
|
||||
mkdir -p "${destination}"/etc/armbian
|
||||
|
||||
local aggregated_content=""
|
||||
aggregate_all_desktop "debian/armbian-bsp-desktop/prepare.sh" $'\n'
|
||||
eval "${aggregated_content}"
|
||||
[[ $? -ne 0 ]] && display_alert "prepare.sh exec error" "" "wrn" # @TODO: this is a fantasy, error would be thrown in line above
|
||||
# @TODO: error information? This is very likely to explode....
|
||||
eval "${AGGREGATED_DESKTOP_BSP_PREPARE}"
|
||||
|
||||
mkdir -p "${DEB_STORAGE}/${RELEASE}"
|
||||
cd "${destination}"
|
||||
cd "${destination}" || exit_with_error "Failed to cd to ${destination}"
|
||||
cd ..
|
||||
fakeroot_dpkg_deb_build "${destination}" "${DEB_STORAGE}/${RELEASE}/${package_name}.deb"
|
||||
|
||||
unset aggregated_content
|
||||
|
||||
}
|
||||
|
||||
@@ -1,103 +1,59 @@
|
||||
#!/usr/bin/env bash
|
||||
# Expected variables
|
||||
# - aggregated_content
|
||||
# - potential_paths
|
||||
# - separator
|
||||
# Write to variables :
|
||||
# - aggregated_content
|
||||
aggregate_content() {
|
||||
display_alert "Aggregation: aggregate_content" "potential_paths: '${potential_paths}'" "aggregation"
|
||||
for filepath in ${potential_paths}; do
|
||||
if [[ -f "${filepath}" ]]; then
|
||||
display_alert "Aggregation: aggregate_content" "HIT: '${filepath}'" "aggregation"
|
||||
aggregated_content+=$(cat "${filepath}")
|
||||
aggregated_content+="${separator}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
get_all_potential_paths() {
|
||||
display_alert "Aggregation: get_all_potential_paths" "${*}" "aggregation"
|
||||
|
||||
local root_dirs="${AGGREGATION_SEARCH_ROOT_ABSOLUTE_DIRS}"
|
||||
local rel_dirs="${1}"
|
||||
local sub_dirs="${2}"
|
||||
local looked_up_subpath="${3}"
|
||||
for root_dir in ${root_dirs}; do
|
||||
for rel_dir in ${rel_dirs}; do
|
||||
for sub_dir in ${sub_dirs}; do
|
||||
potential_paths+="${root_dir}/${rel_dir}/${sub_dir}/${looked_up_subpath} "
|
||||
done
|
||||
done
|
||||
done
|
||||
}
|
||||
|
||||
# Environment variables expected :
|
||||
# - aggregated_content
|
||||
# Arguments :
|
||||
# 1. File to look up in each directory
|
||||
# 2. The separator to add between each concatenated file
|
||||
# 3. Relative directories paths added to ${3}
|
||||
# 4. Relative directories paths added to ${4}
|
||||
#
|
||||
# The function will basically generate a list of potential paths by
|
||||
# generating all the potential paths combinations leading to the
|
||||
# looked up file
|
||||
# ${AGGREGATION_SEARCH_ROOT_ABSOLUTE_DIRS}/${3}/${4}/${1}
|
||||
# Then it will concatenate the content of all the available files
|
||||
# into ${aggregated_content}
|
||||
#
|
||||
# TODO :
|
||||
# ${4} could be removed by just adding the appropriate paths to ${3}
|
||||
# dynamically for each case
|
||||
# (debootstrap, cli, desktop environments, desktop appgroups, ...)
|
||||
|
||||
aggregate_all_root_rel_sub() {
|
||||
display_alert "Aggregation: aggregate_all_root_rel_sub" "${*}" "aggregation"
|
||||
local separator="${2}"
|
||||
|
||||
local potential_paths=""
|
||||
get_all_potential_paths "${3}" "${4}" "${1}"
|
||||
|
||||
aggregate_content
|
||||
}
|
||||
|
||||
aggregate_all_debootstrap() {
|
||||
display_alert "Aggregation: aggregate_all_debootstrap" "${*}" "aggregation"
|
||||
local sub_dirs_to_check=". "
|
||||
if [[ ! -z "${SELECTED_CONFIGURATION+x}" ]]; then
|
||||
sub_dirs_to_check+="config_${SELECTED_CONFIGURATION}"
|
||||
fi
|
||||
aggregate_all_root_rel_sub "${1}" "${2}" "${DEBOOTSTRAP_SEARCH_RELATIVE_DIRS}" "${sub_dirs_to_check}"
|
||||
}
|
||||
|
||||
aggregate_all_cli() {
|
||||
display_alert "Aggregation: aggregate_all_cli" "${*}" "aggregation"
|
||||
local sub_dirs_to_check=". "
|
||||
if [[ ! -z "${SELECTED_CONFIGURATION+x}" ]]; then
|
||||
sub_dirs_to_check+="config_${SELECTED_CONFIGURATION}"
|
||||
fi
|
||||
aggregate_all_root_rel_sub "${1}" "${2}" "${CLI_SEARCH_RELATIVE_DIRS}" "${sub_dirs_to_check}"
|
||||
}
|
||||
|
||||
aggregate_all_desktop() {
|
||||
display_alert "Aggregation: aggregate_all_desktop" "${*}" "aggregation"
|
||||
aggregate_all_root_rel_sub "${1}" "${2}" "${DESKTOP_ENVIRONMENTS_SEARCH_RELATIVE_DIRS}" "."
|
||||
aggregate_all_root_rel_sub "${1}" "${2}" "${DESKTOP_APPGROUPS_SEARCH_RELATIVE_DIRS}" "${DESKTOP_APPGROUPS_SELECTED}"
|
||||
}
|
||||
|
||||
one_line() {
|
||||
local aggregate_func_name="${1}"
|
||||
local aggregated_content=""
|
||||
shift 1
|
||||
$aggregate_func_name "${@}"
|
||||
cleanup_list aggregated_content
|
||||
}
|
||||
|
||||
cleanup_list() {
|
||||
local varname="${1}"
|
||||
local list_to_clean="${!varname}"
|
||||
list_to_clean="${list_to_clean#"${list_to_clean%%[![:space:]]*}"}"
|
||||
list_to_clean="${list_to_clean%"${list_to_clean##*[![:space:]]}"}"
|
||||
echo ${list_to_clean}
|
||||
|
||||
function aggregate_all_packages() {
|
||||
# Get a temporary file for the output. This is not WORKDIR yet, since we're still in configuration phase.
|
||||
temp_file_for_aggregation="$(mktemp)"
|
||||
|
||||
# array with all parameters; will be auto-quoted by bash's @Q modifier below
|
||||
declare -a aggregation_params_quoted=(
|
||||
"SRC=${SRC}"
|
||||
"OUTPUT=${temp_file_for_aggregation}"
|
||||
|
||||
# For the main packages, and others; main packages are not mixed with BOARD or DESKTOP packages.
|
||||
# Results:
|
||||
# - AGGREGATED_DEBOOTSTRAP_COMPONENTS
|
||||
# - AGGREGATED_PACKAGES_DEBOOTSTRAP
|
||||
# - AGGREGATED_PACKAGES_ROOTFS
|
||||
# - AGGREGATED_PACKAGES_IMAGE
|
||||
|
||||
"ARCH=${ARCH}"
|
||||
"RELEASE=${RELEASE}"
|
||||
"LINUXFAMILY=${LINUXFAMILY}"
|
||||
"BOARD=${BOARD}"
|
||||
"USERPATCHES_PATH=${USERPATCHES_PATH}"
|
||||
"SELECTED_CONFIGURATION=${SELECTED_CONFIGURATION}"
|
||||
|
||||
# Removals. Will remove from all lists.
|
||||
"REMOVE_PACKAGES=${REMOVE_PACKAGES[*]}"
|
||||
"REMOVE_PACKAGES_REFS=${REMOVE_PACKAGES_REFS[*]}"
|
||||
|
||||
# Extra packages in rootfs (cached)
|
||||
"EXTRA_PACKAGES_ROOTFS=${EXTRA_PACKAGES_ROOTFS[*]}"
|
||||
"EXTRA_PACKAGES_ROOTFS_REFS=${EXTRA_PACKAGES_ROOTFS_REFS[*]}"
|
||||
|
||||
# Extra packages, in image (not cached)
|
||||
"EXTRA_PACKAGES_IMAGE=${EXTRA_PACKAGES_IMAGE[*]}"
|
||||
"EXTRA_PACKAGES_IMAGE_REFS=${EXTRA_PACKAGES_IMAGE_REFS[*]}"
|
||||
|
||||
# Desktop stuff; results are not mixed into main packages. Results in AGGREGATED_PACKAGES_DESKTOP.
|
||||
"BUILD_DESKTOP=${BUILD_DESKTOP}"
|
||||
"DESKTOP_ENVIRONMENT=${DESKTOP_ENVIRONMENT}"
|
||||
"DESKTOP_ENVIRONMENT_CONFIG_NAME=${DESKTOP_ENVIRONMENT_CONFIG_NAME}"
|
||||
"DESKTOP_APPGROUPS_SELECTED=${DESKTOP_APPGROUPS_SELECTED}"
|
||||
|
||||
# Those are processed by Python, but not part of rootfs / main packages; results in AGGREGATED_PACKAGES_IMAGE_INSTALL
|
||||
# These two vars are made readonly after sourcing the board / family config, so can't be used in extensions and such.
|
||||
"PACKAGE_LIST_FAMILY=${PACKAGE_LIST_FAMILY}"
|
||||
"PACKAGE_LIST_BOARD=${PACKAGE_LIST_BOARD}"
|
||||
|
||||
# Those are processed by Python, but not part of rootfs / main packages; results in AGGREGATED_PACKAGES_IMAGE_UNINSTALL
|
||||
# These two vars are made readonly after sourcing the board / family config, so can't be used in extensions and such.
|
||||
"PACKAGE_LIST_BOARD_REMOVE=${PACKAGE_LIST_BOARD_REMOVE}"
|
||||
"PACKAGE_LIST_FAMILY_REMOVE=${PACKAGE_LIST_FAMILY_REMOVE}"
|
||||
)
|
||||
run_host_command_logged env -i "${aggregation_params_quoted[@]@Q}" python3 "${SRC}/lib/tools/aggregation.py"
|
||||
#run_host_command_logged cat "${temp_file_for_aggregation}"
|
||||
# shellcheck disable=SC1090
|
||||
source "${temp_file_for_aggregation}" # SOURCE IT!
|
||||
run_host_command_logged rm "${temp_file_for_aggregation}"
|
||||
}
|
||||
|
||||
@@ -70,7 +70,6 @@ function desktop_environment_check_if_valid() {
|
||||
function interactive_desktop_main_configuration() {
|
||||
[[ $BUILD_DESKTOP != "yes" ]] && return 0 # Only for desktops.
|
||||
|
||||
# Myy : Once we got a list of selected groups, parse the PACKAGE_LIST inside configuration.sh
|
||||
DESKTOP_ELEMENTS_DIR="${SRC}/config/desktop/${RELEASE}"
|
||||
DESKTOP_CONFIGS_DIR="${DESKTOP_ELEMENTS_DIR}/environments"
|
||||
DESKTOP_CONFIG_PREFIX="config_"
|
||||
|
||||
@@ -11,6 +11,14 @@
|
||||
|
||||
function do_main_configuration() {
|
||||
display_alert "Starting main configuration" "${MOUNT_UUID}" "info"
|
||||
|
||||
# Obsolete stuff, make sure not defined, then make readonly
|
||||
declare -g -r DEBOOTSTRAP_LIST
|
||||
declare -g -r PACKAGE_LIST
|
||||
declare -g -r PACKAGE_LIST_BOARD
|
||||
declare -g -r PACKAGE_LIST_ADDITIONAL
|
||||
declare -g -r PACKAGE_LIST_EXTERNAL
|
||||
declare -g -r PACKAGE_LIST_DESKTOP
|
||||
|
||||
# common options
|
||||
# daily beta build contains date in subrevision
|
||||
@@ -260,6 +268,10 @@ function do_main_configuration() {
|
||||
allowing separate functions for different branches. You're welcome.
|
||||
POST_FAMILY_CONFIG_PER_BRANCH
|
||||
|
||||
# Lets make some variables readonly.
|
||||
# We don't want anything changing them, it's exclusively for family config.
|
||||
declare -g -r PACKAGE_LIST_FAMILY="${PACKAGE_LIST_FAMILY}"
|
||||
declare -g -r PACKAGE_LIST_FAMILY_REMOVE="${PACKAGE_LIST_FAMILY_REMOVE}"
|
||||
|
||||
# A global killswitch for extlinux.
|
||||
if [[ "${SRC_EXTLINUX}" == "yes" ]]; then
|
||||
@@ -290,64 +302,64 @@ function do_main_configuration() {
|
||||
DISTRIBUTION="Debian"
|
||||
fi
|
||||
|
||||
CLI_CONFIG_PATH="${SRC}/config/cli/${RELEASE}"
|
||||
DEBOOTSTRAP_CONFIG_PATH="${CLI_CONFIG_PATH}/debootstrap"
|
||||
|
||||
AGGREGATION_SEARCH_ROOT_ABSOLUTE_DIRS="
|
||||
${SRC}/config
|
||||
${SRC}/config/optional/_any_board/_config
|
||||
${SRC}/config/optional/architectures/${ARCH}/_config
|
||||
${SRC}/config/optional/families/${LINUXFAMILY}/_config
|
||||
${SRC}/config/optional/boards/${BOARD}/_config
|
||||
${USERPATCHES_PATH}
|
||||
"
|
||||
|
||||
DEBOOTSTRAP_SEARCH_RELATIVE_DIRS="
|
||||
cli/_all_distributions/debootstrap
|
||||
cli/${RELEASE}/debootstrap
|
||||
"
|
||||
|
||||
CLI_SEARCH_RELATIVE_DIRS="
|
||||
cli/_all_distributions/main
|
||||
cli/${RELEASE}/main
|
||||
"
|
||||
|
||||
PACKAGES_SEARCH_ROOT_ABSOLUTE_DIRS="
|
||||
${SRC}/packages
|
||||
${SRC}/config/optional/_any_board/_packages
|
||||
${SRC}/config/optional/architectures/${ARCH}/_packages
|
||||
${SRC}/config/optional/families/${LINUXFAMILY}/_packages
|
||||
${SRC}/config/optional/boards/${BOARD}/_packages
|
||||
"
|
||||
|
||||
DESKTOP_ENVIRONMENTS_SEARCH_RELATIVE_DIRS="
|
||||
desktop/_all_distributions/environments/_all_environments
|
||||
desktop/_all_distributions/environments/${DESKTOP_ENVIRONMENT}
|
||||
desktop/_all_distributions/environments/${DESKTOP_ENVIRONMENT}/${DESKTOP_ENVIRONMENT_CONFIG_NAME}
|
||||
desktop/${RELEASE}/environments/_all_environments
|
||||
desktop/${RELEASE}/environments/${DESKTOP_ENVIRONMENT}
|
||||
desktop/${RELEASE}/environments/${DESKTOP_ENVIRONMENT}/${DESKTOP_ENVIRONMENT_CONFIG_NAME}
|
||||
"
|
||||
|
||||
DESKTOP_APPGROUPS_SEARCH_RELATIVE_DIRS="
|
||||
desktop/_all_distributions/appgroups
|
||||
desktop/_all_distributions/environments/${DESKTOP_ENVIRONMENT}/appgroups
|
||||
desktop/${RELEASE}/appgroups
|
||||
desktop/${RELEASE}/environments/${DESKTOP_ENVIRONMENT}/appgroups
|
||||
"
|
||||
|
||||
DEBOOTSTRAP_LIST="$(one_line aggregate_all_debootstrap "packages" " ")"
|
||||
DEBOOTSTRAP_COMPONENTS="$(one_line aggregate_all_debootstrap "components" " ")"
|
||||
DEBOOTSTRAP_COMPONENTS="${DEBOOTSTRAP_COMPONENTS// /,}"
|
||||
PACKAGE_LIST="$(one_line aggregate_all_cli "packages" " ")"
|
||||
PACKAGE_LIST_ADDITIONAL="$(one_line aggregate_all_cli "packages.additional" " ")"
|
||||
PACKAGE_LIST_EXTERNAL="$(one_line aggregate_all_cli "packages.external" " ")"
|
||||
|
||||
if [[ $BUILD_DESKTOP == "yes" ]]; then
|
||||
PACKAGE_LIST_DESKTOP+="$(one_line aggregate_all_desktop "packages" " ")"
|
||||
# @TODO: desktop vs packages.external?
|
||||
PACKAGE_LIST_DESKTOP+=" ${PACKAGE_LIST_EXTERNAL}" # external packages are only included in desktop builds
|
||||
fi
|
||||
# CLI_CONFIG_PATH="${SRC}/config/cli/${RELEASE}"
|
||||
# DEBOOTSTRAP_CONFIG_PATH="${CLI_CONFIG_PATH}/debootstrap"
|
||||
#
|
||||
# AGGREGATION_SEARCH_ROOT_ABSOLUTE_DIRS="
|
||||
#${SRC}/config
|
||||
#${SRC}/config/optional/_any_board/_config
|
||||
#${SRC}/config/optional/architectures/${ARCH}/_config
|
||||
#${SRC}/config/optional/families/${LINUXFAMILY}/_config
|
||||
#${SRC}/config/optional/boards/${BOARD}/_config
|
||||
#${USERPATCHES_PATH}
|
||||
#"
|
||||
#
|
||||
# DEBOOTSTRAP_SEARCH_RELATIVE_DIRS="
|
||||
#cli/_all_distributions/debootstrap
|
||||
#cli/${RELEASE}/debootstrap
|
||||
#"
|
||||
#
|
||||
# CLI_SEARCH_RELATIVE_DIRS="
|
||||
#cli/_all_distributions/main
|
||||
#cli/${RELEASE}/main
|
||||
#"
|
||||
#
|
||||
# PACKAGES_SEARCH_ROOT_ABSOLUTE_DIRS="
|
||||
#${SRC}/packages
|
||||
#${SRC}/config/optional/_any_board/_packages
|
||||
#${SRC}/config/optional/architectures/${ARCH}/_packages
|
||||
#${SRC}/config/optional/families/${LINUXFAMILY}/_packages
|
||||
#${SRC}/config/optional/boards/${BOARD}/_packages
|
||||
#"
|
||||
#
|
||||
# DESKTOP_ENVIRONMENTS_SEARCH_RELATIVE_DIRS="
|
||||
#desktop/_all_distributions/environments/_all_environments
|
||||
#desktop/_all_distributions/environments/${DESKTOP_ENVIRONMENT}
|
||||
#desktop/_all_distributions/environments/${DESKTOP_ENVIRONMENT}/${DESKTOP_ENVIRONMENT_CONFIG_NAME}
|
||||
#desktop/${RELEASE}/environments/_all_environments
|
||||
#desktop/${RELEASE}/environments/${DESKTOP_ENVIRONMENT}
|
||||
#desktop/${RELEASE}/environments/${DESKTOP_ENVIRONMENT}/${DESKTOP_ENVIRONMENT_CONFIG_NAME}
|
||||
#"
|
||||
#
|
||||
# DESKTOP_APPGROUPS_SEARCH_RELATIVE_DIRS="
|
||||
#desktop/_all_distributions/appgroups
|
||||
#desktop/_all_distributions/environments/${DESKTOP_ENVIRONMENT}/appgroups
|
||||
#desktop/${RELEASE}/appgroups
|
||||
#desktop/${RELEASE}/environments/${DESKTOP_ENVIRONMENT}/appgroups
|
||||
#"
|
||||
#
|
||||
# DEBOOTSTRAP_LIST="$(one_line aggregate_all_debootstrap "packages" " ")"
|
||||
# DEBOOTSTRAP_COMPONENTS="$(one_line aggregate_all_debootstrap "components" " ")"
|
||||
# DEBOOTSTRAP_COMPONENTS="${DEBOOTSTRAP_COMPONENTS// /,}"
|
||||
# PACKAGE_LIST="$(one_line aggregate_all_cli "packages" " ")"
|
||||
# PACKAGE_LIST_ADDITIONAL="$(one_line aggregate_all_cli "packages.additional" " ")"
|
||||
# PACKAGE_LIST_EXTERNAL="$(one_line aggregate_all_cli "packages.external" " ")"
|
||||
#
|
||||
# if [[ $BUILD_DESKTOP == "yes" ]]; then
|
||||
# PACKAGE_LIST_DESKTOP+="$(one_line aggregate_all_desktop "packages" " ")"
|
||||
# # @TODO: desktop vs packages.external?
|
||||
# PACKAGE_LIST_DESKTOP+=" ${PACKAGE_LIST_EXTERNAL}" # external packages are only included in desktop builds
|
||||
# fi
|
||||
|
||||
DEBIAN_MIRROR='deb.debian.org/debian'
|
||||
DEBIAN_SECURTY='security.debian.org/'
|
||||
@@ -421,51 +433,52 @@ desktop/${RELEASE}/environments/${DESKTOP_ENVIRONMENT}/appgroups
|
||||
|
||||
[[ -n $APT_PROXY_ADDR ]] && display_alert "Using custom apt-cacher-ng address" "$APT_PROXY_ADDR" "info"
|
||||
|
||||
display_alert "Build final package list" "after possible override" "debug"
|
||||
PACKAGE_LIST="$PACKAGE_LIST $PACKAGE_LIST_RELEASE $PACKAGE_LIST_ADDITIONAL"
|
||||
PACKAGE_MAIN_LIST="$(cleanup_list PACKAGE_LIST)"
|
||||
# Time to calculate packages... or is it?
|
||||
aggregate_all_packages
|
||||
|
||||
[[ $BUILD_DESKTOP == yes ]] && PACKAGE_LIST="$PACKAGE_LIST $PACKAGE_LIST_DESKTOP"
|
||||
# @TODO: what is the use of changing PACKAGE_LIST after PACKAGE_MAIN_LIST was set?
|
||||
PACKAGE_LIST="$(cleanup_list PACKAGE_LIST)"
|
||||
# Now, supposedly PACKAGE_LIST_RM is complete by now.
|
||||
|
||||
# remove any packages defined in PACKAGE_LIST_RM in lib.config
|
||||
aggregated_content="${PACKAGE_LIST_RM} "
|
||||
aggregate_all_cli "packages.remove" " "
|
||||
aggregate_all_desktop "packages.remove" " "
|
||||
PACKAGE_LIST_RM="$(cleanup_list aggregated_content)"
|
||||
unset aggregated_content
|
||||
|
||||
aggregated_content=""
|
||||
aggregate_all_cli "packages.uninstall" " "
|
||||
aggregate_all_desktop "packages.uninstall" " "
|
||||
PACKAGE_LIST_UNINSTALL="$(cleanup_list aggregated_content)"
|
||||
unset aggregated_content
|
||||
|
||||
# @TODO: rpardini: this has to stop. refactor this into array or dict-based and stop the madness.
|
||||
if [[ -n $PACKAGE_LIST_RM ]]; then
|
||||
# Turns out that \b can be tricked by dashes.
|
||||
# So if you remove mesa-utils but still want to install "mesa-utils-extra"
|
||||
# a "\b(mesa-utils)\b" filter will convert "mesa-utils-extra" to "-extra".
|
||||
# \W is not tricked by this but consumes the surrounding spaces, so we
|
||||
# replace the occurence by one space, to avoid sticking the next word to
|
||||
# the previous one after consuming the spaces.
|
||||
DEBOOTSTRAP_LIST=$(sed -r "s/\W($(tr ' ' '|' <<< ${PACKAGE_LIST_RM}))\W/ /g" <<< " ${DEBOOTSTRAP_LIST} ")
|
||||
PACKAGE_LIST=$(sed -r "s/\W($(tr ' ' '|' <<< ${PACKAGE_LIST_RM}))\W/ /g" <<< " ${PACKAGE_LIST} ")
|
||||
PACKAGE_MAIN_LIST=$(sed -r "s/\W($(tr ' ' '|' <<< ${PACKAGE_LIST_RM}))\W/ /g" <<< " ${PACKAGE_MAIN_LIST} ")
|
||||
if [[ $BUILD_DESKTOP == "yes" ]]; then
|
||||
PACKAGE_LIST_DESKTOP=$(sed -r "s/\W($(tr ' ' '|' <<< ${PACKAGE_LIST_RM}))\W/ /g" <<< " ${PACKAGE_LIST_DESKTOP} ")
|
||||
# Removing double spaces... AGAIN, since we might have used a sed on them
|
||||
# Do not quote the variables. This would defeat the trick.
|
||||
PACKAGE_LIST_DESKTOP="$(echo ${PACKAGE_LIST_DESKTOP})"
|
||||
fi
|
||||
|
||||
# Removing double spaces... AGAIN, since we might have used a sed on them
|
||||
# Do not quote the variables. This would defeat the trick.
|
||||
DEBOOTSTRAP_LIST="$(echo ${DEBOOTSTRAP_LIST})"
|
||||
PACKAGE_LIST="$(echo ${PACKAGE_LIST})"
|
||||
PACKAGE_MAIN_LIST="$(echo ${PACKAGE_MAIN_LIST})"
|
||||
fi
|
||||
#display_alert "Build final package list" "after possible override" "debug"
|
||||
#PACKAGE_LIST="$PACKAGE_LIST $PACKAGE_LIST_RELEASE $PACKAGE_LIST_ADDITIONAL"
|
||||
#
|
||||
#[[ $BUILD_DESKTOP == yes ]] && PACKAGE_LIST="$PACKAGE_LIST $PACKAGE_LIST_DESKTOP"
|
||||
#PACKAGE_LIST="$(cleanup_list PACKAGE_LIST)"
|
||||
#
|
||||
## remove any packages defined in PACKAGE_LIST_RM in lib.config
|
||||
#aggregated_content="${PACKAGE_LIST_RM} "
|
||||
#aggregate_all_cli "packages.remove" " "
|
||||
#aggregate_all_desktop "packages.remove" " "
|
||||
#PACKAGE_LIST_RM="$(cleanup_list aggregated_content)"
|
||||
#unset aggregated_content
|
||||
#
|
||||
#aggregated_content=""
|
||||
#aggregate_all_cli "packages.uninstall" " "
|
||||
#aggregate_all_desktop "packages.uninstall" " "
|
||||
#PACKAGE_LIST_UNINSTALL="$(cleanup_list aggregated_content)"
|
||||
#unset aggregated_content
|
||||
#
|
||||
## @TODO: rpardini: this has to stop. refactor this into array or dict-based and stop the madness.
|
||||
#if [[ -n $PACKAGE_LIST_RM ]]; then
|
||||
# # Turns out that \b can be tricked by dashes.
|
||||
# # So if you remove mesa-utils but still want to install "mesa-utils-extra"
|
||||
# # a "\b(mesa-utils)\b" filter will convert "mesa-utils-extra" to "-extra".
|
||||
# # \W is not tricked by this but consumes the surrounding spaces, so we
|
||||
# # replace the occurence by one space, to avoid sticking the next word to
|
||||
# # the previous one after consuming the spaces.
|
||||
# DEBOOTSTRAP_LIST=$(sed -r "s/\W($(tr ' ' '|' <<< ${PACKAGE_LIST_RM}))\W/ /g" <<< " ${DEBOOTSTRAP_LIST} ")
|
||||
# PACKAGE_LIST=$(sed -r "s/\W($(tr ' ' '|' <<< ${PACKAGE_LIST_RM}))\W/ /g" <<< " ${PACKAGE_LIST} ")
|
||||
# if [[ $BUILD_DESKTOP == "yes" ]]; then
|
||||
# PACKAGE_LIST_DESKTOP=$(sed -r "s/\W($(tr ' ' '|' <<< ${PACKAGE_LIST_RM}))\W/ /g" <<< " ${PACKAGE_LIST_DESKTOP} ")
|
||||
# # Removing double spaces... AGAIN, since we might have used a sed on them
|
||||
# # Do not quote the variables. This would defeat the trick.
|
||||
# PACKAGE_LIST_DESKTOP="$(echo ${PACKAGE_LIST_DESKTOP})"
|
||||
# fi
|
||||
#
|
||||
# # Removing double spaces... AGAIN, since we might have used a sed on them
|
||||
# # Do not quote the variables. This would defeat the trick.
|
||||
# DEBOOTSTRAP_LIST="$(echo ${DEBOOTSTRAP_LIST})"
|
||||
# PACKAGE_LIST="$(echo ${PACKAGE_LIST})"
|
||||
#fi
|
||||
|
||||
# Give the option to configure DNS server used in the chroot during the build process
|
||||
[[ -z $NAMESERVER ]] && NAMESERVER="1.0.0.1" # default is cloudflare alternate
|
||||
|
||||
44
lib/functions/configuration/package-lists.sh
Normal file
44
lib/functions/configuration/package-lists.sh
Normal file
@@ -0,0 +1,44 @@
|
||||
function get_caller_reference() {
|
||||
# grab the caller function name, its source file and line number
|
||||
local caller_ref="${FUNCNAME[2]}"
|
||||
local caller_file="${BASH_SOURCE[2]}"
|
||||
local caller_line="${BASH_LINENO[1]}"
|
||||
# the format below must match the parser in parseEnvForList in aggregation.py
|
||||
declare -g caller_reference="${caller_ref}:${caller_file}:${caller_line}"
|
||||
}
|
||||
|
||||
# Adds to the main package list.
|
||||
function add_packages_to_rootfs() {
|
||||
get_caller_reference
|
||||
declare -g -a EXTRA_PACKAGES_ROOTFS=("${EXTRA_PACKAGES_ROOTFS[@]}")
|
||||
declare -g -a EXTRA_PACKAGES_ROOTFS_REFS=("${EXTRA_PACKAGES_ROOTFS_REFS[@]}")
|
||||
for package in "${@}"; do
|
||||
# add package to the list
|
||||
EXTRA_PACKAGES_ROOTFS+=("${package}")
|
||||
EXTRA_PACKAGES_ROOTFS_REFS+=("${caller_reference}")
|
||||
done
|
||||
}
|
||||
|
||||
# Adds to the image package list; they're not cached in the rootfs.
|
||||
function add_packages_to_image() {
|
||||
get_caller_reference
|
||||
declare -g -a EXTRA_PACKAGES_IMAGE=("${EXTRA_PACKAGES_IMAGE[@]}")
|
||||
declare -g -a EXTRA_PACKAGES_IMAGE_REFS=("${EXTRA_PACKAGES_IMAGE_REFS[@]}")
|
||||
for package in "${@}"; do
|
||||
# add package to the list
|
||||
EXTRA_PACKAGES_IMAGE+=("${package}")
|
||||
EXTRA_PACKAGES_IMAGE_REFS+=("${caller_reference}")
|
||||
done
|
||||
}
|
||||
|
||||
# Removes a package from all lists: debootstrap, rootfs, desktop and image.
|
||||
function remove_packages() {
|
||||
get_caller_reference
|
||||
declare -g -a REMOVE_PACKAGES=("${REMOVE_PACKAGES[@]}")
|
||||
declare -g -a REMOVE_PACKAGES_REFS=("${REMOVE_PACKAGES_REFS[@]}")
|
||||
for package in "${@}"; do
|
||||
# add package to the list
|
||||
REMOVE_PACKAGES+=("${package}")
|
||||
REMOVE_PACKAGES_REFS+=("${caller_reference}")
|
||||
done
|
||||
}
|
||||
@@ -9,6 +9,10 @@ function chroot_sdcard_apt_get_install_download_only() {
|
||||
chroot_sdcard_apt_get --no-install-recommends --download-only install "$@"
|
||||
}
|
||||
|
||||
function chroot_sdcard_apt_get_install_dry_run() {
|
||||
chroot_sdcard_apt_get --no-install-recommends --dry-run install "$@"
|
||||
}
|
||||
|
||||
function chroot_sdcard_apt_get_remove() {
|
||||
DONT_MAINTAIN_APT_CACHE="yes" chroot_sdcard_apt_get remove "$@"
|
||||
}
|
||||
|
||||
@@ -58,6 +58,11 @@ function prepare_and_config_main_build_single() {
|
||||
LINUXFAMILY="${BOARDFAMILY}" # @TODO: wtf? why? this is (100%?) rewritten by family config!
|
||||
# this sourced the board config. do_main_configuration will source the family file.
|
||||
|
||||
# Lets make some variables readonly.
|
||||
# We don't want anything changing them, it's exclusively for board config.
|
||||
declare -g -r PACKAGE_LIST_BOARD="${PACKAGE_LIST_BOARD}"
|
||||
declare -g -r PACKAGE_LIST_BOARD_REMOVE="${PACKAGE_LIST_BOARD_REMOVE}"
|
||||
|
||||
[[ -z $KERNEL_TARGET ]] && exit_with_error "Board configuration does not define valid kernel config"
|
||||
|
||||
interactive_config_ask_branch
|
||||
|
||||
@@ -1,59 +1,22 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
add_apt_sources() {
|
||||
local potential_paths=""
|
||||
local sub_dirs_to_check=". "
|
||||
if [[ ! -z "${SELECTED_CONFIGURATION+x}" ]]; then
|
||||
sub_dirs_to_check+="config_${SELECTED_CONFIGURATION}"
|
||||
fi
|
||||
|
||||
# @TODO: rpardini: The logic here is meant to be evolved over time. Originally, all of this only ran when BUILD_DESKTOP=yes.
|
||||
# Igor had bumped it to run on all builds, but that adds external sources to cli and minimal.
|
||||
# @TODO: attention: this only handles the apt-sources; the packages (names) themselves are aggregated somewhere else
|
||||
[[ "${BUILD_DESKTOP}" == "yes" ]] && get_all_potential_paths "${DEBOOTSTRAP_SEARCH_RELATIVE_DIRS}" "${sub_dirs_to_check}" "sources/apt"
|
||||
[[ "${BUILD_DESKTOP}" == "yes" ]] && get_all_potential_paths "${CLI_SEARCH_RELATIVE_DIRS}" "${sub_dirs_to_check}" "sources/apt"
|
||||
[[ "${BUILD_DESKTOP}" == "yes" ]] && get_all_potential_paths "${DESKTOP_ENVIRONMENTS_SEARCH_RELATIVE_DIRS}" "." "sources/apt"
|
||||
[[ "${BUILD_DESKTOP}" == "yes" ]] && get_all_potential_paths "${DESKTOP_APPGROUPS_SEARCH_RELATIVE_DIRS}" "${DESKTOP_APPGROUPS_SELECTED}" "sources/apt"
|
||||
|
||||
# AGGREGATED_APT_SOURCES and AGGREGATED_APT_SOURCES_DICT are pre-resolved by aggregation.py
|
||||
display_alert "Adding additional apt sources" "add_apt_sources()" "debug"
|
||||
mkdir -p "${SDCARD}"/usr/share/keyrings/
|
||||
|
||||
for apt_sources_dirpath in ${potential_paths}; do
|
||||
if [[ -d "${apt_sources_dirpath}" ]]; then
|
||||
for apt_source_filepath in "${apt_sources_dirpath}/"*.source; do
|
||||
apt_source_filepath=$(echo "${apt_source_filepath}" | sed -re 's/(^.*[^/])\.[^./]*$/\1/')
|
||||
local new_apt_source
|
||||
local apt_source_gpg_filepath
|
||||
local apt_source_gpg_filename
|
||||
local apt_source_filename
|
||||
for apt_source in "${AGGREGATED_APT_SOURCES[@]}"; do
|
||||
apt_source_base="${AGGREGATED_APT_SOURCES_DICT["${apt_source}"]}"
|
||||
apt_source_file="${SRC}/${apt_source_base}.source"
|
||||
gpg_file="${SRC}/${apt_source_base}.gpg"
|
||||
|
||||
new_apt_source="$(cat "${apt_source_filepath}.source")"
|
||||
apt_source_gpg_filepath="${apt_source_filepath}.gpg"
|
||||
apt_source_gpg_filename="$(basename "${apt_source_gpg_filepath}")"
|
||||
apt_source_filename="$(basename "${apt_source_filepath}").list"
|
||||
|
||||
display_alert "Adding APT Source" "${new_apt_source}" "info"
|
||||
|
||||
# @TODO: rpardini, why do PPAs get apt-key and others get keyrings GPG?
|
||||
|
||||
if [[ "${new_apt_source}" == ppa* ]]; then
|
||||
# @TODO: needs software-properties-common installed.
|
||||
chroot_sdcard add-apt-repository -y -n "${new_apt_source}" # -y -> Assume yes, -n -> no apt-get update
|
||||
if [[ -f "${apt_source_gpg_filepath}" ]]; then
|
||||
display_alert "Adding GPG Key" "via apt-key add (deprecated): ${apt_source_gpg_filename}"
|
||||
run_host_command_logged cp -pv "${apt_source_gpg_filepath}" "${SDCARD}/tmp/${apt_source_gpg_filename}"
|
||||
chroot_sdcard apt-key add "/tmp/${apt_source_gpg_filename}"
|
||||
fi
|
||||
else
|
||||
# installation without software-common-properties, sources.list + key.gpg
|
||||
echo "${new_apt_source}" > "${SDCARD}/etc/apt/sources.list.d/${apt_source_filename}"
|
||||
if [[ -f "${apt_source_gpg_filepath}" ]]; then
|
||||
display_alert "Adding GPG Key" "via keyrings: ${apt_source_gpg_filename}"
|
||||
mkdir -p "${SDCARD}"/usr/share/keyrings/
|
||||
run_host_command_logged cp -pv "${apt_source_gpg_filepath}" "${SDCARD}"/usr/share/keyrings/
|
||||
fi
|
||||
fi
|
||||
|
||||
done
|
||||
display_alert "Adding APT Source" "${apt_source}" "info"
|
||||
# installation without software-common-properties, sources.list + key.gpg
|
||||
run_host_command_logged cp -pv "${apt_source_file}" "${SDCARD}/etc/apt/sources.list.d/${apt_source}.list"
|
||||
if [[ -f "${gpg_file}" ]]; then
|
||||
# @TODO good chance to test the key for expiration date, and WARN if < 60 days, and ERROR if < 30 days
|
||||
display_alert "Adding GPG Key" "via keyrings: ${apt_source}.list"
|
||||
run_host_command_logged cp -pv "${gpg_file}" "${SDCARD}/usr/share/keyrings/${apt_source}.gpg"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ get_or_create_rootfs_cache_chroot_sdcard() {
|
||||
fi
|
||||
fi
|
||||
|
||||
local packages_hash=$(get_package_list_hash)
|
||||
local packages_hash="${AGGREGATED_ROOTFS_HASH}" # Produced by aggregation.py
|
||||
local packages_hash=${packages_hash:0:8}
|
||||
|
||||
local cache_type="cli"
|
||||
@@ -102,16 +102,17 @@ function create_new_rootfs_cache() {
|
||||
local debootstrap_apt_mirror="http://${APT_PROXY_ADDR:-localhost:3142}/${APT_MIRROR}"
|
||||
acng_check_status_or_restart
|
||||
fi
|
||||
|
||||
# @TODO: one day: https://gitlab.mister-muffin.de/josch/mmdebstrap/src/branch/main/mmdebstrap
|
||||
|
||||
display_alert "Installing base system" "Stage 1/2" "info"
|
||||
cd "${SDCARD}" || exit_with_error "cray-cray about SDCARD" "${SDCARD}" # this will prevent error sh: 0: getcwd() failed
|
||||
|
||||
local -a deboostrap_arguments=(
|
||||
"--variant=minbase" # minimal base variant. go ask Debian about it.
|
||||
"--include=${DEBOOTSTRAP_LIST// /,}" # from aggregation?
|
||||
${PACKAGE_LIST_EXCLUDE:+ --exclude="${PACKAGE_LIST_EXCLUDE// /,}"} # exclude some
|
||||
"--arch=${ARCH}" # the arch
|
||||
"--components=${DEBOOTSTRAP_COMPONENTS}" # from aggregation?
|
||||
"--variant=minbase" # minimal base variant. go ask Debian about it.
|
||||
"--arch=${ARCH}" # the arch
|
||||
"'--include=${AGGREGATED_PACKAGES_DEBOOTSTRAP_COMMA}'" # from aggregation.py
|
||||
"'--components=${AGGREGATED_DEBOOTSTRAP_COMPONENTS_COMMA}'" # from aggregation?
|
||||
)
|
||||
|
||||
# Small detour for local apt caching option.
|
||||
@@ -202,30 +203,37 @@ function create_new_rootfs_cache() {
|
||||
display_alert "Installing the main packages for" "Armbian" "info"
|
||||
export if_error_detail_message="Installation of Armbian main packages for ${BRANCH} ${BOARD} ${RELEASE} ${DESKTOP_APPGROUPS_SELECTED} ${DESKTOP_ENVIRONMENT} ${BUILD_MINIMAL} failed"
|
||||
# First, try to download-only up to 3 times, to work around network/proxy problems.
|
||||
do_with_retries 3 chroot_sdcard_apt_get_install_download_only "$PACKAGE_MAIN_LIST"
|
||||
# AGGREGATED_PACKAGES_ROOTFS is generated by aggregation.py
|
||||
chroot_sdcard_apt_get_install_dry_run "${AGGREGATED_PACKAGES_ROOTFS[@]}"
|
||||
do_with_retries 3 chroot_sdcard_apt_get_install_download_only "${AGGREGATED_PACKAGES_ROOTFS[@]}"
|
||||
|
||||
# Now do the install, all packages should have been downloaded by now
|
||||
chroot_sdcard_apt_get_install "$PACKAGE_MAIN_LIST"
|
||||
chroot_sdcard_apt_get_install "${AGGREGATED_PACKAGES_ROOTFS[@]}"
|
||||
|
||||
if [[ $BUILD_DESKTOP == "yes" ]]; then
|
||||
local apt_desktop_install_flags=""
|
||||
if [[ ! -z ${DESKTOP_APT_FLAGS_SELECTED+x} ]]; then
|
||||
for flag in ${DESKTOP_APT_FLAGS_SELECTED}; do
|
||||
apt_desktop_install_flags+=" --install-${flag}"
|
||||
done
|
||||
else
|
||||
# Myy : Using the previous default option, if the variable isn't defined
|
||||
# And ONLY if it's not defined !
|
||||
apt_desktop_install_flags+=" --no-install-recommends"
|
||||
fi
|
||||
## This is not defined anywhere.... @TODO: remove?
|
||||
#local apt_desktop_install_flags=""
|
||||
#if [[ ! -z ${DESKTOP_APT_FLAGS_SELECTED+x} ]]; then
|
||||
# for flag in ${DESKTOP_APT_FLAGS_SELECTED}; do
|
||||
# apt_desktop_install_flags+=" --install-${flag}"
|
||||
# done
|
||||
#else
|
||||
# # Myy : Using the previous default option, if the variable isn't defined
|
||||
# # And ONLY if it's not defined !
|
||||
# apt_desktop_install_flags+=" --no-install-recommends"
|
||||
#fi
|
||||
|
||||
display_alert "Installing the desktop packages for" "Armbian" "info"
|
||||
|
||||
# dry-run, make sure everything can be installed.
|
||||
chroot_sdcard_apt_get_install_dry_run "${AGGREGATED_PACKAGES_DESKTOP[@]}"
|
||||
|
||||
# Retry download-only 3 times first.
|
||||
do_with_retries 3 chroot_sdcard_apt_get_install_download_only ${apt_desktop_install_flags} $PACKAGE_LIST_DESKTOP
|
||||
do_with_retries 3 chroot_sdcard_apt_get_install_download_only "${AGGREGATED_PACKAGES_DESKTOP[@]}"
|
||||
|
||||
# Then do the actual install.
|
||||
export if_error_detail_message="Installation of Armbian desktop packages for ${BRANCH} ${BOARD} ${RELEASE} ${DESKTOP_APPGROUPS_SELECTED} ${DESKTOP_ENVIRONMENT} ${BUILD_MINIMAL} failed"
|
||||
chroot_sdcard_apt_get install ${apt_desktop_install_flags} $PACKAGE_LIST_DESKTOP
|
||||
chroot_sdcard_apt_get install "${AGGREGATED_PACKAGES_DESKTOP[@]}"
|
||||
fi
|
||||
|
||||
# stage: check md5 sum of installed packages. Just in case.
|
||||
@@ -234,10 +242,12 @@ function create_new_rootfs_cache() {
|
||||
chroot_sdcard debsums --silent
|
||||
|
||||
# Remove packages from packages.uninstall
|
||||
# @TODO: aggregation.py handling of this...
|
||||
display_alert "Uninstall packages" "$PACKAGE_LIST_UNINSTALL" "info"
|
||||
# shellcheck disable=SC2086
|
||||
chroot_sdcard_apt_get purge $PACKAGE_LIST_UNINSTALL
|
||||
|
||||
# @TODO: if we remove with --purge then this is not needed
|
||||
# stage: purge residual packages
|
||||
display_alert "Purging residual packages for" "Armbian" "info"
|
||||
PURGINGPACKAGES=$(chroot $SDCARD /bin/bash -c "dpkg -l | grep \"^rc\" | awk '{print \$2}' | tr \"\n\" \" \"")
|
||||
@@ -306,21 +316,6 @@ function create_new_rootfs_cache() {
|
||||
return 0 # protect against possible future short-circuiting above this
|
||||
}
|
||||
|
||||
# get_package_list_hash
|
||||
#
|
||||
# returns md5 hash for current package list and rootfs cache version
|
||||
|
||||
get_package_list_hash() {
|
||||
local package_arr exclude_arr
|
||||
local list_content
|
||||
read -ra package_arr <<< "${DEBOOTSTRAP_LIST} ${PACKAGE_LIST}"
|
||||
read -ra exclude_arr <<< "${PACKAGE_LIST_EXCLUDE}"
|
||||
(
|
||||
printf "%s\n" "${package_arr[@]}"
|
||||
printf -- "-%s\n" "${exclude_arr[@]}"
|
||||
) | sort -u | md5sum | cut -d' ' -f 1
|
||||
}
|
||||
|
||||
# get_rootfs_cache_list <cache_type> <packages_hash>
|
||||
#
|
||||
# return a list of versions of all avaiable cache from remote and local.
|
||||
|
||||
@@ -257,26 +257,18 @@ function install_distribution_agnostic() {
|
||||
display_alert "Updating" "apt package lists"
|
||||
do_with_retries 3 chroot_sdcard_apt_get update
|
||||
|
||||
# install family packages
|
||||
if [[ -n ${PACKAGE_LIST_FAMILY} ]]; then
|
||||
_pkg_list=${PACKAGE_LIST_FAMILY}
|
||||
display_alert "Installing PACKAGE_LIST_FAMILY packages" "${_pkg_list}"
|
||||
# shellcheck disable=SC2086 # we need to expand here. retry 3 times download-only to counter apt-cacher-ng failures.
|
||||
do_with_retries 3 chroot_sdcard_apt_get_install_download_only ${_pkg_list}
|
||||
# install image packages; AGGREGATED_PACKAGES_IMAGE is produced by aggregation.py
|
||||
# and includes the old PACKAGE_LIST_BOARD and PACKAGE_LIST_FAMILY
|
||||
if [[ ${#AGGREGATED_PACKAGES_IMAGE[@]} -gt 0 ]]; then
|
||||
display_alert "Installing AGGREGATED_PACKAGES_IMAGE packages" "${AGGREGATED_PACKAGES_IMAGE[*]}"
|
||||
|
||||
# shellcheck disable=SC2086 # we need to expand here.
|
||||
chroot_sdcard_apt_get_install ${_pkg_list}
|
||||
fi
|
||||
# dry-run, make sure everything can be installed.
|
||||
chroot_sdcard_apt_get_install_dry_run "${AGGREGATED_PACKAGES_IMAGE[@]}"
|
||||
|
||||
# install board packages
|
||||
if [[ -n ${PACKAGE_LIST_BOARD} ]]; then
|
||||
_pkg_list=${PACKAGE_LIST_BOARD}
|
||||
display_alert "Installing PACKAGE_LIST_BOARD packages" "${_pkg_list}"
|
||||
# shellcheck disable=SC2086 # we need to expand here. retry 3 times download-only to counter apt-cacher-ng failures.
|
||||
do_with_retries 3 chroot_sdcard_apt_get_install_download_only ${_pkg_list}
|
||||
# retry 3 times download-only to counter apt-cacher-ng failures.
|
||||
do_with_retries 3 chroot_sdcard_apt_get_install_download_only "${AGGREGATED_PACKAGES_IMAGE[@]}"
|
||||
|
||||
# shellcheck disable=SC2086 # we need to expand.
|
||||
chroot_sdcard_apt_get_install ${_pkg_list}
|
||||
chroot_sdcard_apt_get_install "${AGGREGATED_PACKAGES_IMAGE[@]}"
|
||||
fi
|
||||
|
||||
# remove family packages
|
||||
@@ -286,21 +278,22 @@ function install_distribution_agnostic() {
|
||||
chroot_sdcard_apt_get_remove --auto-remove ${_pkg_list}
|
||||
fi
|
||||
|
||||
# remove board packages. loop over the list to remove, check if they're actually installed, then remove individually.
|
||||
if [[ -n ${PACKAGE_LIST_BOARD_REMOVE} ]]; then
|
||||
_pkg_list=${PACKAGE_LIST_BOARD_REMOVE}
|
||||
declare -a currently_installed_packages
|
||||
# shellcheck disable=SC2207 # I wanna split, thanks.
|
||||
currently_installed_packages=($(chroot_sdcard_with_stdout dpkg-query --show --showformat='${Package} '))
|
||||
for PKG_REMOVE in ${_pkg_list}; do
|
||||
# shellcheck disable=SC2076 # I wanna match literally, thanks.
|
||||
if [[ " ${currently_installed_packages[*]} " =~ " ${PKG_REMOVE} " ]]; then
|
||||
display_alert "Removing PACKAGE_LIST_BOARD_REMOVE package" "${PKG_REMOVE}"
|
||||
chroot_sdcard_apt_get_remove --auto-remove "${PKG_REMOVE}"
|
||||
fi
|
||||
done
|
||||
unset currently_installed_packages
|
||||
fi
|
||||
# @TODO check if this still necessary or not.
|
||||
## remove board packages. loop over the list to remove, check if they're actually installed, then remove individually.
|
||||
#if [[ -n ${PACKAGE_LIST_BOARD_REMOVE} ]]; then
|
||||
# _pkg_list=${PACKAGE_LIST_BOARD_REMOVE}
|
||||
# declare -a currently_installed_packages
|
||||
# # shellcheck disable=SC2207 # I wanna split, thanks.
|
||||
# currently_installed_packages=($(chroot_sdcard_with_stdout dpkg-query --show --showformat='${Package} '))
|
||||
# for PKG_REMOVE in ${_pkg_list}; do
|
||||
# # shellcheck disable=SC2076 # I wanna match literally, thanks.
|
||||
# if [[ " ${currently_installed_packages[*]} " =~ " ${PKG_REMOVE} " ]]; then
|
||||
# display_alert "Removing PACKAGE_LIST_BOARD_REMOVE package" "${PKG_REMOVE}"
|
||||
# chroot_sdcard_apt_get_remove --auto-remove "${PKG_REMOVE}"
|
||||
# fi
|
||||
# done
|
||||
# unset currently_installed_packages
|
||||
#fi
|
||||
|
||||
# install u-boot
|
||||
# @TODO: add install_bootloader() extension method, refactor into u-boot extension
|
||||
|
||||
@@ -289,6 +289,15 @@ set -o errexit ## set -e : exit the script if any statement returns a non-true
|
||||
# shellcheck source=lib/functions/configuration/menu.sh
|
||||
source "${SRC}"/lib/functions/configuration/menu.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
set -o errtrace # trace ERR through - enabled
|
||||
set -o errexit ## set -e : exit the script if any statement returns a non-true return value - enabled
|
||||
### lib/functions/configuration/package-lists.sh
|
||||
# shellcheck source=lib/functions/configuration/package-lists.sh
|
||||
source "${SRC}"/lib/functions/configuration/package-lists.sh
|
||||
|
||||
# no errors tolerated. invoked before each sourced file to make sure.
|
||||
#set -o pipefail # trace ERR through pipes - will be enabled "soon"
|
||||
#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled
|
||||
|
||||
194
lib/tools/aggregation.py
Normal file
194
lib/tools/aggregation.py
Normal file
@@ -0,0 +1,194 @@
|
||||
#! /bin/env python3
|
||||
|
||||
# Disclaimer: this script was written solely using GitHub Copilot.
|
||||
# I wrote "prompt" comments and the whole thing was generated by Copilot.
|
||||
# Unfortunately I removed most original comments/prompts after code was generated, I should have kept them all in...
|
||||
# I'm not sure if I should be proud or ashamed of this. <-- this was suggested by Copilot too.
|
||||
# -- rpardini, 23/11/2022
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
import common.aggregation_utils as util
|
||||
|
||||
# Read SRC from the environment, treat it.
|
||||
armbian_build_directory = util.get_from_env_or_bomb("SRC")
|
||||
if not os.path.isdir(armbian_build_directory):
|
||||
raise Exception("SRC is not a directory")
|
||||
|
||||
# OUTPUT from the environment, treat it.
|
||||
output_file = util.get_from_env_or_bomb("OUTPUT")
|
||||
with open(output_file, "w") as out:
|
||||
out.write("")
|
||||
|
||||
BUILD_DESKTOP = util.yes_or_no_or_bomb(util.get_from_env_or_bomb("BUILD_DESKTOP"))
|
||||
INCLUDE_EXTERNAL_PACKAGES = True
|
||||
ARCH = util.get_from_env_or_bomb("ARCH")
|
||||
DESKTOP_ENVIRONMENT = util.get_from_env("DESKTOP_ENVIRONMENT")
|
||||
DESKTOP_ENVIRONMENT_CONFIG_NAME = util.get_from_env("DESKTOP_ENVIRONMENT_CONFIG_NAME")
|
||||
RELEASE = util.get_from_env_or_bomb("RELEASE") # "kinetic"
|
||||
LINUXFAMILY = util.get_from_env_or_bomb("LINUXFAMILY")
|
||||
BOARD = util.get_from_env_or_bomb("BOARD")
|
||||
USERPATCHES_PATH = util.get_from_env_or_bomb("USERPATCHES_PATH")
|
||||
|
||||
# Show the environment
|
||||
#print("Environment:")
|
||||
#for k, v in os.environ.items():
|
||||
# print("{}={}".format(k, v))
|
||||
|
||||
util.SELECTED_CONFIGURATION = util.get_from_env_or_bomb("SELECTED_CONFIGURATION") # "cli_standard"
|
||||
util.DESKTOP_APPGROUPS_SELECTED = util.parse_env_for_tokens("DESKTOP_APPGROUPS_SELECTED") # ["browsers", "chat"]
|
||||
util.SRC = armbian_build_directory
|
||||
|
||||
util.AGGREGATION_SEARCH_ROOT_ABSOLUTE_DIRS = [
|
||||
f"{armbian_build_directory}/config",
|
||||
f"{armbian_build_directory}/config/optional/_any_board/_config",
|
||||
f"{armbian_build_directory}/config/optional/architectures/{ARCH}/_config",
|
||||
f"{armbian_build_directory}/config/optional/families/{LINUXFAMILY}/_config",
|
||||
f"{armbian_build_directory}/config/optional/boards/{BOARD}/_config",
|
||||
f"{USERPATCHES_PATH}"
|
||||
]
|
||||
|
||||
util.DEBOOTSTRAP_SEARCH_RELATIVE_DIRS = ["cli/_all_distributions/debootstrap", f"cli/{RELEASE}/debootstrap"]
|
||||
util.CLI_SEARCH_RELATIVE_DIRS = ["cli/_all_distributions/main", f"cli/{RELEASE}/main"]
|
||||
|
||||
util.DESKTOP_ENVIRONMENTS_SEARCH_RELATIVE_DIRS = [
|
||||
f"desktop/_all_distributions/environments/_all_environments",
|
||||
f"desktop/_all_distributions/environments/{DESKTOP_ENVIRONMENT}",
|
||||
f"desktop/_all_distributions/environments/{DESKTOP_ENVIRONMENT}/{DESKTOP_ENVIRONMENT_CONFIG_NAME}",
|
||||
f"desktop/{RELEASE}/environments/_all_environments",
|
||||
f"desktop/{RELEASE}/environments/{DESKTOP_ENVIRONMENT}",
|
||||
f"desktop/{RELEASE}/environments/{DESKTOP_ENVIRONMENT}/{DESKTOP_ENVIRONMENT_CONFIG_NAME}"]
|
||||
|
||||
util.DESKTOP_APPGROUPS_SEARCH_RELATIVE_DIRS = [
|
||||
f"desktop/_all_distributions/appgroups",
|
||||
f"desktop/_all_distributions/environments/{DESKTOP_ENVIRONMENT}/appgroups",
|
||||
f"desktop/{RELEASE}/appgroups",
|
||||
f"desktop/{RELEASE}/environments/{DESKTOP_ENVIRONMENT}/appgroups"]
|
||||
|
||||
# Debootstrap.
|
||||
debootstrap_packages = util.aggregate_all_debootstrap("packages")
|
||||
debootstrap_packages_remove = util.aggregate_all_debootstrap("packages.remove")
|
||||
|
||||
# both main and additional result in the same thing, just different filenames.
|
||||
rootfs_packages_main = util.aggregate_all_cli("packages")
|
||||
rootfs_packages_additional = util.aggregate_all_cli("packages.additional")
|
||||
rootfs_packages_external = util.aggregate_all_cli("packages.external") # @TODO: enable/disable this
|
||||
rootfs_packages_all = util.merge_lists(rootfs_packages_main, rootfs_packages_additional, "add")
|
||||
rootfs_packages_all = util.merge_lists(rootfs_packages_all, rootfs_packages_external, "add")
|
||||
rootfs_packages_remove = util.aggregate_all_cli("packages.remove")
|
||||
|
||||
# Desktop environment packages; packages + packages.external
|
||||
desktop_packages_main = util.aggregate_all_desktop("packages")
|
||||
desktop_packages_external = util.aggregate_all_desktop("packages.external")
|
||||
desktop_packages_additional = util.aggregate_all_desktop("packages.additional")
|
||||
desktop_packages_all = util.merge_lists(desktop_packages_main, desktop_packages_external, "add")
|
||||
desktop_packages_all = util.merge_lists(desktop_packages_all, desktop_packages_additional, "add")
|
||||
desktop_packages_remove = util.aggregate_all_desktop("packages.remove")
|
||||
|
||||
env_list_remove = util.parse_env_for_list("REMOVE_PACKAGES")
|
||||
env_list_extra_rootfs = util.parse_env_for_list("EXTRA_PACKAGES_ROOTFS")
|
||||
env_list_extra_image = util.parse_env_for_list("EXTRA_PACKAGES_IMAGE")
|
||||
env_package_list_board = util.parse_env_for_list(
|
||||
"PACKAGE_LIST_BOARD", {"function": "board", "path": "board.conf", "line": 0})
|
||||
env_package_list_family = util.parse_env_for_list(
|
||||
"PACKAGE_LIST_FAMILY", {"function": "family", "path": "family.conf", "line": 0})
|
||||
env_package_list_board_remove = util.parse_env_for_list(
|
||||
"PACKAGE_LIST_BOARD_REMOVE", {"function": "board_remove", "path": "board.conf", "line": 0})
|
||||
env_package_list_family_remove = util.parse_env_for_list(
|
||||
"PACKAGE_LIST_BOARD_REMOVE", {"function": "family_remove", "path": "family.conf", "line": 0})
|
||||
|
||||
# Now calculate the final lists.
|
||||
|
||||
# debootstrap is the aggregated list, minus the packages we want to remove.
|
||||
AGGREGATED_PACKAGES_DEBOOTSTRAP = util.merge_lists(debootstrap_packages, debootstrap_packages_remove, "remove")
|
||||
AGGREGATED_PACKAGES_DEBOOTSTRAP = util.merge_lists(AGGREGATED_PACKAGES_DEBOOTSTRAP, env_list_remove, "remove")
|
||||
|
||||
# components for debootstrap is just the aggregated list; or is it?
|
||||
AGGREGATED_DEBOOTSTRAP_COMPONENTS = util.aggregate_all_debootstrap("components")
|
||||
AGGREGATED_DEBOOTSTRAP_COMPONENTS_COMMA = ','.join(AGGREGATED_DEBOOTSTRAP_COMPONENTS).replace(' ', ',')
|
||||
|
||||
# The rootfs list; add the extras, and remove the removals.
|
||||
AGGREGATED_PACKAGES_ROOTFS = util.merge_lists(rootfs_packages_all, env_list_extra_rootfs, "add")
|
||||
AGGREGATED_PACKAGES_ROOTFS = util.merge_lists(AGGREGATED_PACKAGES_ROOTFS, rootfs_packages_remove, "remove")
|
||||
AGGREGATED_PACKAGES_ROOTFS = util.merge_lists(AGGREGATED_PACKAGES_ROOTFS, env_list_remove, "remove")
|
||||
|
||||
# The desktop list.
|
||||
AGGREGATED_PACKAGES_DESKTOP = util.merge_lists(desktop_packages_all, desktop_packages_remove, "remove")
|
||||
AGGREGATED_PACKAGES_DESKTOP = util.merge_lists(AGGREGATED_PACKAGES_DESKTOP, env_list_remove, "remove")
|
||||
|
||||
# the image list; this comes from env only; apply the removals.
|
||||
AGGREGATED_PACKAGES_IMAGE = util.merge_lists(env_list_extra_image, env_package_list_board, "add")
|
||||
AGGREGATED_PACKAGES_IMAGE = util.merge_lists(AGGREGATED_PACKAGES_IMAGE, env_package_list_family, "add")
|
||||
AGGREGATED_PACKAGES_IMAGE = util.merge_lists(AGGREGATED_PACKAGES_IMAGE, env_package_list_board_remove, "remove")
|
||||
AGGREGATED_PACKAGES_IMAGE = util.merge_lists(AGGREGATED_PACKAGES_IMAGE, env_package_list_family_remove, "remove")
|
||||
AGGREGATED_PACKAGES_IMAGE = util.merge_lists(AGGREGATED_PACKAGES_IMAGE, env_list_remove, "remove")
|
||||
|
||||
# Calculate a md5 hash of the list of packages, so we can use it as a cache key.
|
||||
# Attention: ROOTFS does not include DESKTOP. @TODO have 2 hashes, one for cli, one for cli+desktop.
|
||||
AGGREGATED_ROOTFS_HASH = hashlib.md5(
|
||||
(" ".join(AGGREGATED_PACKAGES_DEBOOTSTRAP) + " ".join(AGGREGATED_PACKAGES_ROOTFS)).encode("utf-8")).hexdigest()
|
||||
|
||||
# We need to aggregate some desktop stuff, which are not package lists, postinst contents and such.
|
||||
# For this case just find the potentials, and for each found, take the whole contents and join via newlines.
|
||||
AGGREGATED_DESKTOP_POSTINST = util.aggregate_all_desktop(
|
||||
"debian/postinst", util.aggregate_simple_contents_potential)
|
||||
AGGREGATED_DESKTOP_CREATE_DESKTOP_PACKAGE = util.aggregate_all_desktop(
|
||||
"armbian/create_desktop_package.sh", util.aggregate_simple_contents_potential)
|
||||
AGGREGATED_DESKTOP_BSP_POSTINST = util.aggregate_all_desktop(
|
||||
"debian/armbian-bsp-desktop/postinst", util.aggregate_simple_contents_potential)
|
||||
AGGREGATED_DESKTOP_BSP_PREPARE = util.aggregate_all_desktop(
|
||||
"debian/armbian-bsp-desktop/prepare.sh", util.aggregate_simple_contents_potential)
|
||||
|
||||
# Aggregate the apt-sources; only done if BUILD_DESKTOP is True, otherwise empty.
|
||||
AGGREGATED_APT_SOURCES = {}
|
||||
if BUILD_DESKTOP:
|
||||
apt_sources_debootstrap = util.aggregate_all_debootstrap("sources/apt", util.aggregate_apt_sources)
|
||||
apt_sources_cli = util.aggregate_all_cli("sources/apt", util.aggregate_apt_sources)
|
||||
apt_sources_desktop = util.aggregate_all_desktop("sources/apt", util.aggregate_apt_sources)
|
||||
AGGREGATED_APT_SOURCES = util.merge_lists(apt_sources_debootstrap, apt_sources_cli, "add")
|
||||
AGGREGATED_APT_SOURCES = util.merge_lists(AGGREGATED_APT_SOURCES, apt_sources_desktop, "add")
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
with open(output_file, "w") as out:
|
||||
# with sys.stdout as f:
|
||||
out.write("#!/bin/env bash\n")
|
||||
out.write(
|
||||
util.prepare_bash_output_array_for_list(
|
||||
"AGGREGATED_PACKAGES_DEBOOTSTRAP", AGGREGATED_PACKAGES_DEBOOTSTRAP))
|
||||
out.write(util.prepare_bash_output_array_for_list(
|
||||
"AGGREGATED_PACKAGES_ROOTFS", AGGREGATED_PACKAGES_ROOTFS))
|
||||
out.write(util.prepare_bash_output_array_for_list(
|
||||
"AGGREGATED_PACKAGES_IMAGE", AGGREGATED_PACKAGES_IMAGE))
|
||||
out.write(util.prepare_bash_output_array_for_list(
|
||||
"AGGREGATED_PACKAGES_DESKTOP", AGGREGATED_PACKAGES_DESKTOP))
|
||||
|
||||
# The rootfs hash (md5) is used as a cache key.
|
||||
out.write(f"declare -g -r AGGREGATED_ROOTFS_HASH='{AGGREGATED_ROOTFS_HASH}'\n")
|
||||
|
||||
# Special case for components: debootstrap also wants a list of components, comma separated.
|
||||
out.write(
|
||||
f"declare -g -r AGGREGATED_DEBOOTSTRAP_COMPONENTS_COMMA='{AGGREGATED_DEBOOTSTRAP_COMPONENTS_COMMA}'\n")
|
||||
|
||||
# Single string stuff for desktop packages postinst's and preparation. @TODO use functions instead of eval.
|
||||
out.write(util.prepare_bash_output_single_string(
|
||||
"AGGREGATED_DESKTOP_POSTINST", AGGREGATED_DESKTOP_POSTINST))
|
||||
out.write(util.prepare_bash_output_single_string(
|
||||
"AGGREGATED_DESKTOP_CREATE_DESKTOP_PACKAGE", AGGREGATED_DESKTOP_CREATE_DESKTOP_PACKAGE))
|
||||
out.write(util.prepare_bash_output_single_string(
|
||||
"AGGREGATED_DESKTOP_BSP_POSTINST", AGGREGATED_DESKTOP_BSP_POSTINST))
|
||||
out.write(util.prepare_bash_output_single_string(
|
||||
"AGGREGATED_DESKTOP_BSP_PREPARE", AGGREGATED_DESKTOP_BSP_PREPARE))
|
||||
|
||||
# The apt sources.
|
||||
out.write(util.prepare_bash_output_array_for_list(
|
||||
"AGGREGATED_APT_SOURCES", AGGREGATED_APT_SOURCES, util.encode_source_base_path_extra))
|
||||
|
||||
# 2) @TODO: Some removals...
|
||||
|
||||
# aggregate_all_cli "packages.uninstall" " "
|
||||
# aggregate_all_desktop "packages.uninstall" " "
|
||||
# PACKAGE_LIST_UNINSTALL="$(cleanup_list aggregated_content)"
|
||||
# unset aggregated_content
|
||||
312
lib/tools/common/aggregation_utils.py
Normal file
312
lib/tools/common/aggregation_utils.py
Normal file
@@ -0,0 +1,312 @@
|
||||
import fnmatch
|
||||
import os
|
||||
|
||||
AGGREGATION_SEARCH_ROOT_ABSOLUTE_DIRS = []
|
||||
DEBOOTSTRAP_SEARCH_RELATIVE_DIRS = []
|
||||
CLI_SEARCH_RELATIVE_DIRS = []
|
||||
DESKTOP_ENVIRONMENTS_SEARCH_RELATIVE_DIRS = []
|
||||
DESKTOP_APPGROUPS_SEARCH_RELATIVE_DIRS = []
|
||||
SELECTED_CONFIGURATION = None
|
||||
DESKTOP_APPGROUPS_SELECTED = []
|
||||
SRC = None
|
||||
|
||||
|
||||
def calculate_potential_paths(root_dirs, relative_dirs, sub_dirs, artifact_file, initial_paths=None):
|
||||
if initial_paths is None:
|
||||
potential_paths = {"paths": []}
|
||||
else:
|
||||
potential_paths = initial_paths
|
||||
for root_dir in root_dirs:
|
||||
for rel_dir in relative_dirs:
|
||||
for sub_dir in sub_dirs:
|
||||
looked_for_file = f"{root_dir}/{rel_dir}/{sub_dir}/{artifact_file}"
|
||||
# simplify the path, removing any /./ or /../
|
||||
potential_paths["paths"].append(os.path.normpath(looked_for_file))
|
||||
# print(f"DEBUG Potential paths: {potential_paths['paths']}")
|
||||
return potential_paths
|
||||
|
||||
|
||||
def process_common_path_for_potentials(potential_paths):
|
||||
# find the common prefix across potential_paths, and remove it from all paths.
|
||||
potential_paths["common_path"] = SRC + "/" # os.path.commonprefix(potential_paths["paths"])
|
||||
potential_paths["paths"] = [path[len(potential_paths["common_path"]):] for path in potential_paths["paths"]]
|
||||
return potential_paths
|
||||
|
||||
|
||||
def aggregate_packages_from_potential(potential_paths):
|
||||
aggregation_results = {} # {"potential_paths": potential_paths}
|
||||
for path in potential_paths["paths"]:
|
||||
full_path = potential_paths["common_path"] + path
|
||||
if not os.path.isfile(full_path):
|
||||
# print(f"Skipping {path}, not a file")
|
||||
continue
|
||||
|
||||
# Resolve the real path of the file, eliminating symlinks; remove the common prefix again.
|
||||
resolved_path = os.path.realpath(full_path)[len(potential_paths["common_path"]):]
|
||||
# the path in the debugging information is either just the path, or the symlink indication.
|
||||
symlink_to = None if resolved_path == path else resolved_path
|
||||
# print(f"Reading {path}")
|
||||
with open(full_path, "r") as f:
|
||||
line_counter = 0
|
||||
for line in f:
|
||||
line_counter += 1
|
||||
line = line.strip()
|
||||
if line == "" or line.startswith("#"):
|
||||
continue
|
||||
if line not in aggregation_results:
|
||||
aggregation_results[line] = {"content": line, "refs": []}
|
||||
aggregation_results[line]["refs"].append(
|
||||
{"path": path, "line": line_counter, "symlink_to": symlink_to})
|
||||
return aggregation_results
|
||||
|
||||
|
||||
def aggregate_simple_contents_potential(potential_paths):
|
||||
aggregation_results = {} # {"potential_paths": potential_paths}
|
||||
for path in potential_paths["paths"]:
|
||||
full_path = potential_paths["common_path"] + path
|
||||
if not os.path.isfile(full_path):
|
||||
continue
|
||||
|
||||
# Resolve the real path of the file, eliminating symlinks; remove the common prefix again.
|
||||
resolved_path = os.path.realpath(full_path)[len(potential_paths["common_path"]):]
|
||||
# the path in the debugging information is either just the path, or the symlink indication.
|
||||
symlink_to = None if resolved_path == path else resolved_path
|
||||
|
||||
# Read the full contents of the file full_path as a string
|
||||
with open(full_path, "r") as f:
|
||||
contents = f.read()
|
||||
aggregation_results[path] = {"contents": contents, "refs": []}
|
||||
aggregation_results[path]["refs"].append({"path": path, "symlink_to": symlink_to})
|
||||
return aggregation_results
|
||||
|
||||
|
||||
def find_files_in_directory(directory, glob_pattern):
|
||||
files = []
|
||||
for root, dir_names, filenames in os.walk(directory):
|
||||
for filename in fnmatch.filter(filenames, glob_pattern):
|
||||
files.append(os.path.join(root, filename))
|
||||
return files
|
||||
|
||||
|
||||
def aggregate_apt_sources(potential_paths):
|
||||
aggregation_results = {} # {"potential_paths": potential_paths}
|
||||
for path in potential_paths["paths"]:
|
||||
full_path = potential_paths["common_path"] + path
|
||||
if not os.path.isdir(full_path):
|
||||
continue
|
||||
# Resolve the real path of the file, eliminating symlinks; remove the common prefix again.
|
||||
resolved_path = os.path.realpath(full_path)[len(potential_paths["common_path"]):]
|
||||
# the path in the debugging information is either just the path, or the symlink indication.
|
||||
symlink_to = None if resolved_path == path else resolved_path
|
||||
|
||||
# find *.source in the directory
|
||||
files = find_files_in_directory(full_path, "*.source")
|
||||
for full_filename in files:
|
||||
source_name = os.path.basename(full_filename)[:-7]
|
||||
base_path = os.path.relpath(full_filename[:-len(".source")], SRC)
|
||||
if source_name not in aggregation_results:
|
||||
aggregation_results[source_name] = {"content": base_path, "refs": []}
|
||||
aggregation_results[source_name]["refs"].append({"path": path, "symlink_to": symlink_to})
|
||||
return aggregation_results
|
||||
|
||||
|
||||
def remove_common_path_from_refs(merged):
|
||||
all_paths = []
|
||||
for item in merged:
|
||||
for ref in merged[item]["refs"]:
|
||||
if ref["path"].startswith("/"):
|
||||
all_paths.append(ref["path"])
|
||||
common_path = os.path.commonprefix(all_paths)
|
||||
for item in merged:
|
||||
for ref in merged[item]["refs"]:
|
||||
if ref["path"].startswith("/"):
|
||||
# remove the prefix
|
||||
ref["path"] = ref["path"][len(common_path):]
|
||||
return merged
|
||||
|
||||
|
||||
# Let's produce a list from the environment variables, complete with the references.
|
||||
def parse_env_for_list(env_name, fixed_ref=None):
|
||||
env_list = parse_env_for_tokens(env_name)
|
||||
if fixed_ref is None:
|
||||
refs = parse_env_for_tokens(env_name + "_REFS")
|
||||
# Sanity check: the number of refs should be the same as the number of items in the list.
|
||||
if len(env_list) != len(refs):
|
||||
raise Exception(f"Expected {len(env_list)} refs for {env_name}, got {len(refs)}")
|
||||
# Let's parse the refs; they are in the form of "function:path:line"
|
||||
parsed_refs = []
|
||||
for ref in refs:
|
||||
split = ref.split(":")
|
||||
# sanity check, make sure we have 3 parts
|
||||
if len(split) != 3:
|
||||
raise Exception(f"Expected 3 parts in ref {ref}, got {len(split)}")
|
||||
parsed_refs.append({"function": split[0], "path": split[1], "line": split[2]})
|
||||
else:
|
||||
parsed_refs = [fixed_ref] * len(env_list)
|
||||
# Now create a dict; duplicates should be eliminated, and their refs merged.
|
||||
merged = {}
|
||||
for i in range(len(env_list)):
|
||||
item = env_list[i]
|
||||
if item in merged:
|
||||
merged[item]["refs"].append(parsed_refs[i])
|
||||
else:
|
||||
merged[item] = {"content": item, "refs": [parsed_refs[i]]}
|
||||
return remove_common_path_from_refs(merged)
|
||||
|
||||
|
||||
def merge_lists(base, extra, optype="add"):
|
||||
merged = {}
|
||||
for item in base:
|
||||
merged[item] = base[item]
|
||||
if "status" not in merged[item]:
|
||||
merged[item]["status"] = "added"
|
||||
# loop over the refs, and mark them as "initial"
|
||||
for ref in merged[item]["refs"]:
|
||||
# if the key 'status' is not present, add it
|
||||
if "operation" not in ref:
|
||||
ref["operation"] = "initial"
|
||||
for item in extra:
|
||||
for ref in extra[item]["refs"]:
|
||||
# if the key 'status' is not present, add it
|
||||
if "operation" not in ref:
|
||||
ref["operation"] = optype
|
||||
if item in merged:
|
||||
resulting = base[item]
|
||||
resulting["refs"] += extra[item]["refs"]
|
||||
merged[item] = resulting
|
||||
else:
|
||||
merged[item] = extra[item]
|
||||
merged[item]["status"] = optype
|
||||
return merged
|
||||
|
||||
|
||||
def aggregate_all_debootstrap(artifact, aggregation_function=aggregate_packages_from_potential):
|
||||
potential_paths = calculate_potential_paths(
|
||||
AGGREGATION_SEARCH_ROOT_ABSOLUTE_DIRS, DEBOOTSTRAP_SEARCH_RELATIVE_DIRS,
|
||||
[".", f"config_{SELECTED_CONFIGURATION}"], artifact)
|
||||
return aggregation_function(process_common_path_for_potentials(potential_paths))
|
||||
|
||||
|
||||
def aggregate_all_cli(artifact, aggregation_function=aggregate_packages_from_potential):
|
||||
potential_paths = calculate_potential_paths(
|
||||
AGGREGATION_SEARCH_ROOT_ABSOLUTE_DIRS, CLI_SEARCH_RELATIVE_DIRS,
|
||||
[".", f"config_{SELECTED_CONFIGURATION}"], artifact)
|
||||
return aggregation_function(process_common_path_for_potentials(potential_paths))
|
||||
|
||||
|
||||
def aggregate_all_desktop(artifact, aggregation_function=aggregate_packages_from_potential):
|
||||
potential_paths = calculate_potential_paths(
|
||||
AGGREGATION_SEARCH_ROOT_ABSOLUTE_DIRS, DESKTOP_ENVIRONMENTS_SEARCH_RELATIVE_DIRS, ["."], artifact)
|
||||
potential_paths = calculate_potential_paths(
|
||||
AGGREGATION_SEARCH_ROOT_ABSOLUTE_DIRS, DESKTOP_APPGROUPS_SEARCH_RELATIVE_DIRS,
|
||||
DESKTOP_APPGROUPS_SELECTED, artifact, potential_paths)
|
||||
return aggregation_function(process_common_path_for_potentials(potential_paths))
|
||||
|
||||
|
||||
def parse_env_for_tokens(env_name):
|
||||
result = []
|
||||
# Read the environment; if None, return an empty list.
|
||||
val = os.environ.get(env_name, None)
|
||||
if val is None:
|
||||
return result
|
||||
# tokenize val; split by whitespace, line breaks, commas, and semicolons.
|
||||
# trim whitespace from tokens.
|
||||
return [token for token in [token.strip() for token in (val.split())] if token != ""]
|
||||
|
||||
|
||||
def get_from_env(env_name):
|
||||
value = os.environ.get(env_name, None)
|
||||
if value is not None:
|
||||
value = value.strip()
|
||||
return value
|
||||
|
||||
|
||||
def get_from_env_or_bomb(env_name):
|
||||
value = get_from_env(env_name)
|
||||
if value is None:
|
||||
raise Exception(f"{env_name} environment var not set")
|
||||
if value == "":
|
||||
raise Exception(f"{env_name} environment var is empty")
|
||||
return value
|
||||
|
||||
|
||||
def yes_or_no_or_bomb(value):
|
||||
if value == "yes":
|
||||
return True
|
||||
if value == "no":
|
||||
return False
|
||||
raise Exception(f"Expected yes or no, got {value}")
|
||||
|
||||
|
||||
def join_refs_for_bash_single_string(refs):
|
||||
single_line_refs = []
|
||||
for ref in refs:
|
||||
if "operation" in ref and "line" in ref:
|
||||
one_line = ref["operation"] + ":" + ref["path"] + ":" + str(ref["line"])
|
||||
else:
|
||||
one_line = ref["path"]
|
||||
if "symlink_to" in ref:
|
||||
if ref["symlink_to"] is not None:
|
||||
one_line += ":symlink->" + ref["symlink_to"]
|
||||
single_line_refs.append(one_line)
|
||||
return " ".join(single_line_refs)
|
||||
|
||||
|
||||
def prepare_bash_output_array_for_list(output_array_name, merged_list, extra_dict_function=None):
|
||||
values_list = []
|
||||
explain_dict = {}
|
||||
extra_dict = {}
|
||||
for key in merged_list:
|
||||
value = merged_list[key]
|
||||
# print(f"key: {key}, value: {value}")
|
||||
refs = value["refs"]
|
||||
# join the refs with a comma
|
||||
refs_str = join_refs_for_bash_single_string(refs)
|
||||
explain_dict[key] = refs_str
|
||||
if value["status"] != "remove":
|
||||
values_list.append(key)
|
||||
if extra_dict_function is not None:
|
||||
extra_dict[key] = extra_dict_function(value["content"])
|
||||
|
||||
# prepare the values as a bash array definition.
|
||||
# escape each value with double quotes, and join them with a space.
|
||||
values_list_bash = "\n".join([f"\t'{value}'" for value in values_list])
|
||||
actual_var = f"declare -r -g -a {output_array_name}=(\n{values_list_bash}\n)\n"
|
||||
|
||||
# Some utilities (like debootstrap) want a list that is comma separated.
|
||||
# Since that subject to infernal life in bash, let's do it here.
|
||||
values_list_comma = ",".join(values_list)
|
||||
comma_var = f"declare -r -g -a {output_array_name}_COMMA='{values_list_comma}'\n"
|
||||
|
||||
explain_list_bash = "\n".join([f"\t['{value}']='{explain_dict[value]}'" for value in explain_dict.keys()])
|
||||
explain_var = f"declare -r -g -A {output_array_name}_EXPLAIN=(\n{explain_list_bash}\n)\n"
|
||||
|
||||
# @TODO also an array with all the elements in explain; so we can do a for loop over it.
|
||||
extra_dict_decl = ""
|
||||
if len(extra_dict) > 0:
|
||||
extra_list_bash = "\n".join([f"\t['{value}']='{extra_dict[value]}'" for value in extra_dict.keys()])
|
||||
extra_dict_decl = f"declare -r -g -A {output_array_name}_DICT=(\n{extra_list_bash}\n)\n"
|
||||
|
||||
final_value = actual_var + "\n" + extra_dict_decl + "\n" + comma_var + "\n" + explain_var
|
||||
# print(final_value)
|
||||
return final_value
|
||||
|
||||
|
||||
def prepare_bash_output_single_string(output_array_name, merged_list):
|
||||
values_list = []
|
||||
for key in merged_list:
|
||||
value = merged_list[key]
|
||||
refs_str = join_refs_for_bash_single_string(value["refs"])
|
||||
# print(f"key: {key}, value: {value}")
|
||||
values_list.append("### START Source: " + refs_str + "\n" + value[
|
||||
"contents"] + "\n" + "### END Source: " + refs_str + "\n\n")
|
||||
|
||||
values_list_bash = "\n".join(values_list)
|
||||
return f"declare -g {output_array_name}" + "\n" + (
|
||||
f"{output_array_name}=\"$(cat <<-'EOD_{output_array_name}_EOD'\n" +
|
||||
f"{values_list_bash}\nEOD_{output_array_name}_EOD\n)\"\n" + "\n"
|
||||
) + f"declare -r -g {output_array_name}" + "\n"
|
||||
|
||||
|
||||
def encode_source_base_path_extra(contents_dict):
|
||||
return contents_dict
|
||||
Reference in New Issue
Block a user