diff --git a/lib/functions/cli/cli-jsoninfo.sh b/lib/functions/cli/cli-jsoninfo.sh new file mode 100644 index 000000000..5be61de0e --- /dev/null +++ b/lib/functions/cli/cli-jsoninfo.sh @@ -0,0 +1,19 @@ +function cli_json_info_pre_run() { + # "gimme root on a Linux machine" + cli_standard_relaunch_docker_or_sudo +} + +function cli_json_info_run() { + display_alert "Generating JSON info" "for all boards; wait" "info" + + # So call a Python launcher. + # @TODO: this works without ti right now, since all the python stuff works with no external packages + # - python debian packages hostdeps? (-dev, -pip, virtualenv, etc) + # - run the virtualenv (messy?) + + # The info extractor itself... + run_host_command_logged python3 "${SRC}"/lib/tools/info.py ">" "${SRC}/output/info.json" + + # Also convert output to CSV for easy import into Google Sheets etc + run_host_command_logged python3 "${SRC}"/lib/tools/json2csv.py "<" "${SRC}/output/info.json" ">" "${SRC}/output/info.csv" +} diff --git a/lib/functions/cli/commands.sh b/lib/functions/cli/commands.sh index 16164c7b2..6c13a13da 100644 --- a/lib/functions/cli/commands.sh +++ b/lib/functions/cli/commands.sh @@ -10,11 +10,13 @@ function armbian_register_commands() { ["vagrant"]="vagrant" # thus requires cli_vagrant_pre_run and cli_vagrant_run - ["requirements"]="requirements" # implemented in cli_requirements_pre_run and cli_requirements_run # @TODO + ["requirements"]="requirements" # implemented in cli_requirements_pre_run and cli_requirements_run - ["config-dump"]="config_dump" # implemented in cli_config_dump_pre_run and cli_config_dump_run # @TODO + ["config-dump"]="config_dump" # implemented in cli_config_dump_pre_run and cli_config_dump_run ["configdump"]="config_dump" # idem + ["json-info"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run + ["build"]="standard_build" # implemented in cli_standard_build_pre_run and cli_standard_build_run ["undecided"]="undecided" # implemented in cli_undecided_pre_run and cli_undecided_run - relaunches either build or docker diff --git a/lib/library-functions.sh b/lib/library-functions.sh index 53e540071..db388dd96 100644 --- a/lib/library-functions.sh +++ b/lib/library-functions.sh @@ -55,6 +55,15 @@ set -o errexit ## set -e : exit the script if any statement returns a non-true # shellcheck source=lib/functions/cli/cli-docker.sh source "${SRC}"/lib/functions/cli/cli-docker.sh +# no errors tolerated. invoked before each sourced file to make sure. +#set -o pipefail # trace ERR through pipes - will be enabled "soon" +#set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled +set -o errtrace # trace ERR through - enabled +set -o errexit ## set -e : exit the script if any statement returns a non-true return value - enabled +### lib/functions/cli/cli-jsoninfo.sh +# shellcheck source=lib/functions/cli/cli-jsoninfo.sh +source "${SRC}"/lib/functions/cli/cli-jsoninfo.sh + # no errors tolerated. invoked before each sourced file to make sure. #set -o pipefail # trace ERR through pipes - will be enabled "soon" #set -o nounset ## set -u : exit the script if you try to use an uninitialised variable - one day will be enabled diff --git a/lib/tools/info.py b/lib/tools/info.py index 21bee6f30..4d4cb9975 100755 --- a/lib/tools/info.py +++ b/lib/tools/info.py @@ -53,7 +53,8 @@ def map_to_armbian_params(map_params): def run_armbian_compile_and_parse(path_to_compile_sh, armbian_src_path, compile_params): - exec_cmd = ([path_to_compile_sh] + map_to_armbian_params(compile_params)) + exec_cmd = ([path_to_compile_sh] + ["config-dump"] + map_to_armbian_params(compile_params)) + # eprint("Running command: '{}' ", exec_cmd) result = None logs = ["Not available"] try: @@ -70,7 +71,7 @@ def run_armbian_compile_and_parse(path_to_compile_sh, armbian_src_path, compile_ except subprocess.CalledProcessError as e: eprint( "Error calling Armbian: params: {}, return code: {}, stderr: {}".format( - compile_params, e.returncode, e.stderr + compile_params, e.returncode, e.stderr.split("\n")[-1] ) ) return {"in": compile_params, "out": {}, "logs": e.stderr.split("\n"), "config_ok": False} @@ -80,7 +81,7 @@ def run_armbian_compile_and_parse(path_to_compile_sh, armbian_src_path, compile_ # parse list, split by newline, remove armbian_src_path logs = armbian_value_parse_list(result.stderr, "\n", armbian_src_path) - # Now parse it with regex-power! + # Now parse it with regex-power! # regex = r"^declare (..) (.*?)=\"(.*?)\"$" # old multiline version regex = r"declare (..) (.*?)=\"(.*?)\"" test_str = result.stdout @@ -151,7 +152,7 @@ def parse_board_file_for_static_info(board_file, board_id): # Parse KERNEL_TARGET line. kernel_target_matches = re.findall(r"^(export )?KERNEL_TARGET=\"(.*)\"", "\n".join(file_lines), re.MULTILINE) kernel_targets = kernel_target_matches[0][1].split(",") - eprint("Possible kernel branches for board: ", board_id, " : ", kernel_targets) + # eprint("Possible kernel branches for board: ", board_id, " : ", kernel_targets) return { "BOARD_FILE_HARDWARE_DESC": hw_desc_clean, @@ -161,11 +162,11 @@ def parse_board_file_for_static_info(board_file, board_id): def get_info_for_one_board(board_file, board_name, common_params, board_info): - eprint( - "Getting info for board '{}' branch '{}' in file '{}'".format( - board_name, common_params["BRANCH"], board_file - ) - ) + # eprint( + # "Getting info for board '{}' branch '{}' in file '{}'".format( + # board_name, common_params["BRANCH"], board_file + # ) + # ) # eprint("Running Armbian bash for board '{}'".format(board_name)) try: @@ -194,13 +195,13 @@ if True: raise e # now loop over gathered infos every_info = [] - with concurrent.futures.ProcessPoolExecutor(max_workers=32) as executor: + with concurrent.futures.ProcessPoolExecutor() as executor: # max_workers=32 every_future = [] for board in all_boards.keys(): board_info = info_for_board[board] for possible_branch in board_info["BOARD_POSSIBLE_BRANCHES"]: all_params = common_compile_params | board_compile_params | {"BRANCH": possible_branch} - eprint("Submitting future for board {} with BRANCH={}".format(board, possible_branch)) + # eprint("Submitting future for board {} with BRANCH={}".format(board, possible_branch)) future = executor.submit(get_info_for_one_board, all_boards[board], board, all_params, board_info) every_future.append(future) diff --git a/lib/tools/json2csv.py b/lib/tools/json2csv.py index 68c1decb0..ace41b93d 100755 --- a/lib/tools/json2csv.py +++ b/lib/tools/json2csv.py @@ -38,16 +38,30 @@ for obj in flat: columns = columns_map.keys() -eprint("columns: {}".format(columns_map)) +eprint("columns: {}".format(len(columns))) -eprint("columns: {}".format(columns)) +# Now, find the columns of which all values are the same +# and remove them +columns_to_remove = [] +for column in columns: + values = [] + for obj in flat: + value = obj.get(column) + values.append(value) + if len(set(values)) == 1: + columns_to_remove.append(column) + +eprint("columns with all-identical values: {}".format(len(columns_to_remove))) + +# Now actually filter columns, removing columns_to_remove +columns = [column for column in columns if column not in columns_to_remove] import csv -with open('boards_vs_branches.csv', 'w', newline='') as csvfile: - fieldnames = columns - writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore') +writer = csv.DictWriter(sys.stdout, fieldnames=columns, extrasaction='ignore') - writer.writeheader() - for obj in flat: - writer.writerow(obj) +writer.writeheader() +for obj in flat: + writer.writerow(obj) + +eprint("Done writing to stdout.")