diff --git a/lib/python/qmk/cli/find.py b/lib/python/qmk/cli/find.py index 2836eb8a54..f2135bbc16 100644 --- a/lib/python/qmk/cli/find.py +++ b/lib/python/qmk/cli/find.py @@ -23,7 +23,7 @@ def find(cli): if len(cli.args.filter) == 0 and len(cli.args.print) > 0: cli.log.warning('No filters supplied -- keymaps not parsed, unable to print requested values.') - targets = search_keymap_targets(cli.args.keymap, cli.args.filter, cli.args.print) + targets = search_keymap_targets([('all', cli.config.find.keymap)], cli.args.filter, cli.args.print) for keyboard, keymap, print_vals in targets: print(f'{keyboard}:{keymap}') diff --git a/lib/python/qmk/cli/mass_compile.py b/lib/python/qmk/cli/mass_compile.py index 1227f435e7..06e6e411a7 100755 --- a/lib/python/qmk/cli/mass_compile.py +++ b/lib/python/qmk/cli/mass_compile.py @@ -97,6 +97,6 @@ def mass_compile(cli): if len(cli.args.builds) > 0: targets = search_make_targets(cli.args.builds, cli.args.filter) else: - targets = search_keymap_targets(cli.args.keymap, cli.args.filter) + targets = search_keymap_targets([('all', cli.config.mass_compile.keymap)], cli.args.filter) - return mass_compile_targets(targets, cli.args.clean, cli.args.dry_run, cli.args.no_temp, cli.args.parallel, cli.args.env) + return mass_compile_targets(targets, cli.args.clean, cli.args.dry_run, cli.config.mass_compile.no_temp, cli.config.mass_compile.parallel, cli.args.env) diff --git a/lib/python/qmk/search.py b/lib/python/qmk/search.py index 0b5d489218..a74450ca87 100644 --- a/lib/python/qmk/search.py +++ b/lib/python/qmk/search.py @@ -4,12 +4,12 @@ import contextlib import functools import fnmatch import logging -import multiprocessing import re from typing import List, Tuple from dotty_dict import dotty from milc import cli +from qmk.util import parallel_map from qmk.info import keymap_json import qmk.keyboard import qmk.keymap @@ -78,17 +78,16 @@ def _expand_keymap_target(keyboard: str, keymap: str, all_keyboards: List[str] = all_keyboards = qmk.keyboard.list_keyboards() if keyboard == 'all': - with multiprocessing.Pool() as pool: - if keymap == 'all': - cli.log.info('Retrieving list of all keyboards and keymaps...') - targets = [] - for kb in pool.imap_unordered(_all_keymaps, all_keyboards): - targets.extend(kb) - return targets - else: - cli.log.info(f'Retrieving list of keyboards with keymap "{keymap}"...') - keyboard_filter = functools.partial(_keymap_exists, keymap=keymap) - return [(kb, keymap) for kb in filter(lambda e: e is not None, pool.imap_unordered(keyboard_filter, all_keyboards))] + if keymap == 'all': + cli.log.info('Retrieving list of all keyboards and keymaps...') + targets = [] + for kb in parallel_map(_all_keymaps, all_keyboards): + targets.extend(kb) + return targets + else: + cli.log.info(f'Retrieving list of keyboards with keymap "{keymap}"...') + keyboard_filter = functools.partial(_keymap_exists, keymap=keymap) + return [(kb, keymap) for kb in filter(lambda e: e is not None, parallel_map(keyboard_filter, all_keyboards))] else: if keymap == 'all': keyboard = qmk.keyboard.resolve_keyboard(keyboard) @@ -117,8 +116,7 @@ def _filter_keymap_targets(target_list: List[Tuple[str, str]], filters: List[str targets = [(kb, km, {}) for kb, km in target_list] else: cli.log.info('Parsing data for all matching keyboard/keymap combinations...') - with multiprocessing.Pool() as pool: - valid_keymaps = [(e[0], e[1], dotty(e[2])) for e in pool.imap_unordered(_load_keymap_info, target_list)] + valid_keymaps = [(e[0], e[1], dotty(e[2])) for e in parallel_map(_load_keymap_info, target_list)] function_re = re.compile(r'^(?P[a-zA-Z]+)\((?P[a-zA-Z0-9_\.]+)(,\s*(?P[^#]+))?\)$') equals_re = re.compile(r'^(?P[a-zA-Z0-9_\.]+)\s*=\s*(?P[^#]+)$') @@ -179,10 +177,10 @@ def _filter_keymap_targets(target_list: List[Tuple[str, str]], filters: List[str return targets -def search_keymap_targets(keymap='default', filters: List[str] = [], print_vals: List[str] = []) -> List[Tuple[str, str, List[Tuple[str, str]]]]: +def search_keymap_targets(targets: List[Tuple[str, str]] = [('all', 'default')], filters: List[str] = [], print_vals: List[str] = []) -> List[Tuple[str, str, List[Tuple[str, str]]]]: """Search for build targets matching the supplied criteria. """ - return list(sorted(_filter_keymap_targets(expand_keymap_targets([('all', keymap)]), filters, print_vals), key=lambda e: (e[0], e[1]))) + return list(sorted(_filter_keymap_targets(expand_keymap_targets(targets), filters, print_vals), key=lambda e: (e[0], e[1]))) def search_make_targets(targets: List[str], filters: List[str] = [], print_vals: List[str] = []) -> List[Tuple[str, str, List[Tuple[str, str]]]]: diff --git a/lib/python/qmk/util.py b/lib/python/qmk/util.py new file mode 100644 index 0000000000..db7debd578 --- /dev/null +++ b/lib/python/qmk/util.py @@ -0,0 +1,56 @@ +"""Utility functions. +""" +import contextlib +import multiprocessing + +from milc import cli + + +@contextlib.contextmanager +def parallelize(): + """Returns a function that can be used in place of a map() call. + + Attempts to use `mpire`, falling back to `multiprocessing` if it's not + available. If parallelization is not requested, returns the original map() + function. + """ + + # Work out if we've already got a config value for parallel searching + if cli.config.user.parallel_search is None: + parallel_search = True + else: + parallel_search = cli.config.user.parallel_search + + # Non-parallel searches use `map()` + if not parallel_search: + yield map + return + + # Prefer mpire's `WorkerPool` if it's available + with contextlib.suppress(ImportError): + from mpire import WorkerPool + from mpire.utils import make_single_arguments + with WorkerPool() as pool: + + def _worker(func, *args): + # Ensure we don't unpack tuples -- mpire's `WorkerPool` tries to do so normally so we tell it not to. + for r in pool.imap_unordered(func, make_single_arguments(*args, generator=False), progress_bar=True): + yield r + + yield _worker + return + + # Otherwise fall back to multiprocessing's `Pool` + with multiprocessing.Pool() as pool: + yield pool.imap_unordered + + +def parallel_map(*args, **kwargs): + """Effectively runs `map()` but executes it in parallel if necessary. + """ + with parallelize() as map_fn: + # This needs to be enclosed in a `list()` as some implementations return + # a generator function, which means the scope of the pool is closed off + # before the results are returned. Returning a list ensures results are + # materialised before any worker pool is shut down. + return list(map_fn(*args, **kwargs))