From f9755f0f3e3c212ba48df7c74661decfdfdc9874 Mon Sep 17 00:00:00 2001 From: Yuanhao Ji Date: Wed, 12 Feb 2025 10:47:06 +0800 Subject: [PATCH 1/5] add configuration support --- userbenchmark/test_bench/run.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/userbenchmark/test_bench/run.py b/userbenchmark/test_bench/run.py index 45154ec76c..eda9da5dcd 100644 --- a/userbenchmark/test_bench/run.py +++ b/userbenchmark/test_bench/run.py @@ -111,6 +111,34 @@ def generate_model_configs_from_bisect_yaml( return result +def generate_model_configs_from_yaml( + yaml_file: str, +) -> List[TorchBenchModelConfig]: + yaml_file_path = os.path.join(yaml_file) + assert os.path.exists(yaml_file_path) + + with open(yaml_file_path, "r") as yf: + config_obj = yaml.safe_load(yf) + model_set = set(list_models(internal=False)) + device = config_obj["device"] + configs = [] + for model in model_set: + cfg = next(filter(lambda c: c["model"] == model, config_obj["models"]), None) + tests = cfg.get("tests", "eval") if cfg is not None else ["eval"] + for test in tests: + config = TorchBenchModelConfig( + name=model, + device=device, + test=test, + batch_size=cfg.get("batch_size", None) if cfg is not None else None, + extra_args=[], + skip=cfg is not None and cfg.get("skip", False), + ) + print(config) + configs.append(config) + return configs + + def init_output_dir( configs: List[TorchBenchModelConfig], output_dir: pathlib.Path ) -> List[TorchBenchModelConfig]: @@ -340,6 +368,8 @@ def run(args: List[str]): args, extra_args = parse_known_args(args) if args.run_bisect: configs = generate_model_configs_from_bisect_yaml(args.run_bisect) + elif args.config: + configs = generate_model_configs_from_yaml(args.config) else: modelset = set(list_models(internal=(not args.oss))) timm_set = set(list_extended_models(suite_name="timm")) From b5268e92512b4d8106d583f49655e953a1bdc141 Mon Sep 17 00:00:00 2001 From: Yuanhao Ji Date: Thu, 13 Feb 2025 11:18:47 +0800 Subject: [PATCH 2/5] update --- userbenchmark/test_bench/run.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/userbenchmark/test_bench/run.py b/userbenchmark/test_bench/run.py index eda9da5dcd..2498cf1951 100644 --- a/userbenchmark/test_bench/run.py +++ b/userbenchmark/test_bench/run.py @@ -131,10 +131,9 @@ def generate_model_configs_from_yaml( device=device, test=test, batch_size=cfg.get("batch_size", None) if cfg is not None else None, - extra_args=[], + extra_args=cfg.get("extra_args", []) if cfg is not None else [], skip=cfg is not None and cfg.get("skip", False), ) - print(config) configs.append(config) return configs From e3d73eee79589e16c9e45341b16c64378c5768b1 Mon Sep 17 00:00:00 2001 From: Yuanhao Ji Date: Thu, 13 Feb 2025 14:56:20 +0800 Subject: [PATCH 3/5] update --- userbenchmark/test_bench/run.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/userbenchmark/test_bench/run.py b/userbenchmark/test_bench/run.py index 2498cf1951..c971e98d0a 100644 --- a/userbenchmark/test_bench/run.py +++ b/userbenchmark/test_bench/run.py @@ -114,15 +114,30 @@ def generate_model_configs_from_bisect_yaml( def generate_model_configs_from_yaml( yaml_file: str, ) -> List[TorchBenchModelConfig]: + """ + The configuration might like this: + + devices: + - "foo" + models: + - model: BERT_pytorch + batch_size: 1 + + - model: yolov3 + skip: true + extra_args: + - "--accuracy" + """ yaml_file_path = os.path.join(yaml_file) assert os.path.exists(yaml_file_path) with open(yaml_file_path, "r") as yf: config_obj = yaml.safe_load(yf) - model_set = set(list_models(internal=False)) - device = config_obj["device"] + devices = config_obj["devices"] + model_names = set(list_models(internal=False)) + cfgs = itertools.product(*[devices, model_names]) configs = [] - for model in model_set: + for device, model in cfgs: cfg = next(filter(lambda c: c["model"] == model, config_obj["models"]), None) tests = cfg.get("tests", "eval") if cfg is not None else ["eval"] for test in tests: From a64788b426ca552b010766c06c8de4eb843cbd9e Mon Sep 17 00:00:00 2001 From: Yuanhao Ji Date: Thu, 13 Feb 2025 15:55:51 +0800 Subject: [PATCH 4/5] update --- userbenchmark/test_bench/run.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/userbenchmark/test_bench/run.py b/userbenchmark/test_bench/run.py index c971e98d0a..d8dc5aa3ab 100644 --- a/userbenchmark/test_bench/run.py +++ b/userbenchmark/test_bench/run.py @@ -134,19 +134,20 @@ def generate_model_configs_from_yaml( with open(yaml_file_path, "r") as yf: config_obj = yaml.safe_load(yf) devices = config_obj["devices"] + extra_args = config_obj.get("extra_args", []) model_names = set(list_models(internal=False)) cfgs = itertools.product(*[devices, model_names]) configs = [] for device, model in cfgs: cfg = next(filter(lambda c: c["model"] == model, config_obj["models"]), None) - tests = cfg.get("tests", "eval") if cfg is not None else ["eval"] + tests = cfg.get("tests", ["eval"]) if cfg is not None else ["eval"] for test in tests: config = TorchBenchModelConfig( name=model, device=device, test=test, batch_size=cfg.get("batch_size", None) if cfg is not None else None, - extra_args=cfg.get("extra_args", []) if cfg is not None else [], + extra_args=cfg.get("extra_args", extra_args) if cfg is not None else extra_args, skip=cfg is not None and cfg.get("skip", False), ) configs.append(config) From 0f4ef7defc900b51c23a88047deecfd8f8282140 Mon Sep 17 00:00:00 2001 From: Yuanhao Ji Date: Thu, 13 Feb 2025 16:14:59 +0800 Subject: [PATCH 5/5] update --- userbenchmark/test_bench/run.py | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/userbenchmark/test_bench/run.py b/userbenchmark/test_bench/run.py index d8dc5aa3ab..a9c894989b 100644 --- a/userbenchmark/test_bench/run.py +++ b/userbenchmark/test_bench/run.py @@ -131,24 +131,32 @@ def generate_model_configs_from_yaml( yaml_file_path = os.path.join(yaml_file) assert os.path.exists(yaml_file_path) + def _get_val(d: dict, key: str, default_value = None): + if d is None: + return default_value + else: + return d.get(key, default_value) + with open(yaml_file_path, "r") as yf: config_obj = yaml.safe_load(yf) - devices = config_obj["devices"] - extra_args = config_obj.get("extra_args", []) + devices = _get_val(config_obj, "devices") + batch_size = _get_val(config_obj, "batch_size") + extra_args = _get_val(config_obj, "extra_args", []) + model_names = set(list_models(internal=False)) cfgs = itertools.product(*[devices, model_names]) configs = [] for device, model in cfgs: - cfg = next(filter(lambda c: c["model"] == model, config_obj["models"]), None) - tests = cfg.get("tests", ["eval"]) if cfg is not None else ["eval"] + model_cfg = next(filter(lambda c: c["model"] == model, config_obj["models"]), None) + tests = _get_val(model_cfg, "tests", ["eval"]) for test in tests: config = TorchBenchModelConfig( name=model, device=device, test=test, - batch_size=cfg.get("batch_size", None) if cfg is not None else None, - extra_args=cfg.get("extra_args", extra_args) if cfg is not None else extra_args, - skip=cfg is not None and cfg.get("skip", False), + batch_size=_get_val(model_cfg, "batch_size", batch_size), + extra_args=_get_val(model_cfg, "extra_args", extra_args), + skip=_get_val(model_cfg, "skip", False), ) configs.append(config) return configs