diff --git a/neural_compressor/common/base_config.py b/neural_compressor/common/base_config.py index 35b0f532738..3f65a2ea9c0 100644 --- a/neural_compressor/common/base_config.py +++ b/neural_compressor/common/base_config.py @@ -377,6 +377,9 @@ def expand(self) -> List[BaseConfig]: if len(tuning_param_list) == 0: config_list = [config] else: + # The `TuningParam` instance with no options will cause the product to be empty. + # Filter out the `TuningParam` instances with no options + tuning_param_list = list(filter(lambda x: len(x.options) > 0, tuning_param_list)) tuning_param_name_lst = [tuning_param.name for tuning_param in tuning_param_list] for params_values in product(*[tuning_param.options for tuning_param in tuning_param_list]): tuning_param_pair = dict(zip(tuning_param_name_lst, params_values)) diff --git a/test/3x/common/test_common.py b/test/3x/common/test_common.py index 4af0e1a276d..90a5db3c315 100644 --- a/test/3x/common/test_common.py +++ b/test/3x/common/test_common.py @@ -277,6 +277,11 @@ def test_config_expand_complex_tunable_type(self): for i in range(len(configs_list)): self.assertEqual(configs_list[i].target_op_type_list, target_op_type_list_options[i]) + def test_config_expand_with_empty_options(self): + configs = FakeAlgoConfig(weight_dtype=["int", "float32"], weight_bits=[]) + configs_list = configs.expand() + self.assertEqual(len(configs_list), 2) + def test_mixed_two_algos(self): model = FakeModel() OP1_NAME = "OP1_NAME"