diff --git a/omniadvisor/src/omniadvisor/interface/config_tuning.py b/omniadvisor/src/omniadvisor/interface/config_tuning.py index 059f84be1d219b5824dab549e34cbd7eb51e12cb..e69bc36407d20d84076b2939c0205cdfd38a7405 100644 --- a/omniadvisor/src/omniadvisor/interface/config_tuning.py +++ b/omniadvisor/src/omniadvisor/interface/config_tuning.py @@ -3,9 +3,10 @@ import signal from typing import Optional from algo.expert.tuning import ExpertTuner -from algo.iterative.tuning import IterativeTunner +from algo.iterative.tuning import IterativeTuner from algo.native.tuning import NativeTuner from algo.transfer.tuning import TransferTuner +from algo.common.exceptions import InvalidTuningError from common.constant import OA_CONF from common.exceptions import NoOptimalConfigError, SystemKilledError, TuningPreconditionError from omniadvisor.repository.model.load import Load @@ -72,11 +73,11 @@ def _single_tuning(load: Load, retest_way: str, tuning_method: str) -> None: signal.signal(signal.SIGTERM, handler) signal.signal(signal.SIGINT, handler) - next_config, method_extend = _get_next_config(load=load, tuning_method=tuning_method) - - if not next_config: - raise NoOptimalConfigError(f'The recommending config of method {tuning_method} is empty,' - f' please try other tuning methods.') + try: + next_config, method_extend = _get_next_config(load=load, tuning_method=tuning_method) + except InvalidTuningError as e: + global_logger.warning(e) + raise NoOptimalConfigError(f'The tuning method {tuning_method} is invalid, please try other tuning methods.') from e # 用户的default_config上叠加next_config叠加 global_logger.info("Load config tuning success, get new config to retest.") @@ -135,7 +136,7 @@ def _get_next_config(load: Load, tuning_method: str) -> tuple[dict, str]: # AI迭代调优 if tuning_method == OA_CONF.TuningMethod.iterative: global_logger.info("Use AI iterative optimization method to tuning.") - tuner = IterativeTunner(tuning_history=tuning_result_history.to_tuning_data_list()) + tuner = IterativeTuner(tuning_history=tuning_result_history.to_tuning_data_list()) # 专家规则调优 elif tuning_method == OA_CONF.TuningMethod.expert: global_logger.info("Use expert rule optimization method to tuning.") diff --git a/omniadvisor/src/omniadvisor/service/tuning_result/tuning_result.py b/omniadvisor/src/omniadvisor/service/tuning_result/tuning_result.py index 1b3475a542c8f40d73df9950aa3f0c96b42b819c..f480883f3aba42a285ba813f881f1401095db715 100644 --- a/omniadvisor/src/omniadvisor/service/tuning_result/tuning_result.py +++ b/omniadvisor/src/omniadvisor/service/tuning_result/tuning_result.py @@ -187,7 +187,7 @@ class TuningResult: config=self.config, method=self.method, rule=self.method_extend, - status=self.status, + status=True if self.status == OA_CONF.ExecStatus.success else False, runtime=self.runtime, trace=trace ) diff --git a/omniadvisor/tests/conftest.py b/omniadvisor/tests/conftest.py index 413cf1eb9866555da8a04ee6074b8c924b3ad162..c56c330cbd6d69cfa213ff03594cfd3c1cdb8e72 100644 --- a/omniadvisor/tests/conftest.py +++ b/omniadvisor/tests/conftest.py @@ -30,11 +30,12 @@ def dot_expansion(s): # 所有算法相关的包,都在这里注册 to_registers = { - 'algo.expert.tuning.Trace', 'algo.common.model.Trace', - 'algo.expert.tuning.ExpertTuner', - 'algo.iterative.tuning.IterativeTunner', 'algo.common.model.TuningData', + 'algo.common.exceptions.InvalidTuningError', + 'algo.expert.tuning.ExpertTuner', + 'algo.expert.tuning.Trace', + 'algo.iterative.tuning.IterativeTuner', 'algo.native.tuning.NativeTuner', 'algo.transfer.tuning.TransferTuner', } diff --git a/omniadvisor/tests/omniadvisor/interface/test_config_tuning.py b/omniadvisor/tests/omniadvisor/interface/test_config_tuning.py index 6c93ad5575ce6845ed76e7ffe36f6a9b7c146b17..33ba974838d71511c3256c0516faa5e1ebf7be00 100644 --- a/omniadvisor/tests/omniadvisor/interface/test_config_tuning.py +++ b/omniadvisor/tests/omniadvisor/interface/test_config_tuning.py @@ -22,7 +22,7 @@ class TestTuning: @patch('omniadvisor.service.retest_service.float_format') @patch('omniadvisor.interface.config_tuning.float_format') - @patch('algo.iterative.tuning.IterativeTunner.tune') + @patch('algo.iterative.tuning.IterativeTuner.tune') @patch('omniadvisor.interface.config_tuning.get_tuning_result_history') @patch('omniadvisor.interface.config_tuning.get_tuning_result') @patch('omniadvisor.service.retest_service.spark_run') @@ -61,7 +61,7 @@ class TestTuning: @patch('omniadvisor.service.retest_service.float_format') @patch('omniadvisor.interface.config_tuning.float_format') @patch('omniadvisor.repository.load_repository.LoadRepository.update_best_config') - @patch('algo.iterative.tuning.IterativeTunner.tune') + @patch('algo.iterative.tuning.IterativeTuner.tune') @patch('omniadvisor.interface.config_tuning.remove_tuning_result') @patch('omniadvisor.service.retest_service.get_tuning_result') @patch('omniadvisor.interface.config_tuning.get_tuning_result_history') @@ -110,7 +110,7 @@ class TestTuning: mock_remove_tuning_result.assert_not_called() @patch('omniadvisor.interface.config_tuning.remove_tuning_result') - @patch('algo.iterative.tuning.IterativeTunner.tune') + @patch('algo.iterative.tuning.IterativeTuner.tune') @patch('omniadvisor.repository.load_repository.LoadRepository.update_best_config') @patch('omniadvisor.interface.config_tuning.get_tuning_result_history') @patch('omniadvisor.service.retest_service.spark_run', side_effect=RuntimeError) @@ -167,7 +167,7 @@ class TestTuning: with pytest.raises(TuningPreconditionError, match='Cannot find load id'): main() - @patch('algo.iterative.tuning.IterativeTunner.tune') + @patch('algo.iterative.tuning.IterativeTuner.tune') @patch('omniadvisor.repository.load_repository.LoadRepository.update_test_config') @patch('omniadvisor.interface.config_tuning.get_tuning_result_history') @patch('omniadvisor.repository.tuning_record_repository.TuningRecordRepository.create')