diff --git a/omniadvisor/script/auto_deploy.sh b/omniadvisor/script/auto_deploy.sh index 943507875112b9357aa77d47b902392ab894a47b..50d8e869267573e0e2a2d3031bc30e240b367599 100644 --- a/omniadvisor/script/auto_deploy.sh +++ b/omniadvisor/script/auto_deploy.sh @@ -1,64 +1,103 @@ +#!/bin/bash + +log() { # 用法:log LEVEL "message" + local level=${1:-INFO}; shift + printf '%s [%s] %s\n' "$(date '+%F %T%z')" "$level" "$*" >&2 +} + +log INFO "即将开始自动部署OmniAdvisor2.0..." +# 获取软件包路径 prefix=$(dirname `dirname $(pwd)`) core_dir=$prefix/BoostKit-omniadvisor_core_2.0.0 frame_dir=$prefix/BoostKit-omniadvisor_2.0.0 -# 合并核心代码与框架代码 -ls -l $frame_dir/src -ls -l $core_dir/src -cp -r $core_dir/src/algo $frame_dir/src/ -cp -r $core_dir/config/* $frame_dir/config/ +log INFO "检查目标路径是否存在..." +# 路径存在性检查 +## 检测软件包路径 +for d in "$core_dir" "$frame_dir"; do + if [[ ! -d "$d" ]]; then + log ERROR "错误:目录不存在 -> $d" >&2 + log ERROR "请检查软件包是否有按要求放置到指定位置" >&2 + exit 1 + else + log INFO "$d 存在" + fi +done +## 检查SPARK_HOME是否正确配置 if [ -z "${SPARK_HOME}" ]; then # 如果 SPARK_HOME 未设置,打印错误信息并退出 - echo "错误: SPARK_HOME 环境变量未设置!" >&2 - echo "请先配置 SPARK_HOME 环境变量,指向您的 Spark 安装目录" >&2 - echo "例如: " >&2 - echo " export SPARK_HOME=/path/to/your/spark" >&2 - echo "然后将此行添加到您的 ~/.bashrc 或者 ~/.zshrc 文件中" >&2 + log ERROR "错误: SPARK_HOME 环境变量未设置!" >&2 + log ERROR "请先配置 SPARK_HOME 环境变量,指向您的 Spark 安装目录" >&2 + log ERROR "例如: " >&2 + log ERROR "export SPARK_HOME=/path/to/your/spark" >&2 + log ERROR "然后将此行添加到您的 ~/.bashrc 或者 ~/.zshrc 文件中" >&2 exit 1 # 以非零状态退出 表示错误 +else + log INFO "SPARK_HOME=${SPARK_HOME}环境变量已配置" fi -# 替换spark-submit脚本 -spark_submit_modified=$frame_dir/script/spark-submit -spark_submit_link=$SPARK_HOME/bin/spark-submit -original_spark_submit_file=spark_submit_link +# 合并核心代码与框架代码 +log INFO "将算法模块合并到执行框架目录中..." +log INFO "OmniAdvisor2.0框架模块目录: $frame_dir" +log INFO "OmniAdvisor2.0算法模块目录: $core_dir" + +log INFO "执行命令:cp -r $core_dir/src/algo $frame_dir/src/ 合并算法模块" +cp -r $core_dir/src/algo $frame_dir/src/ + +log INFO "执行命令:cp -r $core_dir/config/* $frame_dir/config/ 合并配置文件" +cp -r $core_dir/config/* $frame_dir/config/ + + +# 修改spark_submit_modified的文件权限 用readlink获取真实路径防止提权 +spark_submit_modified=$frame_dir/script/spark-submit +[[ -L "$spark_submit_modified" ]] && { log ERROR "Refuse symlink: $spark_submit_modified 不允许对符号链接进行权限修改" >&2; exit 1; } # 修改spark_submit_modified 中的hijack.pyc路径 hijack_path=$frame_dir/src/hijack.pyc -cat $spark_submit_modified | grep "hijack_path=" +log INFO "开始自动配置${spark_submit_modified}文件中的hijack_path路径..." +log INFO "原始hijack.pyc文件路径:`cat $spark_submit_modified | grep "hijack_path=" | sed 's/^[[:space:]]*//; s/[[:space:]]*$//'`" sed -i "s|hijack_path=\"\"|hijack_path=\"${hijack_path}\"|g" "$spark_submit_modified" -# 显示修改后的路径 -cat $spark_submit_modified | grep "hijack_path=" -chmod +x $spark_submit_modified +log INFO "更新后的hijack.pyc文件路径:`cat $spark_submit_modified | grep "hijack_path=" | sed 's/^[[:space:]]*//; s/[[:space:]]*$//'`" +# 最小化spark_submit_modified的文件权限 不再允许写操作 +log INFO "修改$spark_submit_modified的文件权限为550" +chmod 550 -- "$(readlink -e -- "$spark_submit_modified")" +# 替换spark-submit脚本 +spark_submit_link=$SPARK_HOME/bin/spark-submit +original_spark_submit_file=spark_submit_link +log INFO "将软件所使用的spark-submit软链接到SPARK_HOME/bin/spark-submit" # 如果spark-submit不存在则直接创建软链接 if [ -L "$spark_submit_link" ]; then - echo -e "\n$spark_submit_link 是一个已存在的软链接" - ls -l $spark_submit_link - echo "即将对软链接进行刷新" + log INFO "${spark_submit_link}是一个已存在的软链接`ls -l $spark_submit_link | cut -d' ' -f10-`" + log INFO "即将对软链接进行刷新..." + original_spark_submit_file=`readlink -f $spark_submit_link` ln -snf $spark_submit_modified $spark_submit_link - echo "将$spark_submit_link链接到$spark_submit_modified" + log INFO "将$spark_submit_link链接到$spark_submit_modified" + # 如果spark-submit存在则备份并重置spark-submit的软连接 elif [ -f "$spark_submit_link" ]; then - echo -e "\n$spark_submit_link 是一个普通文件 进行备份" + log INFO "${spark_submit_link}是一个普通文件,进行备份" cp -r $spark_submit_link $SPARK_HOME/bin/spark-submit.original.backup + original_spark_submit_file="$SPARK_HOME/bin/spark-submit.original.backup" - echo "文件已备份到$original_spark_submit_file" + log INFO "文件已备份到$original_spark_submit_file" + ln -snf $spark_submit_modified $spark_submit_link - echo "将$spark_submit_link链接到$spark_submit_modified" + log INFO "将$spark_submit_link链接到$spark_submit_modified" + else - echo -e "\n$spark_submit_link 不是软连接也不是文件 创建新的软链接" - ln -n $spark_submit_modified $spark_submit_link - echo "将$spark_submit_link链接到$spark_submit_modified" + log INFO "$spark_submit_link 不是软连接也不是文件 创建新的软链接" + + ln -sn $spark_submit_modified $spark_submit_link + log INFO "将$spark_submit_link链接到$spark_submit_modified" fi # 展现修改的成果 -echo -e "\更新后$spark_submit_link的链接情况如下" -ls -l $spark_submit_link -echo -e "\n软链接刷新前原始spark_submit的位置如下" -echo $original_spark_submit_file -echo -e "\n修改后与原始文件的修改差异如下:" -diff $spark_submit_link $original_spark_submit_file +log INFO "更新后$spark_submit_link的链接情况如下:`ls -l $spark_submit_link | cut -d' ' -f10-`" +log INFO "软链接刷新前原始spark_submit的位置如下: $original_spark_submit_file" + +log INFO "Success! OmniAdvisor2.0自动部署完成" \ No newline at end of file diff --git a/omniadvisor/src/omniadvisor/service/tuning_result/tuning_result_history.py b/omniadvisor/src/omniadvisor/service/tuning_result/tuning_result_history.py index e28be27479af71305f85d7a5690f001c97fe119f..3520538bb621a70d552f22fc9cf495516b445e3e 100644 --- a/omniadvisor/src/omniadvisor/service/tuning_result/tuning_result_history.py +++ b/omniadvisor/src/omniadvisor/service/tuning_result/tuning_result_history.py @@ -95,7 +95,8 @@ class TuningResultHistory: @property def boost_percentage(self): - if not self.user_tuning_result: + # abs(self.user_tuning_result.runtime) <= 1e-12用于保证分母不为0 + if not self.user_tuning_result or abs(self.user_tuning_result.runtime) <= 1e-12: return 0.0 return round( (self.user_tuning_result.runtime - self.best_tuning_result.runtime) / self.user_tuning_result.runtime, 4