From 4c2c149dd5b3d6afdff644c0310f853711e4f22a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BE=90=E8=89=BA=E4=B8=B9?= <53546877+Craven1701@users.noreply.github.com> Date: Fri, 5 Sep 2025 10:50:56 +0800 Subject: [PATCH 1/3] =?UTF-8?q?1.auto=5Fdeploy=E5=87=BA=E8=BE=93=E6=A0=BC?= =?UTF-8?q?=E5=BC=8F=E6=95=B4=E6=94=B9=20&=20=E6=96=87=E4=BB=B6=E6=9D=83?= =?UTF-8?q?=E9=99=90=E6=9C=80=E5=B0=8F=E5=8C=96=202.tuning=5Fresult=5Fhist?= =?UTF-8?q?ory=E4=B8=AD=E7=9A=84=E9=99=A40=E9=98=B2=E6=8A=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- omniadvisor/script/auto_deploy.sh | 101 ++++++++++++------ .../tuning_result/tuning_result_history.py | 3 +- 2 files changed, 70 insertions(+), 34 deletions(-) diff --git a/omniadvisor/script/auto_deploy.sh b/omniadvisor/script/auto_deploy.sh index 943507875..ea5d6824a 100644 --- a/omniadvisor/script/auto_deploy.sh +++ b/omniadvisor/script/auto_deploy.sh @@ -1,64 +1,99 @@ +log() { # 用法:log LEVEL "message" + local level=${1:-INFO}; shift + printf '%s [%s] %s\n' "$(date '+%F %T%z')" "$level" "$*" >&2 +} + +log INFO "即将开始自动部署OmniAdvisor2.0..." +# 获取软件包路径 prefix=$(dirname `dirname $(pwd)`) core_dir=$prefix/BoostKit-omniadvisor_core_2.0.0 frame_dir=$prefix/BoostKit-omniadvisor_2.0.0 -# 合并核心代码与框架代码 -ls -l $frame_dir/src -ls -l $core_dir/src -cp -r $core_dir/src/algo $frame_dir/src/ -cp -r $core_dir/config/* $frame_dir/config/ +log INFO "检查目标路径是否存在..." +# 路径存在性检查 +## 检测软件包路径 +for d in "$core_dir" "$frame_dir"; do + if [[ ! -d "$d" ]]; then + log ERROR "错误:目录不存在 -> $d" >&2 + log ERROR "请检查软件包是否有按要求放置到指定位置" >&2 + exit 1 + else + log INFO "$d 存在" + fi +done +## 检查SPARK_HOME是否正确配置 if [ -z "${SPARK_HOME}" ]; then # 如果 SPARK_HOME 未设置,打印错误信息并退出 - echo "错误: SPARK_HOME 环境变量未设置!" >&2 - echo "请先配置 SPARK_HOME 环境变量,指向您的 Spark 安装目录" >&2 - echo "例如: " >&2 - echo " export SPARK_HOME=/path/to/your/spark" >&2 - echo "然后将此行添加到您的 ~/.bashrc 或者 ~/.zshrc 文件中" >&2 + log ERROR "错误: SPARK_HOME 环境变量未设置!" >&2 + log ERROR "请先配置 SPARK_HOME 环境变量,指向您的 Spark 安装目录" >&2 + log ERROR "例如: " >&2 + log ERROR "export SPARK_HOME=/path/to/your/spark" >&2 + log ERROR "然后将此行添加到您的 ~/.bashrc 或者 ~/.zshrc 文件中" >&2 exit 1 # 以非零状态退出 表示错误 +else + log INFO "SPARK_HOME=${SPARK_HOME}环境变量已配置" fi -# 替换spark-submit脚本 -spark_submit_modified=$frame_dir/script/spark-submit -spark_submit_link=$SPARK_HOME/bin/spark-submit -original_spark_submit_file=spark_submit_link + +# 合并核心代码与框架代码 +log INFO "将算法模块合并到执行框架目录中..." +log INFO "OmniAdvisor2.0框架模块目录: $frame_dir" +log INFO "OmniAdvisor2.0算法模块目录: $core_dir" + +log INFO "执行命令:cp -r $core_dir/src/algo $frame_dir/src/ 合并算法模块" +cp -r $core_dir/src/algo $frame_dir/src/ + +log INFO "执行命令:cp -r $core_dir/config/* $frame_dir/config/ 合并配置文件" +cp -r $core_dir/config/* $frame_dir/config/ # 修改spark_submit_modified 中的hijack.pyc路径 +spark_submit_modified=$frame_dir/script/spark-submit hijack_path=$frame_dir/src/hijack.pyc -cat $spark_submit_modified | grep "hijack_path=" +log INFO "开始自动配置${spark_submit_modified}文件中的hijack_path路径..." +log INFO "原始hijack.pyc文件路径:`cat $spark_submit_modified | grep "hijack_path=" | sed 's/^[[:space:]]*//; s/[[:space:]]*$//'`" sed -i "s|hijack_path=\"\"|hijack_path=\"${hijack_path}\"|g" "$spark_submit_modified" -# 显示修改后的路径 -cat $spark_submit_modified | grep "hijack_path=" -chmod +x $spark_submit_modified +log INFO "更新后的hijack.pyc文件路径:`cat $spark_submit_modified | grep "hijack_path=" | sed 's/^[[:space:]]*//; s/[[:space:]]*$//'`" +# 修改spark_submit_modified的文件权限 用readlink获取真实路径防止提权 +[[ -L "$spark_submit_modified" ]] && { log ERROR "Refuse symlink: $spark_submit_modified" >&2; exit 1; } +chmod 550 -- "$(readlink -e -- "$spark_submit_modified")" +# 替换spark-submit脚本 +spark_submit_link=$SPARK_HOME/bin/spark-submit +original_spark_submit_file=spark_submit_link +log INFO "将软件所使用的spark-submit软链接到SPARK_HOME/bin/spark-submit" # 如果spark-submit不存在则直接创建软链接 if [ -L "$spark_submit_link" ]; then - echo -e "\n$spark_submit_link 是一个已存在的软链接" - ls -l $spark_submit_link - echo "即将对软链接进行刷新" + log INFO "${spark_submit_link}是一个已存在的软链接`ls -l $spark_submit_link`" + log INFO "即将对软链接进行刷新..." + original_spark_submit_file=`readlink -f $spark_submit_link` ln -snf $spark_submit_modified $spark_submit_link - echo "将$spark_submit_link链接到$spark_submit_modified" + log INFO "将$spark_submit_link链接到$spark_submit_modified" + # 如果spark-submit存在则备份并重置spark-submit的软连接 elif [ -f "$spark_submit_link" ]; then - echo -e "\n$spark_submit_link 是一个普通文件 进行备份" + log INFO "${spark_submit_link}是一个普通文件,进行备份" cp -r $spark_submit_link $SPARK_HOME/bin/spark-submit.original.backup + original_spark_submit_file="$SPARK_HOME/bin/spark-submit.original.backup" - echo "文件已备份到$original_spark_submit_file" + log INFO "文件已备份到$original_spark_submit_file" + ln -snf $spark_submit_modified $spark_submit_link - echo "将$spark_submit_link链接到$spark_submit_modified" + log INFO "将$spark_submit_link链接到$spark_submit_modified" + else - echo -e "\n$spark_submit_link 不是软连接也不是文件 创建新的软链接" + log INFO "$spark_submit_link 不是软连接也不是文件 创建新的软链接" + ln -n $spark_submit_modified $spark_submit_link - echo "将$spark_submit_link链接到$spark_submit_modified" + log INFO "将$spark_submit_link链接到$spark_submit_modified" fi # 展现修改的成果 -echo -e "\更新后$spark_submit_link的链接情况如下" -ls -l $spark_submit_link -echo -e "\n软链接刷新前原始spark_submit的位置如下" -echo $original_spark_submit_file -echo -e "\n修改后与原始文件的修改差异如下:" -diff $spark_submit_link $original_spark_submit_file +log INFO "更新后$spark_submit_link的链接情况如下:" +log INFO "`ls -l $spark_submit_link`" +log INFO "软链接刷新前原始spark_submit的位置如下: $original_spark_submit_file" + +log INFO "Success! OmniAdvisor2.0自动部署完成" \ No newline at end of file diff --git a/omniadvisor/src/omniadvisor/service/tuning_result/tuning_result_history.py b/omniadvisor/src/omniadvisor/service/tuning_result/tuning_result_history.py index e28be2747..3520538bb 100644 --- a/omniadvisor/src/omniadvisor/service/tuning_result/tuning_result_history.py +++ b/omniadvisor/src/omniadvisor/service/tuning_result/tuning_result_history.py @@ -95,7 +95,8 @@ class TuningResultHistory: @property def boost_percentage(self): - if not self.user_tuning_result: + # abs(self.user_tuning_result.runtime) <= 1e-12用于保证分母不为0 + if not self.user_tuning_result or abs(self.user_tuning_result.runtime) <= 1e-12: return 0.0 return round( (self.user_tuning_result.runtime - self.best_tuning_result.runtime) / self.user_tuning_result.runtime, 4 -- Gitee From f78cf1faa777e879d11469592dacb382e0cff5d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BE=90=E8=89=BA=E4=B8=B9?= <53546877+Craven1701@users.noreply.github.com> Date: Fri, 5 Sep 2025 11:46:45 +0800 Subject: [PATCH 2/3] =?UTF-8?q?=E8=BD=AF=E9=93=BE=E6=8E=A5=E5=91=BD?= =?UTF-8?q?=E4=BB=A4=E4=BC=98=E5=8C=96=20=E4=BD=BF=E7=94=A8-sn=E5=AE=89?= =?UTF-8?q?=E5=85=A8=E5=88=9B=E5=BB=BA?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- omniadvisor/script/auto_deploy.sh | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/omniadvisor/script/auto_deploy.sh b/omniadvisor/script/auto_deploy.sh index ea5d6824a..a95396fa5 100644 --- a/omniadvisor/script/auto_deploy.sh +++ b/omniadvisor/script/auto_deploy.sh @@ -48,17 +48,20 @@ log INFO "执行命令:cp -r $core_dir/config/* $frame_dir/config/ 合并配置 cp -r $core_dir/config/* $frame_dir/config/ -# 修改spark_submit_modified 中的hijack.pyc路径 +# 修改spark_submit_modified的文件权限 用readlink获取真实路径防止提权 spark_submit_modified=$frame_dir/script/spark-submit +[[ -L "$spark_submit_modified" ]] && { log ERROR "Refuse symlink: $spark_submit_modified 不允许对符号链接进行权限修改" >&2; exit 1; } + +# 修改spark_submit_modified 中的hijack.pyc路径 hijack_path=$frame_dir/src/hijack.pyc log INFO "开始自动配置${spark_submit_modified}文件中的hijack_path路径..." log INFO "原始hijack.pyc文件路径:`cat $spark_submit_modified | grep "hijack_path=" | sed 's/^[[:space:]]*//; s/[[:space:]]*$//'`" sed -i "s|hijack_path=\"\"|hijack_path=\"${hijack_path}\"|g" "$spark_submit_modified" log INFO "更新后的hijack.pyc文件路径:`cat $spark_submit_modified | grep "hijack_path=" | sed 's/^[[:space:]]*//; s/[[:space:]]*$//'`" -# 修改spark_submit_modified的文件权限 用readlink获取真实路径防止提权 -[[ -L "$spark_submit_modified" ]] && { log ERROR "Refuse symlink: $spark_submit_modified" >&2; exit 1; } -chmod 550 -- "$(readlink -e -- "$spark_submit_modified")" +# 最小化spark_submit_modified的文件权限 不再允许写操作 +log INFO "修改$spark_submit_modified的文件权限为550" +chmod 550 -- "$(readlink -e -- "$spark_submit_modified")" # 替换spark-submit脚本 spark_submit_link=$SPARK_HOME/bin/spark-submit @@ -66,7 +69,7 @@ original_spark_submit_file=spark_submit_link log INFO "将软件所使用的spark-submit软链接到SPARK_HOME/bin/spark-submit" # 如果spark-submit不存在则直接创建软链接 if [ -L "$spark_submit_link" ]; then - log INFO "${spark_submit_link}是一个已存在的软链接`ls -l $spark_submit_link`" + log INFO "${spark_submit_link}是一个已存在的软链接`ls -l $spark_submit_link | cut -d' ' -f10-`" log INFO "即将对软链接进行刷新..." original_spark_submit_file=`readlink -f $spark_submit_link` @@ -87,13 +90,12 @@ elif [ -f "$spark_submit_link" ]; then else log INFO "$spark_submit_link 不是软连接也不是文件 创建新的软链接" - ln -n $spark_submit_modified $spark_submit_link + ln -sn $spark_submit_modified $spark_submit_link log INFO "将$spark_submit_link链接到$spark_submit_modified" fi # 展现修改的成果 -log INFO "更新后$spark_submit_link的链接情况如下:" -log INFO "`ls -l $spark_submit_link`" +log INFO "更新后$spark_submit_link的链接情况如下:`ls -l $spark_submit_link | cut -d' ' -f10-`" log INFO "软链接刷新前原始spark_submit的位置如下: $original_spark_submit_file" log INFO "Success! OmniAdvisor2.0自动部署完成" \ No newline at end of file -- Gitee From 65982901ac0336884f7ba520441a26c9209c5400 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BE=90=E8=89=BA=E4=B8=B9?= <53546877+Craven1701@users.noreply.github.com> Date: Fri, 5 Sep 2025 15:23:16 +0800 Subject: [PATCH 3/3] cleancode --- omniadvisor/script/auto_deploy.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/omniadvisor/script/auto_deploy.sh b/omniadvisor/script/auto_deploy.sh index a95396fa5..50d8e8692 100644 --- a/omniadvisor/script/auto_deploy.sh +++ b/omniadvisor/script/auto_deploy.sh @@ -1,3 +1,5 @@ +#!/bin/bash + log() { # 用法:log LEVEL "message" local level=${1:-INFO}; shift printf '%s [%s] %s\n' "$(date '+%F %T%z')" "$level" "$*" >&2 -- Gitee