diff --git a/TensorFlow/contrib/cv/SYMNET_ID1292_for_Tensorflow/pip-requirements.txt b/TensorFlow/contrib/cv/SYMNET_ID1292_for_Tensorflow/pip-requirements.txt index a90c02fe57c8fd8b78226eabfbc114d1f35278a1..4829dd3a1b9f501b323b7dd1b7200891c24bf7a3 100644 --- a/TensorFlow/contrib/cv/SYMNET_ID1292_for_Tensorflow/pip-requirements.txt +++ b/TensorFlow/contrib/cv/SYMNET_ID1292_for_Tensorflow/pip-requirements.txt @@ -1,3 +1,3 @@ torch==1.10.0 -torchvision==0.11.3 +torchvision==0.6.0a0 numpy==1.21.2 \ No newline at end of file diff --git a/TensorFlow/contrib/cv/SYMNET_ID1292_for_Tensorflow/run_symnet.py b/TensorFlow/contrib/cv/SYMNET_ID1292_for_Tensorflow/run_symnet.py index 7556ce8d15a588fefa194326e25ff517bb47be45..def3bc0f21fe2b43e1203c47aa750abb01e00b4c 100644 --- a/TensorFlow/contrib/cv/SYMNET_ID1292_for_Tensorflow/run_symnet.py +++ b/TensorFlow/contrib/cv/SYMNET_ID1292_for_Tensorflow/run_symnet.py @@ -180,7 +180,7 @@ if args.ifNPU == True: profiling_dir = "/cache/profiling" os.makedirs(profiling_dir) -import moxing as mox +#import moxing as mox ############################################################ @@ -206,7 +206,7 @@ def main(): sw = SolverWrapper(net, train_dataloader, test_dataloader, args) sw.trainval_model(sess, args.epoch) - mox.file.copy_parallel(profiling_dir, args.train_url) + #mox.file.copy_parallel(profiling_dir, args.train_url) ################################################################################ diff --git a/TensorFlow/contrib/cv/SYMNET_ID1292_for_Tensorflow/test/train_full_1p.sh b/TensorFlow/contrib/cv/SYMNET_ID1292_for_Tensorflow/test/train_full_1p.sh index c663be49d974421d9f32e639709d71a18b35156a..fc25ec4692727da016c22f7ef4a94a2a08ae2a28 100644 --- a/TensorFlow/contrib/cv/SYMNET_ID1292_for_Tensorflow/test/train_full_1p.sh +++ b/TensorFlow/contrib/cv/SYMNET_ID1292_for_Tensorflow/test/train_full_1p.sh @@ -7,7 +7,13 @@ ########################################################## # shell脚本所在路径 cur_path=`echo $(cd $(dirname $0);pwd)` - +cd / +if [ -d ./cache/profiling ] +then + rm -rf ./cache +fi +cd ${cur_path} +#cur_path=`echo $(cd $(dirname $0);pwd)` # 判断当前shell是否是performance perf_flag=`echo $0 | grep performance | wc -l` @@ -113,13 +119,51 @@ epochs=700 if [ x"${modelarts_flag}" != x ]; then - python3.7 ./run_symnet.py --name UT_best --data UT --epoch=${epochs} --obj_pred UT_obj_lr1e-3_test_ep260.pkl --trained_weight snapshot_epoch_586.ckpt --batchnorm --wordvec onehot --lr 1e-4 --bz=${batch_size} --lambda_cls_attr 1 --lambda_cls_obj 0.5 --lambda_trip 0.5 --lambda_sym 0.01 --lambda_axiom 0.03 --data_url=${data_path} --train_url=${output_path} -# python3.7 ./test_symnet.py --name UT_best --data UT --epoch=${epochs} --obj_pred UT_obj_lr1e-3_test_ep260.pkl --wordvec onehot --batchnorm --data_url=${data_path} --train_url=${output_path} + echo -------123456-------- + python3.7 ./run_symnet.py --name UT_best \ + --data UT \ + --epoch=${epochs} \ + --obj_pred UT_obj_lr1e-3_test_ep260.pkl \ + --trained_weight snapshot_epoch_586.ckpt \ + --batchnorm \ + --wordvec onehot \ + --lr 1e-4 \ + --bz=${batch_size} \ + --lambda_cls_attr 1 \ + --lambda_cls_obj 0.5 \ + --lambda_trip 0.5 \ + --lambda_sym 0.01 \ + --lambda_axiom 0.03 \ + --data_url=${data_path}/data/ \ + --train_url=${output_path} 1>${print_log} 2>&1 +# python3.7 ./test_symnet.py --name UT_best \ + #--data UT \ + #--epoch 600 \ + #--obj_pred UT_lr1e-3_ep140.pkl \ + #--wordvec onehot \ + #--batchnorm >>${print_log} 2>&1 else - python3.7 ./run_symnet.py --name UT_best --data UT --epoch${epochs} --obj_pred UT_obj_lr1e-3_test_ep260.pkl --trained_weight snapshot_epoch_586.ckpt --batchnorm --wordvec onehot --lr 1e-4 --bz=${batch_size} --lambda_cls_attr 1 --lambda_cls_obj 0.5 --lambda_trip 0.5 --lambda_sym 0.01 --lambda_axiom 0.03 --data_url=${data_path} --train_url=${output_path} 1>${print_log} 2>&1 -# python3.7 ./test_symnet.py --name UT_best --data UT --epoch=${epochs} --obj_pred UT_obj_lr1e-3_test_ep260.pkl --wordvec onehot --batchnorm --data_url=${data_path} --train_url=${output_path} + echo --------1234567---------- + python3.7 ./run_symnet.py --name UT_best \ + --data UT \ + --epoch=${epochs} \ + --obj_pred UT_obj_lr1e-3_test_ep260.pkl \ + --trained_weight snapshot_epoch_586.ckpt \ + --batchnorm \ + --wordvec onehot \ + --lr 1e-4 \ + --bz=${batch_size} \ + --lambda_cls_attr 1 \ + --lambda_cls_obj 0.5 \ + --lambda_trip 0.5 \ + --lambda_sym 0.01 \ + --lambda_axiom 0.03 \ + --data_url=${data_path}/data \ + --train_url=${output_path} 1>${print_log} 2>&1 + fi + # 性能相关数据计算 StepTime=`grep "sec/step :" ${print_log} | tail -n 20 | awk -F ':' '{print $NF}' | awk '{sum+=$1} END {print sum/NR}'` FPS=`awk 'BEGIN{printf "%.2f\n", '${batch_size}'/'${StepTime}'}'` @@ -184,4 +228,9 @@ echo "ActualFPS = ${FPS}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "TrainingTime = ${StepTime}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "ActualLoss = ${ActualLoss}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log echo "E2ETrainingTime = ${e2e_time}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log -echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/test/output/$ASCEND_DEVICE_ID/${CaseName}.log \ No newline at end of file +echo "TrainAccuracy = ${train_accuracy}" >> $cur_path/output/$ASCEND_DEVICE_ID/${CaseName}.log +cd / +if [ -d ./cache/profiling ] +then + rm -rf ./cache +fi diff --git a/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/test/train_performance_1p.sh b/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/test/train_performance_1p.sh index 4a72627bfe582424001c7c54817da5e5d3238cd4..8b1e18d922a07576e158bff7487fdce5e0d79f3a 100644 --- a/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/test/train_performance_1p.sh +++ b/TensorFlow/contrib/cv/UDCVO_ID2359_for_TensorFlow/test/train_performance_1p.sh @@ -141,7 +141,7 @@ then --rot_param exponential \ --n_summary 10 \ --n_checkpoint 50 \ - --checkpoint_path /home/ma-user/modelarts/outputs/train_url_0/ + --checkpoint_path /home/ma-user/modelarts/outputs/train_url_0/ >${print_log} 2>&1 else #python3.7 ./LeNet.py --data_path=${data_path} --output_path=${output_path} --steps=${train_steps} > ${print_log} python3.7 src/train_voiced.py \