diff --git a/tf_adapter_2.x/npu_device/core/npu_device.cpp b/tf_adapter_2.x/npu_device/core/npu_device.cpp index f1d6f0f8e7dad5c623bb148175e9078f8eac7540..1849e34ddaaca2606467bf894a82bf41bcc1b547 100644 --- a/tf_adapter_2.x/npu_device/core/npu_device.cpp +++ b/tf_adapter_2.x/npu_device/core/npu_device.cpp @@ -808,7 +808,7 @@ void NpuDevice::RunGeGraphAsync(TFE_Context *context, uint64_t graph_id, int num if (err_msg.empty()) { err_msg = " code:" + std::to_string(s); } - done(tensorflow::errors::Internal("Graph engine process graph failed: ", err_msg)); + done(tensorflow::errors::Internal("Graph engine process graph failed:\n", err_msg)); return; } else if (ge_outputs.size() != static_cast(num_outputs)) { done(tensorflow::errors::Internal("Graph engine process graph succeed but output num ", ge_outputs.size(), diff --git a/tf_adapter_2.x/npu_device/core/npu_micros.h b/tf_adapter_2.x/npu_device/core/npu_micros.h index cfeeb4ee06241189296b84df1d92f48809a685ce..933ef44a8c51a399b35660ede228f36eb2bc4d20 100644 --- a/tf_adapter_2.x/npu_device/core/npu_micros.h +++ b/tf_adapter_2.x/npu_device/core/npu_micros.h @@ -79,7 +79,7 @@ if (err_msg.empty()) { \ err_msg = " code:" + std::to_string(_status); \ } \ - CTX->status = tensorflow::errors::Internal(PREFIX, ":", err_msg); \ + CTX->status = tensorflow::errors::Internal(PREFIX, ":\n", err_msg); \ LOG(ERROR) << CTX->status.ToString(); \ return; \ } \ @@ -93,7 +93,7 @@ if (err_msg.empty()) { \ err_msg = " code:" + std::to_string(_status); \ } \ - (CTX)->status = tensorflow::errors::Internal(PREFIX, ":", err_msg); \ + (CTX)->status = tensorflow::errors::Internal(PREFIX, ":\n", err_msg); \ LOG(ERROR) << (CTX)->status.ToString(); \ return RET; \ } \