diff --git a/torch_npu/profiler/analysis/prof_common_func/_log.py b/torch_npu/profiler/analysis/prof_common_func/_log.py index 15ba7a80f9d10ed74e1e26a4a5be4ab9190b7ef0..965e28d1a08dabfb6523bbf7b544ec68124f3a1a 100644 --- a/torch_npu/profiler/analysis/prof_common_func/_log.py +++ b/torch_npu/profiler/analysis/prof_common_func/_log.py @@ -34,6 +34,7 @@ class ProfilerLogger: BACKUP_COUNT = 3 # logger instance _instance = None + _pid = None @classmethod def get_instance(cls) -> logging.Logger: @@ -54,7 +55,9 @@ class ProfilerLogger: RuntimeError: If logger initialization fails """ if cls._instance is not None: - return + if cls._pid == os.getpid(): + return + cls.destroy() # Create logs directory log_dir = os.path.join(output_dir, cls.DEFAULT_LOG_DIR) @@ -89,6 +92,7 @@ class ProfilerLogger: logger.addHandler(file_handler) cls._instance = logger + cls._pid = os.getpid() logger.info("Profiler logger initialized at: %s", log_file) @classmethod @@ -109,6 +113,16 @@ class ProfilerLogger: """Close and cleanup the logger.""" if cls._instance: for handler in cls._instance.handlers[:]: - handler.close() cls._instance.removeHandler(handler) + if cls._pid == os.getpid(): + print("profiler logger destroy1") + handler.close() + else: + try: + if hasattr(handler.stream, 'fileno'): + fileno = handler.stream.fileno() + os.close(fileno) + print("profiler logger destroy2") + except (OSError, AttributeError, ValueError): + print("profiler logger destroy3") cls._instance = None