使用多处理时,无法在python中按分钟创建日志文件



使用多处理时无法在python中逐分钟创建日志文件

import multiprocessing
import logging.handlers
import time
import logging.handlers
import multiprocessing
log_file_name = "test1.log"
logging_level = logging.DEBUG
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
handler = logging.handlers.TimedRotatingFileHandler(log_file_name, when='M', interval=1)
handler.suffix = "%Y-%m-%d"
handler.setFormatter(formatter)
logger = logging.getLogger(log_file_name)
logger.addHandler(handler)
logger.setLevel(logging_level)
def main_1():
print("main 1")
logger.info("A Sample Log Statement main_1 ")
time.sleep(2)
logger.info("A Sample Log Statement main_1 ")
def main_2():
print("main 2")
logger.info("A Sample Log Statement main_2 ")
time.sleep(2)
logger.info("A Sample Log Statement main_2 ")

def isReady():
while (True):
p1 = multiprocessing.Process(target=main_1)
p2 = multiprocessing.Process(target=main_2)
p1.start()
p2.start()
p1.join()
p2.join()
if __name__ == "__main__":
isReady()
**Error:**
PermissionError: [WinError 32] The process cannot access the file because it is being used by another process: 'D:\user2\WorkSapace\MULTI\FileTransfer_dec_06\test1.log' -> 'D:\venkatareddy.m\WorkSapace\Uniper\FileTransfer_dec_06\test1.log.2021-12-13'
Call stack:
File "<string>", line 1, in <module>
File "C:Program FilesPython39libmultiprocessingspawn.py", line 116, in spawn_main
exitcode = _main(fd, parent_sentinel)
File "C:Program FilesPython39libmultiprocessingspawn.py", line 129, in _main
return self._bootstrap(parent_sentinel)
File "C:Program FilesPython39libmultiprocessingprocess.py", line 315, in _bootstrap
self.run()
File "C:Program FilesPython39libmultiprocessingprocess.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "D:user2WorkSapaceMULTIFileTransfer_dec_06logging_2_new.py", line 27, in main_2
logger.info("A Sample Log Statement main_2 ")
Message: 'A Sample Log Statement main_2 '
Arguments: ()
--- Logging error ---
Traceback (most recent call last):

问题是,记录器使用threading.RLock在多个日志记录到同一文件的线程之间提供线程安全性,但无法真正处理多个进程日志记录到相同文件。

可能最好的解决方案是创建一个管理的日志记录对象;生命;在multiprocessing.BaseManager的子类所拥有的单独地址空间中,该子类启动一个等待对此日志记录对象的方法请求的线程。实际传递给进程的是一个代理对象,该对象相当于对上述托管记录器的远程方法调用,然后由托管记录器单线程执行这些调用。

我修改了代码,这样如果出现问题,它更有可能失败,也更有可能终止。

import multiprocessing
from multiprocessing.managers import BaseManager
import logging.handlers

def init_log():
log_file_name = "test.log"
logging_level = logging.DEBUG
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
handler = logging.handlers.TimedRotatingFileHandler(log_file_name, when='M', interval=1)
handler.suffix = "%Y-%m-%d"
handler.setFormatter(formatter)
logger = logging.getLogger(log_file_name)
logger.addHandler(handler)
logger.setLevel(logging_level)
return logger

class LoggerManager(BaseManager):
pass

class MyLogger:
def __init__(self):
self.logger = init_log()
def info(self, *args, **kwargs):
self.logger.info(*args, **kwargs)
# define other methods such as degug if required:

def main_1(logger):
print("main 1")
for _ in range(20):
logger.info("A Sample Log Statement main_1")
def main_2(logger):
print("main 2")
for _ in range(20):
logger.info("A Sample Log Statement main_2")

def isReady():
LoggerManager.register('Logger', MyLogger)
with LoggerManager() as manager:
my_logger = manager.Logger()
for _ in range(5):
p1 = multiprocessing.Process(target=main_1, args=(my_logger,))
p2 = multiprocessing.Process(target=main_2, args=(my_logger,))
p1.start()
p2.start()
p1.join()
p2.join()
if __name__ == "__main__":
isReady()

一种替代方案是通过将logging.handlers.TimedRotatingFileHandler子类化并用multiprocessing.RLock实例实例化来解决该问题。然后,我通过使用multiprocessing.RLock实例初始化lock属性来覆盖基类的createLock方法。不用说,所有进程都必须使用相同的multiprocessing.RLock实例。

import multiprocessing
import logging.handlers

class MyHandler(logging.handlers.TimedRotatingFileHandler):
def __init__(self, lock, *args, **kwargs):
self._mp_lock = lock # for multiprocessing "thread" safety
super().__init__(*args, **kwargs)
def createLock(self):
self.lock = self._mp_lock

def init_log(lock):
global logger
log_file_name = "test.log"
logging_level = logging.DEBUG
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
handler = MyHandler(lock, log_file_name, when='M', interval=1)
handler.suffix = "%Y-%m-%d"
handler.setFormatter(formatter)
logger = logging.getLogger(log_file_name)
logger.addHandler(handler)
logger.setLevel(logging_level)
def main_1(lock):
init_log(lock)
print("main 1")
for _ in range(20):
logger.info("A Sample Log Statement main_1")
def main_2(lock):
init_log(lock)
print("main 2")
for _ in range(20):
logger.info("A Sample Log Statement main_2")

def isReady():
lock = multiprocessing.RLock()
for _ in range(5):
p1 = multiprocessing.Process(target=main_1, args=(lock,))
p2 = multiprocessing.Process(target=main_2, args=(lock,))
p1.start()
p2.start()
p1.join()
p2.join()
if __name__ == "__main__":
isReady()