为什么伐木和期货不能协同工作



我尝试了以下脚本,但日志不起作用。

import time  
import concurrent.futures
import logging
import os
######## Logging ################
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
file_handler = logging.FileHandler(os.path.join('logs','pipeline2.log'))
#In case we have also a StreamHandler with level of INFO
file_handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s:%(levelname)s:%(name)s:%(message)s')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
def f(x):
time.sleep(0.001)  # to visualize the progress
logger.info(x**2)
def run(f, my_iter):
"""
https://stackoverflow.com/questions/51601756/use-tqdm-with-concurrent-futures
"""
import concurrent.futures
my_iter = list(my_iter)
l = len(my_iter)
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
futures = {executor.submit(f, arg): arg for arg in my_iter}
results = {}
for future in concurrent.futures.as_completed(futures):
arg = futures[future]
results[arg] = future.result()
my_iter = range(10000)
run(f, my_iter)

运行脚本时:我没有看到日志中出现任何文件pipeline2.log。我该如何解决这个问题?

编辑:按照建议,我在代码中检索tqdm(我认为它不会干扰(n,这样它就可以很容易地复制。

如何运行代码?我能想到以下情况日志不起作用。

import time  
import concurrent.futures
import logging
import os
logger = logging.getLogger(__name__)
log_path = os.path.join('logs','pipeline2.log')
def add_file_handler(log_path):
logger.setLevel(logging.INFO)
file_handler = logging.FileHandler(log_path)
file_handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s:%(levelname)s:%(name)s:%(message)s')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
print(f"handler fileno {file_handler.stream.fileno()} added")
def remove_file(filepath):
print(f"{filepath} exists: {os.path.exists(filepath)}")
try:
print(f"{filepath} remove: {os.remove(filepath)}")
except FileNotFoundError:
pass
def f(x):
time.sleep(0.001)  # to visualize the progress
logger.info(x**2)
def run(f, my_iter):
"""
https://stackoverflow.com/questions/51601756/use-tqdm-with-concurrent-futures
"""
import concurrent.futures
my_iter = list(my_iter)
l = len(my_iter)
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
futures = {executor.submit(f, arg): arg for arg in my_iter}
results = {}
for future in concurrent.futures.as_completed(futures):
arg = futures[future]
results[arg] = future.result()

运行

remove_file(log_path)
add_file_handler(log_path)
remove_file(log_path)
my_iter = range(10)
run(f, my_iter)    
try:
with open(log_path) as f:
print(f.read())
except FileNotFoundError:
print(f"No such file or directory {log_path}")

打印

logs/pipeline2.log exists: True
logs/pipeline2.log remove: None
handler fileno 58 added
logs/pipeline2.log exists: True
logs/pipeline2.log remove: None
No such file or directory logs/pipeline2.log

然后运行

add_file_handler(log_path)
my_iter = range(10)
run(f, my_iter)    
for handler in logger.handlers:
print(handler.stream.fileno())
print(handler.stream.write(f"fileno: {handler.stream.fileno()}n"))
print(handler.stream.flush())
try:
with open(log_path) as f:
print(f.read())
except FileNotFoundError:
print(f"No such file or directory {log_path}")

输出

handler fileno 61 added
58
11
None
61
11
None
2020-10-23 23:31:27,149:INFO:__main__:0
2020-10-23 23:31:27,149:INFO:__main__:1
2020-10-23 23:31:27,157:INFO:__main__:36
2020-10-23 23:31:27,155:INFO:__main__:16
2020-10-23 23:31:27,155:INFO:__main__:25
2020-10-23 23:31:27,156:INFO:__main__:9
2020-10-23 23:31:27,150:INFO:__main__:4
2020-10-23 23:31:27,159:INFO:__main__:64
2020-10-23 23:31:27,159:INFO:__main__:49
2020-10-23 2

最新更新