多处理器功能未停止



我是一名生物信息学家,试图创建一个脚本来监控我的脚本的硬件资源使用情况。脚本可能需要长达15个小时,因此我需要在脚本运行时定期拍摄快照的东西。

我决定在递归计时器上与监视函数(get_stats()(并行运行这两个函数,以在快照到来时获取快照。in_command()函数获取一个文本文件,其中包含用于调用脚本管道的os命令。

我的问题是,一旦其他脚本停止,get_stats()函数就不会停止。我有一个开关(process_switch(,一旦其他功能完成并停止get_stats()功能,它就会翻转。我是并行处理的新手。

import psutil
import platform
from datetime import datetime
import multiprocessing
import time
import os
from threading import Timer
import multiprocessing as mp

def get_size(bytes, suffix="B"):
"""
Scale bytes to its proper format
e.g:
1253656 => '1.20MB'
1253656678 => '1.17GB'
"""
factor = 1024
for unit in ["", "K", "M", "G", "T", "P"]:
if bytes < factor:
return f"{bytes:.2f}{unit}{suffix}"
bytes /= factor

def get_stats():
global df
global process_switch
global start_time
df['time'].append(time.time() - start_time)
# Get core information
df['total_cores'].append(psutil.cpu_count(logical=True))
df['physical_cores'].append(psutil.cpu_count(logical=False))
cpufreq = psutil.cpu_freq()
# cpu frequency in Mhz
df['max_frequency'].append(cpufreq.max)
df['min_frequency'].append(cpufreq.min)
df['current_frequency'].append(cpufreq.current)
cpu_core = {}
for i, percentage in enumerate(psutil.cpu_percent(percpu=True, interval=1)):
cpu_core[str(i)] = percentage
df['cpu_core'].append(cpu_core)
# get ram information
svmem = psutil.virtual_memory()
df['total_memory'].append(get_size(svmem.total))
df['available_memory'].append(get_size(svmem.available))
df['used_memory'].append(get_size(svmem.used))
df['percent_memory'].append(svmem.percent)
# swap memory if it exists
swap = psutil.swap_memory()
df['swap_total'].append(get_size(swap.total))
df['swap_free'].append(get_size(swap.free))
df['swap_used'].append(get_size(swap.used))
df['swap_percentage'].append(swap.percent)
print(df)
#check to see if the other function has finished and stop this process
if process_switch == 1:
t = Timer(20, get_stats)
t.start()
else:
t.stop()

def in_command(file):
#takes in a text file and runs the command lines located within and runs them sequentially
global process_switch
f = open(file,'r')
f_lines = f.readlines()
for line in f_lines:
print(line)
os.system(line)
f.close()
#turn off monitoring software
process_switch = 0

if __name__ == "__main__":
df = {'time': [], 'total_cores': [], 'physical_cores': [], 'max_frequency': [],
'min_frequency': [], 'current_frequency': [], 'cpu_core': [], 'total_memory': [],
'available_memory': [], 'used_memory': [], 'percent_memory': [], 'swap_total': [],
'swap_free': [], 'swap_used': [], 'swap_percentage': []}

process_switch = 1
start_time = time.time()

p1 = mp.Process(target=get_stats, args = ())
p2 = mp.Process(target=in_command, args=('text_command.txt',))
p1.start()
p2.start()
p1.join()
p2.join()
print(df)
print('finished')

谢谢martineau!我不知道全局变量是不共享的,现在我停下来从逻辑上思考它是有道理的。对于其他有这个问题的人,我能够通过使用多处理来解决它。Manager((函数来声明全局变量。这里有一篇关于这个问题的好文章https://blog.ruanbekker.com/blog/2019/02/19/sharing-global-variables-in-python-using-multiprocessing/。谢谢你的帮助!

最新更新