让tensorflow在nodejs服务上更快



我已经创建了一个代码来用tensorflow js检测图像上的对象,但它确实很慢。为此,我安装了npmjs包:

@tensorflow/tfjs
@tensorflow-models/coco-ssd
@tensorflow-models/mobilenet
get-image-data

这是我的剧本:

const tf = require('@tensorflow/tfjs')
// Load the binding (CPU computation)
const mobilenet = require('@tensorflow-models/mobilenet');
const cocoSsd = require("@tensorflow-models/coco-ssd");
const events = require('events');
const post_event = new events.EventEmitter();
const start = Date.now()
// for getting the data images
const image = require('get-image-data')
image('./img/cup.jpg',async(err, image)=>{
const numChannels = 3;
const numPixels = image.width * image.height;
const values = new Int32Array(numPixels * numChannels);
pixels = image.data
for(let i = 0; i < numPixels; i++) {
for (let channel = 0; channel < numChannels; ++channel) {
values[i * numChannels + channel] = pixels[i * 4 + channel];
}
}
const outShape = [image.height, image.width, numChannels];
const input = tf.tensor3d(values, outShape, 'int32');
await load(input)
});
const load=async img=>{
console.log("IMG LOADED in ", (Date.now()-start)/1000,"s")
let mobilenet_ = cocossd_ = false, post_predictions = []; 
mobilenet.load().then(async model=>{
console.log("mobilenet loaded in ",(Date.now()-start)/1000,"s")
model.classify(img).then(async classify_predictions=>{
for(i=0;i<classify_predictions.length;i++){
const element = classify_predictions[i];
const each_class = element["className"].split(", ")
each_class.forEach(this_element=>{
post_predictions.push([this_element, (element.probability*100)]);
})
}
post_event.emit("mobilenet")
});        
});
cocoSsd.load().then(async model=>{
console.log("cocossd loaded in ",(Date.now()-start)/1000,"s")
model.detect(img).then(predictions=>{
for(i=0;i<predictions.length;i++){
const this_element = predictions[i];
post_predictions.unshift([this_element.class, (this_element.score*100)]);
}
post_event.emit("cocossd")
});
})
post_event.on("mobilenet", ()=>{
console.log("mobilenet(longest) finished in ", (Date.now()-start)/1000,"s", post_predictions)
mobilenet_=true
if(mobilenet_ && cocossd_){
post_event.emit("finish")
}
}).on("cocossd", ()=>{
console.log("cocossd finished in ", (Date.now()-start)/1000,"s", post_predictions)
cocossd_ = true
if(mobilenet_ && cocossd_){
post_event.emit("finish")
}
}).on("finish", ()=>{
post_predictions.sort((a, b)=>{
return b[1]-a[1];
});
console.log("Post in ", (Date.now()-start)/1000,"s", post_predictions)
})
}

这是有效的,但当我运行它时,它真的很慢,以下是结果:

IMG LOADED in  0.486 s
cocossd loaded in  6.11 s
cocossd finished in  9.028 s [ [ 'cup', 95.68768739700317 ] ]
mobilenet loaded in  10.845 s
mobilenet(longest) finished in  12.795 s [
[ 'cup', 95.68768739700317 ],
[ 'cup', 69.30274367332458 ],
[ 'espresso', 17.099112272262573 ],
[ 'coffee mug', 13.384920358657837 ]
]
Post in  12.809 s [
[ 'cup', 95.68768739700317 ],
[ 'cup', 69.30274367332458 ],
[ 'espresso', 17.099112272262573 ],
[ 'coffee mug', 13.384920358657837 ]
]

我看过一些视频,他们说nodejs版本的mobilenet需要20ms才能得到结果。但在我的应用程序上,它需要10分。也许我做错了什么。有人能帮我解决这个问题吗?

感谢

加载模型需要一些时间。例如,您可以创建一个期望图像的express服务器,并进行对象检测。启动服务器时,可以预加载模型。对于每个api请求,模型已经加载,检测在几毫秒内完成(希望:-(

最新更新