阅读一个kafka主题,并通过Rest API公开数据以供prometheus抓取(Nodejs)



我使用kafkajs来公开从kafka主题读取的数据,该主题将通过http端点公开,以便prometheus抓取数据。但我无法公开卡夫卡主题的数据。我写了一个像这样的生产者和消费者

Producer.js

// import the `Kafka` instance from the kafkajs library
const {
Kafka,
logLevel
} = require("kafkajs")
const fs = require("fs");
const path = require("path");
// the client ID lets kafka know who's producing the messages
const clientId = "my-app"
// we can define the list of brokers in the cluster
const brokers = ["localhost:9092"]
// this is the topic to which we want to write messages
const topic = "message-log"
// initialize a new kafka client and initialize a producer from it
const kafka = new Kafka({
clientId,
brokers,
// logLevel: logLevel.INFO
})
const producer = kafka.producer({})
// we define an async function that writes a new message each second
const produce = async () => {
await producer.connect()
// after the produce has connected, we start an interval timer
try {
// send a message to the configured topic with
// the key and value formed from the current value of `i`
await producer.send({
topic,
acks: 1,
messages: [{
key: "metrics on premise",
value: fs.readFileSync(path.join(__dirname,'metrics.txt'), 'utf8'),
}, ],
})
// if the message is written successfully, log it and increment `i`
console.log("writes:  #####################")

} catch (err) {
console.error("could not write message " + err)
}
}
module.exports = produce

Index.js

const produce = require("./produce")
const consume = require("./consume")
const fs = require("fs");
const path = require("path");
const express = require('express')
const app = express()
const port = 3003

app.get('/metrics', async (req, res) => {
//res.send(fs.readFileSync(path.join(__dirname,'topic_message.txt'), 'utf8'))
consume(res).catch(err => {
console.error("Error in consumer: ", err)
})
})
app.listen(port, () => {
console.log(`Example app listening at http://localhost:${port}`)
})

// call the `produce` function and log an error if it occurs
produce().catch((err) => {
console.error("error in producer: ", err)
})

下面是消费者Consumer.js

const {
Kafka,
logLevel
} = require("kafkajs")
const fs = require("fs");
const path = require("path");
const clientId = "my-app"
const brokers = ["localhost:9092"]
const topic = "message-log"
const kafka = new Kafka({
clientId,
brokers,
// logCreator: customLogger,
// logLevel: logLevel.DEBUG,
})
const consumer = kafka.consumer({
groupId: clientId,
minBytes: 5,
maxBytes: 1e6,
// wait for at most 3 seconds before receiving new data
maxWaitTimeInMs: 3000,
});
const consume = async (res) => {
// first, we wait for the client to connect and subscribe to the given topic
let myString = "";
await consumer.connect()
await consumer.subscribe({
topic,
fromBeginning: true
})
await consumer.run({
// this function is called every time the consumer gets a new message
eachMessage: ({
message
}) => {
console.log("Message received ###############################################################################");
res.send(message.value);
},
})
setTimeout(async () => {
await consumer.disconnect();
}, 2000);
}
module.exports = consume

当我点击api时,我无法将消耗的消息发送到api

除非您通过流式HTTP响应或使用websocket(您不在本代码中(进行抓取,否则我不确定这是一个好方法。

如果你真的想把Kafka记录发送到Prometheus,那么从消费者那里通过PushGateway发送它们,而不是使用同步的HTTP刮

相关内容

  • 没有找到相关文章

最新更新