js-executor: maxBatchSize
This commit is contained in:
parent
35e2ff99c3
commit
0970ce65b4
@ -26,6 +26,9 @@ const acks = Number(config.get('kafka.acks'));
|
|||||||
const requestTimeout = Number(config.get('kafka.requestTimeout'));
|
const requestTimeout = Number(config.get('kafka.requestTimeout'));
|
||||||
const compressionType = (config.get('kafka.requestTimeout') === "gzip") ? CompressionTypes.GZIP : CompressionTypes.None;
|
const compressionType = (config.get('kafka.requestTimeout') === "gzip") ? CompressionTypes.GZIP : CompressionTypes.None;
|
||||||
|
|
||||||
|
const linger = 5; //milliseconds //TODO move to the config
|
||||||
|
const maxBatchSize = 10; //max messages in batch //TODO move to the config
|
||||||
|
|
||||||
let kafkaClient;
|
let kafkaClient;
|
||||||
let kafkaAdmin;
|
let kafkaAdmin;
|
||||||
let consumer;
|
let consumer;
|
||||||
@ -33,8 +36,8 @@ let producer;
|
|||||||
|
|
||||||
const configEntries = [];
|
const configEntries = [];
|
||||||
|
|
||||||
let topicMessages = [];
|
let batchMessages = [];
|
||||||
let loopSend;
|
let sendLoopInstance;
|
||||||
|
|
||||||
function KafkaProducer() {
|
function KafkaProducer() {
|
||||||
this.send = (responseTopic, scriptId, rawResponse, headers) => {
|
this.send = (responseTopic, scriptId, rawResponse, headers) => {
|
||||||
@ -48,20 +51,34 @@ function KafkaProducer() {
|
|||||||
}]
|
}]
|
||||||
};
|
};
|
||||||
|
|
||||||
topicMessages.push(message);
|
pushMessageToSendLater(message);
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function sendLoopFunction() {
|
function pushMessageToSendLater(message) {
|
||||||
loopSend = setInterval(sendProducerMsg, 200);
|
batchMessages.push(message);
|
||||||
|
if (batchMessages.length >= maxBatchSize) {
|
||||||
|
sendMessagesAsBatch();
|
||||||
|
sendLoopWithLinger(); //reset loop function and reschedule new linger
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function sendProducerMsg() {
|
function sendLoopWithLinger() {
|
||||||
if (topicMessages.length > 0) {
|
if (sendLoopInstance) {
|
||||||
logger.info('sendProducerMsg from queue response, lenght: [%s]', topicMessages.length );
|
logger.debug("Clear sendLoop scheduler. Starting new send loop with linger [%s]", linger);
|
||||||
const messagesToSend = topicMessages;
|
clearInterval(sendLoopInstance);
|
||||||
topicMessages = [];
|
} else {
|
||||||
|
logger.debug("Starting new send loop with linger [%s]", linger)
|
||||||
|
}
|
||||||
|
sendLoopInstance = setInterval(sendMessagesAsBatch, linger);
|
||||||
|
}
|
||||||
|
|
||||||
|
function sendMessagesAsBatch() {
|
||||||
|
if (batchMessages.length > 0) {
|
||||||
|
logger.info('sendMessagesAsBatch, lenght: [%s]', batchMessages.length );
|
||||||
|
const messagesToSend = batchMessages;
|
||||||
|
batchMessages = [];
|
||||||
producer.sendBatch({
|
producer.sendBatch({
|
||||||
topicMessages: messagesToSend,
|
topicMessages: messagesToSend,
|
||||||
acks: acks,
|
acks: acks,
|
||||||
@ -71,13 +88,14 @@ function sendProducerMsg() {
|
|||||||
logger.info('Response sent to kafka, length: [%s]', messagesToSend.length );
|
logger.info('Response sent to kafka, length: [%s]', messagesToSend.length );
|
||||||
},
|
},
|
||||||
(err) => {
|
(err) => {
|
||||||
if (err) {
|
|
||||||
logger.error('Failed to send kafka, length: [%s], pending to reprocess msgs', messagesToSend.length );
|
logger.error('Failed to send kafka, length: [%s], pending to reprocess msgs', messagesToSend.length );
|
||||||
topicMessages = messagesToSend.concat(topicMessages);
|
batchMessages = messagesToSend.concat(batchMessages);
|
||||||
logger.error(err.stack);
|
logger.error(err.stack);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
);
|
);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
//logger.debug("nothing to send");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -156,22 +174,22 @@ function sendProducerMsg() {
|
|||||||
const { REQUEST_QUEUE_SIZE } = producer.events;
|
const { REQUEST_QUEUE_SIZE } = producer.events;
|
||||||
const removeListenerRQS = producer.on(REQUEST_QUEUE_SIZE, e => logger.info(`producer REQUEST_QUEUE_SIZE ${e.payload.broker} size ${e.queueSize}`));
|
const removeListenerRQS = producer.on(REQUEST_QUEUE_SIZE, e => logger.info(`producer REQUEST_QUEUE_SIZE ${e.payload.broker} size ${e.queueSize}`));
|
||||||
|
|
||||||
const removeListeners = {}
|
// const removeListeners = {}
|
||||||
const { FETCH_START } = consumer.events;
|
// const { FETCH_START } = consumer.events;
|
||||||
removeListeners[FETCH_START] = consumer.on(FETCH_START, e => logger.info(`consumer FETCH_START`));
|
// removeListeners[FETCH_START] = consumer.on(FETCH_START, e => logger.info(`consumer FETCH_START`));
|
||||||
const { FETCH } = consumer.events;
|
// const { FETCH } = consumer.events;
|
||||||
removeListeners[FETCH] = consumer.on(FETCH, e => logger.info(`consumer FETCH numberOfBatches ${e.payload.numberOfBatches} duration ${e.payload.duration}`));
|
// removeListeners[FETCH] = consumer.on(FETCH, e => logger.info(`consumer FETCH numberOfBatches ${e.payload.numberOfBatches} duration ${e.payload.duration}`));
|
||||||
const { START_BATCH_PROCESS } = consumer.events;
|
// const { START_BATCH_PROCESS } = consumer.events;
|
||||||
removeListeners[START_BATCH_PROCESS] = consumer.on(START_BATCH_PROCESS, e => logger.info(`consumer START_BATCH_PROCESS topic ${e.payload.topic} batchSize ${e.payload.batchSize}`));
|
// removeListeners[START_BATCH_PROCESS] = consumer.on(START_BATCH_PROCESS, e => logger.info(`consumer START_BATCH_PROCESS topic ${e.payload.topic} batchSize ${e.payload.batchSize}`));
|
||||||
const { END_BATCH_PROCESS } = consumer.events;
|
// const { END_BATCH_PROCESS } = consumer.events;
|
||||||
removeListeners[END_BATCH_PROCESS] = consumer.on(END_BATCH_PROCESS, e => logger.info(`consumer END_BATCH_PROCESS topic ${e.payload.topic} batchSize ${e.payload.batchSize}`));
|
// removeListeners[END_BATCH_PROCESS] = consumer.on(END_BATCH_PROCESS, e => logger.info(`consumer END_BATCH_PROCESS topic ${e.payload.topic} batchSize ${e.payload.batchSize}`));
|
||||||
const { COMMIT_OFFSETS } = consumer.events;
|
// const { COMMIT_OFFSETS } = consumer.events;
|
||||||
removeListeners[COMMIT_OFFSETS] = consumer.on(COMMIT_OFFSETS, e => logger.info(`consumer COMMIT_OFFSETS topics ${e.payload.topics}`));
|
// removeListeners[COMMIT_OFFSETS] = consumer.on(COMMIT_OFFSETS, e => logger.info(`consumer COMMIT_OFFSETS topics ${e.payload.topics}`));
|
||||||
|
|
||||||
const messageProcessor = new JsInvokeMessageProcessor(new KafkaProducer());
|
const messageProcessor = new JsInvokeMessageProcessor(new KafkaProducer());
|
||||||
await consumer.connect();
|
await consumer.connect();
|
||||||
await producer.connect();
|
await producer.connect();
|
||||||
sendLoopFunction();
|
sendLoopWithLinger();
|
||||||
await consumer.subscribe({topic: requestTopic});
|
await consumer.subscribe({topic: requestTopic});
|
||||||
|
|
||||||
logger.info('Started ThingsBoard JavaScript Executor Microservice.');
|
logger.info('Started ThingsBoard JavaScript Executor Microservice.');
|
||||||
@ -254,8 +272,8 @@ async function disconnectProducer() {
|
|||||||
try {
|
try {
|
||||||
logger.info('Stopping loop...');
|
logger.info('Stopping loop...');
|
||||||
//TODO: send handle msg
|
//TODO: send handle msg
|
||||||
clearInterval(loopSend);
|
clearInterval(sendLoopInstance);
|
||||||
sendProducerMsg();
|
sendMessagesAsBatch();
|
||||||
await _producer.disconnect();
|
await _producer.disconnect();
|
||||||
logger.info('Kafka Producer stopped.');
|
logger.info('Kafka Producer stopped.');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user