1
0
mirror of https://github.com/redis/node-redis.git synced 2025-08-13 10:02:24 +03:00

feat: add auto pipeline

This commit is contained in:
Ruben Bridgewater
2017-05-28 04:31:37 +02:00
parent 8c63233968
commit 0d53d3dcdf
8 changed files with 79 additions and 121 deletions

View File

@@ -9,30 +9,72 @@ const debug = require('./debug')
// } catch (e) {
// // Fallback
// return (val) => {
// return Buffer.isBuffer(val) || ArrayBuffer.isView(val)
// return Buffer.isBuffer(val) || val instanceof Uint8Array
// }
// }
// })()
const copy = []
const RN = Buffer.from('\r\n')
var bufferCount = 0
var errors = null
function writeBuffers (client) {
client.fireStrings = false
/**
* @description Pipeline and write all commands to the stream
*
* If the pipelined string exceeds X mb, write it directly to the stream and pipeline the rest again.
* @param {RedisClient} client
*/
function writeToStream (client) {
const stream = client._stream
const queue = client._pipelineQueue
const cache = client._strCache
var buffer = false
while (queue.length) {
buffer = stream.write(queue.shift())
}
if (cache.length !== 0) {
buffer = stream.write(cache)
client._strCache = ''
}
client.shouldBuffer = !buffer
stream.uncork()
client._pipeline = false
}
// TODO: This can be significantly improved!
// We can concat the string instead of using the queue
// in most cases. This improves the performance.
// This can only be used for strings only though.
function write (client) {
if (client._pipeline === false) {
client._stream.cork()
client._pipeline = true
process.nextTick(writeToStream, client)
}
}
// TODO: Check if the performance is really increased
// by converting the strings to Buffers.
// At least from Node 8 on it should be better.
// TODO: Consider caching the arg.length buffer
function pipelineBuffers (client, commandStr) {
const queue = client._pipelineQueue
const cache = client._strCache
if (cache !== '') {
queue.push(Buffer.from(cache))
client._strCache = ''
}
queue.push(Buffer.from(commandStr))
while (copy.length) {
const arg = copy.shift()
// TODO: Consider to convert the strings to buffers
// This might actually improve the performance at
// least in more modern Node versions
var arg = copy.shift()
if (typeof arg === 'string') {
client.write(`$${Buffer.byteLength(arg)}\r\n${arg}\r\n`)
} else { // buffer
client.write(`$${arg.length}\r\n`)
client.write(arg)
client.write('\r\n')
arg = Buffer.from(arg)
}
queue.push(Buffer.from(`$${arg.length}\r\n`))
queue.push(arg)
queue.push(RN)
debug('sendCommand: buffer send %s bytes', arg.length)
}
}
@@ -46,7 +88,7 @@ function toString (arg) {
for (var i = 0; i < arg.length; i += 1) {
toString(arg[i])
}
} else if (arg && arg.constructor.name === 'Buffer') {
} else if (arg && arg.constructor.name === 'Buffer') { // TODO: check performance
copy.push(arg)
bufferCount++
} else if (typeof arg === 'boolean') { // TODO: Remove this support and use hooks instead
@@ -125,6 +167,7 @@ function normalizeAndWrite (client, command) {
command.bufferArgs = bufferArgs
command.argsLength = len
const queue = client._pipelineQueue
if (bufferArgs === false) {
while (copy.length) {
@@ -132,11 +175,15 @@ function normalizeAndWrite (client, command) {
commandStr += `$${Buffer.byteLength(arg)}\r\n${arg}\r\n`
}
debug('Send %s id %s: %s', client.address, client.connectionId, commandStr)
client.write(commandStr)
client._strCache += commandStr
if (client._strCache.length > 10 * 1024 * 1024) {
queue.push(client._strCache)
client._strCache = ''
}
} else {
client.write(commandStr)
writeBuffers(client)
pipelineBuffers(client, commandStr)
}
write(client)
}
module.exports = normalizeAndWrite