1
0
mirror of https://github.com/redis/node-redis.git synced 2025-08-07 13:22:56 +03:00

feat: add auto pipeline

This commit is contained in:
Ruben Bridgewater
2017-05-28 04:31:37 +02:00
parent 8c63233968
commit 0d53d3dcdf
8 changed files with 79 additions and 121 deletions

View File

@@ -88,7 +88,6 @@ function execTransaction (multi) {
}
const len = queue.length
multi.errors = []
client.cork()
client._multi = true
multi.wantsBuffers = new Array(len)
// Silently ignore this error. We'll receive the error for the exec as well
@@ -100,7 +99,6 @@ function execTransaction (multi) {
}
const main = client.internalSendCommand(new Command('exec', []))
client.uncork()
return Promise.all(promises).then(() => main.then((replies) => multiCallback(multi, replies)).catch((err) => {
err.errors = multi.errors
return Promise.reject(err)
@@ -125,7 +123,6 @@ function execBatch (multi) {
})
}
var error = false
client.cork()
const promises = []
while (queue.length) {
const command = queue.shift()
@@ -134,7 +131,6 @@ function execBatch (multi) {
return e
}))
}
client.uncork()
return Promise.all(promises).then((res) => {
if (error) {
const err = new Errors.RedisError('bla failed')

View File

@@ -6,6 +6,9 @@ const Command = require('./command')
function onConnect (client) {
debug('Stream connected %s id %s', client.address, client.connectionId)
// TODO: Check if the clients prototype and the clients instance have
// fast properties. If that's not the case, make them fast properties
// again!
client.connected = true
client.ready = false
client.emittedEnd = false
@@ -49,22 +52,6 @@ function readyHandler (client) {
debug('readyHandler called %s id %s', client.address, client.connectionId)
client.ready = true
client.cork = () => {
client.pipeline = true
client._stream.cork()
}
client.uncork = () => {
if (client.fireStrings) {
client.writeStrings()
} else {
client.writeBuffers()
}
client.pipeline = false
client.fireStrings = true
// TODO: Consider using next tick here. See https://github.com/NodeRedis/nodeRedis/issues/1033
client._stream.uncork()
}
if (client.selectedDb !== undefined) {
client.internalSendCommand(new Command('select', [client.selectedDb])).catch((err) => {
if (!client.closing) {

View File

@@ -6,7 +6,6 @@ var lazyConnect = function (client) {
lazyConnect = require('./connect')
lazyConnect(client)
}
const noop = () => {}
/**
* @description Try connecting to a server again
@@ -50,10 +49,6 @@ function reconnect (client, why, error) {
debug('Redis connection is gone from %s event.', why)
client.connected = false
client.ready = false
// Deactivate cork to work with the offline queue
client.cork = noop
client.uncork = noop
client.pipeline = false
client.pubSubMode = 0
// since we are collapsing end and close, users don't expect to be called twice
@@ -108,10 +103,13 @@ function reconnect (client, why, error) {
if (client.options.retryUnfulfilledCommands) {
client.offlineQueue.unshift.apply(client.offlineQueue, client.commandQueue.toArray())
client.commandQueue.clear()
} else if (client.commandQueue.length !== 0) {
// TODO: If only the pipelineQueue contains the error we could improve the situation.
// We could postpone writing to the stream until we connected again and fire the commands.
// The commands in the pipelineQueue are also not uncertain. They never left the client.
} else if (client.commandQueue.length !== 0 || client._pipelineQueue.length !== 0) {
client.flushAndError('Redis connection lost and command aborted.', 'UNCERTAIN_STATE', {
error,
queues: ['commandQueue']
queues: ['commandQueue', '_pipelineQueue']
})
}

View File

@@ -9,30 +9,72 @@ const debug = require('./debug')
// } catch (e) {
// // Fallback
// return (val) => {
// return Buffer.isBuffer(val) || ArrayBuffer.isView(val)
// return Buffer.isBuffer(val) || val instanceof Uint8Array
// }
// }
// })()
const copy = []
const RN = Buffer.from('\r\n')
var bufferCount = 0
var errors = null
function writeBuffers (client) {
client.fireStrings = false
/**
* @description Pipeline and write all commands to the stream
*
* If the pipelined string exceeds X mb, write it directly to the stream and pipeline the rest again.
* @param {RedisClient} client
*/
function writeToStream (client) {
const stream = client._stream
const queue = client._pipelineQueue
const cache = client._strCache
var buffer = false
while (queue.length) {
buffer = stream.write(queue.shift())
}
if (cache.length !== 0) {
buffer = stream.write(cache)
client._strCache = ''
}
client.shouldBuffer = !buffer
stream.uncork()
client._pipeline = false
}
// TODO: This can be significantly improved!
// We can concat the string instead of using the queue
// in most cases. This improves the performance.
// This can only be used for strings only though.
function write (client) {
if (client._pipeline === false) {
client._stream.cork()
client._pipeline = true
process.nextTick(writeToStream, client)
}
}
// TODO: Check if the performance is really increased
// by converting the strings to Buffers.
// At least from Node 8 on it should be better.
// TODO: Consider caching the arg.length buffer
function pipelineBuffers (client, commandStr) {
const queue = client._pipelineQueue
const cache = client._strCache
if (cache !== '') {
queue.push(Buffer.from(cache))
client._strCache = ''
}
queue.push(Buffer.from(commandStr))
while (copy.length) {
const arg = copy.shift()
// TODO: Consider to convert the strings to buffers
// This might actually improve the performance at
// least in more modern Node versions
var arg = copy.shift()
if (typeof arg === 'string') {
client.write(`$${Buffer.byteLength(arg)}\r\n${arg}\r\n`)
} else { // buffer
client.write(`$${arg.length}\r\n`)
client.write(arg)
client.write('\r\n')
arg = Buffer.from(arg)
}
queue.push(Buffer.from(`$${arg.length}\r\n`))
queue.push(arg)
queue.push(RN)
debug('sendCommand: buffer send %s bytes', arg.length)
}
}
@@ -46,7 +88,7 @@ function toString (arg) {
for (var i = 0; i < arg.length; i += 1) {
toString(arg[i])
}
} else if (arg && arg.constructor.name === 'Buffer') {
} else if (arg && arg.constructor.name === 'Buffer') { // TODO: check performance
copy.push(arg)
bufferCount++
} else if (typeof arg === 'boolean') { // TODO: Remove this support and use hooks instead
@@ -125,6 +167,7 @@ function normalizeAndWrite (client, command) {
command.bufferArgs = bufferArgs
command.argsLength = len
const queue = client._pipelineQueue
if (bufferArgs === false) {
while (copy.length) {
@@ -132,11 +175,15 @@ function normalizeAndWrite (client, command) {
commandStr += `$${Buffer.byteLength(arg)}\r\n${arg}\r\n`
}
debug('Send %s id %s: %s', client.address, client.connectionId, commandStr)
client.write(commandStr)
client._strCache += commandStr
if (client._strCache.length > 10 * 1024 * 1024) {
queue.push(client._strCache)
client._strCache = ''
}
} else {
client.write(commandStr)
writeBuffers(client)
pipelineBuffers(client, commandStr)
}
write(client)
}
module.exports = normalizeAndWrite