You've already forked node-redis
mirror of
https://github.com/redis/node-redis.git
synced 2025-12-11 09:22:35 +03:00
feat(client): add latency histogram (#3099)
* add latency histogram command, tests (##1955)
This commit is contained in:
committed by
GitHub
parent
38bfaa7c90
commit
dae47b4820
93
packages/client/lib/commands/LATENCY_HISTOGRAM.spec.ts
Normal file
93
packages/client/lib/commands/LATENCY_HISTOGRAM.spec.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import assert from "node:assert/strict";
|
||||
import testUtils, { GLOBAL } from "../test-utils";
|
||||
import LATENCY_HISTOGRAM from "./LATENCY_HISTOGRAM";
|
||||
import { parseArgs } from "./generic-transformers";
|
||||
|
||||
describe("LATENCY HISTOGRAM", () => {
|
||||
describe("transformArguments", () => {
|
||||
it("filtered by command set", () => {
|
||||
assert.deepEqual(parseArgs(LATENCY_HISTOGRAM, "set"), [
|
||||
"LATENCY",
|
||||
"HISTOGRAM",
|
||||
"set",
|
||||
]);
|
||||
});
|
||||
|
||||
it("unfiltered", () => {
|
||||
assert.deepEqual(parseArgs(LATENCY_HISTOGRAM), [
|
||||
"LATENCY",
|
||||
"HISTOGRAM",
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("RESP 2", () => {
|
||||
testUtils.testWithClient(
|
||||
"unfiltered list",
|
||||
async (client) => {
|
||||
await client.configResetStat();
|
||||
await Promise.all([
|
||||
client.lPush("push-key", "hello "),
|
||||
client.set("set-key", "world!"),
|
||||
]);
|
||||
const histogram = await client.latencyHistogram();
|
||||
const commands = ["config|resetstat", "set", "lpush"];
|
||||
for (const command of commands) {
|
||||
assert.ok(typeof histogram[command]["calls"], "number");
|
||||
}
|
||||
},
|
||||
GLOBAL.SERVERS.OPEN,
|
||||
);
|
||||
|
||||
testUtils.testWithClient(
|
||||
"filtered by a command list",
|
||||
async (client) => {
|
||||
await client.configSet("latency-monitor-threshold", "100");
|
||||
await client.set("set-key", "hello");
|
||||
const histogram = await client.latencyHistogram("set");
|
||||
assert.ok(typeof histogram.set["calls"], "number");
|
||||
},
|
||||
GLOBAL.SERVERS.OPEN,
|
||||
);
|
||||
});
|
||||
|
||||
describe("RESP 3", () => {
|
||||
testUtils.testWithClient(
|
||||
"unfiltered list",
|
||||
async (client) => {
|
||||
await client.configResetStat();
|
||||
await Promise.all([
|
||||
client.lPush("push-key", "hello "),
|
||||
client.set("set-key", "world!"),
|
||||
]);
|
||||
const histogram = await client.latencyHistogram();
|
||||
const commands = ["config|resetstat", "set", "lpush"];
|
||||
for (const command of commands) {
|
||||
assert.ok(typeof histogram[command]["calls"], "number");
|
||||
}
|
||||
},
|
||||
{
|
||||
...GLOBAL.SERVERS.OPEN,
|
||||
clientOptions: {
|
||||
RESP: 3,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
testUtils.testWithClient(
|
||||
"filtered by a command list",
|
||||
async (client) => {
|
||||
await client.configSet("latency-monitor-threshold", "100");
|
||||
await client.set("set-key", "hello");
|
||||
const histogram = await client.latencyHistogram("set");
|
||||
assert.ok(typeof histogram.set["calls"], "number");
|
||||
},
|
||||
{
|
||||
...GLOBAL.SERVERS.OPEN,
|
||||
clientOptions: {
|
||||
RESP: 3,
|
||||
},
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
46
packages/client/lib/commands/LATENCY_HISTOGRAM.ts
Normal file
46
packages/client/lib/commands/LATENCY_HISTOGRAM.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { CommandParser } from '../client/parser';
|
||||
import { Command } from '../RESP/types';
|
||||
import { transformTuplesToMap } from './generic-transformers';
|
||||
|
||||
type RawHistogram = [string, number, string, number[]];
|
||||
|
||||
type Histogram = Record<string, {
|
||||
calls: number;
|
||||
histogram_usec: Record<string, number>;
|
||||
}>;
|
||||
|
||||
const id = (n: number) => n;
|
||||
|
||||
export default {
|
||||
CACHEABLE: false,
|
||||
IS_READ_ONLY: true,
|
||||
/**
|
||||
* Constructs the LATENCY HISTOGRAM command
|
||||
*
|
||||
* @param parser - The command parser
|
||||
* @param commands - The list of redis commands to get histogram for
|
||||
* @see https://redis.io/docs/latest/commands/latency-histogram/
|
||||
*/
|
||||
parseCommand(parser: CommandParser, ...commands: string[]) {
|
||||
const args = ['LATENCY', 'HISTOGRAM'];
|
||||
if (commands.length !== 0) {
|
||||
args.push(...commands);
|
||||
}
|
||||
parser.push(...args);
|
||||
},
|
||||
transformReply: {
|
||||
2: (reply: (string | RawHistogram)[]): Histogram => {
|
||||
const result: Histogram = {};
|
||||
if (reply.length === 0) return result;
|
||||
for (let i = 1; i < reply.length; i += 2) {
|
||||
const histogram = reply[i] as RawHistogram;
|
||||
result[reply[i - 1] as string] = {
|
||||
calls: histogram[1],
|
||||
histogram_usec: transformTuplesToMap(histogram[3], id),
|
||||
};
|
||||
}
|
||||
return result;
|
||||
},
|
||||
3: undefined as unknown as () => Histogram,
|
||||
}
|
||||
} as const satisfies Command;
|
||||
@@ -364,6 +364,7 @@ import VREM from './VREM';
|
||||
import VSETATTR from './VSETATTR';
|
||||
import VSIM from './VSIM';
|
||||
import VSIM_WITHSCORES from './VSIM_WITHSCORES';
|
||||
import LATENCY_HISTOGRAM from './LATENCY_HISTOGRAM';
|
||||
|
||||
export {
|
||||
CLIENT_KILL_FILTERS,
|
||||
@@ -722,6 +723,8 @@ export default {
|
||||
latencyGraph: LATENCY_GRAPH,
|
||||
LATENCY_HISTORY,
|
||||
latencyHistory: LATENCY_HISTORY,
|
||||
LATENCY_HISTOGRAM,
|
||||
latencyHistogram: LATENCY_HISTOGRAM,
|
||||
LATENCY_LATEST,
|
||||
latencyLatest: LATENCY_LATEST,
|
||||
LATENCY_RESET,
|
||||
|
||||
Reference in New Issue
Block a user